Compare commits
2 Commits
v0.17.2
...
features/k
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
702774edea | ||
|
|
7713dd4063 |
13
.codecov.yml
13
.codecov.yml
@@ -19,16 +19,3 @@ comment: off
|
||||
# annotations in files that seemingly have nothing to do with the PR.
|
||||
github_checks:
|
||||
annotations: false
|
||||
|
||||
# Attempt to fix "Missing base commit" messages in the codecov UI.
|
||||
# Because we do not run full tests on package PRs, package PRs' merge
|
||||
# commits on `develop` don't have coverage info. It appears that
|
||||
# codecov will give you an error if the pseudo-base's coverage data
|
||||
# doesn't all apply properly to the real PR base.
|
||||
#
|
||||
# See here for docs:
|
||||
# https://docs.codecov.com/docs/comparing-commits#pseudo-comparison
|
||||
# See here for another potential solution:
|
||||
# https://community.codecov.com/t/2480/15
|
||||
codecov:
|
||||
allow_coverage_offsets: true
|
||||
|
||||
38
.coveragerc
Normal file
38
.coveragerc
Normal file
@@ -0,0 +1,38 @@
|
||||
# -*- conf -*-
|
||||
# .coveragerc to control coverage.py
|
||||
[run]
|
||||
parallel = True
|
||||
concurrency = multiprocessing
|
||||
branch = True
|
||||
source =
|
||||
bin
|
||||
lib
|
||||
omit =
|
||||
lib/spack/spack/test/*
|
||||
lib/spack/docs/*
|
||||
lib/spack/external/*
|
||||
share/spack/qa/*
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
# Have to re-enable the standard pragma
|
||||
pragma: no cover
|
||||
|
||||
# Don't complain about missing debug-only code:
|
||||
def __repr__
|
||||
if self\.debug
|
||||
|
||||
# Don't complain if tests don't hit defensive assertion code:
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
if 0:
|
||||
if False:
|
||||
if __name__ == .__main__.:
|
||||
|
||||
ignore_errors = True
|
||||
|
||||
[html]
|
||||
directory = htmlcov
|
||||
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
name: "\U0001F41E Bug report"
|
||||
about: Report a bug in the core of Spack (command not working as expected, etc.)
|
||||
labels: "bug,triage"
|
||||
---
|
||||
|
||||
<!-- Explain, in a clear and concise way, the command you ran and the result you were trying to achieve.
|
||||
Example: "I ran `spack find` to list all the installed packages and ..." -->
|
||||
|
||||
### Steps to reproduce the issue
|
||||
|
||||
```console
|
||||
$ spack <command1> <spec>
|
||||
$ spack <command2> <spec>
|
||||
...
|
||||
```
|
||||
|
||||
### Error Message
|
||||
|
||||
<!-- If Spack reported an error, provide the error message. If it did not report an error but the output appears incorrect, provide the incorrect output. If there was no error message and no output but the result is incorrect, describe how it does not match what you expect. -->
|
||||
```console
|
||||
$ spack --debug --stacktrace <command>
|
||||
```
|
||||
|
||||
### Information on your system
|
||||
|
||||
<!-- Please include the output of `spack debug report` -->
|
||||
|
||||
<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
|
||||
|
||||
### Additional information
|
||||
|
||||
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
|
||||
- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
- [ ] I have run the failing commands in debug mode and reported the output
|
||||
|
||||
<!-- We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack! -->
|
||||
58
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
58
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,58 +0,0 @@
|
||||
name: "\U0001F41E Bug report"
|
||||
description: Report a bug in the core of Spack (command not working as expected, etc.)
|
||||
labels: [bug, triage]
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Explain, in a clear and concise way, the command you ran and the result you were trying to achieve.
|
||||
Example: "I ran `spack find` to list all the installed packages and ..."
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack <command1> <spec>
|
||||
$ spack <command2> <spec>
|
||||
...
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
If Spack reported an error, provide the error message. If it did not report an error but the output appears incorrect, provide the incorrect output. If there was no error message and no output but the result is incorrect, describe how it does not match what you expect.
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack --debug --stacktrace <command>
|
||||
```
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- label: I have run the failing commands in debug mode and reported the output
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
||||
43
.github/ISSUE_TEMPLATE/build_error.md
vendored
Normal file
43
.github/ISSUE_TEMPLATE/build_error.md
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
---
|
||||
name: "\U0001F4A5 Build error"
|
||||
about: Some package in Spack didn't build correctly
|
||||
title: "Installation issue: "
|
||||
labels: "build-error"
|
||||
---
|
||||
|
||||
<!-- Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
|
||||
1. Title the issue "Installation issue: <name-of-the-package>".
|
||||
2. Provide the information required below.
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively! -->
|
||||
|
||||
### Steps to reproduce the issue
|
||||
|
||||
<!-- Fill in the exact spec you are trying to build and the relevant part of the error message -->
|
||||
```console
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
|
||||
### Information on your system
|
||||
|
||||
<!-- Please include the output of `spack debug report` -->
|
||||
|
||||
<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
|
||||
|
||||
### Additional information
|
||||
|
||||
<!-- Please upload the following files. They should be present in the stage directory of the failing build. Also upload any config.log or similar file if one exists. -->
|
||||
* [spack-build-out.txt]()
|
||||
* [spack-build-env.txt]()
|
||||
|
||||
<!-- Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and @mention them here if they exist. -->
|
||||
|
||||
### General information
|
||||
|
||||
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
|
||||
- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
- [ ] I have run `spack maintainers <name-of-the-package>` and @mentioned any maintainers
|
||||
- [ ] I have uploaded the build log and environment files
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
64
.github/ISSUE_TEMPLATE/build_error.yml
vendored
64
.github/ISSUE_TEMPLATE/build_error.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: "\U0001F4A5 Build error"
|
||||
description: Some package in Spack didn't build correctly
|
||||
title: "Installation issue: "
|
||||
labels: [build-error]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
1. Title the issue `Installation issue: <name-of-the-package>`.
|
||||
2. Provide the information required below.
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Fill in the exact spec you are trying to build and the relevant part of the error message
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: |
|
||||
Please upload the following files:
|
||||
* **`spack-build-out.txt`**
|
||||
* **`spack-build-env.txt`**
|
||||
|
||||
They should be present in the stage directory of the failing build. Also upload any `config.log` or similar file if one exists.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and **@mention** them here if they exist.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
required: true
|
||||
- label: I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
|
||||
required: true
|
||||
- label: I have uploaded the build log and environment files
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
1
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
33
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
---
|
||||
name: "\U0001F38A Feature request"
|
||||
about: Suggest adding a feature that is not yet in Spack
|
||||
labels: feature
|
||||
|
||||
---
|
||||
|
||||
<!--*Please add a concise summary of your suggestion here.*-->
|
||||
|
||||
### Rationale
|
||||
|
||||
<!--*Is your feature request related to a problem? Please describe it!*-->
|
||||
|
||||
### Description
|
||||
|
||||
<!--*Describe the solution you'd like and the alternatives you have considered.*-->
|
||||
|
||||
|
||||
### Additional information
|
||||
<!--*Add any other context about the feature request here.*-->
|
||||
|
||||
|
||||
### General information
|
||||
|
||||
- [ ] I have run `spack --version` and reported the version of Spack
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
|
||||
|
||||
|
||||
<!--If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
||||
-->
|
||||
41
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
41
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: "\U0001F38A Feature request"
|
||||
description: Suggest adding a feature that is not yet in Spack
|
||||
labels: [feature]
|
||||
body:
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Please add a concise summary of your suggestion here.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: rationale
|
||||
attributes:
|
||||
label: Rationale
|
||||
description: Is your feature request related to a problem? Please describe it!
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe the solution you'd like and the alternatives you have considered.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Add any other context about the feature request here.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack --version` and reported the version of Spack
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
||||
275
.github/workflows/bootstrap.yml
vendored
275
.github/workflows/bootstrap.yml
vendored
@@ -1,275 +0,0 @@
|
||||
name: Bootstrapping
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
zypper update -y
|
||||
zypper install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Work around CVE-2022-24765
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
91
.github/workflows/build-containers.yml
vendored
91
.github/workflows/build-containers.yml
vendored
@@ -1,91 +0,0 @@
|
||||
name: Containers
|
||||
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
# Build new Spack develop containers nightly.
|
||||
schedule:
|
||||
- cron: '34 0 * * *'
|
||||
# Run on pull requests that modify this file
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
strategy:
|
||||
# Even if one container fails to build we still want the others
|
||||
# to continue their builds.
|
||||
fail-fast: false
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64,linux/ppc64le']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
container="${{ matrix.dockerfile[0] }}:latest"
|
||||
echo "container=${container}" >> $GITHUB_ENV
|
||||
echo "versioned=${container}" >> $GITHUB_ENV
|
||||
|
||||
# On a new release create a container with the same tag as the release.
|
||||
- name: Set Container Tag on Release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
run: |
|
||||
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
|
||||
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
|
||||
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@a66e35b9cbcf4ad0ea91ffcaf7bbad63ad9e0229 # @v2
|
||||
with:
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
ghcr.io/spack/${{ env.container }}
|
||||
ghcr.io/spack/${{ env.versioned }}
|
||||
77
.github/workflows/linux_build_tests.yaml
vendored
Normal file
77
.github/workflows/linux_build_tests.yaml
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
name: linux builds
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- lz4 # MakefilePackage
|
||||
- mpich~fortran # AutotoolsPackage
|
||||
- 'tut%gcc@:10.99.99' # WafPackage
|
||||
- py-setuptools # PythonPackage
|
||||
- openjpeg # CMakePackage
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ccache-build-${{ matrix.package }}
|
||||
restore-keys: |
|
||||
ccache-build-${{ matrix.package }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System Packages
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev ruby findutils openssl libssl-dev libpciaccess-dev
|
||||
R --version
|
||||
perl --version
|
||||
ruby --version
|
||||
- name: Copy Configuration
|
||||
run: |
|
||||
ccache -M 300M && ccache -z
|
||||
# Set up external deps for build tests, b/c they take too long to compile
|
||||
cp share/spack/qa/configuration/*.yaml etc/spack/
|
||||
- name: Run the build test
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
SPEC=${{ matrix.package }} share/spack/qa/run-build-tests
|
||||
ccache -s
|
||||
26
.github/workflows/macos_python.yml
vendored
26
.github/workflows/macos_python.yml
vendored
@@ -24,8 +24,8 @@ jobs:
|
||||
name: gcc with clang
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -39,8 +39,8 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -52,8 +52,8 @@ jobs:
|
||||
name: scipy, mpl, pd
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -62,3 +62,17 @@ jobs:
|
||||
spack install -v --fail-fast py-scipy %apple-clang
|
||||
spack install -v --fail-fast py-matplotlib %apple-clang
|
||||
spack install -v --fail-fast py-pandas %apple-clang
|
||||
|
||||
install_mpi4py_clang:
|
||||
name: mpi4py, petsc4py
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
run: |
|
||||
. .github/workflows/install_spack.sh
|
||||
spack install -v --fail-fast py-mpi4py %apple-clang
|
||||
spack install -v --fail-fast py-petsc4py %apple-clang
|
||||
|
||||
11
.github/workflows/setup_git.sh
vendored
11
.github/workflows/setup_git.sh
vendored
@@ -1,8 +1,9 @@
|
||||
#!/bin/bash -e
|
||||
#!/usr/bin/env sh
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
# create a local pr base branch
|
||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||
# With fetch-depth: 0 we have a remote develop
|
||||
# but not a local branch. Don't do this on develop
|
||||
if [ "$(git branch --show-current)" != "develop" ]
|
||||
then
|
||||
git branch develop origin/develop
|
||||
fi
|
||||
|
||||
166
.github/workflows/unit_tests.yaml
vendored
166
.github/workflows/unit_tests.yaml
vendored
@@ -15,8 +15,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -31,15 +31,15 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black types-six
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -48,6 +48,26 @@ jobs:
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
# Build the documentation
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y coreutils ninja-build graphviz
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade -r lib/spack/docs/requirements.txt
|
||||
- name: Build documentation
|
||||
run: |
|
||||
share/spack/qa/run-doc-tests
|
||||
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -57,12 +77,12 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
@@ -92,17 +112,17 @@ jobs:
|
||||
|
||||
# Run unit tests with different configurations on linux
|
||||
unittests:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['original', 'clingo']
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -110,28 +130,37 @@ jobs:
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml]
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.900 black
|
||||
fi
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
@@ -151,34 +180,48 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml]
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 38
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run shell tests (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
@@ -189,13 +232,13 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
# Test for Python2.6 run on Centos 6
|
||||
centos6:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:centos6
|
||||
steps:
|
||||
@@ -205,32 +248,28 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
SPACK_TEST_SOLVER: original
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
||||
git fetch origin "${{ github.ref }}:test-branch"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
. .github/workflows/setup_git.sh
|
||||
bin/spack unit-test -x
|
||||
share/spack/qa/run-unit-tests
|
||||
- name: Run unit tests (only package tests)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: original
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
||||
git fetch origin "${{ github.ref }}:test-branch"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
. .github/workflows/setup_git.sh
|
||||
bin/spack unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
share/spack/qa/run-unit-tests
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
@@ -240,7 +279,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -252,17 +291,16 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
@@ -271,10 +309,24 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf kcov
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml] clingo
|
||||
pip install --upgrade pip six setuptools codecov coverage clingo
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -296,54 +348,48 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
build:
|
||||
needs: [ validate, style, changes ]
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade codecov coverage[toml]
|
||||
pip install --upgrade codecov coverage
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.800
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap untrust spack-install
|
||||
$(which spack) solve zlib
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -x
|
||||
coverage combine
|
||||
coverage xml
|
||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||
rm lib/spack/docs/_spack_root
|
||||
else
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
file: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -136,7 +136,6 @@ venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
!/lib/spack/env
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
@@ -210,9 +209,6 @@ tramp
|
||||
/eshell/history
|
||||
/eshell/lastdir
|
||||
|
||||
# zsh byte-compiled files
|
||||
*.zwc
|
||||
|
||||
# elpa packages
|
||||
/elpa/
|
||||
|
||||
|
||||
35
.mypy.ini
Normal file
35
.mypy.ini
Normal file
@@ -0,0 +1,35 @@
|
||||
[mypy]
|
||||
python_version = 3.7
|
||||
files=lib/spack/llnl/**/*.py,lib/spack/spack/**/*.py
|
||||
mypy_path=bin,lib/spack,lib/spack/external,var/spack/repos/builtin
|
||||
# This and a generated import file allows supporting packages
|
||||
namespace_packages=True
|
||||
# To avoid re-factoring all the externals, ignore errors and missing imports
|
||||
# globally, then turn back on in spack and spack submodules
|
||||
ignore_errors=True
|
||||
ignore_missing_imports=True
|
||||
|
||||
[mypy-spack.*]
|
||||
ignore_errors=False
|
||||
ignore_missing_imports=False
|
||||
|
||||
[mypy-packages.*]
|
||||
ignore_errors=False
|
||||
ignore_missing_imports=False
|
||||
|
||||
[mypy-llnl.*]
|
||||
ignore_errors=False
|
||||
ignore_missing_imports=False
|
||||
|
||||
[mypy-spack.test.packages]
|
||||
ignore_errors=True
|
||||
|
||||
# ignore errors in fake import path for packages
|
||||
[mypy-spack.pkg.*]
|
||||
ignore_errors=True
|
||||
ignore_missing_imports=True
|
||||
|
||||
# jinja has syntax in it that requires python3 and causes a parse error
|
||||
# skip importing it
|
||||
[mypy-jinja2]
|
||||
follow_imports=skip
|
||||
@@ -2,7 +2,6 @@ version: 2
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
|
||||
225
CHANGELOG.md
225
CHANGELOG.md
@@ -1,228 +1,3 @@
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
* Fix --reuse with upstreams set in an environment (#29680)
|
||||
* config add: fix parsing of validator error to infer type from oneOf (#29475)
|
||||
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
|
||||
* Use Spec.constrain to construct spec lists for stacks (#28783)
|
||||
* Fix bug occurring when searching for inherited patches in packages (#29574)
|
||||
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
|
||||
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Allow locks to work under high contention (#27846)
|
||||
* Improve errors messages from clingo (#27707 #27970)
|
||||
* Respect package permissions for sbang (#25764)
|
||||
* Fix --enable-locks behavior (#24675)
|
||||
* Fix log-format reporter ignoring install errors (#25961)
|
||||
* Fix overloaded argparse keys (#27379)
|
||||
* Allow style commands to run with targets other than "develop" (#27472)
|
||||
* Log lock messages to debug level, instead of verbose level (#27408)
|
||||
* Handle invalid unicode while logging (#21447)
|
||||
* spack audit: fix API calls to variants (#27713)
|
||||
* Provide meaningful message for empty environment installs (#28031)
|
||||
* Added opensuse leap containers to spack containerize (#27837)
|
||||
* Revert "patches: make re-applied patches idempotent" (#27625)
|
||||
* MANPATH can use system defaults (#21682)
|
||||
* Add "setdefault" subcommand to `spack module tcl` (#14686)
|
||||
* Regenerate views when specs already installed (#28113)
|
||||
|
||||
### Package bugfixes
|
||||
* Fix external package detection for OpenMPI (#27255)
|
||||
* Update the UPC++ package to 2021.9.0 (#26996)
|
||||
* Added py-vermin v1.3.2 (#28072)
|
||||
|
||||
# v0.17.0 (2021-11-05)
|
||||
|
||||
`v0.17.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **New concretizer is now default**
|
||||
The new concretizer introduced as an experimental feature in `v0.16.0`
|
||||
is now the default (#25502). The new concretizer is based on the
|
||||
[clingo](https://github.com/potassco/clingo) logic programming system,
|
||||
and it enables us to do much higher quality and faster dependency solving
|
||||
The old concretizer is still available via the `concretizer: original`
|
||||
setting, but it is deprecated and will be removed in `v0.18.0`.
|
||||
|
||||
2. **Binary Bootstrapping**
|
||||
To make it easier to use the new concretizer and binary packages,
|
||||
Spack now bootstraps `clingo` and `GnuPG` from public binaries. If it
|
||||
is not able to bootstrap them from binaries, it installs them from
|
||||
source code. With these changes, you should still be able to clone Spack
|
||||
and start using it almost immediately. (#21446, #22354, #22489, #22606,
|
||||
#22720, #22720, #23677, #23946, #24003, #25138, #25607, #25964, #26029,
|
||||
#26399, #26599).
|
||||
|
||||
3. **Reuse existing packages (experimental)**
|
||||
The most wanted feature from our
|
||||
[2020 user survey](https://spack.io/spack-user-survey-2020/) and
|
||||
the most wanted Spack feature of all time (#25310). `spack install`,
|
||||
`spack spec`, and `spack concretize` now have a `--reuse` option, which
|
||||
causes Spack to minimize the number of rebuilds it does. The `--reuse`
|
||||
option will try to find existing installations and binary packages locally
|
||||
and in registered mirrors, and will prefer to use them over building new
|
||||
versions. This will allow users to build from source *far* less than in
|
||||
prior versions of Spack. This feature will continue to be improved, with
|
||||
configuration options and better CLI expected in `v0.17.1`. It will become
|
||||
the *default* concretization mode in `v0.18.0`.
|
||||
|
||||
4. **Better error messages**
|
||||
We have improved the error messages generated by the new concretizer by
|
||||
using *unsatisfiable cores*. Spack will now print a summary of the types
|
||||
of constraints that were violated to make a spec unsatisfiable (#26719).
|
||||
|
||||
5. **Conditional variants**
|
||||
Variants can now have a `when="<spec>"` clause, allowing them to be
|
||||
conditional based on the version or other attributes of a package (#24858).
|
||||
|
||||
6. **Git commit versions**
|
||||
In an environment and on the command-line, you can now provide a full,
|
||||
40-character git commit as a version for any package with a top-level
|
||||
`git` URL. e.g., `spack install hdf5@45bb27f58240a8da7ebb4efc821a1a964d7712a8`.
|
||||
Spack will compare the commit to tags in the git repository to understand
|
||||
what versions it is ahead of or behind.
|
||||
|
||||
7. **Override local config and cache directories**
|
||||
You can now set `SPACK_DISABLE_LOCAL_CONFIG` to disable the `~/.spack` and
|
||||
`/etc/spack` configuration scopes. `SPACK_USER_CACHE_PATH` allows you to
|
||||
move caches out of `~/.spack`, as well (#27022, #26735). This addresses
|
||||
common problems where users could not isolate CI environments from local
|
||||
configuration.
|
||||
|
||||
8. **Improvements to Spack Containerize**
|
||||
For added reproducibility, you can now pin the Spack version used by
|
||||
`spack containerize` (#21910). The container build will only build
|
||||
with the Spack version pinned at build recipe creation instead of the
|
||||
latest Spack version.
|
||||
|
||||
9. **New commands for dealing with tags**
|
||||
The `spack tags` command allows you to list tags on packages (#26136), and you
|
||||
can list tests and filter tags with `spack test list` (#26842).
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Copy and relocate environment views as stand-alone installations (#24832)
|
||||
* `spack diff` command can diff two installed specs (#22283, #25169)
|
||||
* `spack -c <config>` can set one-off config parameters on CLI (#22251)
|
||||
* `spack load --list` is an alias for `spack find --loaded` (#27184)
|
||||
* `spack gpg` can export private key with `--secret` (#22557)
|
||||
* `spack style` automatically bootstraps dependencies (#24819)
|
||||
* `spack style --fix` automatically invokes `isort` (#24071)
|
||||
* build dependencies can be installed from build caches with `--include-build-deps` (#19955)
|
||||
* `spack audit` command for checking package constraints (#23053)
|
||||
* Spack can now fetch from `CVS` repositories (yep, really) (#23212)
|
||||
* `spack monitor` lets you upload analysis about installations to a
|
||||
[spack monitor server](https://github.com/spack/spack-monitor) (#23804, #24321,
|
||||
#23777, #25928))
|
||||
* `spack python --path` shows which `python` Spack is using (#22006)
|
||||
* `spack env activate --temp` can create temporary environments (#25388)
|
||||
* `--preferred` and `--latest` options for `spack checksum` (#25830)
|
||||
* `cc` is now pure posix and runs on Alpine (#26259)
|
||||
* `SPACK_PYTHON` environment variable sets which `python` spack uses (#21222)
|
||||
* `SPACK_SKIP_MODULES` lets you source `setup-env.sh` faster if you don't need modules (#24545)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* `spec.yaml` files are now `spec.json`, yielding a large speed improvement (#22845)
|
||||
* Splicing allows Spack specs to store mixed build provenance (#20262)
|
||||
* More extensive hooks API for installations (#21930)
|
||||
* New internal API for getting the active environment (#25439)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Parallelize separate concretization in environments; Previously 55 min E4S solve
|
||||
now takes 2.5 min (#26264)
|
||||
* Drastically improve YamlFilesystemView file removal performance via batching (#24355)
|
||||
* Speed up spec comparison (#21618)
|
||||
* Speed up environment activation (#25633)
|
||||
|
||||
## Archspec improvements
|
||||
* support for new generic `x86_64_v2`, `x86_64_v3`, `x86_64_v4` targets
|
||||
(see [archspec#31](https://github.com/archspec/archspec-json/pull/31))
|
||||
* `spack arch --generic` lets you get the best generic architecture for
|
||||
your node (#27061)
|
||||
* added support for aocc (#20124), `arm` compiler on `graviton2` (#24904)
|
||||
and on `a64fx` (#24524),
|
||||
|
||||
## Infrastructure, buildcaches, and services
|
||||
|
||||
* Add support for GCS Bucket Mirrors (#26382)
|
||||
* Add `spackbot` to help package maintainers with notifications. See
|
||||
[spack.github.io/spackbot](https://spack.github.io/spackbot/)
|
||||
* Reproducible pipeline builds with `spack ci rebuild` (#22887)
|
||||
* Removed redundant concretizations from GitLab pipeline generation (#26622)
|
||||
* Spack CI no longer generates jobs for unbuilt specs (#20435)
|
||||
* Every pull request pipeline has its own buildcache (#25529)
|
||||
* `--no-add` installs only specified specs and only if already present in… (#22657)
|
||||
* Add environment-aware `spack buildcache sync` command (#25470)
|
||||
* Binary cache installation speedups and improvements (#19690, #20768)
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
* `spack setup` was deprecated in v0.16.0, and has now been removed.
|
||||
Use `spack develop` and `spack dev-build`.
|
||||
* Remove unused `--dependencies` flag from `spack load` (#25731)
|
||||
* Remove stubs for `spack module [refresh|find|rm|loads]`, all of which
|
||||
were deprecated in 2018.
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Deactivate previous env before activating new one (#25409)
|
||||
* Many fixes to error codes from `spack install` (#21319, #27012, #25314)
|
||||
* config add: infer type based on JSON schema validation errors (#27035)
|
||||
* `spack config edit` now works even if `spack.yaml` is broken (#24689)
|
||||
|
||||
## Packages
|
||||
|
||||
* Allow non-empty version ranges like `1.1.0:1.1` (#26402)
|
||||
* Remove `.99`'s from many version ranges (#26422)
|
||||
* Python: use platform-specific site packages dir (#25998)
|
||||
* `CachedCMakePackage` for using *.cmake initial config files (#19316)
|
||||
* `lua-lang` allows swapping `lua` and `luajit` (#22492)
|
||||
* Better support for `ld.gold` and `ld.lld` (#25626)
|
||||
* build times are now stored as metadata in `$prefix/.spack` (#21179)
|
||||
* post-install tests can be reused in smoke tests (#20298)
|
||||
* Packages can use `pypi` attribute to infer `homepage`/`url`/`list_url` (#17587)
|
||||
* Use gnuconfig package for `config.guess` file replacement (#26035)
|
||||
* patches: make re-applied patches idempotent (#26784)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 5969 total packages, 920 new since `v0.16.0`
|
||||
* 358 new Python packages, 175 new R packages
|
||||
* 513 people contributed to this release
|
||||
* 490 committers to packages
|
||||
* 105 committers to core
|
||||
* Lots of GPU updates:
|
||||
* ~77 CUDA-related commits
|
||||
* ~66 AMD-related updates
|
||||
* ~27 OneAPI-related commits
|
||||
* 30 commits from AMD toolchain support
|
||||
* `spack test` usage in packages is increasing
|
||||
* 1669 packages with tests (mostly generic python tests)
|
||||
* 93 packages with their own tests
|
||||
|
||||
|
||||
# v0.16.3 (2021-09-21)
|
||||
|
||||
* clang/llvm: fix version detection (#19978)
|
||||
* Fix use of quotes in Python build system (#22279)
|
||||
* Cray: fix extracting paths from module files (#23472)
|
||||
* Use AWS CloudFront for source mirror (#23978)
|
||||
* Ensure all roots of an installed environment are marked explicit in db (#24277)
|
||||
* Fix fetching for Python 3.8 and 3.9 (#24686)
|
||||
* locks: only open lockfiles once instead of for every lock held (#24794)
|
||||
* Remove the EOL centos:6 docker image
|
||||
|
||||
# v0.16.2 (2021-05-22)
|
||||
|
||||
* Major performance improvement for `spack load` and other commands. (#23661)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://slack.spack.io)
|
||||
|
||||
@@ -27,7 +26,7 @@ for examples and highlights.
|
||||
To install spack and your first package, make sure you have Python.
|
||||
Then:
|
||||
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install zlib
|
||||
|
||||
@@ -37,8 +36,6 @@ Documentation
|
||||
[**Full documentation**](https://spack.readthedocs.io/) is available, or
|
||||
run `spack help` or `spack help --all`.
|
||||
|
||||
For a cheat sheet on Spack syntax, run `spack help --spec`.
|
||||
|
||||
Tutorial
|
||||
----------------
|
||||
|
||||
|
||||
24
SECURITY.md
24
SECURITY.md
@@ -1,24 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following releases.
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
23
bin/spack
23
bin/spack
@@ -28,7 +28,6 @@ exit 1
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 5)
|
||||
@@ -71,28 +70,6 @@ if "ruamel.yaml" in sys.modules:
|
||||
if "ruamel" in sys.modules:
|
||||
del sys.modules["ruamel"]
|
||||
|
||||
# The following code is here to avoid failures when updating
|
||||
# the develop version, due to spurious argparse.pyc files remaining
|
||||
# in the libs/spack/external directory, see:
|
||||
# https://github.com/spack/spack/pull/25376
|
||||
# TODO: Remove in v0.18.0 or later
|
||||
try:
|
||||
import argparse
|
||||
except ImportError:
|
||||
argparse_pyc = os.path.join(spack_external_libs, 'argparse.pyc')
|
||||
if not os.path.exists(argparse_pyc):
|
||||
raise
|
||||
try:
|
||||
os.remove(argparse_pyc)
|
||||
import argparse # noqa
|
||||
except Exception:
|
||||
msg = ('The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. '
|
||||
'Either delete it manually or ask some administrator to '
|
||||
'delete it for you.')
|
||||
print(msg.format(argparse_pyc))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
import spack.main # noqa
|
||||
|
||||
# Once we've set up the system path, run the spack main method
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
bootstrap:
|
||||
# If set to false Spack will not bootstrap missing software,
|
||||
# but will instead raise an error.
|
||||
enable: true
|
||||
# Root directory for bootstrapping work. The software bootstrapped
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions: true
|
||||
spack-install: true
|
||||
@@ -42,8 +42,8 @@ config:
|
||||
# (i.e., ``$TMP` or ``$TMPDIR``).
|
||||
#
|
||||
# Another option that prevents conflicts and potential permission issues is
|
||||
# to specify `$user_cache_path/stage`, which ensures each user builds in their
|
||||
# home directory.
|
||||
# to specify `~/.spack/stage`, which ensures each user builds in their home
|
||||
# directory.
|
||||
#
|
||||
# A more traditional path uses the value of `$spack/var/spack/stage`, which
|
||||
# builds directly inside Spack's instance without staging them in a
|
||||
@@ -60,13 +60,13 @@ config:
|
||||
# identifies Spack staging to avoid accidentally wiping out non-Spack work.
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- $user_cache_path/stage
|
||||
- ~/.spack/stage
|
||||
# - $spack/var/spack/stage
|
||||
|
||||
# Directory in which to run tests and store test results.
|
||||
# Tests will be stored in directories named by date/time and package
|
||||
# name/hash.
|
||||
test_stage: $user_cache_path/test
|
||||
test_stage: ~/.spack/test
|
||||
|
||||
# Cache directory for already downloaded source tarballs and archived
|
||||
# repositories. This can be purged with `spack clean --downloads`.
|
||||
@@ -75,7 +75,7 @@ config:
|
||||
|
||||
# Cache directory for miscellaneous files, like the package index.
|
||||
# This can be purged with `spack clean --misc-cache`
|
||||
misc_cache: $user_cache_path/cache
|
||||
misc_cache: ~/.spack/cache
|
||||
|
||||
|
||||
# Timeout in seconds used for downloading sources etc. This only applies
|
||||
@@ -134,10 +134,6 @@ config:
|
||||
# enabling locks.
|
||||
locks: true
|
||||
|
||||
# The default url fetch method to use.
|
||||
# If set to 'curl', Spack will require curl on the user's system
|
||||
# If set to 'urllib', Spack will use python built-in libs to fetch
|
||||
url_fetch_method: urllib
|
||||
|
||||
# The maximum number of jobs to use for the build system (e.g. `make`), when
|
||||
# the -j flag is not given on the command line. Defaults to 16 when not set.
|
||||
@@ -160,10 +156,11 @@ config:
|
||||
# sufficiently for many specs.
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
# backtracking and optimization for user preferences.
|
||||
#
|
||||
concretizer: clingo
|
||||
# 'clingo' currently requires the clingo ASP solver to be installed and
|
||||
# built with python bindings. 'original' is built in.
|
||||
concretizer: original
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
@@ -190,8 +187,3 @@ config:
|
||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
|
||||
@@ -43,7 +43,6 @@ packages:
|
||||
opencl: [pocl]
|
||||
onedal: [intel-oneapi-dal]
|
||||
osmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
rpc: [libtirpc]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS = -W --keep-going
|
||||
SPHINXOPTS = -W
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
@@ -31,13 +31,13 @@ colorized output with a flag
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack --color always find | less -R
|
||||
$ spack --color always | less -R
|
||||
|
||||
or an environment variable
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ SPACK_COLOR=always spack find | less -R
|
||||
$ SPACK_COLOR=always spack | less -R
|
||||
|
||||
--------------------------
|
||||
Listing available packages
|
||||
@@ -188,34 +188,6 @@ configuration a **spec**. In the commands above, ``mpileaks`` and
|
||||
``mpileaks@3.0.4`` are both valid *specs*. We'll talk more about how
|
||||
you can use them to customize an installation in :ref:`sec-specs`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. warning::
|
||||
|
||||
The ``--reuse`` option described here is experimental, and it will
|
||||
likely be replaced with a different option and configuration settings
|
||||
in the next Spack release.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --reuse mpich
|
||||
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -723,136 +695,6 @@ structured the way you want:
|
||||
}
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
``spack diff``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
It's often the case that you have two versions of a spec that you need to
|
||||
disambiguate. Let's say that we've installed two variants of zlib, one with
|
||||
and one without the optimize variant:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib
|
||||
$ spack install zlib -optimize
|
||||
|
||||
When we do ``spack find`` we see the two versions.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find zlib
|
||||
==> 2 installed packages
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
zlib@1.2.11 zlib@1.2.11
|
||||
|
||||
|
||||
Let's now say that we want to uninstall zlib. We run the command, and hit a problem
|
||||
real quickly since we have two!
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack uninstall zlib
|
||||
==> Error: zlib matches multiple packages:
|
||||
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
efzjziy zlib@1.2.11 sl7m27m zlib@1.2.11
|
||||
|
||||
==> Error: You can either:
|
||||
a) use a more specific spec, or
|
||||
b) specify the spec by its hash (e.g. `spack uninstall /hash`), or
|
||||
c) use `spack uninstall --all` to uninstall ALL matching specs.
|
||||
|
||||
Oh no! We can see from the above that we have two different versions of zlib installed,
|
||||
and the only difference between the two is the hash. This is a good use case for
|
||||
``spack diff``, which can easily show us the "diff" or set difference
|
||||
between properties for two packages. Let's try it out.
|
||||
Since the only difference we see in the ``spack find`` view is the hash, let's use
|
||||
``spack diff`` to look for more detail. We will provide the two hashes:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff /efzjziy /sl7m27m
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- zlib@1.2.11efzjziyc3dmb5h5u5azsthgbgog5mj7g
|
||||
+++ zlib@1.2.11sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
@@ variant_value @@
|
||||
- zlib optimize False
|
||||
+ zlib optimize True
|
||||
|
||||
|
||||
The output is colored, and written in the style of a git diff. This means that you
|
||||
can copy and paste it into a GitHub markdown as a code block with language "diff"
|
||||
and it will render nicely! Here is an example:
|
||||
|
||||
.. code-block:: md
|
||||
|
||||
```diff
|
||||
--- zlib@1.2.11/efzjziyc3dmb5h5u5azsthgbgog5mj7g
|
||||
+++ zlib@1.2.11/sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
@@ variant_value @@
|
||||
- zlib optimize False
|
||||
+ zlib optimize True
|
||||
```
|
||||
|
||||
Awesome! Now let's read the diff. It tells us that our first zlib was built with ``~optimize``
|
||||
(``False``) and the second was built with ``+optimize`` (``True``). You can't see it in the docs
|
||||
here, but the output above is also colored based on the content being an addition (+) or
|
||||
subtraction (-).
|
||||
|
||||
This is a small example, but you will be able to see differences for any attributes on the
|
||||
installation spec. Running ``spack diff A B`` means we'll see which spec attributes are on
|
||||
``B`` but not on ``A`` (green) and which are on ``A`` but not on ``B`` (red). Here is another
|
||||
example with an additional difference type, ``version``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff python@2.7.8 python@3.8.11
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- python@2.7.8/tsxdi6gl4lihp25qrm4d6nys3nypufbf
|
||||
+++ python@3.8.11/yjtseru4nbpllbaxb46q7wfkyxbuvzxx
|
||||
@@ variant_value @@
|
||||
- python patches a8c52415a8b03c0e5f28b5d52ae498f7a7e602007db2b9554df28cd5685839b8
|
||||
+ python patches 0d98e93189bc278fbc37a50ed7f183bd8aaf249a8e1670a465f0db6bb4f8cf87
|
||||
@@ version @@
|
||||
- openssl 1.0.2u
|
||||
+ openssl 1.1.1k
|
||||
- python 2.7.8
|
||||
+ python 3.8.11
|
||||
|
||||
Let's say that we were only interested in one kind of attribute above, ``version``.
|
||||
We can ask the command to only output this attribute. To do this, you'd add
|
||||
the ``--attribute`` for attribute parameter, which defaults to all. Here is how you
|
||||
would filter to show just versions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff --attribute version python@2.7.8 python@3.8.11
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- python@2.7.8/tsxdi6gl4lihp25qrm4d6nys3nypufbf
|
||||
+++ python@3.8.11/yjtseru4nbpllbaxb46q7wfkyxbuvzxx
|
||||
@@ version @@
|
||||
- openssl 1.0.2u
|
||||
+ openssl 1.1.1k
|
||||
- python 2.7.8
|
||||
+ python 3.8.11
|
||||
|
||||
And you can add as many attributes as you'd like with multiple `--attribute` arguments
|
||||
(for lots of attributes, you can use ``-a`` for short). Finally, if you want to view the
|
||||
data as json (and possibly pipe into an output file) just add ``--json``:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff --json python@2.7.8 python@3.8.11
|
||||
|
||||
|
||||
This data will be much longer because along with the differences for ``A`` vs. ``B`` and
|
||||
``B`` vs. ``A``, the JSON output also showsthe intersection.
|
||||
|
||||
|
||||
------------------------
|
||||
Using installed packages
|
||||
------------------------
|
||||
@@ -896,9 +738,8 @@ your path:
|
||||
These commands will add appropriate directories to your ``PATH``,
|
||||
``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH`` according to the
|
||||
:ref:`prefix inspections <customize-env-modifications>` defined in your
|
||||
modules configuration.
|
||||
When you no longer want to use a package, you can type unload or
|
||||
unuse similarly:
|
||||
modules configuration. When you no longer want to use a package, you
|
||||
can type unload or unuse similarly:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -939,22 +780,6 @@ first ``libelf`` above, you would run:
|
||||
|
||||
$ spack load /qmm4kso
|
||||
|
||||
To see which packages that you have loaded to your enviornment you would
|
||||
use ``spack find --loaded``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --loaded
|
||||
==> 2 installed packages
|
||||
-- linux-debian7 / gcc@4.4.7 ------------------------------------
|
||||
libelf@0.8.13
|
||||
|
||||
-- linux-debian7 / intel@15.0.0 ---------------------------------
|
||||
libelf@0.8.13
|
||||
|
||||
You can also use ``spack load --list`` to get the same output, but it
|
||||
does not have the full set of query options that ``spack find`` offers.
|
||||
|
||||
We'll learn more about Spack's spec syntax in the next section.
|
||||
|
||||
|
||||
@@ -1694,7 +1519,6 @@ and it will be added to the ``PYTHONPATH`` in your current shell:
|
||||
|
||||
Now ``import numpy`` will succeed for as long as you keep your current
|
||||
session open.
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
|
||||
@@ -63,7 +63,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/custompackage
|
||||
build_systems/multiplepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
|
||||
@@ -112,44 +112,20 @@ phase runs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ autoreconf --install --verbose --force -I <aclocal-prefix>/share/aclocal
|
||||
|
||||
In case you need to add more arguments, override ``autoreconf_extra_args``
|
||||
in your ``package.py`` on class scope like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
autoreconf_extra_args = ["-Im4"]
|
||||
$ libtoolize
|
||||
$ aclocal
|
||||
$ autoreconf --install --verbose --force
|
||||
|
||||
All you need to do is add a few Autotools dependencies to the package.
|
||||
Most stable releases will come with a ``configure`` script, but if you
|
||||
check out a commit from the ``master`` branch, you would want to add:
|
||||
check out a commit from the ``develop`` branch, you would want to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('autoconf', type='build', when='@master')
|
||||
depends_on('automake', type='build', when='@master')
|
||||
depends_on('libtool', type='build', when='@master')
|
||||
|
||||
It is typically redundant to list the ``m4`` macro processor package as a
|
||||
dependency, since ``autoconf`` already depends on it.
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Using a custom autoreconf phase
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
In some cases, it might be needed to replace the default implementation
|
||||
of the autoreconf phase with one running a script interpreter. In this
|
||||
example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
which('bash')('autogen.sh')
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
depends_on('autoconf', type='build', when='@develop')
|
||||
depends_on('automake', type='build', when='@develop')
|
||||
depends_on('libtool', type='build', when='@develop')
|
||||
depends_on('m4', type='build', when='@develop')
|
||||
|
||||
In some cases, developers might need to distribute a patch that modifies
|
||||
one of the files used to generate ``configure`` or ``Makefile.in``.
|
||||
@@ -159,57 +135,6 @@ create a new patch that directly modifies ``configure``. That way,
|
||||
Spack can use the secondary patch and additional build system
|
||||
dependencies aren't necessary.
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
Old Autotools helper scripts
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
Autotools based tarballs come with helper scripts such as ``config.sub`` and
|
||||
``config.guess``. It is the responsibility of the developers to keep these files
|
||||
up to date so that they run on every platform, but for very old software
|
||||
releases this is impossible. In these cases Spack can help to replace these
|
||||
files with newer ones, without having to add the heavy dependency on
|
||||
``automake``.
|
||||
|
||||
Automatic helper script replacement is currently enabled by default on
|
||||
``ppc64le`` and ``aarch64``, as these are the known cases where old scripts fail.
|
||||
On these targets, ``AutotoolsPackage`` adds a build dependency on ``gnuconfig``,
|
||||
which is a very light-weight package with newer versions of the helper files.
|
||||
Spack then tries to run all the helper scripts it can find in the release, and
|
||||
replaces them on failure with the helper scripts from ``gnuconfig``.
|
||||
|
||||
To opt out of this feature, use the following setting:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
patch_config_files = False
|
||||
|
||||
To enable it conditionally on different architectures, define a property and
|
||||
make the package depend on ``gnuconfig`` as a build dependency:
|
||||
|
||||
.. code-block
|
||||
|
||||
depends_on('gnuconfig', when='@1.0:')
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
return self.spec.satisfies("@1.0:")
|
||||
|
||||
.. note::
|
||||
|
||||
On some exotic architectures it is necessary to use system provided
|
||||
``config.sub`` and ``config.guess`` files. In this case, the most
|
||||
transparent solution is to mark the ``gnuconfig`` package as external and
|
||||
non-buildable, with a prefix set to the directory containing the files:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
gnuconfig:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: gnuconfig@master
|
||||
prefix: /usr/share/configure_files/
|
||||
|
||||
|
||||
""""""""""""""""
|
||||
force_autoreconf
|
||||
""""""""""""""""
|
||||
@@ -399,29 +324,8 @@ options:
|
||||
|
||||
--with-libfabric=</path/to/libfabric>
|
||||
|
||||
"""""""""""""""""""""""
|
||||
The ``variant`` keyword
|
||||
"""""""""""""""""""""""
|
||||
|
||||
When Spack variants and configure flags do not correspond one-to-one, the
|
||||
``variant`` keyword can be passed to ``with_or_without`` and
|
||||
``enable_or_disable``. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.enable_or_disable('debug-tools', variant='debug_tools')
|
||||
|
||||
Or when one variant controls multiple flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.with_or_without('memchecker', variant='debug_tools')
|
||||
config_args += self.with_or_without('profiler', variant='debug_tools')
|
||||
|
||||
""""""""""""""""""""
|
||||
Activation overrides
|
||||
activation overrides
|
||||
""""""""""""""""""""
|
||||
|
||||
Finally, the behavior of either ``with_or_without`` or
|
||||
|
||||
@@ -130,8 +130,8 @@ Adding flags to cmake
|
||||
To add additional flags to the ``cmake`` call, simply override the
|
||||
``cmake_args`` function. The following example defines values for the flags
|
||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
|
||||
and without the :py:meth:`~.CMakePackage.define` and
|
||||
:py:meth:`~.CMakePackage.define_from_variant` helper functions:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
||||
@@ -1,350 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _multiplepackage:
|
||||
|
||||
----------------------
|
||||
Multiple Build Systems
|
||||
----------------------
|
||||
|
||||
Quite frequently, a package will change build systems from one version to the
|
||||
next. For example, a small project that once used a single Makefile to build
|
||||
may now require Autotools to handle the increased number of files that need to
|
||||
be compiled. Or, a package that once used Autotools may switch to CMake for
|
||||
Windows support. In this case, it becomes a bit more challenging to write a
|
||||
single build recipe for this package in Spack.
|
||||
|
||||
There are several ways that this can be handled in Spack:
|
||||
|
||||
#. Subclass the new build system, and override phases as needed (preferred)
|
||||
#. Subclass ``Package`` and implement ``install`` as needed
|
||||
#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system
|
||||
#. Rename the old package to ``*-legacy`` and create a new package
|
||||
#. Move the old package to a ``legacy`` repository and create a new package
|
||||
#. Drop older versions that only support the older build system
|
||||
|
||||
Of these options, 1 is preferred, and will be demonstrated in this
|
||||
documentation. Options 3-5 have issues with concretization, so shouldn't be
|
||||
used. Options 4-5 also don't support more than two build systems. Option 6 only
|
||||
works if the old versions are no longer needed. Option 1 is preferred over 2
|
||||
because it makes it easier to drop the old build system entirely.
|
||||
|
||||
The exact syntax of the package depends on which build systems you need to
|
||||
support. Below are a couple of common examples.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Makefile -> Autotools
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(MakefilePackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
The package subclasses from :ref:`makefilepackage`, which has three phases:
|
||||
|
||||
#. ``edit`` (does nothing by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, the ``install`` phase needed to be overridden because the
|
||||
Makefile did not have an install target. We also modify the Makefile to use
|
||||
Spack's compiler wrappers. The default ``build`` phase is not changed.
|
||||
|
||||
Starting with version 1.3.0, we want to use Autotools to build instead.
|
||||
:ref:`autotoolspackage` has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def autoreconf(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def configure(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* We moved the contents of the ``edit`` phase to the ``patch`` function. Since
|
||||
``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this
|
||||
step to be executed is to move it to the ``patch`` function, which always
|
||||
gets run.
|
||||
* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old
|
||||
Makefile-based build system doesn't use these, we ignore these phases when
|
||||
building ``foo@1.2.0``.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``foo@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Autotools -> CMake
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(AutotoolsPackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
The package subclasses from :ref:`autotoolspackage`, which has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
Starting with version 1.3.0, we want to use CMake to build instead.
|
||||
:ref:`cmakepackage` has three phases:
|
||||
|
||||
#. ``cmake`` (runs ``cmake ...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``cmake_args`` to specify additional flags to pass to ``cmake``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* Since CMake and Autotools are so similar, we only need to override the
|
||||
``cmake`` phase, we can use the default ``build`` and ``install`` phases.
|
||||
* We override ``cmake`` to run ``./configure`` for older versions.
|
||||
``configure_args`` remains the same.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``bar@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems for the same version
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
During the transition from one build system to another, developers often
|
||||
support multiple build systems at the same time. Spack can only use a single
|
||||
build system for a single version. To decide which build system to use for a
|
||||
particular version, take the following things into account:
|
||||
|
||||
1. If the developers explicitly state that one build system is preferred over
|
||||
another, use that one.
|
||||
2. If one build system is considered "experimental" while another is considered
|
||||
"stable", use the stable build system.
|
||||
3. Otherwise, use the newer build system.
|
||||
|
||||
The developer preference for which build system to use can change over time as
|
||||
a newer build system becomes stable/recommended.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dropping support for old build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When older versions of a package don't support a newer build system, it can be
|
||||
tempting to simply delete them from a package. This significantly reduces
|
||||
package complexity and makes the build recipe much easier to maintain. However,
|
||||
other packages or Spack users may rely on these older versions. The recommended
|
||||
approach is to first support both build systems (as demonstrated above),
|
||||
:ref:`deprecate <deprecate>` versions that rely on the old build system, and
|
||||
remove those versions and any phases that needed to be overridden in the next
|
||||
Spack release.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Three or more build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In rare cases, a package may change build systems multiple times. For example,
|
||||
a package may start with Makefiles, then switch to Autotools, then switch to
|
||||
CMake. The same logic used above can be extended to any number of build systems.
|
||||
For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Baz(CMakePackage):
|
||||
version("1.4.0", sha256="...") # CMake
|
||||
version("1.3.0", sha256="...") # Autotools
|
||||
version("1.2.0", sha256="...") # Makefile
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.3.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@1.3")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Additional examples
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When writing new packages, it often helps to see examples of existing packages.
|
||||
Here is an incomplete list of existing Spack packages that have changed build
|
||||
systems before:
|
||||
|
||||
================ ===================== ================
|
||||
Package Previous Build System New Build System
|
||||
================ ===================== ================
|
||||
amber custom CMake
|
||||
arpack-ng Autotools CMake
|
||||
atk Autotools Meson
|
||||
blast None Autotools
|
||||
dyninst Autotools CMake
|
||||
evtgen Autotools CMake
|
||||
fish Autotools CMake
|
||||
gdk-pixbuf Autotools Meson
|
||||
glib Autotools Meson
|
||||
glog Autotools CMake
|
||||
gmt Autotools CMake
|
||||
gtkplus Autotools Meson
|
||||
hpl Makefile Autotools
|
||||
interproscan Perl Maven
|
||||
jasper Autotools CMake
|
||||
kahip SCons CMake
|
||||
kokkos Makefile CMake
|
||||
kokkos-kernels Makefile CMake
|
||||
leveldb Makefile CMake
|
||||
libdrm Autotools Meson
|
||||
libjpeg-turbo Autotools CMake
|
||||
mesa Autotools Meson
|
||||
metis None CMake
|
||||
mpifileutils Autotools CMake
|
||||
muparser Autotools CMake
|
||||
mxnet Makefile CMake
|
||||
nest Autotools CMake
|
||||
neuron Autotools CMake
|
||||
nsimd CMake nsconfig
|
||||
opennurbs Makefile CMake
|
||||
optional-lite None CMake
|
||||
plasma Makefile CMake
|
||||
preseq Makefile Autotools
|
||||
protobuf Autotools CMake
|
||||
py-pygobject Autotools Python
|
||||
singularity Autotools Makefile
|
||||
span-lite None CMake
|
||||
ssht Makefile CMake
|
||||
string-view-lite None CMake
|
||||
superlu Makefile CMake
|
||||
superlu-dist Makefile CMake
|
||||
uncrustify Autotools CMake
|
||||
================ ===================== ================
|
||||
|
||||
Packages that support multiple build systems can be a bit confusing to write.
|
||||
Don't hesitate to open an issue or draft pull request and ask for advice from
|
||||
other Spack developers!
|
||||
@@ -336,7 +336,7 @@ This would be translated to:
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
depends_on('python@3.5:3', type=('build', 'run'))
|
||||
depends_on('python@3.5:3.999', type=('build', 'run'))
|
||||
|
||||
|
||||
Many ``setup.py`` or ``setup.cfg`` files also contain information like::
|
||||
@@ -568,7 +568,7 @@ check the ``METADATA`` file for lines like::
|
||||
Lines that use ``Requires-Dist`` are similar to ``install_requires``.
|
||||
Lines that use ``Provides-Extra`` are similar to ``extra_requires``,
|
||||
and you can add a variant for those dependencies. The ``~=1.11.0``
|
||||
syntax is equivalent to ``1.11.0:1.11``.
|
||||
syntax is equivalent to ``1.11.0:1.11.999``.
|
||||
|
||||
""""""""""
|
||||
setuptools
|
||||
|
||||
@@ -97,19 +97,15 @@ def setup(sphinx):
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
needs_sphinx = '3.4'
|
||||
needs_sphinx = '1.8'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinxcontrib.programoutput',
|
||||
]
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinxcontrib.programoutput']
|
||||
|
||||
# Set default graphviz options
|
||||
graphviz_dot_args = [
|
||||
@@ -168,19 +164,6 @@ def setup(sphinx):
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build', '_spack_root', '.spack-env']
|
||||
|
||||
nitpicky = True
|
||||
nitpick_ignore = [
|
||||
# Python classes that intersphinx is unable to resolve
|
||||
('py:class', 'argparse.HelpFormatter'),
|
||||
('py:class', 'contextlib.contextmanager'),
|
||||
('py:class', 'module'),
|
||||
('py:class', '_io.BufferedReader'),
|
||||
('py:class', 'unittest.case.TestCase'),
|
||||
('py:class', '_frozen_importlib_external.SourceFileLoader'),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
('py:class', 'spack.provider_index._IndexBase'),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
@@ -375,11 +358,3 @@ class SpackStyle(DefaultStyle):
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
|
||||
# -- Extension configuration -------------------------------------------------
|
||||
|
||||
# sphinx.ext.intersphinx
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
|
||||
@@ -259,16 +259,3 @@ and ld.so will ONLY search for dependencies in the ``RUNPATH`` of
|
||||
the loading object.
|
||||
|
||||
DO NOT MIX the two options within the same install tree.
|
||||
|
||||
----------------------
|
||||
``terminal_title``
|
||||
----------------------
|
||||
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
remain in the terminal's title indefinitely. Most terminals should already
|
||||
be set up this way and clear Spack's status information.
|
||||
|
||||
@@ -402,15 +402,12 @@ Spack-specific variables
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
* ``$tempdir``: default system temporary directory (as specified in
|
||||
Python's `tempfile.tempdir
|
||||
<https://docs.python.org/2/library/tempfile.html#tempfile.tempdir>`_
|
||||
variable.
|
||||
* ``$user``: name of the current user
|
||||
* ``$user_cache_path``: user cache directory (``~/.spack`` unless
|
||||
:ref:`overridden <local-config-overrides>`)
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
@@ -565,39 +562,3 @@ built in and are not overridden by a configuration file. The
|
||||
command line. ``dirty`` and ``install_tree`` come from the custom
|
||||
scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration
|
||||
options come from the default configuration files that ship with Spack.
|
||||
|
||||
.. _local-config-overrides:
|
||||
|
||||
------------------------------
|
||||
Overriding Local Configuration
|
||||
------------------------------
|
||||
|
||||
Spack's ``system`` and ``user`` scopes provide ways for administrators and users to set
|
||||
global defaults for all Spack instances, but for use cases where one wants a clean Spack
|
||||
installation, these scopes can be undesirable. For example, users may want to opt out of
|
||||
global system configuration, or they may want to ignore their own home directory
|
||||
settings when running in a continuous integration environment.
|
||||
|
||||
Spack also, by default, keeps various caches and user data in ``~/.spack``, but
|
||||
users may want to override these locations.
|
||||
|
||||
Spack provides three environment variables that allow you to override or opt out of
|
||||
configuration locations:
|
||||
|
||||
* ``SPACK_USER_CONFIG_PATH``: Override the path to use for the
|
||||
``user`` scope (``~/.spack`` by default).
|
||||
* ``SPACK_SYSTEM_CONFIG_PATH``: Override the path to use for the
|
||||
``system`` scope (``/etc/spack`` by default).
|
||||
* ``SPACK_DISABLE_LOCAL_CONFIG``: set this environment variable to completely disable
|
||||
**both** the system and user configuration directories. Spack will only consider its
|
||||
own defaults and ``site`` configuration locations.
|
||||
|
||||
And one that allows you to move the default cache location:
|
||||
|
||||
* ``SPACK_USER_CACHE_PATH``: Override the default path to use for user data
|
||||
(misc_cache, tests, reports, etc.)
|
||||
|
||||
With these settings, if you want to isolate Spack in a CI environment, you can do this::
|
||||
|
||||
export SPACK_DISABLE_LOCAL_CONFIG=true
|
||||
export SPACK_USER_CACHE_PATH=/tmp/spack
|
||||
|
||||
@@ -126,12 +126,12 @@ are currently supported are summarized in the table below:
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - CentOS 6
|
||||
- ``centos:6``
|
||||
- ``spack/centos6``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
@@ -200,7 +200,7 @@ Setting Base Images
|
||||
|
||||
The ``images`` subsection is used to select both the image where
|
||||
Spack builds the software and the image where the built software
|
||||
is installed. This attribute can be set in different ways and
|
||||
is installed. This attribute can be set in two different ways and
|
||||
which one to use depends on the use case at hand.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -260,54 +260,10 @@ software is respectively built and installed:
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l"]
|
||||
|
||||
This is the simplest available method of selecting base images, and we advise
|
||||
This method of selecting base images is the simplest of the two, and we advise
|
||||
to use it whenever possible. There are cases though where using Spack official
|
||||
images is not enough to fit production needs. In these situations users can
|
||||
extend the recipe to start with the bootstrapping of Spack at a certain pinned
|
||||
version or manually select which base image to start from in the recipe,
|
||||
as we'll see next.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Use a Bootstrap Stage for Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In some cases users may want to pin the commit sha that is used for Spack, to ensure later
|
||||
reproducibility, or start from a fork of the official Spack repository to try a bugfix or
|
||||
a feature in the early stage of development. This is possible by being just a little more
|
||||
verbose when specifying information about Spack in the ``spack.yaml`` file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
images:
|
||||
os: amazonlinux:2
|
||||
spack:
|
||||
# URL of the Spack repository to be used in the container image
|
||||
url: <to-use-a-fork>
|
||||
# Either a commit sha, a branch name or a tag
|
||||
ref: <sha/tag/branch>
|
||||
# If true turn a branch name or a tag into the corresponding commit
|
||||
# sha at the time of recipe generation
|
||||
resolve_sha: <true/false>
|
||||
|
||||
``url`` specifies the URL from which to clone Spack and defaults to https://github.com/spack/spack.
|
||||
The ``ref`` attribute can be either a commit sha, a branch name or a tag. The default value in
|
||||
this case is to use the ``develop`` branch, but it may change in the future to point to the latest stable
|
||||
release. Finally ``resolve_sha`` transform branch names or tags into the corresponding commit
|
||||
shas at the time of recipe generation, to allow for a greater reproducibility of the results
|
||||
at a later time.
|
||||
|
||||
The list of operating systems that can be used to bootstrap Spack can be
|
||||
obtained with:
|
||||
|
||||
.. command-output:: spack containerize --list-os
|
||||
|
||||
.. note::
|
||||
|
||||
The ``resolve_sha`` option uses ``git rev-parse`` under the hood and thus it requires
|
||||
to checkout the corresponding Spack repository in a temporary folder before generating
|
||||
the recipe. Recipe generation may take longer when this option is set to true because
|
||||
of this additional step.
|
||||
|
||||
images is not enough to fit production needs. In these situations users can manually
|
||||
select which base image to start from in the recipe, as we'll see next.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Use Custom Images Provided by Users
|
||||
@@ -459,18 +415,6 @@ to customize the generation of container recipes:
|
||||
- Version of Spack use in the ``build`` stage
|
||||
- Valid tags for ``base:image``
|
||||
- Yes, if using constrained selection of base images
|
||||
* - ``images:spack:url``
|
||||
- Repository from which Spack is cloned
|
||||
- Any fork of Spack
|
||||
- No
|
||||
* - ``images:spack:ref``
|
||||
- Reference for the checkout of Spack
|
||||
- Either a commit sha, a branch name or a tag
|
||||
- No
|
||||
* - ``images:spack:resolve_sha``
|
||||
- Resolve branches and tags in ``spack.yaml`` to commits in the generated recipe
|
||||
- True or False (default: False)
|
||||
- No
|
||||
* - ``images:build``
|
||||
- Image to be used in the ``build`` stage
|
||||
- Any valid container image
|
||||
|
||||
@@ -338,6 +338,15 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
.. note::
|
||||
|
||||
There is also a ``run-doc-tests`` script in ``share/spack/qa``. The only
|
||||
difference between running this script and running ``make`` by hand is that
|
||||
the script will exit immediately if it encounters an error or warning. This
|
||||
is necessary for CI. If you made a lot of documentation changes, it is
|
||||
much quicker to run ``make`` by hand so that you can see all of the warnings
|
||||
at once.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
@@ -108,9 +108,9 @@ with a high level view of Spack's directory structure:
|
||||
|
||||
spack/ <- spack module; contains Python code
|
||||
analyzers/ <- modules to run analysis on installed packages
|
||||
build_systems/ <- modules for different build systems
|
||||
build_systems/ <- modules for different build systems
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
compilers/ <- compiler description files
|
||||
container/ <- module for spack containerize
|
||||
hooks/ <- hook modules to run at different points
|
||||
modules/ <- modules for lmod, tcl, etc.
|
||||
@@ -151,22 +151,24 @@ Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`~spack.package.Package` class, which
|
||||
Contains the :class:`Package <spack.package.Package>` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
Python module names, and Python class names. Functions like
|
||||
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||
module names to class names.
|
||||
:mod:`spack.packages`
|
||||
Contains all of the packages in Spack and methods for managing them.
|
||||
Functions like :func:`packages.get <spack.packages.get>` and
|
||||
:func:`class_name_for_package_name
|
||||
<packages.class_name_for_package_name>` handle mapping package module
|
||||
names to class names and dynamically instantiating packages by name
|
||||
from module files.
|
||||
|
||||
:mod:`spack.directives`
|
||||
*Directives* are functions that can be called inside a package definition
|
||||
to modify the package, like :func:`~spack.directives.depends_on`
|
||||
and :func:`~spack.directives.provides`. See :ref:`dependencies`
|
||||
and :ref:`virtual-dependencies`.
|
||||
:mod:`spack.relations`
|
||||
*Relations* are relationships between packages, like
|
||||
:func:`depends_on <spack.relations.depends_on>` and :func:`provides
|
||||
<spack.relations.provides>`. See :ref:`dependencies` and
|
||||
:ref:`virtual-dependencies`.
|
||||
|
||||
:mod:`spack.multimethod`
|
||||
Implementation of the :func:`@when <spack.multimethod.when>`
|
||||
@@ -178,27 +180,31 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
of specs.
|
||||
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser
|
||||
<spack.spec.SpecParser>`. Also implements most of the logic for
|
||||
normalization and concretization of specs.
|
||||
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
parsers: :class:`Parser <spack.parse.Parser>` and :class:`Lexer
|
||||
<spack.parse.Lexer>`. Used by :class:`SpecParser
|
||||
<spack.spec.SpecParser>`.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
which allows site administrators to change Spack's :ref:`concretization-policies`.
|
||||
Contains :class:`DefaultConcretizer
|
||||
<spack.concretize.DefaultConcretizer>` implementation, which allows
|
||||
site administrators to change Spack's :ref:`concretization-policies`.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
||||
and :class:`~spack.version.VersionList`. All three are comparable with each
|
||||
other and offer union and intersection operations. Spack uses these classes
|
||||
to compare versions and to manage version constraints on specs. Comparison
|
||||
semantics are similar to the ``LooseVersion`` class in ``distutils`` and to
|
||||
the way RPM compares version strings.
|
||||
Implements a simple :class:`Version <spack.version.Version>` class
|
||||
with simple comparison semantics. Also implements
|
||||
:class:`VersionRange <spack.version.VersionRange>` and
|
||||
:class:`VersionList <spack.version.VersionList>`. All three are
|
||||
comparable with each other and offer union and intersection
|
||||
operations. Spack uses these classes to compare versions and to
|
||||
manage version constraints on specs. Comparison semantics are
|
||||
similar to the ``LooseVersion`` class in ``distutils`` and to the
|
||||
way RPM compares version strings.
|
||||
|
||||
:mod:`spack.compilers`
|
||||
Submodules contains descriptors for all valid compilers in Spack.
|
||||
@@ -210,6 +216,15 @@ Spec-related modules
|
||||
but compilers aren't fully integrated with the build process
|
||||
yet.
|
||||
|
||||
:mod:`spack.architecture`
|
||||
:func:`architecture.sys_type <spack.architecture.sys_type>` is used
|
||||
to determine the host architecture while building.
|
||||
|
||||
.. warning::
|
||||
|
||||
Not yet implemented. Should eventually have architecture
|
||||
descriptions for cross-compiling.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Build environment
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -217,7 +232,7 @@ Build environment
|
||||
:mod:`spack.stage`
|
||||
Handles creating temporary directories for builds.
|
||||
|
||||
:mod:`spack.build_environment`
|
||||
:mod:`spack.compilation`
|
||||
This contains utility functions used by the compiler wrapper script,
|
||||
``cc``.
|
||||
|
||||
@@ -242,19 +257,22 @@ Unit tests
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
:mod:`spack.test.mock_packages`
|
||||
This is a fake package hierarchy used to mock up packages for
|
||||
Spack's test suite.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Research and Monitoring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.monitor`
|
||||
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
||||
the ``spack install`` and ``spack analyze`` commands to send build and
|
||||
package metadata up to a `Spack Monitor
|
||||
<https://github.com/spack/spack-monitor>`_ server.
|
||||
Contains :class:`SpackMonitor <spack.monitor.SpackMonitor>`. This is accessed
|
||||
from the ``spack install`` and ``spack analyze`` commands to send build
|
||||
and package metadada up to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
:mod:`spack.analyzers`
|
||||
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
||||
A module folder with a :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`
|
||||
that provides base functions to run, save, and (optionally) upload analysis
|
||||
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
@@ -268,7 +286,7 @@ Other Modules
|
||||
tarball URLs.
|
||||
|
||||
:mod:`spack.error`
|
||||
:class:`~spack.error.SpackError`, the base class for
|
||||
:class:`SpackError <spack.error.SpackError>`, the base class for
|
||||
Spack's exception hierarchy.
|
||||
|
||||
:mod:`llnl.util.tty`
|
||||
@@ -317,8 +335,8 @@ Writing analyzers
|
||||
To write an analyzer, you should add a new python file to the
|
||||
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
||||
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
||||
to add an analyzer class ``Myanalyzer`` you would write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
to add an analyzer class ``Myanalyzer`` you woul write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
use the base as follows:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -329,7 +347,7 @@ use the base as follows:
|
||||
|
||||
|
||||
Note that the class name is your module file name, all lowercase
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -338,13 +356,13 @@ Analyzer Output Directory
|
||||
|
||||
By default, when you run ``spack analyze run`` an analyzer output directory will
|
||||
be created in your spack user directory in your ``$HOME``. The reason we output here
|
||||
is because the install directory might not always be writable.
|
||||
is because the install directory might not always be writable.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
~/.spack/
|
||||
analyzers
|
||||
|
||||
|
||||
Result files will be written here, organized in subfolders in the same structure
|
||||
as the package, with each analyzer owning it's own subfolder. for example:
|
||||
|
||||
@@ -362,11 +380,11 @@ as the package, with each analyzer owning it's own subfolder. for example:
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── lib
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
Notice that for the libabigail analyzer, since results are generated per object,
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
||||
|
||||
|
||||
@@ -408,7 +426,7 @@ and then return the object with a key as the analyzer name. The result data
|
||||
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
||||
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
||||
path, and not the absolute path. For example, let's say we extract a metric called
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
We might have data that looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -464,7 +482,7 @@ Saving Analyzer Results
|
||||
The analyzer will have ``save_result`` called, with the result object generated
|
||||
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
||||
to upload it to a monitor server. If your result follows an accepted result
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
function to your class. However, if your result data is large or otherwise
|
||||
needs additional parsing, you can define it. If you define the function, it
|
||||
is useful to know about the ``output_dir`` property, which you can join
|
||||
@@ -530,7 +548,7 @@ each one (separately) to the monitor:
|
||||
|
||||
Notice that this function, if you define it, requires a result object (generated by
|
||||
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
||||
yet, you might want to do that here. You should also use ``tty.info`` to give
|
||||
the user a message of "Writing result to $DIRNAME."
|
||||
@@ -598,7 +616,7 @@ types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -606,7 +624,7 @@ Types of Hooks
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
The following hooks are currently implemented to make it easy for you,
|
||||
the developer, to add hooks at different stages of a spack install or similar.
|
||||
the developer, to add hooks at different stages of a spack install or similar.
|
||||
If there is a hook that you would like and is missing, you can propose to add a new one.
|
||||
|
||||
"""""""""""""""""""""
|
||||
@@ -614,9 +632,9 @@ If there is a hook that you would like and is missing, you can propose to add a
|
||||
"""""""""""""""""""""
|
||||
|
||||
A ``pre_install`` hook is run within an install subprocess, directly before
|
||||
the install starts. It expects a single argument of a spec, and is run in
|
||||
the install starts. It expects a single argument of a spec, and is run in
|
||||
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
|
||||
as we have defined them here, but rather callback functions associated with
|
||||
as we have defined them here, but rather callback functions associated with
|
||||
a package install.
|
||||
|
||||
|
||||
@@ -639,7 +657,7 @@ here.
|
||||
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
|
||||
in the install function of a ``PackageInstaller``,
|
||||
and importantly is not part of a build process, but before it. This is when
|
||||
we have just newly grabbed the task, and are preparing to install. If you
|
||||
we have just newly grabbed the task, and are preparing to install. If you
|
||||
write a hook of this type, you should provide the spec to it.
|
||||
|
||||
.. code-block:: python
|
||||
@@ -648,7 +666,7 @@ write a hook of this type, you should provide the spec to it.
|
||||
"""On start of an install, we want to...
|
||||
"""
|
||||
print('on_install_start')
|
||||
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
``on_install_success(spec)``
|
||||
@@ -726,8 +744,8 @@ to trigger after anything is written to a logger. You would add it as follows:
|
||||
post_install = HookRunner('post_install')
|
||||
|
||||
# hooks related to logging
|
||||
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
|
||||
|
||||
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
|
||||
|
||||
|
||||
You then need to decide what arguments my hook would expect. Since this is
|
||||
related to logging, let's say that you want a message and level. That means
|
||||
@@ -757,7 +775,7 @@ In this example, we use it outside of a logger that is already defined:
|
||||
|
||||
This is not to say that this would be the best way to implement an integration
|
||||
with the logger (you'd probably want to write a custom logger, or you could
|
||||
have the hook defined within the logger) but serves as an example of writing a hook.
|
||||
have the hook defined within the logger) but serves as an example of writing a hook.
|
||||
|
||||
----------
|
||||
Unit tests
|
||||
@@ -767,38 +785,6 @@ Unit tests
|
||||
Unit testing
|
||||
------------
|
||||
|
||||
---------------------
|
||||
Developer environment
|
||||
---------------------
|
||||
|
||||
.. warning::
|
||||
|
||||
This is an experimental feature. It is expected to change and you should
|
||||
not use it in a production environment.
|
||||
|
||||
|
||||
When installing a package, we currently have support to export environment
|
||||
variables to specify adding debug flags to the build. By default, a package
|
||||
install will build without any debug flag. However, if you want to add them,
|
||||
you can export:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
export SPACK_ADD_DEBUG_FLAGS=true
|
||||
spack install zlib
|
||||
|
||||
|
||||
If you want to add custom flags, you should export an additional variable:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
export SPACK_ADD_DEBUG_FLAGS=true
|
||||
export SPACK_DEBUG_FLAGS="-g"
|
||||
spack install zlib
|
||||
|
||||
These environment variables will eventually be integrated into spack so
|
||||
they are set from the command line.
|
||||
|
||||
------------------
|
||||
Developer commands
|
||||
------------------
|
||||
@@ -809,29 +795,6 @@ Developer commands
|
||||
``spack doc``
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
.. _cmd-spack-style:
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
``spack style``
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
spack style exists to help the developer user to check imports and style with
|
||||
mypy, flake8, isort, and (soon) black. To run all style checks, simply do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack style
|
||||
|
||||
To run automatic fixes for isort you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack style --fix
|
||||
|
||||
You do not need any of these Python packages installed on your system for
|
||||
the checks to work! Spack will bootstrap install them from packages for
|
||||
your use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
``spack unit-test``
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -910,7 +873,7 @@ just like you would with the normal ``python`` command.
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Spack blame is a way to quickly see contributors to packages or files
|
||||
in the spack repository. You should provide a target package name or
|
||||
in the spack repository. You should provide a target package name or
|
||||
file name to the command. Here is an example asking to see contributions
|
||||
for the package "python":
|
||||
|
||||
@@ -920,8 +883,8 @@ for the package "python":
|
||||
LAST_COMMIT LINES % AUTHOR EMAIL
|
||||
2 weeks ago 3 0.3 Mickey Mouse <cheddar@gmouse.org>
|
||||
a month ago 927 99.7 Minnie Mouse <swiss@mouse.org>
|
||||
|
||||
2 weeks ago 930 100.0
|
||||
|
||||
2 weeks ago 930 100.0
|
||||
|
||||
|
||||
By default, you will get a table view (shown above) sorted by date of contribution,
|
||||
@@ -1292,7 +1255,7 @@ Publishing a release on GitHub
|
||||
|
||||
#. Create the release in GitHub.
|
||||
|
||||
* Go to
|
||||
* Go to
|
||||
`github.com/spack/spack/releases <https://github.com/spack/spack/releases>`_
|
||||
and click ``Draft a new release``.
|
||||
|
||||
|
||||
@@ -732,17 +732,13 @@ Configuring environment views
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a view descriptor, headed
|
||||
by a name. The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``. For example, in the following manifest
|
||||
optionally the projections for the view, and ``select`` and
|
||||
``exclude`` lists for the view. For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
All the dependencies of each root spec in the environment will be linked
|
||||
in the view due to the command ``link: all`` and the files in the view will
|
||||
be symlinks to the spack install directories.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -755,16 +751,11 @@ be symlinks to the spack install directories.
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: {name}/{version}-{compiler.name}
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
For more information on using view projections, see the section on
|
||||
:ref:`adding_projections_to_views`. The default for the ``select`` and
|
||||
``exclude`` values is to select everything and exclude nothing. The
|
||||
default projection is the default view projection (``{}``). The ``link``
|
||||
defaults to ``all`` but can also be ``roots`` when only the root specs
|
||||
in the environment are desired in the view. The ``link_type`` defaults
|
||||
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
|
||||
default projection is the default view projection (``{}``).
|
||||
|
||||
Any number of views may be defined under the ``view`` heading in a
|
||||
Spack Environment.
|
||||
|
||||
@@ -9,16 +9,21 @@
|
||||
Getting Started
|
||||
===============
|
||||
|
||||
--------------------
|
||||
System Prerequisites
|
||||
--------------------
|
||||
-------------
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
Spack has the following minimum system requirements, which are assumed to
|
||||
be present on the machine where Spack is run:
|
||||
Spack has the following minimum requirements, which must be installed
|
||||
before Spack is run:
|
||||
|
||||
.. csv-table:: System prerequisites for Spack
|
||||
:file: tables/system_prerequisites.csv
|
||||
:header-rows: 1
|
||||
#. Python 2 (2.6 or 2.7) or 3 (3.5 - 3.9) to run Spack
|
||||
#. A C/C++ compiler for building
|
||||
#. The ``make`` executable for building
|
||||
#. The ``tar``, ``gzip``, ``unzip``, ``bzip2``, ``xz`` and optionally ``zstd``
|
||||
executables for extracting source code
|
||||
#. The ``patch`` command to apply patches
|
||||
#. The ``git`` and ``curl`` commands for fetching
|
||||
#. If using the ``gpg`` subcommand, ``gnupg2`` is required
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
@@ -35,7 +40,7 @@ Getting Spack is easy. You can clone it from the `github repository
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
|
||||
This will create a directory called ``spack``.
|
||||
|
||||
@@ -84,140 +89,6 @@ sourcing time, ensuring future invocations of the ``spack`` command will
|
||||
continue to use the same consistent python version regardless of changes in
|
||||
the environment.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack uses ``clingo`` under the hood to resolve optimal versions and variants of
|
||||
dependencies when installing a package. Since ``clingo`` itself is a binary,
|
||||
Spack has to install it on initial use, which is called bootstrapping.
|
||||
|
||||
Spack provides two ways of bootstrapping ``clingo``: from pre-built binaries
|
||||
(default), or from sources. The fastest way to get started is to bootstrap from
|
||||
pre-built binaries.
|
||||
|
||||
.. note::
|
||||
|
||||
When bootstrapping from pre-built binaries, Spack currently requires
|
||||
``patchelf`` on Linux and ``otool`` on macOS. If ``patchelf`` is not in the
|
||||
``PATH``, Spack will build it from sources, and a C++ compiler is required.
|
||||
|
||||
The first time you concretize a spec, Spack will bootstrap in the background:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack spec zlib
|
||||
Input spec
|
||||
--------------------------------
|
||||
zlib
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
zlib@1.2.11%gcc@7.5.0+optimize+pic+shared arch=linux-ubuntu18.04-zen
|
||||
|
||||
real 0m20.023s
|
||||
user 0m18.351s
|
||||
sys 0m0.784s
|
||||
|
||||
After this command you'll see that ``clingo`` has been installed for Spack's own use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/root/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
|
||||
clingo-bootstrap@spack python@3.6
|
||||
|
||||
-- linux-ubuntu18.04-zen / gcc@7.5.0 ----------------------------
|
||||
patchelf@0.13
|
||||
|
||||
Subsequent calls to the concretizer will then be much faster:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack spec zlib
|
||||
[ ... ]
|
||||
real 0m0.490s
|
||||
user 0m0.431s
|
||||
sys 0m0.041s
|
||||
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to mark this bootstrapping method as untrusted. This makes
|
||||
Spack fall back to bootstrapping from sources:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
Type: install
|
||||
|
||||
Description:
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
|
||||
.. note::
|
||||
|
||||
When bootstrapping from sources, Spack requires a full install of Python
|
||||
including header files (e.g. ``python3-dev`` on Debian), and a compiler
|
||||
with support for C++14 (GCC on Linux, Apple Clang on macOS) and static C++
|
||||
standard libraries on Linux.
|
||||
|
||||
Spack will build the required software on the first request to concretize a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
"""""""""""""""""""
|
||||
The Bootstrap Store
|
||||
"""""""""""""""""""
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Check Installation
|
||||
@@ -246,6 +117,53 @@ environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using clingo as an external solver to compute which software
|
||||
needs to be installed. If you have a default compiler supporting C++14 Spack
|
||||
can automatically bootstrap this tool from sources the first time it is
|
||||
needed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison``
|
||||
on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Alternate Prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -449,34 +367,6 @@ then inject those flags into the compiler command. Compiler flags
|
||||
entered from the command line will be discussed in more detail in the
|
||||
following section.
|
||||
|
||||
Some compilers also require additional environment configuration.
|
||||
Examples include Intels oneAPI and AMDs AOCC compiler suites,
|
||||
which have custom scripts for loading environment variables and setting paths.
|
||||
These variables should be specified in the ``environment`` section of the compiler
|
||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
spec: oneapi@latest
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -631,9 +521,8 @@ Fortran.
|
||||
#. Run ``spack compiler find`` to locate Clang.
|
||||
|
||||
#. There are different ways to get ``gfortran`` on macOS. For example, you can
|
||||
install GCC with Spack (``spack install gcc``), with Homebrew (``brew install
|
||||
gcc``), or from a `DMG installer
|
||||
<https://github.com/fxcoudert/gfortran-for-macOS/releases>`_.
|
||||
install GCC with Spack (``spack install gcc``) or with Homebrew
|
||||
(``brew install gcc``).
|
||||
|
||||
#. The only thing left to do is to edit ``~/.spack/darwin/compilers.yaml`` to provide
|
||||
the path to ``gfortran``:
|
||||
@@ -654,8 +543,7 @@ Fortran.
|
||||
If you used Spack to install GCC, you can get the installation prefix by
|
||||
``spack location -i gcc`` (this will only work if you have a single version
|
||||
of GCC installed). Whereas for Homebrew, GCC is installed in
|
||||
``/usr/local/Cellar/gcc/x.y.z``. With the DMG installer, the correct path
|
||||
will be ``/usr/local/gfortran``.
|
||||
``/usr/local/Cellar/gcc/x.y.z``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Compiler Verification
|
||||
@@ -889,7 +777,7 @@ an OpenMPI installed in /opt/local, one would use:
|
||||
buildable: False
|
||||
|
||||
In general, Spack is easier to use and more reliable if it builds all of
|
||||
its own dependencies. However, there are several packages for which one
|
||||
its own dependencies. However, there are two packages for which one
|
||||
commonly needs to use system versions:
|
||||
|
||||
^^^
|
||||
|
||||
@@ -39,7 +39,7 @@ package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ git clone https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install libelf
|
||||
|
||||
@@ -56,6 +56,7 @@ or refer to the full manual below.
|
||||
basic_usage
|
||||
workflows
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
known_issues
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
77
lib/spack/docs/known_issues.rst
Normal file
77
lib/spack/docs/known_issues.rst
Normal file
@@ -0,0 +1,77 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
---------------------------------------------------
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed by Spack's new concretizer
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
require that its dependencies are also built with MPI support. In the
|
||||
``package.py``, this looks like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('hdf5~mpi', when='~mpi')
|
||||
depends_on('hdf5+mpi', when='+mpi')
|
||||
|
||||
Spack handles this situation properly for *immediate* dependencies, and
|
||||
builds ``hdf5`` with the same variant you used for the package that
|
||||
depends on it. However, for *indirect* dependencies (dependencies of
|
||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
||||
this. Users commonly run into this situation when trying to build R with
|
||||
X11 support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X
|
||||
...
|
||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
||||
Package cairo requires variant ~X, but spec asked for +X
|
||||
|
||||
A workaround is to explicitly activate the variants of dependencies as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X ^cairo+X ^pango+X
|
||||
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
-----------------------------------------------
|
||||
depends_on cannot handle recursive dependencies
|
||||
-----------------------------------------------
|
||||
|
||||
**Status:** Not yet a work in progress
|
||||
|
||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
||||
For example, something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
should be rewritten as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda', when='+cuda')
|
||||
depends_on('hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/17660 and
|
||||
https://github.com/spack/spack/issues/11160 for more details.
|
||||
@@ -213,18 +213,6 @@ location). The set ``my_custom_lmod_modules`` will install its lmod
|
||||
modules to ``/path/to/install/custom/lmod/modules`` (and still install
|
||||
its tcl modules, if any, to the default location).
|
||||
|
||||
By default, an architecture-specific directory is added to the root
|
||||
directory. A module set may override that behavior by setting the
|
||||
``arch_folder`` config value to ``False``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
roots:
|
||||
tcl: /path/to/install/tcl/modules
|
||||
arch_folder: false
|
||||
|
||||
Obviously, having multiple module sets install modules to the default
|
||||
location could be confusing to users of your modules. In the next
|
||||
section, we will discuss enabling and disabling module types (module
|
||||
@@ -461,36 +449,6 @@ that are already in the LMod hierarchy.
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""
|
||||
Select default modules
|
||||
""""""""""""""""""""""
|
||||
|
||||
By default, when multiple modules of the same name share a directory,
|
||||
the highest version number will be the default module. This behavior
|
||||
of the ``module`` command can be overridden with a symlink named
|
||||
``default`` to the desired default module. If you wish to configure
|
||||
default modules with Spack, add a ``defaults`` key to your modules
|
||||
configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
my-module-set:
|
||||
tcl:
|
||||
defaults:
|
||||
- gcc@10.2.1
|
||||
- hdf5@1.2.10+mpi+hl%gcc
|
||||
|
||||
These defaults may be arbitrarily specific. For any package that
|
||||
satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
|
||||
.. _customize-env-modifications:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
@@ -612,7 +612,6 @@ it executable, then runs it with some arguments.
|
||||
installer = Executable(self.stage.archive_file)
|
||||
installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.')
|
||||
|
||||
.. _deprecate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Deprecating old versions
|
||||
@@ -695,23 +694,20 @@ example, ``py-sphinx-rtd-theme@0.1.10a0``. In this case, numbers are
|
||||
always considered to be "newer" than letters. This is for consistency
|
||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
||||
|
||||
Spack versions may also be arbitrary non-numeric strings, for example
|
||||
``@develop``, ``@master``, ``@local``.
|
||||
Spack versions may also be arbitrary non-numeric strings; any string
|
||||
here will suffice; for example, ``@develop``, ``@master``, ``@local``.
|
||||
Versions are compared as follows. First, a version string is split into
|
||||
multiple fields based on delimiters such as ``.``, ``-`` etc. Then
|
||||
matching fields are compared using the rules below:
|
||||
|
||||
The order on versions is defined as follows. A version string is split
|
||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
||||
Lists are then ordered lexicographically, where components are ordered
|
||||
as follows:
|
||||
#. The following develop-like strings are greater (newer) than all
|
||||
numbers and are ordered as ``develop > main > master > head > trunk``.
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
order between themselves: ``develop > main > master > head > trunk``.
|
||||
#. Numbers are all less than the chosen develop-like strings above,
|
||||
and are sorted numerically.
|
||||
|
||||
#. Numbers are ordered numerically, are less than special strings, and
|
||||
larger than other non-numeric components.
|
||||
|
||||
#. All other non-numeric components are less than numeric components,
|
||||
and are ordered alphabetically.
|
||||
#. All other non-numeric versions are less than numeric versions, and
|
||||
are sorted alphabetically.
|
||||
|
||||
The logic behind this sort order is two-fold:
|
||||
|
||||
@@ -732,7 +728,7 @@ Version selection
|
||||
When concretizing, many versions might match a user-supplied spec.
|
||||
For example, the spec ``python`` matches all available versions of the
|
||||
package ``python``. Similarly, ``python@3:`` matches all versions of
|
||||
Python 3 and above. Given a set of versions that match a spec, Spack
|
||||
Python3. Given a set of versions that match a spec, Spack
|
||||
concretization uses the following priorities to decide which one to
|
||||
use:
|
||||
|
||||
@@ -1422,60 +1418,6 @@ other similar operations:
|
||||
).with_default('auto').with_non_feature_values('auto'),
|
||||
)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Conditional Variants
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The variant directive accepts a ``when`` clause. The variant will only
|
||||
be present on specs that otherwise satisfy the spec listed as the
|
||||
``when`` clause. For example, the following class has a variant
|
||||
``bar`` when it is at version 2.0 or higher.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, when='@2.0:', description='help message')
|
||||
|
||||
The ``when`` clause follows the same syntax and accepts the same
|
||||
values as the ``when`` argument of
|
||||
:py:func:`spack.directives.depends_on`
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Overriding Variants
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Packages may override variants for several reasons, most often to
|
||||
change the default from a variant defined in a parent class or to
|
||||
change the conditions under which a variant is present on the spec.
|
||||
|
||||
When a variant is defined multiple times, whether in the same package
|
||||
file or in a subclass and a superclass, the last definition is used
|
||||
for all attributes **except** for the ``when`` clauses. The ``when``
|
||||
clauses are accumulated through all invocations, and the variant is
|
||||
present on the spec if any of the accumulated conditions are
|
||||
satisfied.
|
||||
|
||||
For example, consider the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, when='@1.0', description='help1')
|
||||
variant('bar', default=True, when='platform=darwin', description='help2')
|
||||
...
|
||||
|
||||
This package ``foo`` has a variant ``bar`` when the spec satisfies
|
||||
either ``@1.0`` or ``platform=darwin``, but not for other platforms at
|
||||
other versions. The default for this variant, when it is present, is
|
||||
always ``True``, regardless of which condition of the variant is
|
||||
satisfied. This allows packages to override variants in packages or
|
||||
build system classes from which they inherit, by modifying the variant
|
||||
values without modifying the ``when`` clause. It also allows a package
|
||||
to implement ``or`` semantics for a variant ``when`` clause by
|
||||
duplicating the variant definition.
|
||||
|
||||
------------------------------------
|
||||
Resources (expanding extra tarballs)
|
||||
------------------------------------
|
||||
@@ -2120,7 +2062,7 @@ Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
most can be built with a range of version. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
|
||||
@@ -2129,9 +2071,9 @@ writing a package for a legacy Python module that only works with Python
|
||||
depends_on('python@2.4:2.6')
|
||||
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
greater than or equal to ``2.4`` and up to and including ``2.6``. If you
|
||||
want to specify that a package works with any version of Python 3, this
|
||||
would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2142,30 +2084,29 @@ requires Python 2, you can similarly leave out the lower bound:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@:2')
|
||||
depends_on('python@:2.9')
|
||||
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
``@:3`` means "up to and including 3".
|
||||
|
||||
What if a package can only be built with Python 2.7? You might be
|
||||
What if a package can only be built with Python 2.6? You might be
|
||||
inclined to use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.7')
|
||||
depends_on('python@2.6')
|
||||
|
||||
However, this would be wrong. Spack assumes that all version constraints
|
||||
are exact, so it would try to install Python not at ``2.7.18``, but
|
||||
exactly at ``2.7``, which is a non-existent version. The correct way to
|
||||
specify this would be:
|
||||
are absolute, so it would try to install Python at exactly ``2.6``. The
|
||||
correct way to specify this would be:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.7.0:2.7')
|
||||
depends_on('python@2.6.0:2.6.999')
|
||||
|
||||
A spec can contain a version list of ranges and individual versions
|
||||
separated by commas. For example, if you need Boost 1.59.0 or newer,
|
||||
but there are known issues with 1.64.0, 1.65.0, and 1.66.0, you can say:
|
||||
A spec can contain multiple version ranges separated by commas.
|
||||
For example, if you need Boost 1.59.0 or newer, but there are known
|
||||
issues with 1.64.0, 1.65.0, and 1.66.0, you can say:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2882,7 +2823,7 @@ is equivalent to:
|
||||
|
||||
depends_on('elpa+openmp', when='+openmp+elpa')
|
||||
|
||||
Constraints from nested context managers are also combined together, but they are rarely
|
||||
Constraints from nested context managers are also added together, but they are rarely
|
||||
needed or recommended.
|
||||
|
||||
.. _install-method:
|
||||
@@ -2943,52 +2884,52 @@ The package base class, usually specialized for a given build system, determines
|
||||
actual set of entities available for overriding.
|
||||
The classes that are currently provided by Spack are:
|
||||
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||
| | built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages |
|
||||
| | built using GNU Autotools |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages |
|
||||
| | built using CMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages |
|
||||
| | built using QMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages |
|
||||
| | built using SCons |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages |
|
||||
| | built using Waf |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.r.RPackage` | Specialized class for |
|
||||
| | R extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for |
|
||||
| | Octave packages |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for |
|
||||
| | Python extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for |
|
||||
| | Perl extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed |
|
||||
| | Intel software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
+-------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+===============================+==================================+
|
||||
| :py:class:`.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.MakefilePackage` | Specialized class for packages |
|
||||
| | built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.AutotoolsPackage` | Specialized class for packages |
|
||||
| | built using GNU Autotools |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.CMakePackage` | Specialized class for packages |
|
||||
| | built using CMake |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.QMakePackage` | Specialized class for packages |
|
||||
| | build using QMake |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.SConsPackage` | Specialized class for packages |
|
||||
| | built using SCons |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.WafPackage` | Specialized class for packages |
|
||||
| | built using Waf |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.RPackage` | Specialized class for |
|
||||
| | :py:class:`.R` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.OctavePackage` | Specialized class for |
|
||||
| | :py:class:`.Octave` packages |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.PythonPackage` | Specialized class for |
|
||||
| | :py:class:`.Python` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.PerlPackage` | Specialized class for |
|
||||
| | :py:class:`.Perl` extensions |
|
||||
+-------------------------------+----------------------------------+
|
||||
| :py:class:`.IntelPackage` | Specialized class for licensed |
|
||||
| | Intel software |
|
||||
+-------------------------------+----------------------------------+
|
||||
|
||||
|
||||
.. note::
|
||||
@@ -2998,7 +2939,7 @@ The classes that are currently provided by Spack are:
|
||||
rare cases where manual intervention is needed we need to stress that a
|
||||
package base class depends on the *build system* being used, not the language of the package.
|
||||
For example, a Python extension installed with CMake would ``extends('python')`` and
|
||||
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
||||
subclass from :py:class:`.CMakePackage`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Installation pipeline
|
||||
@@ -4138,7 +4079,7 @@ prefix **before** ``make install``. Builds like this can falsely report
|
||||
success when an error occurs before the installation is complete. Simple
|
||||
sanity checks can be used to identify files and or directories that are
|
||||
required of a successful installation. Spack checks for the presence of
|
||||
the files and directories after ``install()`` runs.
|
||||
the files and directories after ``install()`` runs.
|
||||
|
||||
If any of the listed files or directories are missing, then the build will
|
||||
fail and the install prefix will be removed. If they all exist, then Spack
|
||||
@@ -4252,7 +4193,7 @@ need to use two decorators for each phase test method:
|
||||
The first decorator tells Spack when in the installation process to
|
||||
run your test method installation process; namely *after* the provided
|
||||
installation phase. The second decorator tells Spack to only run the
|
||||
checks when the ``--test`` option is provided on the command line.
|
||||
checks when the ``--test`` option is provided on the command line.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4326,17 +4267,17 @@ tests can be performed days, even weeks, after the software is installed.
|
||||
|
||||
Stand-alone tests are checks that should run relatively quickly -- as
|
||||
in on the order of at most a few minutes -- and ideally execute all
|
||||
aspects of the installed software, or at least key functionality.
|
||||
aspects of the installed software, or at least key functionality.
|
||||
|
||||
.. note::
|
||||
|
||||
Execution speed is important because these tests are intended
|
||||
to quickly assess whether the installed software works on the
|
||||
system.
|
||||
|
||||
|
||||
Failing stand-alone tests indicate that there is no reason to
|
||||
proceed with more resource-intensive tests.
|
||||
|
||||
|
||||
Passing stand-alone (or smoke) tests can lead to more thorough
|
||||
testing, such as extensive unit or regression tests, or tests
|
||||
that run at scale. Spack support for more thorough testing is
|
||||
@@ -4366,7 +4307,7 @@ file such that:
|
||||
test_stage: /path/to/stage
|
||||
|
||||
The package can access this path **during test processing** using
|
||||
`self.test_suite.stage`.
|
||||
`self.test_suite.stage`.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4426,9 +4367,9 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
where ``srcs`` is a string or a list of strings corresponding to
|
||||
the paths for the files and or subdirectories, relative to the staged
|
||||
source, that are to be copied to the corresponding relative test path
|
||||
under the prefix. All of the contents within each subdirectory will
|
||||
also be copied.
|
||||
source, that are to be copied to the corresponding path relative to
|
||||
``self.install_test_root``. All of the contents within each subdirectory
|
||||
will be also be copied.
|
||||
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
@@ -4436,13 +4377,8 @@ can be implemented as shown below.
|
||||
|
||||
.. note::
|
||||
|
||||
The method name ``copy_test_sources`` here is for illustration
|
||||
purposes. You are free to use a name that is more suited to your
|
||||
package.
|
||||
|
||||
The key to copying the files at build time for stand-alone testing
|
||||
is use of the ``run_after`` directive, which ensures the associated
|
||||
files are copied **after** the provided build stage.
|
||||
The ``run_after`` directive ensures associated files are copied
|
||||
**after** the package is installed by the build process.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -4452,20 +4388,25 @@ can be implemented as shown below.
|
||||
@run_after('install')
|
||||
def copy_test_sources(self):
|
||||
srcs = ['tests',
|
||||
join_path('examples', 'foo.c'),
|
||||
join_path('examples', 'foo.c'),
|
||||
join_path('examples', 'bar.c')]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
|
||||
In this case, the method copies the associated files from the build
|
||||
stage **after** the software is installed to the package's metadata
|
||||
directory. The result is the directory and files will be cached in
|
||||
a special test subdirectory under the installation prefix.
|
||||
paths under ``self.install_test_root`` as follows:
|
||||
|
||||
* ``join_path(self.install_test_root, 'tests')`` along with its files
|
||||
and subdirectories
|
||||
* ``join_path(self.install_test_root, 'examples', 'foo.c')``
|
||||
* ``join_path(self.install_test_root, 'examples', 'bar.c')``
|
||||
|
||||
These paths are **automatically copied** to the test stage directory
|
||||
during stand-alone testing. The package's ``test`` method can access
|
||||
them using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example, the method would use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
where they are available to the package's ``test`` method through the
|
||||
``self.test_suite.current_test_cache_dir`` property. In our example,
|
||||
the method can access the directory and files using the following
|
||||
paths:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, 'tests')``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||
@@ -4473,8 +4414,9 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
.. note::
|
||||
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
Library developers will want to build the associated tests under
|
||||
the ``self.test_suite.current_test_cache_dir`` and against their
|
||||
**installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4484,6 +4426,11 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
would be appropriate for ensuring the installed software continues
|
||||
to work as the underlying system evolves.
|
||||
|
||||
.. note::
|
||||
|
||||
You are free to use a method name that is more suitable for
|
||||
your package.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
"""""""""""""""""""
|
||||
@@ -4499,7 +4446,7 @@ Examples include:
|
||||
- expected test output
|
||||
|
||||
These extra files should be added to the ``test`` subdirectory of the
|
||||
package in the Spack repository.
|
||||
package in the Spack repository.
|
||||
|
||||
Spack will **automatically copy** the contents of that directory to the
|
||||
test staging directory for stand-alone testing. The ``test`` method can
|
||||
@@ -4524,7 +4471,7 @@ The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
where ``filename`` is the path to the file containing the expected output.
|
||||
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
accessed and used as illustrated by a simplified version of an ``sqlite``
|
||||
package check:
|
||||
|
||||
@@ -4562,8 +4509,7 @@ can retrieve the expected output from ``examples/foo.out`` using:
|
||||
|
||||
def test(self):
|
||||
..
|
||||
filename = join_path(self.test_suite.current_test_cache_dir,
|
||||
'examples', 'foo.out')
|
||||
filename = join_path(self.install_test_root, 'examples', 'foo.out')
|
||||
expected = get_escaped_text_output(filename)
|
||||
..
|
||||
|
||||
@@ -4645,10 +4591,10 @@ where each argument has the following meaning:
|
||||
|
||||
Options are a list of strings to be passed to the executable when
|
||||
it runs.
|
||||
|
||||
|
||||
The default is ``[]``, which means no options are provided to the
|
||||
executable.
|
||||
|
||||
|
||||
* ``expected`` is an optional list of expected output strings.
|
||||
|
||||
Spack requires every string in ``expected`` to be a regex matching
|
||||
@@ -4659,31 +4605,31 @@ where each argument has the following meaning:
|
||||
|
||||
The expected output can be :ref:`read from a file
|
||||
<expected_test_output_from_file>`.
|
||||
|
||||
|
||||
The default is ``expected=[]``, so Spack will not check the output.
|
||||
|
||||
|
||||
* ``status`` is the optional expected return code(s).
|
||||
|
||||
A list of return codes corresponding to successful execution can
|
||||
be provided (e.g., ``status=[0,3,7]``). Support for non-zero return
|
||||
codes allows for basic **expected failure** tests as well as different
|
||||
return codes across versions of the software.
|
||||
|
||||
|
||||
The default is ``status=[0]``, which corresponds to **successful**
|
||||
execution in the sense that the executable does not exit with a
|
||||
failure code or raise an exception.
|
||||
|
||||
|
||||
* ``installed`` is used to require ``exe`` to be within the package
|
||||
prefix.
|
||||
|
||||
|
||||
If ``True``, then the path for ``exe`` is required to be within the
|
||||
package prefix; otherwise, the path is not constrained.
|
||||
|
||||
|
||||
The default is ``False``, so the fully qualified path for ``exe``
|
||||
does **not** need to be within the installation directory.
|
||||
|
||||
|
||||
* ``purpose`` is an optional heading describing the the test part.
|
||||
|
||||
|
||||
Output from the test is written to a test log file so this argument
|
||||
serves as a searchable heading in text logs to highlight the start
|
||||
of the test part. Having a description can be helpful when debugging
|
||||
@@ -4698,10 +4644,10 @@ where each argument has the following meaning:
|
||||
|
||||
The default is ``False``, which means the test executable must be
|
||||
present for any installable version of the software.
|
||||
|
||||
|
||||
* ``work_dir`` is the path to the directory from which the executable
|
||||
will run.
|
||||
|
||||
|
||||
The default of ``None`` corresponds to the current directory (``'.'``).
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
@@ -4731,6 +4677,9 @@ directory paths are provided in the table below.
|
||||
* - Test Suite Stage Files
|
||||
- ``self.test_suite.stage``
|
||||
- ``join_path(self.test_suite.stage, 'results.txt')``
|
||||
* - Cached Build-time Files
|
||||
- ``self.install_test_root``
|
||||
- ``join_path(self.install_test_root, 'examples', 'foo.c')``
|
||||
* - Staged Cached Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||
@@ -4805,7 +4754,7 @@ where only the outputs for the first of each set are shown:
|
||||
Copyright (C) 2018 Free Software Foundation, Inc.
|
||||
This is free software; see the source for copying conditions. There is NO
|
||||
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
PASSED
|
||||
...
|
||||
==> [2021-04-26-17:35:20.493921] test: checking mpirun output
|
||||
@@ -4966,7 +4915,7 @@ This is already part of the boilerplate for packages created with
|
||||
Filtering functions
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <llnl.util.filesystem.filter_file>`
|
||||
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <spack.filter_file>`
|
||||
Works like ``sed`` but with Python regular expression syntax. Takes
|
||||
a regular expression, a replacement, and a set of files. ``repl``
|
||||
can be a raw string or a callable function. If it is a raw string,
|
||||
@@ -5004,7 +4953,7 @@ Filtering functions
|
||||
filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx,
|
||||
prefix.bin.mpicxx)
|
||||
|
||||
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <llnl.util.filesystem.change_sed_delimiter>`
|
||||
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <spack.change_sed_delim>`
|
||||
Some packages, like TAU, have a build system that can't install
|
||||
into directories with, e.g. '@' in the name, because they use
|
||||
hard-coded ``sed`` commands in their build.
|
||||
@@ -5026,14 +4975,14 @@ Filtering functions
|
||||
File functions
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
:py:func:`ancestor(dir, n=1) <llnl.util.filesystem.ancestor>`
|
||||
:py:func:`ancestor(dir, n=1) <spack.ancestor>`
|
||||
Get the n\ :sup:`th` ancestor of the directory ``dir``.
|
||||
|
||||
:py:func:`can_access(path) <llnl.util.filesystem.can_access>`
|
||||
:py:func:`can_access(path) <spack.can_access>`
|
||||
True if we can read and write to the file at ``path``. Same as
|
||||
native python ``os.access(file_name, os.R_OK|os.W_OK)``.
|
||||
|
||||
:py:func:`install(src, dest) <llnl.util.filesystem.install>`
|
||||
:py:func:`install(src, dest) <spack.install>`
|
||||
Install a file to a particular location. For example, install a
|
||||
header into the ``include`` directory under the install ``prefix``:
|
||||
|
||||
@@ -5041,14 +4990,14 @@ File functions
|
||||
|
||||
install('my-header.h', prefix.include)
|
||||
|
||||
:py:func:`join_path(*paths) <llnl.util.filesystem.join_path>`
|
||||
:py:func:`join_path(*paths) <spack.join_path>`
|
||||
An alias for ``os.path.join``. This joins paths using the OS path separator.
|
||||
|
||||
:py:func:`mkdirp(*paths) <llnl.util.filesystem.mkdirp>`
|
||||
:py:func:`mkdirp(*paths) <spack.mkdirp>`
|
||||
Create each of the directories in ``paths``, creating any parent
|
||||
directories if they do not exist.
|
||||
|
||||
:py:func:`working_dir(dirname, kwargs) <llnl.util.filesystem.working_dir>`
|
||||
:py:func:`working_dir(dirname, kwargs) <spack.working_dir>`
|
||||
This is a Python `Context Manager
|
||||
<https://docs.python.org/2/library/contextlib.html>`_ that makes it
|
||||
easier to work with subdirectories in builds. You use this with the
|
||||
@@ -5090,7 +5039,7 @@ File functions
|
||||
The ``create=True`` keyword argument causes the command to create
|
||||
the directory if it does not exist.
|
||||
|
||||
:py:func:`touch(path) <llnl.util.filesystem.touch>`
|
||||
:py:func:`touch(path) <spack.touch>`
|
||||
Create an empty file at ``path``.
|
||||
|
||||
.. _make-package-findable:
|
||||
|
||||
@@ -48,9 +48,9 @@ or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), thou
|
||||
topics are outside the scope of this document.
|
||||
|
||||
Spack's pipelines are now making use of the
|
||||
`trigger <https://docs.gitlab.com/ee/ci/yaml/#trigger>`_ syntax to run
|
||||
`trigger <https://docs.gitlab.com/12.9/ee/ci/yaml/README.html#trigger>`_ syntax to run
|
||||
dynamically generated
|
||||
`child pipelines <https://docs.gitlab.com/ee/ci/pipelines/parent_child_pipelines.html>`_.
|
||||
`child pipelines <https://docs.gitlab.com/12.9/ee/ci/parent_child_pipelines.html>`_.
|
||||
Note that the use of dynamic child pipelines requires running Gitlab version
|
||||
``>= 12.9``.
|
||||
|
||||
|
||||
@@ -335,7 +335,7 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
|
||||
Note that, unlike ``spack repo list``, this does not include the
|
||||
mNote that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2
|
||||
sphinx
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
|
||||
@@ -8,21 +8,12 @@
|
||||
# these commands in this directory to install Sphinx and its plugins,
|
||||
# then build the docs:
|
||||
#
|
||||
# spack env activate .
|
||||
# spack install
|
||||
# spack env activate .
|
||||
# make
|
||||
#
|
||||
spack:
|
||||
specs:
|
||||
# Sphinx
|
||||
- "py-sphinx@3.4:4.1.1,4.1.3:"
|
||||
- py-sphinx
|
||||
- py-sphinxcontrib-programoutput
|
||||
- py-docutils@:0.16
|
||||
- py-sphinx-rtd-theme
|
||||
# VCS
|
||||
- git
|
||||
- mercurial
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretization: together
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.6/2.7/3.5-3.9, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
|
@@ -387,7 +387,7 @@ some nice features:
|
||||
Spack-built compiler can be given to an IDE without requiring the
|
||||
IDE to load that compiler's module.
|
||||
|
||||
Unfortunately, Spack's RPATH support does not work in every case. For example:
|
||||
Unfortunately, Spack's RPATH support does not work in all case. For example:
|
||||
|
||||
#. Software comes in many forms --- not just compiled ELF binaries,
|
||||
but also as interpreted code in Python, R, JVM bytecode, etc.
|
||||
|
||||
597
lib/spack/env/cc
vendored
597
lib/spack/env/cc
vendored
@@ -1,5 +1,4 @@
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
@@ -21,41 +20,25 @@
|
||||
# -Wl,-rpath arguments for dependency /lib directories.
|
||||
#
|
||||
|
||||
# Reset IFS to the default: whitespace-separated lists. When we use
|
||||
# other separators, we set and reset it.
|
||||
unset IFS
|
||||
|
||||
# Separator for lists whose names end with `_list`.
|
||||
# We pick the alarm bell character, which is highly unlikely to
|
||||
# conflict with anything. This is a literal bell character (which
|
||||
# we have to use since POSIX sh does not convert escape sequences
|
||||
# like '\a' outside of the format argument of `printf`).
|
||||
# NOTE: Depending on your editor this may look empty, but it is not.
|
||||
readonly lsep=''
|
||||
|
||||
# This is an array of environment variables that need to be set before
|
||||
# the script runs. They are set by routines in spack.build_environment
|
||||
# as part of the package installation process.
|
||||
readonly params="\
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
# Boolean (true/false/custom) if we want to add debug flags
|
||||
# SPACK_ADD_DEBUG_FLAGS
|
||||
|
||||
# If a custom flag is requested, it will be defined
|
||||
# SPACK_DEBUG_FLAGS
|
||||
parameters=(
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_TARGET_ARGS
|
||||
SPACK_DTAGS_TO_ADD
|
||||
SPACK_DTAGS_TO_STRIP
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS
|
||||
)
|
||||
|
||||
# The compiler input variables are checked for sanity later:
|
||||
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
|
||||
@@ -67,159 +50,43 @@ SPACK_SYSTEM_DIRS"
|
||||
# Test command is used to unit test the compiler script.
|
||||
# SPACK_TEST_COMMAND
|
||||
|
||||
# die MESSAGE
|
||||
# Print a message and exit with error code 1.
|
||||
die() {
|
||||
echo "[spack cc] ERROR: $*"
|
||||
# die()
|
||||
# Prints a message and exits with error 1.
|
||||
function die {
|
||||
echo "$@"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# empty VARNAME
|
||||
# Return whether the variable VARNAME is unset or set to the empty string.
|
||||
empty() {
|
||||
eval "test -z \"\${$1}\""
|
||||
}
|
||||
# read input parameters into proper bash arrays.
|
||||
# SYSTEM_DIRS is delimited by :
|
||||
IFS=':' read -ra SPACK_SYSTEM_DIRS <<< "${SPACK_SYSTEM_DIRS}"
|
||||
|
||||
# setsep LISTNAME
|
||||
# Set the global variable 'sep' to the separator for a list with name LISTNAME.
|
||||
# There are three types of lists:
|
||||
# 1. regular lists end with _list and are separated by $lsep
|
||||
# 2. directory lists end with _dirs/_DIRS/PATH(S) and are separated by ':'
|
||||
# 3. any other list is assumed to be separated by spaces: " "
|
||||
setsep() {
|
||||
case "$1" in
|
||||
*_dirs|*_DIRS|*PATH|*PATHS)
|
||||
sep=':'
|
||||
;;
|
||||
*_list)
|
||||
sep="$lsep"
|
||||
;;
|
||||
*)
|
||||
sep=" "
|
||||
;;
|
||||
esac
|
||||
}
|
||||
# SPACK_<LANG>FLAGS and SPACK_LDLIBS are split by ' '
|
||||
IFS=' ' read -ra SPACK_FFLAGS <<< "$SPACK_FFLAGS"
|
||||
IFS=' ' read -ra SPACK_CPPFLAGS <<< "$SPACK_CPPFLAGS"
|
||||
IFS=' ' read -ra SPACK_CFLAGS <<< "$SPACK_CFLAGS"
|
||||
IFS=' ' read -ra SPACK_CXXFLAGS <<< "$SPACK_CXXFLAGS"
|
||||
IFS=' ' read -ra SPACK_LDFLAGS <<< "$SPACK_LDFLAGS"
|
||||
IFS=' ' read -ra SPACK_LDLIBS <<< "$SPACK_LDLIBS"
|
||||
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
if empty "$varname"; then
|
||||
eval "$varname=\"\${elt}\""
|
||||
else
|
||||
# Get the appropriate separator for the list we're appending to.
|
||||
setsep "$varname"
|
||||
eval "$varname=\"\${elt}${sep}\${$varname}\""
|
||||
fi
|
||||
}
|
||||
|
||||
# append LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Append ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
append() {
|
||||
varname="$1"
|
||||
elt="$2"
|
||||
|
||||
if empty "$varname"; then
|
||||
eval "$varname=\"\${elt}\""
|
||||
else
|
||||
# Get the appropriate separator for the list we're appending to.
|
||||
setsep "$varname"
|
||||
eval "$varname=\"\${$varname}${sep}\${elt}\""
|
||||
fi
|
||||
}
|
||||
|
||||
# extend LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Append the elements stored in the variable LISTNAME2
|
||||
# to the list stored in LISTNAME1.
|
||||
# If PREFIX is provided, prepend it to each element.
|
||||
extend() {
|
||||
# Figure out the appropriate IFS for the list we're reading.
|
||||
setsep "$2"
|
||||
if [ "$sep" != " " ]; then
|
||||
IFS="$sep"
|
||||
fi
|
||||
eval "for elt in \${$2}; do append $1 \"$3\${elt}\"; done"
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# preextend LISTNAME1 LISTNAME2 [PREFIX]
|
||||
#
|
||||
# Prepend the elements stored in the list at LISTNAME2
|
||||
# to the list at LISTNAME1, preserving order.
|
||||
# If PREFIX is provided, prepend it to each element.
|
||||
preextend() {
|
||||
# Figure out the appropriate IFS for the list we're reading.
|
||||
setsep "$2"
|
||||
if [ "$sep" != " " ]; then
|
||||
IFS="$sep"
|
||||
fi
|
||||
|
||||
# first, reverse the list to prepend
|
||||
_reversed_list=""
|
||||
eval "for elt in \${$2}; do prepend _reversed_list \"$3\${elt}\"; done"
|
||||
|
||||
# prepend reversed list to preextend in order
|
||||
IFS="${lsep}"
|
||||
for elt in $_reversed_list; do prepend "$1" "$3${elt}"; done
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
function system_dir {
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
for sd in "${SPACK_SYSTEM_DIRS[@]}"; do
|
||||
if [ "${path}" == "${sd}" ] || [ "${path}" == "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
fi
|
||||
|
||||
# ensure required variables are set
|
||||
for param in $params; do
|
||||
if eval "test -z \"\${${param}:-}\""; then
|
||||
for param in "${parameters[@]}"; do
|
||||
if [[ -z ${!param+x} ]]; then
|
||||
die "Spack compiler must be run from Spack! Input '$param' is missing."
|
||||
fi
|
||||
done
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
SPACK_ADD_DEBUG_FLAGS="false"
|
||||
fi
|
||||
|
||||
# SPACK_ADD_DEBUG_FLAGS must be true/false/custom
|
||||
is_valid="false"
|
||||
for param in "true" "false" "custom"; do
|
||||
if [ "$param" = "$SPACK_ADD_DEBUG_FLAGS" ]; then
|
||||
is_valid="true"
|
||||
fi
|
||||
done
|
||||
|
||||
# Exit with error if we are given an incorrect value
|
||||
if [ "$is_valid" = "false" ]; then
|
||||
die "SPACK_ADD_DEBUG_FLAGS, if defined, must be one of 'true', 'false', or 'custom'."
|
||||
fi
|
||||
|
||||
# Figure out the type of compiler, the language, and the mode so that
|
||||
# the compiler script knows what to do.
|
||||
#
|
||||
@@ -234,42 +101,37 @@ fi
|
||||
# ld link
|
||||
# ccld compile & link
|
||||
|
||||
command="${0##*/}"
|
||||
command=$(basename "$0")
|
||||
comp="CC"
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
ld)
|
||||
mode=ld
|
||||
;;
|
||||
*)
|
||||
@@ -280,7 +142,7 @@ esac
|
||||
# If any of the arguments below are present, then the mode is vcheck.
|
||||
# In vcheck mode, nothing is added in terms of extra search paths or
|
||||
# libraries.
|
||||
if [ -z "$mode" ] || [ "$mode" = ld ]; then
|
||||
if [[ -z $mode ]] || [[ $mode == ld ]]; then
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
-v|-V|--version|-dumpversion)
|
||||
@@ -292,16 +154,16 @@ if [ -z "$mode" ] || [ "$mode" = ld ]; then
|
||||
fi
|
||||
|
||||
# Finish setting up the mode.
|
||||
if [ -z "$mode" ]; then
|
||||
if [[ -z $mode ]]; then
|
||||
mode=ccld
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "-E" ]; then
|
||||
if [[ $arg == -E ]]; then
|
||||
mode=cpp
|
||||
break
|
||||
elif [ "$arg" = "-S" ]; then
|
||||
elif [[ $arg == -S ]]; then
|
||||
mode=as
|
||||
break
|
||||
elif [ "$arg" = "-c" ]; then
|
||||
elif [[ $arg == -c ]]; then
|
||||
mode=cc
|
||||
break
|
||||
fi
|
||||
@@ -328,46 +190,42 @@ dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
|
||||
linker_arg="${SPACK_LINKER_ARG}"
|
||||
|
||||
# Set up rpath variable according to language.
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET"
|
||||
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
|
||||
eval rpath=\$SPACK_${comp}_RPATH_ARG
|
||||
|
||||
# Dump the mode and exit if the command is dump-mode.
|
||||
if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
|
||||
if [[ $SPACK_TEST_COMMAND == dump-mode ]]; then
|
||||
echo "$mode"
|
||||
exit
|
||||
fi
|
||||
|
||||
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
|
||||
# *have* to set up Fortran executables, so we need to tell the user when a build is
|
||||
# about to attempt to use them unsuccessfully.
|
||||
if [ -z "$command" ]; then
|
||||
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
|
||||
# Check that at least one of the real commands was actually selected,
|
||||
# otherwise we don't know what to execute.
|
||||
if [[ -z $command ]]; then
|
||||
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
|
||||
fi
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
#
|
||||
new_dirs=""
|
||||
IFS=':'
|
||||
for dir in $PATH; do
|
||||
IFS=':' read -ra env_path <<< "$PATH"
|
||||
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
|
||||
spack_env_dirs+=("" ".")
|
||||
export PATH=""
|
||||
for dir in "${env_path[@]}"; do
|
||||
addpath=true
|
||||
for spack_env_dir in $SPACK_ENV_PATH; do
|
||||
case "${dir%%/}" in
|
||||
"$spack_env_dir"|'.'|'')
|
||||
addpath=false
|
||||
break
|
||||
;;
|
||||
esac
|
||||
for env_dir in "${spack_env_dirs[@]}"; do
|
||||
if [[ "$dir" == "$env_dir" ]]; then
|
||||
addpath=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ $addpath = true ]; then
|
||||
append new_dirs "$dir"
|
||||
if $addpath; then
|
||||
export PATH="${PATH:+$PATH:}$dir"
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
export PATH="$new_dirs"
|
||||
|
||||
if [ "$mode" = vcheck ]; then
|
||||
if [[ $mode == vcheck ]]; then
|
||||
exec "${command}" "$@"
|
||||
fi
|
||||
|
||||
@@ -375,20 +233,16 @@ fi
|
||||
# It doesn't work with -rpath.
|
||||
# This variable controls whether they are added.
|
||||
add_rpaths=true
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "-r" ]; then
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
elif [ "$arg" = "-Wl,-r" ] && [ "$mode" = ccld ]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]];
|
||||
then
|
||||
for arg in "$@"; do
|
||||
if [[ ($arg == -r && $mode == ld) ||
|
||||
($arg == -r && $mode == ccld) ||
|
||||
($arg == -Wl,-r && $mode == ccld) ]]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Save original command for debug logging
|
||||
@@ -411,22 +265,17 @@ input_command="$*"
|
||||
# The libs variable is initialized here for completeness, and it is also
|
||||
# used later to inject flags supplied via `ldlibs` on the command
|
||||
# line. These come into the wrappers via SPACK_LDLIBS.
|
||||
|
||||
# The loop below breaks up the command line into these lists of components.
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
#
|
||||
includes=()
|
||||
libdirs=()
|
||||
rpaths=()
|
||||
system_includes=()
|
||||
system_libdirs=()
|
||||
system_rpaths=()
|
||||
libs=()
|
||||
other_args=()
|
||||
isystem_system_includes=()
|
||||
isystem_includes=()
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
@@ -446,32 +295,32 @@ while [ $# -ne 0 ]; do
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
isystem_system_includes+=("$arg")
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
isystem_includes+=("$arg")
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
system_includes+=("$arg")
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
includes+=("$arg")
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
system_libdirs+=("$arg")
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
libdirs+=("$arg")
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
@@ -482,76 +331,66 @@ while [ $# -ne 0 ]; do
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
other_args+=("-l$arg")
|
||||
;;
|
||||
-Wl,*)
|
||||
arg="${1#-Wl,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath,*) rp="${arg#-rpath,}" ;;
|
||||
--rpath,*) rp="${arg#--rpath,}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Wl,*)
|
||||
rp="${arg#-Wl,}"
|
||||
;;
|
||||
*)
|
||||
die "-Wl,-rpath was not followed by -Wl,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
: # We want to remove explicitly this flag
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$arg"
|
||||
;;
|
||||
esac
|
||||
if [[ "$arg" = -rpath=* ]]; then
|
||||
rp="${arg#-rpath=}"
|
||||
elif [[ "$arg" = --rpath=* ]]; then
|
||||
rp="${arg#--rpath=}"
|
||||
elif [[ "$arg" = -rpath,* ]]; then
|
||||
rp="${arg#-rpath,}"
|
||||
elif [[ "$arg" = --rpath,* ]]; then
|
||||
rp="${arg#--rpath,}"
|
||||
elif [[ "$arg" =~ ^-?-rpath$ ]]; then
|
||||
shift; arg="$1"
|
||||
if [[ "$arg" != -Wl,* ]]; then
|
||||
die "-Wl,-rpath was not followed by -Wl,*"
|
||||
fi
|
||||
rp="${arg#-Wl,}"
|
||||
elif [[ "$arg" = "$dtags_to_strip" ]] ; then
|
||||
: # We want to remove explicitly this flag
|
||||
else
|
||||
other_args+=("-Wl,$arg")
|
||||
fi
|
||||
;;
|
||||
-Xlinker,*)
|
||||
arg="${1#-Xlinker,}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
|
||||
case "$arg" in
|
||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
||||
-rpath|--rpath)
|
||||
shift; arg="$1"
|
||||
case "$arg" in
|
||||
-Xlinker,*)
|
||||
rp="${arg#-Xlinker,}"
|
||||
;;
|
||||
*)
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Xlinker,$arg"
|
||||
;;
|
||||
esac
|
||||
if [[ "$arg" = -rpath=* ]]; then
|
||||
rp="${arg#-rpath=}"
|
||||
elif [[ "$arg" = --rpath=* ]]; then
|
||||
rp="${arg#--rpath=}"
|
||||
elif [[ "$arg" = -rpath ]] || [[ "$arg" = --rpath ]]; then
|
||||
shift; arg="$1"
|
||||
if [[ "$arg" != -Xlinker,* ]]; then
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
fi
|
||||
rp="${arg#-Xlinker,}"
|
||||
else
|
||||
other_args+=("-Xlinker,$arg")
|
||||
fi
|
||||
;;
|
||||
-Xlinker)
|
||||
if [ "$2" = "-rpath" ]; then
|
||||
if [ "$3" != "-Xlinker" ]; then
|
||||
if [[ "$2" == "-rpath" ]]; then
|
||||
if [[ "$3" != "-Xlinker" ]]; then
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
fi
|
||||
shift 3;
|
||||
rp="$1"
|
||||
elif [ "$2" = "$dtags_to_strip" ]; then
|
||||
elif [[ "$2" = "$dtags_to_strip" ]] ; then
|
||||
shift # We want to remove explicitly this flag
|
||||
else
|
||||
append other_args_list "$1"
|
||||
other_args+=("$1")
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [ "$1" = "$dtags_to_strip" ]; then
|
||||
if [[ "$1" = "$dtags_to_strip" ]] ; then
|
||||
: # We want to remove explicitly this flag
|
||||
else
|
||||
append other_args_list "$1"
|
||||
other_args+=("$1")
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
@@ -559,9 +398,9 @@ while [ $# -ne 0 ]; do
|
||||
# test rpaths against system directories in one place.
|
||||
if [ -n "$rp" ]; then
|
||||
if system_dir "$rp"; then
|
||||
append system_rpath_dirs_list "$rp"
|
||||
system_rpaths+=("$rp")
|
||||
else
|
||||
append rpath_dirs_list "$rp"
|
||||
rpaths+=("$rp")
|
||||
fi
|
||||
fi
|
||||
shift
|
||||
@@ -574,24 +413,14 @@ done
|
||||
# See the gmake manual on implicit rules for details:
|
||||
# https://www.gnu.org/software/make/manual/html_node/Implicit-Variables.html
|
||||
#
|
||||
flags_list=""
|
||||
|
||||
# Add debug flags
|
||||
if [ "${SPACK_ADD_DEBUG_FLAGS}" = "true" ]; then
|
||||
extend flags_list debug_flags
|
||||
|
||||
# If a custom flag is requested, derive from environment
|
||||
elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
flags=()
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend flags_list SPACK_FFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_FFLAGS[@]}") ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
@@ -599,8 +428,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_CPPFLAGS[@]}") ;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -609,67 +437,67 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend flags_list SPACK_CFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_CFLAGS[@]}") ;;
|
||||
CXX)
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_CXXFLAGS[@]}") ;;
|
||||
esac
|
||||
|
||||
# prepend target args
|
||||
preextend flags_list SPACK_TARGET_ARGS
|
||||
flags=(${SPACK_TARGET_ARGS[@]} "${flags[@]}")
|
||||
;;
|
||||
esac
|
||||
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
flags=("${flags[@]}" "${SPACK_LDFLAGS[@]}") ;;
|
||||
esac
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
case "$mode" in
|
||||
ld)
|
||||
append flags_list "-headerpad_max_install_names" ;;
|
||||
ccld)
|
||||
append flags_list "-Wl,-headerpad_max_install_names" ;;
|
||||
esac
|
||||
fi
|
||||
if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]];
|
||||
then
|
||||
case "$mode" in
|
||||
ld)
|
||||
flags=("${flags[@]}" -headerpad_max_install_names) ;;
|
||||
ccld)
|
||||
flags=("${flags[@]}" "-Wl,-headerpad_max_install_names") ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
IFS=':' read -ra rpath_dirs <<< "$SPACK_RPATH_DIRS"
|
||||
if [[ $mode == ccld || $mode == ld ]]; then
|
||||
|
||||
if [[ "$add_rpaths" != "false" ]] ; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
rpaths=("${rpaths[@]}" "${rpath_dirs[@]}")
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
IFS=':' read -ra link_dirs <<< "$SPACK_LINK_DIRS"
|
||||
if [[ $mode == ccld || $mode == ld ]]; then
|
||||
libdirs=("${libdirs[@]}" "${link_dirs[@]}")
|
||||
fi
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
# Set extra RPATHs
|
||||
extend lib_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
extend rpath_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
IFS=':' read -ra extra_rpaths <<< "$SPACK_COMPILER_EXTRA_RPATHS"
|
||||
libdirs+=("${extra_rpaths[@]}")
|
||||
if [[ "$add_rpaths" != "false" ]] ; then
|
||||
rpaths+=("${extra_rpaths[@]}")
|
||||
fi
|
||||
|
||||
# Set implicit RPATHs
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
extend rpath_dirs_list SPACK_COMPILER_IMPLICIT_RPATHS
|
||||
IFS=':' read -ra implicit_rpaths <<< "$SPACK_COMPILER_IMPLICIT_RPATHS"
|
||||
if [[ "$add_rpaths" != "false" ]] ; then
|
||||
rpaths+=("${implicit_rpaths[@]}")
|
||||
fi
|
||||
|
||||
# Add SPACK_LDLIBS to args
|
||||
for lib in $SPACK_LDLIBS; do
|
||||
append libs_list "${lib#-l}"
|
||||
for lib in "${SPACK_LDLIBS[@]}"; do
|
||||
libs+=("${lib#-l}")
|
||||
done
|
||||
;;
|
||||
esac
|
||||
@@ -677,62 +505,63 @@ esac
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Includes and system includes first
|
||||
args=()
|
||||
|
||||
# flags assembled earlier
|
||||
args+=("${flags[@]}")
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
for dir in "${includes[@]}"; do args+=("-I$dir"); done
|
||||
for dir in "${isystem_includes[@]}"; do args+=("-isystem" "$dir"); done
|
||||
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
IFS=':' read -ra spack_include_dirs <<< "$SPACK_INCLUDE_DIRS"
|
||||
if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then
|
||||
if [[ "$isystem_was_used" == "true" ]] ; then
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-isystem" "$dir"); done
|
||||
else
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-I$dir"); done
|
||||
fi
|
||||
fi
|
||||
|
||||
for dir in "${system_includes[@]}"; do args+=("-I$dir"); done
|
||||
for dir in "${isystem_system_includes[@]}"; do args+=("-isystem" "$dir"); done
|
||||
|
||||
# Library search paths
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
for dir in "${libdirs[@]}"; do args+=("-L$dir"); done
|
||||
for dir in "${system_libdirs[@]}"; do args+=("-L$dir"); done
|
||||
|
||||
# RPATHs arguments
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
if [ -n "$dtags_to_add" ] ; then args+=("$linker_arg$dtags_to_add") ; fi
|
||||
for dir in "${rpaths[@]}"; do args+=("$rpath$dir"); done
|
||||
for dir in "${system_rpaths[@]}"; do args+=("$rpath$dir"); done
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
if [ -n "$dtags_to_add" ] ; then args+=("$dtags_to_add") ; fi
|
||||
for dir in "${rpaths[@]}"; do args+=("-rpath" "$dir"); done
|
||||
for dir in "${system_rpaths[@]}"; do args+=("-rpath" "$dir"); done
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
args+=("${other_args[@]}")
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
for lib in "${libs[@]}"; do
|
||||
args+=("-l$lib");
|
||||
done
|
||||
|
||||
full_command_list="$command"
|
||||
extend full_command_list args_list
|
||||
full_command=("$command" "${args[@]}")
|
||||
|
||||
# prepend the ccache binary if we're using ccache
|
||||
if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
case "$lang_flags" in
|
||||
C|CXX) # ccache only supports C languages
|
||||
prepend full_command_list "${SPACK_CCACHE_BINARY}"
|
||||
full_command=("${SPACK_CCACHE_BINARY}" "${full_command[@]}")
|
||||
# workaround for stage being a temp folder
|
||||
# see #3761#issuecomment-294352232
|
||||
export CCACHE_NOHASHDIR=yes
|
||||
@@ -741,36 +570,22 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
fi
|
||||
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
if [[ $SPACK_TEST_COMMAND == dump-args ]]; then
|
||||
IFS="
|
||||
" && echo "${full_command[*]}"
|
||||
exit
|
||||
elif [[ -n $SPACK_TEST_COMMAND ]]; then
|
||||
die "ERROR: Unknown test command"
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
if [[ $SPACK_DEBUG == TRUE ]]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
echo "[$mode] ${full_command[*]}" >> "$output_log"
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
exec "${full_command[@]}"
|
||||
|
||||
1
lib/spack/env/ld.gold
vendored
1
lib/spack/env/ld.gold
vendored
@@ -1 +0,0 @@
|
||||
cc
|
||||
1
lib/spack/env/ld.lld
vendored
1
lib/spack/env/ld.lld
vendored
@@ -1 +0,0 @@
|
||||
cc
|
||||
4
lib/spack/external/__init__.py
vendored
4
lib/spack/external/__init__.py
vendored
@@ -11,7 +11,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
* Version: 0.1.2 (commit 26dec9d47e509daf8c970de4c89da200da52ad20)
|
||||
|
||||
argparse
|
||||
--------
|
||||
@@ -88,8 +88,6 @@
|
||||
* Usage: Needed by pytest. Library with cross-python path,
|
||||
ini-parsing, io, code, and log facilities.
|
||||
* Version: 1.4.34 (last version supporting Python 2.6)
|
||||
* Note: This packages has been modified:
|
||||
* https://github.com/pytest-dev/py/pull/186 was backported
|
||||
|
||||
pytest
|
||||
------
|
||||
|
||||
13
lib/spack/external/archspec/README.md
vendored
13
lib/spack/external/archspec/README.md
vendored
@@ -49,19 +49,6 @@ $ tox
|
||||
congratulations :)
|
||||
```
|
||||
|
||||
## Citing Archspec
|
||||
|
||||
If you are referencing `archspec` in a publication, please cite the following
|
||||
paper:
|
||||
|
||||
* Massimiliano Culpo, Gregory Becker, Carlos Eduardo Arango Gutierrez, Kenneth
|
||||
Hoste, and Todd Gamblin.
|
||||
[**`archspec`: A library for detecting, labeling, and reasoning about
|
||||
microarchitectures**](https://tgamblin.github.io/pubs/archspec-canopie-hpc-2020.pdf).
|
||||
In *2nd International Workshop on Containers and New Orchestration Paradigms
|
||||
for Isolated Environments in HPC (CANOPIE-HPC'20)*, Online Event, November
|
||||
12, 2020.
|
||||
|
||||
## License
|
||||
|
||||
Archspec is distributed under the terms of both the MIT license and the
|
||||
|
||||
48
lib/spack/external/archspec/cpu/detect.py
vendored
48
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -206,26 +206,11 @@ def host():
|
||||
# Get a list of possible candidates for this micro-architecture
|
||||
candidates = compatible_microarchitectures(info)
|
||||
|
||||
# Sorting criteria for candidates
|
||||
def sorting_fn(item):
|
||||
return len(item.ancestors), len(item.features)
|
||||
|
||||
# Get the best generic micro-architecture
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
candidates = [c for c in candidates if c > best_generic]
|
||||
|
||||
# If we don't have candidates, return the best generic micro-architecture
|
||||
if not candidates:
|
||||
return best_generic
|
||||
|
||||
# Reverse sort of the depth for the inheritance tree among only targets we
|
||||
# can use. This gets the newest target we satisfy.
|
||||
return max(candidates, key=sorting_fn)
|
||||
return sorted(
|
||||
candidates, key=lambda t: (len(t.ancestors), len(t.features)), reverse=True
|
||||
)[0]
|
||||
|
||||
|
||||
def compatibility_check(architecture_family):
|
||||
@@ -260,13 +245,7 @@ def compatibility_check_for_power(info, target):
|
||||
"""Compatibility check for PPC64 and PPC64LE architectures."""
|
||||
basename = platform.machine()
|
||||
generation_match = re.search(r"POWER(\d+)", info.get("cpu", ""))
|
||||
try:
|
||||
generation = int(generation_match.group(1))
|
||||
except AttributeError:
|
||||
# There might be no match under emulated environments. For instance
|
||||
# emulating a ppc64le with QEMU and Docker still reports the host
|
||||
# /proc/cpuinfo and not a Power
|
||||
generation = 0
|
||||
generation = int(generation_match.group(1))
|
||||
|
||||
# We can use a target if it descends from our machine type and our
|
||||
# generation (9 for POWER9, etc) is at least its generation.
|
||||
@@ -306,22 +285,3 @@ def compatibility_check_for_aarch64(info, target):
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="riscv64")
|
||||
def compatibility_check_for_riscv64(info, target):
|
||||
"""Compatibility check for riscv64 architectures."""
|
||||
basename = "riscv64"
|
||||
uarch = info.get("uarch")
|
||||
|
||||
# sifive unmatched board
|
||||
if uarch == "sifive,u74-mc":
|
||||
uarch = "u74mc"
|
||||
# catch-all for unknown uarchs
|
||||
else:
|
||||
uarch = "riscv64"
|
||||
|
||||
arch_root = TARGETS[basename]
|
||||
return (target == arch_root or arch_root in target.ancestors) and (
|
||||
target == uarch or target.vendor == "generic"
|
||||
)
|
||||
|
||||
@@ -173,12 +173,6 @@ def family(self):
|
||||
|
||||
return roots.pop()
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
|
||||
def to_dict(self, return_list_of_items=False):
|
||||
"""Returns a dictionary representation of this object.
|
||||
|
||||
|
||||
@@ -1942,12 +1942,6 @@
|
||||
"versions": "5:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "20:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2017,44 +2011,6 @@
|
||||
"features": [],
|
||||
"compilers": {
|
||||
}
|
||||
},
|
||||
"riscv64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "7.1:",
|
||||
"flags" : "-march=rv64gc"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=rv64gc"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"u74mc": {
|
||||
"from": ["riscv64"],
|
||||
"vendor": "SiFive",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.2:",
|
||||
"flags" : "-march=rv64gc -mtune=sifive-7-series"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags" : "-march=rv64gc -mtune=sifive-7-series"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"feature_aliases": {
|
||||
|
||||
91
lib/spack/external/ctest_log_parser.py
vendored
91
lib/spack/external/ctest_log_parser.py
vendored
@@ -77,18 +77,52 @@
|
||||
from six import StringIO
|
||||
from six import string_types
|
||||
|
||||
class prefilter(object):
|
||||
"""Make regular expressions faster with a simple prefiltering predicate.
|
||||
|
||||
Some regular expressions seem to be much more costly than others. In
|
||||
most cases, we can evaluate a simple precondition, e.g.::
|
||||
|
||||
lambda x: "error" in x
|
||||
|
||||
to avoid evaluating expensive regexes on all lines in a file. This
|
||||
can reduce parse time for large files by orders of magnitude when
|
||||
evaluating lots of expressions.
|
||||
|
||||
A ``prefilter`` object is designed to act like a regex,, but
|
||||
``search`` and ``match`` check the precondition before bothering to
|
||||
evaluate the regular expression.
|
||||
|
||||
Note that ``match`` and ``search`` just return ``True`` and ``False``
|
||||
at the moment. Make them return a ``MatchObject`` or ``None`` if it
|
||||
becomes necessary.
|
||||
"""
|
||||
def __init__(self, precondition, *patterns):
|
||||
self.patterns = [re.compile(p) for p in patterns]
|
||||
self.pre = precondition
|
||||
self.pattern = "\n ".join(
|
||||
('MERGED:',) + patterns)
|
||||
|
||||
def search(self, text):
|
||||
return self.pre(text) and any(p.search(text) for p in self.patterns)
|
||||
|
||||
def match(self, text):
|
||||
return self.pre(text) and any(p.match(text) for p in self.patterns)
|
||||
|
||||
|
||||
_error_matches = [
|
||||
"^FAIL: ",
|
||||
"^FATAL: ",
|
||||
"^failed ",
|
||||
"FAILED",
|
||||
"Failed test",
|
||||
prefilter(
|
||||
lambda x: any(s in x for s in (
|
||||
'Error:', 'error', 'undefined reference', 'multiply defined')),
|
||||
"([^:]+): error[ \\t]*[0-9]+[ \\t]*:",
|
||||
"([^:]+): (Error:|error|undefined reference|multiply defined)",
|
||||
"([^ :]+) ?: (error|fatal error|catastrophic error)",
|
||||
"([^:]+)\\(([^\\)]+)\\) ?: (error|fatal error|catastrophic error)"),
|
||||
"^FAILED",
|
||||
"^[Bb]us [Ee]rror",
|
||||
"^[Ss]egmentation [Vv]iolation",
|
||||
"^[Ss]egmentation [Ff]ault",
|
||||
":.*[Pp]ermission [Dd]enied",
|
||||
"[^ :]:[0-9]+: [^ \\t]",
|
||||
"[^:]: error[ \\t]*[0-9]+[ \\t]*:",
|
||||
"^Error ([0-9]+):",
|
||||
"^Fatal",
|
||||
"^[Ee]rror: ",
|
||||
@@ -98,9 +132,6 @@
|
||||
"^cc[^C]*CC: ERROR File = ([^,]+), Line = ([0-9]+)",
|
||||
"^ld([^:])*:([ \\t])*ERROR([^:])*:",
|
||||
"^ild:([ \\t])*\\(undefined symbol\\)",
|
||||
"[^ :] : (error|fatal error|catastrophic error)",
|
||||
"[^:]: (Error:|error|undefined reference|multiply defined)",
|
||||
"[^:]\\([^\\)]+\\) ?: (error|fatal error|catastrophic error)",
|
||||
"^fatal error C[0-9]+:",
|
||||
": syntax error ",
|
||||
"^collect2: ld returned 1 exit status",
|
||||
@@ -109,7 +140,7 @@
|
||||
"^Unresolved:",
|
||||
"Undefined symbol",
|
||||
"^Undefined[ \\t]+first referenced",
|
||||
"^CMake Error",
|
||||
"^CMake Error.*:",
|
||||
":[ \\t]cannot find",
|
||||
":[ \\t]can't find",
|
||||
": \\*\\*\\* No rule to make target [`'].*\\'. Stop",
|
||||
@@ -123,7 +154,6 @@
|
||||
"ld: 0706-006 Cannot find or open library file: -l ",
|
||||
"ild: \\(argument error\\) can't find library argument ::",
|
||||
"^could not be found and will not be loaded.",
|
||||
"^WARNING: '.*' is missing on your system",
|
||||
"s:616 string too big",
|
||||
"make: Fatal error: ",
|
||||
"ld: 0711-993 Error occurred while writing to the output file:",
|
||||
@@ -145,40 +175,44 @@
|
||||
"instantiated from ",
|
||||
"candidates are:",
|
||||
": warning",
|
||||
": WARNING",
|
||||
": \\(Warning\\)",
|
||||
": note",
|
||||
" ok",
|
||||
"Note:",
|
||||
"makefile:",
|
||||
"Makefile:",
|
||||
":[ \\t]+Where:",
|
||||
"[^ :]:[0-9]+: Warning",
|
||||
"([^ :]+):([0-9]+): Warning",
|
||||
"------ Build started: .* ------",
|
||||
]
|
||||
|
||||
#: Regexes to match file/line numbers in error/warning messages
|
||||
_warning_matches = [
|
||||
"[^ :]:[0-9]+: warning:",
|
||||
"[^ :]:[0-9]+: note:",
|
||||
prefilter(
|
||||
lambda x: 'warning' in x,
|
||||
"([^ :]+):([0-9]+): warning:",
|
||||
"([^:]+): warning ([0-9]+):",
|
||||
"([^:]+): warning[ \\t]*[0-9]+[ \\t]*:",
|
||||
"([^ :]+) : warning",
|
||||
"([^:]+): warning"),
|
||||
prefilter(
|
||||
lambda x: 'note:' in x,
|
||||
"^([^ :]+):([0-9]+): note:"),
|
||||
prefilter(
|
||||
lambda x: any(s in x for s in ('Warning', 'Warnung')),
|
||||
"^(Warning|Warnung) ([0-9]+):",
|
||||
"^(Warning|Warnung)[ :]",
|
||||
"^cxx: Warning:",
|
||||
"([^ :]+):([0-9]+): (Warning|Warnung)",
|
||||
"^CMake Warning.*:"),
|
||||
"file: .* has no symbols",
|
||||
"^cc[^C]*CC: WARNING File = ([^,]+), Line = ([0-9]+)",
|
||||
"^ld([^:])*:([ \\t])*WARNING([^:])*:",
|
||||
"[^:]: warning [0-9]+:",
|
||||
"^\"[^\"]+\", line [0-9]+: [Ww](arning|arnung)",
|
||||
"[^:]: warning[ \\t]*[0-9]+[ \\t]*:",
|
||||
"^(Warning|Warnung) ([0-9]+):",
|
||||
"^(Warning|Warnung)[ :]",
|
||||
"WARNING: ",
|
||||
"[^ :] : warning",
|
||||
"[^:]: warning",
|
||||
"\", line [0-9]+\\.[0-9]+: [0-9]+-[0-9]+ \\([WI]\\)",
|
||||
"^cxx: Warning:",
|
||||
"file: .* has no symbols",
|
||||
"[^ :]:[0-9]+: (Warning|Warnung)",
|
||||
"\\([0-9]*\\): remark #[0-9]*",
|
||||
"\".*\", line [0-9]+: remark\\([0-9]*\\):",
|
||||
"cc-[0-9]* CC: REMARK File = .*, Line = [0-9]*",
|
||||
"^CMake Warning",
|
||||
"^\\[WARNING\\]",
|
||||
]
|
||||
|
||||
@@ -309,7 +343,8 @@ def _profile_match(matches, exceptions, line, match_times, exc_times):
|
||||
|
||||
def _parse(lines, offset, profile):
|
||||
def compile(regex_array):
|
||||
return [re.compile(regex) for regex in regex_array]
|
||||
return [regex if isinstance(regex, prefilter) else re.compile(regex)
|
||||
for regex in regex_array]
|
||||
|
||||
error_matches = compile(_error_matches)
|
||||
error_exceptions = compile(_error_exceptions)
|
||||
|
||||
6
lib/spack/external/py/_path/local.py
vendored
6
lib/spack/external/py/_path/local.py
vendored
@@ -10,7 +10,7 @@
|
||||
from py._path.common import iswin32, fspath
|
||||
from stat import S_ISLNK, S_ISDIR, S_ISREG
|
||||
|
||||
from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
|
||||
from os.path import abspath, normcase, normpath, isabs, exists, isdir, isfile, islink, dirname
|
||||
|
||||
if sys.version_info > (3,0):
|
||||
def map_as_list(func, iter):
|
||||
@@ -801,10 +801,10 @@ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
|
||||
if rootdir is None:
|
||||
rootdir = cls.get_temproot()
|
||||
|
||||
nprefix = prefix.lower()
|
||||
nprefix = normcase(prefix)
|
||||
def parse_num(path):
|
||||
""" parse the number out of a path (if it matches the prefix) """
|
||||
nbasename = path.basename.lower()
|
||||
nbasename = normcase(path.basename)
|
||||
if nbasename.startswith(nprefix):
|
||||
try:
|
||||
return int(nbasename[len(nprefix):])
|
||||
|
||||
@@ -326,7 +326,7 @@ def end_function(self, prog=None):
|
||||
"""Returns the syntax needed to end a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str or None): the command name
|
||||
prog (str, optional): the command name
|
||||
|
||||
Returns:
|
||||
str: the function definition ending
|
||||
|
||||
@@ -141,7 +141,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
||||
file.
|
||||
"""
|
||||
string = kwargs.get('string', False)
|
||||
backup = kwargs.get('backup', False)
|
||||
backup = kwargs.get('backup', True)
|
||||
ignore_absent = kwargs.get('ignore_absent', False)
|
||||
stop_at = kwargs.get('stop_at', None)
|
||||
|
||||
@@ -444,7 +444,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (function): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
@@ -518,7 +518,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
src (str): the directory to install
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (function): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
@@ -557,12 +557,12 @@ def mkdirp(*paths, **kwargs):
|
||||
paths (str): paths to create with mkdirp
|
||||
|
||||
Keyword Aguments:
|
||||
mode (permission bits or None): optional permissions to set
|
||||
mode (permission bits or None, optional): optional permissions to set
|
||||
on the created directory -- use OS default if not provided
|
||||
group (group name or None): optional group for permissions of
|
||||
group (group name or None, optional): optional group for permissions of
|
||||
final created directory -- use OS default if not provided. Only
|
||||
used if world write permissions are not set
|
||||
default_perms (str or None): one of 'parents' or 'args'. The default permissions
|
||||
default_perms ('parents' or 'args', optional): The default permissions
|
||||
that are set for directories that are not themselves an argument
|
||||
for mkdirp. 'parents' means intermediate directories get the
|
||||
permissions of their direct parent directory, 'args' means
|
||||
@@ -656,12 +656,6 @@ def working_dir(dirname, **kwargs):
|
||||
os.chdir(orig_dir)
|
||||
|
||||
|
||||
class CouldNotRestoreDirectoryBackup(RuntimeError):
|
||||
def __init__(self, inner_exception, outer_exception):
|
||||
self.inner_exception = inner_exception
|
||||
self.outer_exception = outer_exception
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
@@ -689,33 +683,32 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
assert os.path.isabs(tmp_root)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
||||
tty.debug('TEMPORARY DIRECTORY CREATED [{0}]'.format(tmp_dir))
|
||||
|
||||
shutil.move(src=directory_name, dst=tmp_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
||||
tty.debug('DIRECTORY MOVED [src={0}, dest={1}]'.format(
|
||||
directory_name, tmp_dir
|
||||
))
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||
# Try to recover the original directory, if this fails, raise a
|
||||
# composite exception.
|
||||
try:
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
except Exception as outer_exception:
|
||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as e:
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
|
||||
|
||||
tty.debug('Directory recovered [{0}]'.format(directory_name))
|
||||
raise
|
||||
msg = 'the transactional move of "{0}" failed.'
|
||||
msg += '\n ' + str(e)
|
||||
raise RuntimeError(msg.format(directory_name))
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
shutil.rmtree(tmp_dir)
|
||||
tty.debug('TEMPORARY DIRECTORY DELETED [{0}]'.format(tmp_dir))
|
||||
|
||||
|
||||
def hash_directory(directory, ignore=[]):
|
||||
@@ -873,7 +866,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (function): function indicating which files to ignore
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1109,23 +1102,23 @@ def find(root, files, recursive=True):
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches everything
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
======= ====================================
|
||||
Pattern Meaning
|
||||
======= ====================================
|
||||
* matches everything
|
||||
? matches any single character
|
||||
[seq] matches any character in ``seq``
|
||||
[!seq] matches any character not in ``seq``
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
recurse (bool, optional): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
list of strings: The files that have been found
|
||||
"""
|
||||
if isinstance(files, six.string_types):
|
||||
files = [files]
|
||||
@@ -1207,7 +1200,7 @@ def directories(self):
|
||||
['/dir1', '/dir2']
|
||||
|
||||
Returns:
|
||||
list: A list of directories
|
||||
list of strings: A list of directories
|
||||
"""
|
||||
return list(dedupe(
|
||||
os.path.dirname(x) for x in self.files if os.path.dirname(x)
|
||||
@@ -1225,7 +1218,7 @@ def basenames(self):
|
||||
['a.h', 'b.h']
|
||||
|
||||
Returns:
|
||||
list: A list of base-names
|
||||
list of strings: A list of base-names
|
||||
"""
|
||||
return list(dedupe(os.path.basename(x) for x in self.files))
|
||||
|
||||
@@ -1312,7 +1305,7 @@ def headers(self):
|
||||
"""Stable de-duplication of the headers.
|
||||
|
||||
Returns:
|
||||
list: A list of header files
|
||||
list of strings: A list of header files
|
||||
"""
|
||||
return self.files
|
||||
|
||||
@@ -1325,7 +1318,7 @@ def names(self):
|
||||
['a', 'b']
|
||||
|
||||
Returns:
|
||||
list: A list of files without extensions
|
||||
list of strings: A list of files without extensions
|
||||
"""
|
||||
names = []
|
||||
|
||||
@@ -1416,9 +1409,9 @@ def find_headers(headers, root, recursive=False):
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
headers (str or list): Header name(s) to search for
|
||||
headers (str or list of str): Header name(s) to search for
|
||||
root (str): The root directory to start searching from
|
||||
recursive (bool): if False search only root folder,
|
||||
recursive (bool, optional): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
|
||||
Returns:
|
||||
@@ -1454,7 +1447,7 @@ def find_all_headers(root):
|
||||
in the directory passed as argument.
|
||||
|
||||
Args:
|
||||
root (str): directory where to look recursively for header files
|
||||
root (path): directory where to look recursively for header files
|
||||
|
||||
Returns:
|
||||
List of all headers found in ``root`` and subdirectories.
|
||||
@@ -1474,7 +1467,7 @@ def libraries(self):
|
||||
"""Stable de-duplication of library files.
|
||||
|
||||
Returns:
|
||||
list: A list of library files
|
||||
list of strings: A list of library files
|
||||
"""
|
||||
return self.files
|
||||
|
||||
@@ -1487,7 +1480,7 @@ def names(self):
|
||||
['a', 'b']
|
||||
|
||||
Returns:
|
||||
list: A list of library names
|
||||
list of strings: A list of library names
|
||||
"""
|
||||
names = []
|
||||
|
||||
@@ -1572,8 +1565,8 @@ def find_system_libraries(libraries, shared=True):
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
libraries (str or list): Library name(s) to search for
|
||||
shared (bool): if True searches for shared libraries,
|
||||
libraries (str or list of str): Library name(s) to search for
|
||||
shared (bool, optional): if True searches for shared libraries,
|
||||
otherwise for static. Defaults to True.
|
||||
|
||||
Returns:
|
||||
@@ -1623,11 +1616,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
======= ====================================
|
||||
|
||||
Parameters:
|
||||
libraries (str or list): Library name(s) to search for
|
||||
libraries (str or list of str): Library name(s) to search for
|
||||
root (str): The root directory to start searching from
|
||||
shared (bool): if True searches for shared libraries,
|
||||
shared (bool, optional): if True searches for shared libraries,
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool): if False search only root folder,
|
||||
recursive (bool, optional): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
|
||||
Returns:
|
||||
@@ -1855,18 +1848,3 @@ def keep_modification_time(*filenames):
|
||||
for f, mtime in mtimes.items():
|
||||
if os.path.exists(f):
|
||||
os.utime(f, (os.path.getatime(f), mtime))
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_dir(*args, **kwargs):
|
||||
"""Create a temporary directory and cd's into it. Delete the directory
|
||||
on exit.
|
||||
|
||||
Takes the same arguments as tempfile.mkdtemp()
|
||||
"""
|
||||
tmp_dir = tempfile.mkdtemp(*args, **kwargs)
|
||||
try:
|
||||
with working_dir(tmp_dir):
|
||||
yield tmp_dir
|
||||
finally:
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -30,6 +31,23 @@
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
|
||||
# On macOS, Python 3.8 multiprocessing now defaults to the 'spawn' start
|
||||
# method. Spack cannot currently handle this, so force the process to start
|
||||
# using the 'fork' start method.
|
||||
#
|
||||
# TODO: This solution is not ideal, as the 'fork' start method can lead to
|
||||
# crashes of the subprocess. Figure out how to make 'spawn' work.
|
||||
#
|
||||
# See:
|
||||
# * https://github.com/spack/spack/pull/18124
|
||||
# * https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods # noqa: E501
|
||||
# * https://bugs.python.org/issue33725
|
||||
if sys.version_info >= (3,): # novm
|
||||
fork_context = multiprocessing.get_context('fork')
|
||||
else:
|
||||
fork_context = multiprocessing
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
"""Create a hierarchy of dictionaries by splitting the supplied
|
||||
set of objects on unique values of the supplied functions.
|
||||
@@ -240,47 +258,6 @@ def new_dec(*args, **kwargs):
|
||||
return new_dec
|
||||
|
||||
|
||||
def key_ordering(cls):
|
||||
"""Decorates a class with extra methods that implement rich comparison
|
||||
operations and ``__hash__``. The decorator assumes that the class
|
||||
implements a function called ``_cmp_key()``. The rich comparison
|
||||
operations will compare objects using this key, and the ``__hash__``
|
||||
function will return the hash of this key.
|
||||
|
||||
If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
|
||||
``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
|
||||
|
||||
Raises:
|
||||
TypeError: If the class does not have a ``_cmp_key`` method
|
||||
"""
|
||||
def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not has_method(cls, '_cmp_key'):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter('__eq__',
|
||||
lambda s, o:
|
||||
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter('__lt__',
|
||||
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter('__le__',
|
||||
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
|
||||
setter('__ne__',
|
||||
lambda s, o:
|
||||
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter('__gt__',
|
||||
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter('__ge__',
|
||||
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
|
||||
setter('__hash__', lambda self: hash(self._cmp_key()))
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
#: sentinel for testing that iterators are done in lazy_lexicographic_ordering
|
||||
done = object()
|
||||
|
||||
@@ -596,8 +573,8 @@ def pretty_date(time, now=None):
|
||||
"""Convert a datetime or timestamp to a pretty, relative date.
|
||||
|
||||
Args:
|
||||
time (datetime.datetime or int): date to print prettily
|
||||
now (datetime.datetime): datetime for 'now', i.e. the date the pretty date
|
||||
time (datetime or int): date to print prettily
|
||||
now (datetime): dateimte for 'now', i.e. the date the pretty date
|
||||
is relative to (default is datetime.now())
|
||||
|
||||
Returns:
|
||||
@@ -671,7 +648,7 @@ def pretty_string_to_date(date_str, now=None):
|
||||
or be a *pretty date* (like ``yesterday`` or ``two months ago``)
|
||||
|
||||
Returns:
|
||||
(datetime.datetime): datetime object corresponding to ``date_str``
|
||||
(datetime): datetime object corresponding to ``date_str``
|
||||
"""
|
||||
|
||||
pattern = {}
|
||||
@@ -915,19 +892,3 @@ class Devnull(object):
|
||||
"""
|
||||
def write(self, *_):
|
||||
pass
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
@@ -9,25 +9,14 @@
|
||||
import socket
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.util.string
|
||||
|
||||
__all__ = [
|
||||
'Lock',
|
||||
'LockDowngradeError',
|
||||
'LockUpgradeError',
|
||||
'LockTransaction',
|
||||
'WriteTransaction',
|
||||
'ReadTransaction',
|
||||
'LockError',
|
||||
'LockTimeoutError',
|
||||
'LockPermissionError',
|
||||
'LockROFileError',
|
||||
'CantCreateLockError'
|
||||
]
|
||||
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
|
||||
'LockError', 'LockTimeoutError',
|
||||
'LockPermissionError', 'LockROFileError', 'CantCreateLockError']
|
||||
|
||||
#: Mapping of supported locks to description
|
||||
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
|
||||
@@ -37,126 +26,6 @@
|
||||
true_fn = lambda: True
|
||||
|
||||
|
||||
class OpenFile(object):
|
||||
"""Record for keeping track of open lockfiles (with reference counting).
|
||||
|
||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||
filehandle here as it's the thing we end up using in python code. You can get
|
||||
the file descriptor from the file handle if needed -- or we could make this track
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
def __init__(self, fh):
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
|
||||
|
||||
class OpenFileTracker(object):
|
||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||
|
||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||
This is convenient, because if a process exits, it releases its locks.
|
||||
Unfortunately, this also means that if you close a file, *all* locks associated
|
||||
with that file's inode are released, regardless of whether the process has any
|
||||
other open file descriptors on it.
|
||||
|
||||
Because of this, we need to track open lock files so that we only close them when
|
||||
a process no longer needs them. We do this by tracking each lockfile by its
|
||||
inode and process id. This has several nice properties:
|
||||
|
||||
1. Tracking by pid ensures that, if we fork, we don't inadvertently track the parent
|
||||
process's lockfiles. ``fcntl`` locks are not inherited across forks, so we'll
|
||||
just track new lockfiles in the child.
|
||||
2. Tracking by inode ensures that referencs are counted per inode, and that we don't
|
||||
inadvertently close a file whose inode still has open locks.
|
||||
3. Tracking by both pid and inode ensures that we only open lockfiles the minimum
|
||||
number of times necessary for the locks we have.
|
||||
|
||||
Note: as mentioned elsewhere, these locks aren't thread safe -- they're designed to
|
||||
work in Python and assume the GIL.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors = {} # type: Dict[Tuple[int, int], OpenFile]
|
||||
|
||||
def get_fh(self, path):
|
||||
"""Get a filehandle for a lockfile.
|
||||
|
||||
This routine will open writable files for read/write even if you're asking
|
||||
for a shared (read-only) lock. This is so that we can upgrade to an exclusive
|
||||
(write) lock later if requested.
|
||||
|
||||
Arguments:
|
||||
path (str): path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
|
||||
pid = os.getpid()
|
||||
open_file = None # OpenFile object, if there is one
|
||||
stat = None # stat result for the lockfile, if it exists
|
||||
|
||||
try:
|
||||
# see whether we've seen this inode/pid before
|
||||
stat = os.stat(path)
|
||||
key = (stat.st_ino, pid)
|
||||
open_file = self._descriptors.get(key)
|
||||
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
|
||||
# path does not exist -- fail if we won't be able to create it
|
||||
parent = os.path.dirname(path) or '.'
|
||||
if not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(path)
|
||||
|
||||
# if there was no already open file, we'll need to open one
|
||||
if not open_file:
|
||||
if stat and not os.access(path, os.W_OK):
|
||||
# we know path exists but not if it's writable. If it's read-only,
|
||||
# only open the file for reading (and fail if we're trying to get
|
||||
# an exclusive (write) lock on it)
|
||||
os_mode, fh_mode = os.O_RDONLY, 'r'
|
||||
|
||||
fd = os.open(path, os_mode)
|
||||
fh = os.fdopen(fd, fh_mode)
|
||||
open_file = OpenFile(fh)
|
||||
|
||||
# if we just created the file, we'll need to get its inode here
|
||||
if not stat:
|
||||
inode = os.fstat(fd).st_ino
|
||||
key = (inode, pid)
|
||||
|
||||
self._descriptors[key] = open_file
|
||||
|
||||
open_file.refs += 1
|
||||
return open_file.fh
|
||||
|
||||
def release_fh(self, path):
|
||||
"""Release a filehandle, only closing it if there are no more references."""
|
||||
try:
|
||||
inode = os.stat(path).st_ino
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
inode = None # this will not be in self._descriptors
|
||||
|
||||
key = (inode, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing lock path: %s" % path
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
del self._descriptors[key]
|
||||
open_file.fh.close()
|
||||
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
file_tracker = OpenFileTracker()
|
||||
|
||||
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
# Don't print anything if we succeeded on the first try
|
||||
if nattempts <= 1:
|
||||
@@ -177,8 +46,7 @@ class Lock(object):
|
||||
Note that this is for managing contention over resources *between*
|
||||
processes and not for managing contention between threads in a process: the
|
||||
functions of this object are not thread-safe. A process also must not
|
||||
maintain multiple locks on the same file (or, more specifically, on
|
||||
overlapping byte ranges in the same file).
|
||||
maintain multiple locks on the same file.
|
||||
"""
|
||||
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None,
|
||||
@@ -283,10 +151,25 @@ def _lock(self, op, timeout=None):
|
||||
|
||||
# Create file and parent directories if they don't exist.
|
||||
if self._file is None:
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
parent = self._ensure_parent_directory()
|
||||
|
||||
if op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fd_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
if os.path.exists(self.path):
|
||||
if not os.access(self.path, os.W_OK):
|
||||
if op == fcntl.LOCK_SH:
|
||||
# can still lock read-only files if we open 'r'
|
||||
os_mode, fd_mode = os.O_RDONLY, 'r'
|
||||
else:
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
elif not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(self.path)
|
||||
|
||||
fd = os.open(self.path, os_mode)
|
||||
self._file = os.fdopen(fd, fd_mode)
|
||||
|
||||
elif op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
@@ -399,8 +282,7 @@ def _unlock(self):
|
||||
"""
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
|
||||
file_tracker.release_fh(self.path)
|
||||
self._file.close()
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
@@ -519,7 +401,7 @@ def release_read(self, release_fn=None):
|
||||
"""Releases a read lock.
|
||||
|
||||
Arguments:
|
||||
release_fn (typing.Callable): function to call *before* the last recursive
|
||||
release_fn (callable): function to call *before* the last recursive
|
||||
lock (read or write) is released.
|
||||
|
||||
If the last recursive lock will be released, then this will call
|
||||
@@ -555,7 +437,7 @@ def release_write(self, release_fn=None):
|
||||
"""Releases a write lock.
|
||||
|
||||
Arguments:
|
||||
release_fn (typing.Callable): function to call before the last recursive
|
||||
release_fn (callable): function to call before the last recursive
|
||||
write is released.
|
||||
|
||||
If the last recursive *write* lock will be released, then this
|
||||
@@ -651,10 +533,10 @@ class LockTransaction(object):
|
||||
Arguments:
|
||||
lock (Lock): underlying lock for this transaction to be accquired on
|
||||
enter and released on exit
|
||||
acquire (typing.Callable or contextlib.contextmanager): function to be called
|
||||
after lock is acquired, or contextmanager to enter after acquire and leave
|
||||
acquire (callable or contextmanager): function to be called after lock
|
||||
is acquired, or contextmanager to enter after acquire and leave
|
||||
before release.
|
||||
release (typing.Callable): function to be called before release. If
|
||||
release (callable): function to be called before release. If
|
||||
``acquire`` is a contextmanager, this will be called *after*
|
||||
exiting the nexted context and before the lock is released.
|
||||
timeout (float): number of seconds to set for the timeout when
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
@@ -29,7 +28,6 @@
|
||||
_msg_enabled = True
|
||||
_warn_enabled = True
|
||||
_error_enabled = True
|
||||
_output_filter = lambda s: s
|
||||
indent = " "
|
||||
|
||||
|
||||
@@ -92,18 +90,6 @@ def error_enabled():
|
||||
return _error_enabled
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def output_filter(filter_fn):
|
||||
"""Context manager that applies a filter to all output."""
|
||||
global _output_filter
|
||||
saved_filter = _output_filter
|
||||
try:
|
||||
_output_filter = filter_fn
|
||||
yield
|
||||
finally:
|
||||
_output_filter = saved_filter
|
||||
|
||||
|
||||
class SuppressOutput:
|
||||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
@@ -180,23 +166,13 @@ def msg(message, *args, **kwargs):
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
cprint("@*b{%s==>} %s%s" % (
|
||||
st_text, get_timestamp(), cescape(message)))
|
||||
else:
|
||||
cwrite(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
cwrite("@*b{%s==>} %s%s" % (
|
||||
st_text, get_timestamp(), cescape(message)))
|
||||
for arg in args:
|
||||
print(indent + _output_filter(six.text_type(arg)))
|
||||
print(indent + six.text_type(arg))
|
||||
|
||||
|
||||
def info(message, *args, **kwargs):
|
||||
@@ -212,29 +188,18 @@ def info(message, *args, **kwargs):
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(st_countback)
|
||||
cprint(
|
||||
"@%s{%s==>} %s%s" % (
|
||||
format,
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(six.text_type(message)))
|
||||
),
|
||||
stream=stream
|
||||
)
|
||||
cprint("@%s{%s==>} %s%s" % (
|
||||
format, st_text, get_timestamp(), cescape(six.text_type(message))
|
||||
), stream=stream)
|
||||
for arg in args:
|
||||
if wrap:
|
||||
lines = textwrap.wrap(
|
||||
_output_filter(six.text_type(arg)),
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
break_long_words=break_long_words
|
||||
)
|
||||
six.text_type(arg), initial_indent=indent,
|
||||
subsequent_indent=indent, break_long_words=break_long_words)
|
||||
for line in lines:
|
||||
stream.write(line + '\n')
|
||||
else:
|
||||
stream.write(
|
||||
indent + _output_filter(six.text_type(arg)) + '\n'
|
||||
)
|
||||
stream.write(indent + six.text_type(arg) + '\n')
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
|
||||
@@ -109,17 +109,19 @@ def colify(elts, **options):
|
||||
using ``str()``.
|
||||
|
||||
Keyword Arguments:
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
indent (int): Optionally indent all columns by some number of spaces
|
||||
padding (int): Spaces between columns. Default is 2
|
||||
width (int): Width of the output. Default is 80 if tty not detected
|
||||
cols (int): Force number of columns. Default is to size to terminal, or
|
||||
single-column if no tty
|
||||
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
|
||||
tty. Set to False to force single-column output
|
||||
method (str): Method to use to fit columns. Options are variable or uniform.
|
||||
Variable-width columns are tighter, uniform columns are all the same width
|
||||
and fit less data on the screen
|
||||
output (stream): A file object to write to. Default is ``sys.stdout``
|
||||
indent (int): Optionally indent all columns by some number of spaces
|
||||
padding (int): Spaces between columns. Default is 2
|
||||
width (int): Width of the output. Default is 80 if tty not detected
|
||||
cols (int): Force number of columns. Default is to size to
|
||||
terminal, or single-column if no tty
|
||||
tty (bool): Whether to attempt to write to a tty. Default is to
|
||||
autodetect a tty. Set to False to force single-column
|
||||
output
|
||||
method (str): Method to use to fit columns. Options are variable or
|
||||
uniform. Variable-width columns are tighter, uniform
|
||||
columns are all the same width and fit less data on
|
||||
the screen
|
||||
"""
|
||||
# Get keyword arguments or set defaults
|
||||
cols = options.pop("cols", 0)
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
|
||||
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07')
|
||||
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
@@ -323,7 +323,7 @@ def unwrap(self):
|
||||
if sys.version_info < (3,):
|
||||
self.file = open(self.file_like, 'w')
|
||||
else:
|
||||
self.file = open(self.file_like, 'w', encoding='utf-8') # novm
|
||||
self.file = open(self.file_like, 'w', encoding='utf-8')
|
||||
else:
|
||||
self.file = StringIO()
|
||||
return self.file
|
||||
@@ -436,7 +436,7 @@ class log_output(object):
|
||||
"""
|
||||
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
env=None, filter_fn=None):
|
||||
env=None):
|
||||
"""Create a new output log context manager.
|
||||
|
||||
Args:
|
||||
@@ -446,8 +446,6 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
debug (int): positive to enable tty debug mode during logging
|
||||
buffer (bool): pass buffer=True to skip unbuffering output; note
|
||||
this doesn't set up any *new* buffering
|
||||
filter_fn (callable, optional): Callable[str] -> str to filter each
|
||||
line of output
|
||||
|
||||
log_output can take either a file object or a filename. If a
|
||||
filename is passed, the file will be opened and closed entirely
|
||||
@@ -467,7 +465,6 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
|
||||
self.debug = debug
|
||||
self.buffer = buffer
|
||||
self.env = env # the environment to use for _writer_daemon
|
||||
self.filter_fn = filter_fn
|
||||
|
||||
self._active = False # used to prevent re-entry
|
||||
|
||||
@@ -533,22 +530,20 @@ def __enter__(self):
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
input_multiprocess_fd = None
|
||||
|
||||
with replace_environment(self.env):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd, read_multiprocess_fd, write_fd,
|
||||
self.echo, self.log_file, child_pipe, self.filter_fn
|
||||
self.echo, self.log_file, child_pipe
|
||||
)
|
||||
)
|
||||
self.process.daemon = True # must set before start()
|
||||
@@ -672,7 +667,7 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper, control_pipe, filter_fn):
|
||||
log_file_wrapper, control_pipe):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -717,7 +712,6 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
|
||||
"""
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
@@ -780,12 +774,7 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(in_pipe.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = '<line lost: output was not encoded as UTF-8>\n'
|
||||
|
||||
line = _retry(in_pipe.readline)()
|
||||
if not line:
|
||||
return
|
||||
line_count += 1
|
||||
@@ -795,10 +784,7 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
|
||||
# Echo to stdout if requested or forced.
|
||||
if echo or force_echo:
|
||||
output_line = clean_line
|
||||
if filter_fn:
|
||||
output_line = filter_fn(clean_line)
|
||||
sys.stdout.write(output_line)
|
||||
sys.stdout.write(clean_line)
|
||||
|
||||
# Stripped output to log file.
|
||||
log_file.write(_strip(clean_line))
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 17, 2)
|
||||
spack_version_info = (0, 16, 1)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
@@ -10,8 +10,6 @@
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
@@ -45,7 +43,6 @@ def _read_environment_file(self, filename):
|
||||
to remove path prefixes specific to user systems.
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
tty.warn("No environment file available")
|
||||
return
|
||||
|
||||
mods = EnvironmentModifications.from_sourcing_file(filename)
|
||||
|
||||
609
lib/spack/spack/architecture.py
Normal file
609
lib/spack/spack/architecture.py
Normal file
@@ -0,0 +1,609 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
This module contains all the elements that are required to create an
|
||||
architecture object. These include, the target processor, the operating system,
|
||||
and the architecture platform (i.e. cray, darwin, linux, etc) classes.
|
||||
|
||||
On a multiple architecture machine, the architecture spec field can be set to
|
||||
build a package against any target and operating system that is present on the
|
||||
platform. On Cray platforms or any other architecture that has different front
|
||||
and back end environments, the operating system will determine the method of
|
||||
compiler
|
||||
detection.
|
||||
|
||||
There are two different types of compiler detection:
|
||||
1. Through the $PATH env variable (front-end detection)
|
||||
2. Through the tcl module system. (back-end detection)
|
||||
|
||||
Depending on which operating system is specified, the compiler will be detected
|
||||
using one of those methods.
|
||||
|
||||
For platforms such as linux and darwin, the operating system is autodetected
|
||||
and the target is set to be x86_64.
|
||||
|
||||
The command line syntax for specifying an architecture is as follows:
|
||||
|
||||
target=<Target name> os=<OperatingSystem name>
|
||||
|
||||
If the user wishes to use the defaults, either target or os can be left out of
|
||||
the command line and Spack will concretize using the default. These defaults
|
||||
are set in the 'platforms/' directory which contains the different subclasses
|
||||
for platforms. If the machine has multiple architectures, the user can
|
||||
also enter front-end, or fe or back-end or be. These settings will concretize
|
||||
to their respective front-end and back-end targets and operating systems.
|
||||
Additional platforms can be added by creating a subclass of Platform
|
||||
and adding it inside the platform directory.
|
||||
|
||||
Platforms are an abstract class that are extended by subclasses. If the user
|
||||
wants to add a new type of platform (such as cray_xe), they can create a
|
||||
subclass and set all the class attributes such as priority, front_target,
|
||||
back_target, front_os, back_os. Platforms also contain a priority class
|
||||
attribute. A lower number signifies higher priority. These numbers are
|
||||
arbitrarily set and can be changed though often there isn't much need unless a
|
||||
new platform is added and the user wants that to be detected first.
|
||||
|
||||
Targets are created inside the platform subclasses. Most architecture
|
||||
(like linux, and darwin) will have only one target (x86_64) but in the case of
|
||||
Cray machines, there is both a frontend and backend processor. The user can
|
||||
specify which targets are present on front-end and back-end architecture
|
||||
|
||||
Depending on the platform, operating systems are either auto-detected or are
|
||||
set. The user can set the front-end and back-end operating setting by the class
|
||||
attributes front_os and back_os. The operating system as described earlier,
|
||||
will be responsible for compiler detection.
|
||||
"""
|
||||
import contextlib
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.error as serr
|
||||
import spack.paths
|
||||
import spack.util.classes
|
||||
import spack.util.executable
|
||||
import spack.version
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
|
||||
class NoPlatformError(serr.SpackError):
|
||||
def __init__(self):
|
||||
super(NoPlatformError, self).__init__(
|
||||
"Could not determine a platform for this machine.")
|
||||
|
||||
|
||||
def _ensure_other_is_target(method):
|
||||
"""Decorator to be used in dunder methods taking a single argument to
|
||||
ensure that the argument is an instance of ``Target`` too.
|
||||
"""
|
||||
@functools.wraps(method)
|
||||
def _impl(self, other):
|
||||
if isinstance(other, six.string_types):
|
||||
other = Target(other)
|
||||
|
||||
if not isinstance(other, Target):
|
||||
return NotImplemented
|
||||
|
||||
return method(self, other)
|
||||
|
||||
return _impl
|
||||
|
||||
|
||||
class Target(object):
|
||||
def __init__(self, name, module_name=None):
|
||||
"""Target models microarchitectures and their compatibility.
|
||||
|
||||
Args:
|
||||
name (str or Microarchitecture):micro-architecture of the
|
||||
target
|
||||
module_name (str): optional module name to get access to the
|
||||
current target. This is typically used on machines
|
||||
like Cray (e.g. craype-compiler)
|
||||
"""
|
||||
if not isinstance(name, archspec.cpu.Microarchitecture):
|
||||
name = archspec.cpu.TARGETS.get(
|
||||
name, archspec.cpu.generic_microarchitecture(name)
|
||||
)
|
||||
self.microarchitecture = name
|
||||
self.module_name = module_name
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.microarchitecture.name
|
||||
|
||||
@_ensure_other_is_target
|
||||
def __eq__(self, other):
|
||||
return self.microarchitecture == other.microarchitecture and \
|
||||
self.module_name == other.module_name
|
||||
|
||||
def __ne__(self, other):
|
||||
# This method is necessary as long as we support Python 2. In Python 3
|
||||
# __ne__ defaults to the implementation below
|
||||
return not self == other
|
||||
|
||||
@_ensure_other_is_target
|
||||
def __lt__(self, other):
|
||||
# TODO: In the future it would be convenient to say
|
||||
# TODO: `spec.architecture.target < other.architecture.target`
|
||||
# TODO: and change the semantic of the comparison operators
|
||||
|
||||
# This is needed to sort deterministically specs in a list.
|
||||
# It doesn't implement a total ordering semantic.
|
||||
return self.microarchitecture.name < other.microarchitecture.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name, self.module_name))
|
||||
|
||||
@staticmethod
|
||||
def from_dict_or_value(dict_or_value):
|
||||
# A string here represents a generic target (like x86_64 or ppc64) or
|
||||
# a custom micro-architecture
|
||||
if isinstance(dict_or_value, six.string_types):
|
||||
return Target(dict_or_value)
|
||||
|
||||
# TODO: From a dict we actually retrieve much more information than
|
||||
# TODO: just the name. We can use that information to reconstruct an
|
||||
# TODO: "old" micro-architecture or check the current definition.
|
||||
target_info = dict_or_value
|
||||
return Target(target_info['name'])
|
||||
|
||||
def to_dict_or_value(self):
|
||||
"""Returns a dict or a value representing the current target.
|
||||
|
||||
String values are used to keep backward compatibility with generic
|
||||
targets, like e.g. x86_64 or ppc64. More specific micro-architectures
|
||||
will return a dictionary which contains information on the name,
|
||||
features, vendor, generation and parents of the current target.
|
||||
"""
|
||||
# Generic targets represent either an architecture
|
||||
# family (like x86_64) or a custom micro-architecture
|
||||
if self.microarchitecture.vendor == 'generic':
|
||||
return str(self)
|
||||
|
||||
return syaml_dict(
|
||||
self.microarchitecture.to_dict(return_list_of_items=True)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = cls_name + '({0}, {1})'
|
||||
return fmt.format(repr(self.microarchitecture),
|
||||
repr(self.module_name))
|
||||
|
||||
def __str__(self):
|
||||
return str(self.microarchitecture)
|
||||
|
||||
def __contains__(self, cpu_flag):
|
||||
return cpu_flag in self.microarchitecture
|
||||
|
||||
def optimization_flags(self, compiler):
|
||||
"""Returns the flags needed to optimize for this target using
|
||||
the compiler passed as argument.
|
||||
|
||||
Args:
|
||||
compiler (CompilerSpec or Compiler): object that contains both the
|
||||
name and the version of the compiler we want to use
|
||||
"""
|
||||
# Mixed toolchains are not supported yet
|
||||
import spack.compilers
|
||||
if isinstance(compiler, spack.compiler.Compiler):
|
||||
if spack.compilers.is_mixed_toolchain(compiler):
|
||||
msg = ('microarchitecture specific optimizations are not '
|
||||
'supported yet on mixed compiler toolchains [check'
|
||||
' {0.name}@{0.version} for further details]')
|
||||
warnings.warn(msg.format(compiler))
|
||||
return ''
|
||||
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(
|
||||
compiler.version
|
||||
)
|
||||
if not version_number or suffix not in ('', 'apple'):
|
||||
# Try to deduce the underlying version of the compiler, regardless
|
||||
# of its name in compilers.yaml. Depending on where this function
|
||||
# is called we might get either a CompilerSpec or a fully fledged
|
||||
# compiler object.
|
||||
import spack.spec
|
||||
if isinstance(compiler, spack.spec.CompilerSpec):
|
||||
compiler = spack.compilers.compilers_for_spec(compiler).pop()
|
||||
try:
|
||||
compiler_version = compiler.real_version
|
||||
except spack.util.executable.ProcessError as e:
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
return self.microarchitecture.optimization_flags(
|
||||
compiler.name, str(compiler_version)
|
||||
)
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class Platform(object):
|
||||
""" Abstract class that each type of Platform will subclass.
|
||||
Will return a instance of it once it is returned.
|
||||
"""
|
||||
|
||||
# Subclass sets number. Controls detection order
|
||||
priority = None # type: int
|
||||
|
||||
#: binary formats used on this platform; used by relocation logic
|
||||
binary_formats = ['elf']
|
||||
|
||||
front_end = None # type: str
|
||||
back_end = None # type: str
|
||||
default = None # type: str # The default back end target.
|
||||
|
||||
front_os = None # type: str
|
||||
back_os = None # type: str
|
||||
default_os = None # type: str
|
||||
|
||||
reserved_targets = ['default_target', 'frontend', 'fe', 'backend', 'be']
|
||||
reserved_oss = ['default_os', 'frontend', 'fe', 'backend', 'be']
|
||||
|
||||
def __init__(self, name):
|
||||
self.targets = {}
|
||||
self.operating_sys = {}
|
||||
self.name = name
|
||||
|
||||
def add_target(self, name, target):
|
||||
"""Used by the platform specific subclass to list available targets.
|
||||
Raises an error if the platform specifies a name
|
||||
that is reserved by spack as an alias.
|
||||
"""
|
||||
if name in Platform.reserved_targets:
|
||||
raise ValueError(
|
||||
"%s is a spack reserved alias "
|
||||
"and cannot be the name of a target" % name)
|
||||
self.targets[name] = target
|
||||
|
||||
def target(self, name):
|
||||
"""This is a getter method for the target dictionary
|
||||
that handles defaulting based on the values provided by default,
|
||||
front-end, and back-end. This can be overwritten
|
||||
by a subclass for which we want to provide further aliasing options.
|
||||
"""
|
||||
# TODO: Check if we can avoid using strings here
|
||||
name = str(name)
|
||||
if name == 'default_target':
|
||||
name = self.default
|
||||
elif name == 'frontend' or name == 'fe':
|
||||
name = self.front_end
|
||||
elif name == 'backend' or name == 'be':
|
||||
name = self.back_end
|
||||
|
||||
return self.targets.get(name, None)
|
||||
|
||||
def add_operating_system(self, name, os_class):
|
||||
""" Add the operating_system class object into the
|
||||
platform.operating_sys dictionary
|
||||
"""
|
||||
if name in Platform.reserved_oss:
|
||||
raise ValueError(
|
||||
"%s is a spack reserved alias "
|
||||
"and cannot be the name of an OS" % name)
|
||||
self.operating_sys[name] = os_class
|
||||
|
||||
def operating_system(self, name):
|
||||
if name == 'default_os':
|
||||
name = self.default_os
|
||||
if name == 'frontend' or name == "fe":
|
||||
name = self.front_os
|
||||
if name == 'backend' or name == 'be':
|
||||
name = self.back_os
|
||||
|
||||
return self.operating_sys.get(name, None)
|
||||
|
||||
@classmethod
|
||||
def setup_platform_environment(cls, pkg, env):
|
||||
""" Subclass can override this method if it requires any
|
||||
platform-specific build environment modifications.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
""" Subclass is responsible for implementing this method.
|
||||
Returns True if the Platform class detects that
|
||||
it is the current platform
|
||||
and False if it's not.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.name
|
||||
yield self.default
|
||||
yield self.front_end
|
||||
yield self.back_end
|
||||
yield self.default_os
|
||||
yield self.front_os
|
||||
yield self.back_os
|
||||
|
||||
def targets():
|
||||
for t in sorted(self.targets.values()):
|
||||
yield t._cmp_iter
|
||||
yield targets
|
||||
|
||||
def oses():
|
||||
for o in sorted(self.operating_sys.values()):
|
||||
yield o._cmp_iter
|
||||
yield oses
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class OperatingSystem(object):
|
||||
""" Operating System will be like a class similar to platform extended
|
||||
by subclasses for the specifics. Operating System will contain the
|
||||
compiler finding logic. Instead of calling two separate methods to
|
||||
find compilers we call find_compilers method for each operating system
|
||||
"""
|
||||
|
||||
def __init__(self, name, version):
|
||||
self.name = name.replace('-', '_')
|
||||
self.version = str(version).replace('-', '_')
|
||||
|
||||
def __str__(self):
|
||||
return "%s%s" % (self.name, self.version)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.name
|
||||
yield self.version
|
||||
|
||||
def to_dict(self):
|
||||
return syaml_dict([
|
||||
('name', self.name),
|
||||
('version', self.version)
|
||||
])
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class Arch(object):
|
||||
"""Architecture is now a class to help with setting attributes.
|
||||
|
||||
TODO: refactor so that we don't need this class.
|
||||
"""
|
||||
|
||||
def __init__(self, plat=None, os=None, target=None):
|
||||
self.platform = plat
|
||||
if plat and os:
|
||||
os = self.platform.operating_system(os)
|
||||
self.os = os
|
||||
if plat and target:
|
||||
target = self.platform.target(target)
|
||||
self.target = target
|
||||
|
||||
# Hooks for parser to use when platform is set after target or os
|
||||
self.target_string = None
|
||||
self.os_string = None
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
return all((self.platform is not None,
|
||||
isinstance(self.platform, Platform),
|
||||
self.os is not None,
|
||||
isinstance(self.os, OperatingSystem),
|
||||
self.target is not None, isinstance(self.target, Target)))
|
||||
|
||||
def __str__(self):
|
||||
if self.platform or self.os or self.target:
|
||||
if self.platform.name == 'darwin':
|
||||
os_name = self.os.name if self.os else "None"
|
||||
else:
|
||||
os_name = str(self.os)
|
||||
|
||||
return (str(self.platform) + "-" +
|
||||
os_name + "-" + str(self.target))
|
||||
else:
|
||||
return ''
|
||||
|
||||
def __contains__(self, string):
|
||||
return string in str(self)
|
||||
|
||||
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
|
||||
def __nonzero__(self):
|
||||
return (self.platform is not None or
|
||||
self.os is not None or
|
||||
self.target is not None)
|
||||
__bool__ = __nonzero__
|
||||
|
||||
def _cmp_iter(self):
|
||||
if isinstance(self.platform, Platform):
|
||||
yield self.platform.name
|
||||
else:
|
||||
yield self.platform
|
||||
|
||||
if isinstance(self.os, OperatingSystem):
|
||||
yield self.os.name
|
||||
else:
|
||||
yield self.os
|
||||
|
||||
if isinstance(self.target, Target):
|
||||
yield self.target.microarchitecture
|
||||
else:
|
||||
yield self.target
|
||||
|
||||
def to_dict(self):
|
||||
str_or_none = lambda v: str(v) if v else None
|
||||
d = syaml_dict([
|
||||
('platform', str_or_none(self.platform)),
|
||||
('platform_os', str_or_none(self.os)),
|
||||
('target', self.target.to_dict_or_value())])
|
||||
return syaml_dict([('arch', d)])
|
||||
|
||||
def to_spec(self):
|
||||
"""Convert this Arch to an anonymous Spec with architecture defined."""
|
||||
spec = spack.spec.Spec()
|
||||
spec.architecture = spack.spec.ArchSpec(str(self))
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
spec = spack.spec.ArchSpec.from_dict(d)
|
||||
return arch_for_spec(spec)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def get_platform(platform_name):
|
||||
"""Returns a platform object that corresponds to the given name."""
|
||||
platform_list = all_platforms()
|
||||
for p in platform_list:
|
||||
if platform_name.replace("_", "").lower() == p.__name__.lower():
|
||||
return p()
|
||||
|
||||
|
||||
def verify_platform(platform_name):
|
||||
""" Determines whether or not the platform with the given name is supported
|
||||
in Spack. For more information, see the 'spack.platforms' submodule.
|
||||
"""
|
||||
platform_name = platform_name.replace("_", "").lower()
|
||||
platform_names = [p.__name__.lower() for p in all_platforms()]
|
||||
|
||||
if platform_name not in platform_names:
|
||||
tty.die("%s is not a supported platform; supported platforms are %s" %
|
||||
(platform_name, platform_names))
|
||||
|
||||
|
||||
def arch_for_spec(arch_spec):
|
||||
"""Transforms the given architecture spec into an architecture object."""
|
||||
arch_spec = spack.spec.ArchSpec(arch_spec)
|
||||
assert arch_spec.concrete
|
||||
|
||||
arch_plat = get_platform(arch_spec.platform)
|
||||
if not (arch_plat.operating_system(arch_spec.os) and
|
||||
arch_plat.target(arch_spec.target)):
|
||||
raise ValueError(
|
||||
"Can't recreate arch for spec %s on current arch %s; "
|
||||
"spec architecture is too different" % (arch_spec, sys_type()))
|
||||
|
||||
return Arch(arch_plat, arch_spec.os, arch_spec.target)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _all_platforms():
|
||||
mod_path = spack.paths.platform_path
|
||||
return spack.util.classes.list_classes("spack.platforms", mod_path)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _platform():
|
||||
"""Detects the platform for this machine.
|
||||
|
||||
Gather a list of all available subclasses of platforms.
|
||||
Sorts the list according to their priority looking. Priority is
|
||||
an arbitrarily set number. Detects platform either using uname or
|
||||
a file path (/opt/cray...)
|
||||
"""
|
||||
# Try to create a Platform object using the config file FIRST
|
||||
platform_list = _all_platforms()
|
||||
platform_list.sort(key=lambda a: a.priority)
|
||||
|
||||
for platform_cls in platform_list:
|
||||
if platform_cls.detect():
|
||||
return platform_cls()
|
||||
|
||||
|
||||
#: The "real" platform of the host running Spack. This should not be changed
|
||||
#: by any method and is here as a convenient way to refer to the host platform.
|
||||
real_platform = _platform
|
||||
|
||||
#: The current platform used by Spack. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
platform = _platform
|
||||
|
||||
#: The list of all platform classes. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
all_platforms = _all_platforms
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def default_arch():
|
||||
"""Default ``Arch`` object for this machine.
|
||||
|
||||
See ``sys_type()``.
|
||||
"""
|
||||
return Arch(platform(), 'default_os', 'default_target')
|
||||
|
||||
|
||||
def sys_type():
|
||||
"""Print out the "default" platform-os-target tuple for this machine.
|
||||
|
||||
On machines with only one target OS/target, prints out the
|
||||
platform-os-target for the frontend. For machines with a frontend
|
||||
and a backend, prints the default backend.
|
||||
|
||||
TODO: replace with use of more explicit methods to get *all* the
|
||||
backends, as client code should really be aware of cross-compiled
|
||||
architectures.
|
||||
|
||||
"""
|
||||
return str(default_arch())
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def compatible_sys_types():
|
||||
"""Returns a list of all the systypes compatible with the current host."""
|
||||
compatible_archs = []
|
||||
current_host = archspec.cpu.host()
|
||||
compatible_targets = [current_host] + current_host.ancestors
|
||||
for target in compatible_targets:
|
||||
arch = Arch(platform(), 'default_os', target)
|
||||
compatible_archs.append(str(arch))
|
||||
return compatible_archs
|
||||
|
||||
|
||||
class _PickleableCallable(object):
|
||||
"""Class used to pickle a callable that may substitute either
|
||||
_platform or _all_platforms. Lambda or nested functions are
|
||||
not pickleable.
|
||||
"""
|
||||
def __init__(self, return_value):
|
||||
self.return_value = return_value
|
||||
|
||||
def __call__(self):
|
||||
return self.return_value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_platform(new_platform):
|
||||
global platform, all_platforms
|
||||
|
||||
msg = '"{0}" must be an instance of Platform'
|
||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||
|
||||
original_platform_fn, original_all_platforms_fn = platform, all_platforms
|
||||
platform = _PickleableCallable(new_platform)
|
||||
all_platforms = _PickleableCallable([type(new_platform)])
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
|
||||
platform, all_platforms = original_platform_fn, original_all_platforms_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
@@ -37,16 +37,12 @@ def _search_duplicate_compilers(error_cls):
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
try:
|
||||
from collections.abc import Sequence # novm
|
||||
except ImportError:
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
CALLBACKS = {}
|
||||
|
||||
@@ -265,45 +261,6 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
#: Sanity checks on linting
|
||||
# This can take some time, so it's run separately from packages
|
||||
package_https_directives = AuditClass(
|
||||
group='packages-https',
|
||||
tag='PKG-HTTPS-DIRECTIVES',
|
||||
description='Sanity checks on https checks of package urls, etc.',
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links
|
||||
"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg.homepage, 1)
|
||||
try:
|
||||
response = urlopen(https)
|
||||
except Exception as e:
|
||||
msg = 'Error with attempting https for "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||
continue
|
||||
|
||||
if response.getcode() == 200:
|
||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||
errors.append(msg.format(pkg.name))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
@@ -389,8 +346,9 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
dependency_variants = dependency_edge.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
v, _ = dependency_pkg.variants[name]
|
||||
v.validate_or_raise(value, pkg=dependency_pkg)
|
||||
dependency_pkg.variants[name].validate_or_raise(
|
||||
value, pkg=dependency_pkg
|
||||
)
|
||||
except Exception as e:
|
||||
summary = (pkg_name + ": wrong variant used for a "
|
||||
"dependency in a 'depends_on' directive")
|
||||
@@ -418,8 +376,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
errors = []
|
||||
for name, v in constraint.variants.items():
|
||||
try:
|
||||
variant, _ = pkg.variants[name]
|
||||
variant.validate_or_raise(v, pkg=pkg)
|
||||
pkg.variants[name].validate_or_raise(v, pkg=pkg)
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
|
||||
@@ -4,14 +4,15 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
import traceback
|
||||
from contextlib import closing
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -26,10 +27,7 @@
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
@@ -45,25 +43,6 @@
|
||||
_build_cache_keys_relative_path = '_pgp'
|
||||
|
||||
|
||||
class FetchCacheError(Exception):
|
||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||
def __init__(self, errors):
|
||||
if not isinstance(errors, list):
|
||||
raise TypeError("Expected a list of errors")
|
||||
self.errors = errors
|
||||
if len(errors) > 1:
|
||||
msg = " Error {0}: {1}: {2}"
|
||||
self.message = "Multiple errors during fetching:\n"
|
||||
self.message += "\n".join((
|
||||
msg.format(i + 1, err.__class__.__name__, str(err))
|
||||
for (i, err) in enumerate(errors)
|
||||
))
|
||||
else:
|
||||
err = errors[0]
|
||||
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
||||
super(FetchCacheError, self).__init__(self.message)
|
||||
|
||||
|
||||
class BinaryCacheIndex(object):
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -225,7 +204,7 @@ def find_built_spec(self, spec):
|
||||
The cache can be updated by calling ``update()`` on the cache.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec to find
|
||||
spec (Spec): Concrete spec to find
|
||||
|
||||
Returns:
|
||||
An list of objects containing the found specs and mirror url where
|
||||
@@ -241,16 +220,11 @@ def find_built_spec(self, spec):
|
||||
]
|
||||
"""
|
||||
self.regenerate_spec_cache()
|
||||
return self.find_by_hash(spec.dag_hash())
|
||||
|
||||
def find_by_hash(self, find_hash):
|
||||
"""Same as find_built_spec but uses the hash of a spec.
|
||||
|
||||
Args:
|
||||
find_hash (str): hash of the spec to search
|
||||
"""
|
||||
find_hash = spec.dag_hash()
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
|
||||
return self._mirrors_for_spec[find_hash]
|
||||
|
||||
def update_spec(self, spec, found_list):
|
||||
@@ -315,22 +289,14 @@ def update(self):
|
||||
# Otherwise the concrete spec cache should not need to be updated at
|
||||
# all.
|
||||
|
||||
fetch_errors = []
|
||||
all_methods_failed = True
|
||||
|
||||
for cached_mirror_url in self._local_index_cache:
|
||||
cache_entry = self._local_index_cache[cached_mirror_url]
|
||||
cached_index_hash = cache_entry['index_hash']
|
||||
cached_index_path = cache_entry['index_path']
|
||||
if cached_mirror_url in configured_mirror_urls:
|
||||
# May need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url, expect_hash=cached_index_hash)
|
||||
all_methods_failed = False
|
||||
except FetchCacheError as fetch_error:
|
||||
needs_regen = False
|
||||
fetch_errors.extend(fetch_error.errors)
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url, expect_hash=cached_index_hash)
|
||||
# The need to regenerate implies a need to clear as well.
|
||||
spec_cache_clear_needed |= needs_regen
|
||||
spec_cache_regenerate_needed |= needs_regen
|
||||
@@ -357,12 +323,7 @@ def update(self):
|
||||
for mirror_url in configured_mirror_urls:
|
||||
if mirror_url not in self._local_index_cache:
|
||||
# Need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||
all_methods_failed = False
|
||||
except FetchCacheError as fetch_error:
|
||||
fetch_errors.extend(fetch_error.errors)
|
||||
needs_regen = False
|
||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||
# Generally speaking, a new mirror wouldn't imply the need to
|
||||
# clear the spec cache, so leave it as is.
|
||||
if needs_regen:
|
||||
@@ -370,9 +331,7 @@ def update(self):
|
||||
|
||||
self._write_local_index_cache()
|
||||
|
||||
if all_methods_failed:
|
||||
raise FetchCacheError(fetch_errors)
|
||||
elif spec_cache_regenerate_needed:
|
||||
if spec_cache_regenerate_needed:
|
||||
self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed)
|
||||
|
||||
def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
@@ -391,8 +350,6 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
True if this function thinks the concrete spec cache,
|
||||
``_mirrors_for_spec``, should be regenerated. Returns False
|
||||
otherwise.
|
||||
Throws:
|
||||
FetchCacheError: a composite exception.
|
||||
"""
|
||||
index_fetch_url = url_util.join(
|
||||
mirror_url, _build_cache_relative_path, 'index.json')
|
||||
@@ -402,19 +359,14 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
old_cache_key = None
|
||||
fetched_hash = None
|
||||
|
||||
errors = []
|
||||
|
||||
# Fetch the hash first so we can check if we actually need to fetch
|
||||
# the index itself.
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(hash_fetch_url)
|
||||
fetched_hash = codecs.getreader('utf-8')(fs).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
errors.append(
|
||||
RuntimeError("Unable to read index hash {0} due to {1}: {2}".format(
|
||||
hash_fetch_url, url_err.__class__.__name__, str(url_err)
|
||||
))
|
||||
)
|
||||
tty.debug('Unable to read index hash {0}'.format(
|
||||
hash_fetch_url), url_err, 1)
|
||||
|
||||
# The only case where we'll skip attempting to fetch the buildcache
|
||||
# index from the mirror is when we already have a hash for this
|
||||
@@ -441,23 +393,24 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
_, _, fs = web_util.read_from_url(index_fetch_url)
|
||||
index_object_str = codecs.getreader('utf-8')(fs).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
errors.append(
|
||||
RuntimeError("Unable to read index {0} due to {1}: {2}".format(
|
||||
index_fetch_url, url_err.__class__.__name__, str(url_err)
|
||||
))
|
||||
)
|
||||
raise FetchCacheError(errors)
|
||||
tty.debug('Unable to read index {0}'.format(index_fetch_url),
|
||||
url_err, 1)
|
||||
# We failed to fetch the index, even though we decided it was
|
||||
# necessary. However, regenerating the spec cache won't produce
|
||||
# anything different than what it has already, so return False.
|
||||
return False
|
||||
|
||||
locally_computed_hash = compute_hash(index_object_str)
|
||||
|
||||
if fetched_hash is not None and locally_computed_hash != fetched_hash:
|
||||
msg = ('Computed hash ({0}) did not match remote ({1}), '
|
||||
'indicating error in index transmission').format(
|
||||
locally_computed_hash, expect_hash)
|
||||
errors.append(RuntimeError(msg))
|
||||
msg_tmpl = ('Computed hash ({0}) did not match remote ({1}), '
|
||||
'indicating error in index transmission')
|
||||
tty.error(msg_tmpl.format(locally_computed_hash, expect_hash))
|
||||
# We somehow got an index that doesn't match the remote one, maybe
|
||||
# the next time we try we'll be successful.
|
||||
raise FetchCacheError(errors)
|
||||
# the next time we try we'll be successful. Regardless, we're not
|
||||
# updating our index cache with this, so don't regenerate the spec
|
||||
# cache either.
|
||||
return False
|
||||
|
||||
url_hash = compute_hash(mirror_url)
|
||||
|
||||
@@ -613,16 +566,6 @@ def get_buildfile_manifest(spec):
|
||||
# Used by make_package_relative to determine binaries to change.
|
||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
||||
dirs[:] = [d for d in dirs if d not in blacklist]
|
||||
|
||||
# Directories may need to be relocated too.
|
||||
for directory in dirs:
|
||||
dir_path_name = os.path.join(root, directory)
|
||||
rel_path_name = os.path.relpath(dir_path_name, spec.prefix)
|
||||
if os.path.islink(dir_path_name):
|
||||
link = os.readlink(dir_path_name)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
||||
data['link_to_relocate'].append(rel_path_name)
|
||||
|
||||
for filename in files:
|
||||
path_name = os.path.join(root, filename)
|
||||
m_type, m_subtype = relocate.mime_type(path_name)
|
||||
@@ -638,7 +581,7 @@ def get_buildfile_manifest(spec):
|
||||
added = True
|
||||
|
||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||
if ((m_subtype in ('x-executable', 'x-sharedlib', 'x-pie-executable')
|
||||
if ((m_subtype in ('x-executable', 'x-sharedlib')
|
||||
and sys.platform != 'darwin') or
|
||||
(m_subtype in ('x-mach-binary')
|
||||
and sys.platform == 'darwin') or
|
||||
@@ -670,8 +613,9 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
||||
|
||||
# Create buildinfo data and write it to disk
|
||||
import spack.hooks.sbang as sbang
|
||||
buildinfo = {}
|
||||
buildinfo['sbang_install_path'] = spack.hooks.sbang.sbang_install_path()
|
||||
buildinfo['sbang_install_path'] = sbang.sbang_install_path()
|
||||
buildinfo['relative_rpaths'] = rel
|
||||
buildinfo['buildpath'] = spack.store.layout.root
|
||||
buildinfo['spackprefix'] = spack.paths.prefix
|
||||
@@ -762,14 +706,20 @@ def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
cache_prefix. This page contains a link for each binary package (.yaml or
|
||||
.json) under cache_prefix.
|
||||
cache_prefix. This page contains a link for each binary package (.yaml)
|
||||
under cache_prefix.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||
|
||||
try:
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
if entry.endswith('.yaml'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -783,97 +733,24 @@ def generate_package_index(cache_prefix):
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
||||
tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
|
||||
all_mirror_specs = {}
|
||||
|
||||
for file_path in file_list:
|
||||
try:
|
||||
spec_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
spec_dict = sjson.load(spec_file_contents)
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
spec_dict = syaml.load(spec_file_contents)
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
all_mirror_specs[s.dag_hash()] = {
|
||||
'spec_url': spec_url,
|
||||
'spec': s,
|
||||
'num_deps': len(list(s.traverse(root=False))),
|
||||
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
|
||||
'buildinfo': spec_dict['buildinfo'],
|
||||
}
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
sorted_specs = sorted(all_mirror_specs.keys(),
|
||||
key=lambda k: all_mirror_specs[k]['num_deps'])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||
|
||||
try:
|
||||
tty.debug('Specs sorted by number of dependencies:')
|
||||
for dag_hash in sorted_specs:
|
||||
spec_record = all_mirror_specs[dag_hash]
|
||||
s = spec_record['spec']
|
||||
num_deps = spec_record['num_deps']
|
||||
tty.debug(' {0}/{1} -> {2}'.format(
|
||||
s.name, dag_hash[:7], num_deps))
|
||||
if num_deps > 0:
|
||||
# Check each of this spec's dependencies (which we have already
|
||||
# processed), as they are the source of truth for their own
|
||||
# full hash. If the full hash we have for any deps does not
|
||||
# match what those deps have themselves, then we need to splice
|
||||
# this spec with those deps, and push this spliced spec
|
||||
# (spec.json file) back to the mirror, as well as update the
|
||||
# all_mirror_specs dictionary with this spliced spec.
|
||||
to_splice = []
|
||||
for dep in s.dependencies():
|
||||
dep_dag_hash = dep.dag_hash()
|
||||
if dep_dag_hash in all_mirror_specs:
|
||||
true_dep = all_mirror_specs[dep_dag_hash]['spec']
|
||||
if true_dep.full_hash() != dep.full_hash():
|
||||
to_splice.append(true_dep)
|
||||
|
||||
if to_splice:
|
||||
tty.debug(' needs the following deps spliced:')
|
||||
for true_dep in to_splice:
|
||||
tty.debug(' {0}/{1}'.format(
|
||||
true_dep.name, true_dep.dag_hash()[:7]))
|
||||
s = s.splice(true_dep, True)
|
||||
|
||||
# Push this spliced spec back to the mirror
|
||||
spliced_spec_dict = s.to_dict(hash=ht.full_hash)
|
||||
for key in ['binary_cache_checksum', 'buildinfo']:
|
||||
spliced_spec_dict[key] = spec_record[key]
|
||||
|
||||
temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
|
||||
with open(temp_json_path, 'w') as fd:
|
||||
fd.write(sjson.dump(spliced_spec_dict))
|
||||
|
||||
spliced_spec_url = spec_record['spec_url']
|
||||
web_util.push_to_url(
|
||||
temp_json_path, spliced_spec_url, keep_original=False)
|
||||
tty.debug(' spliced and wrote {0}'.format(
|
||||
spliced_spec_url))
|
||||
spec_record['spec'] = s
|
||||
|
||||
yaml_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(yaml_url))
|
||||
_, _, yaml_file = web_util.read_from_url(yaml_url)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
# yaml_obj = syaml.load(yaml_contents)
|
||||
# s = Spec.from_yaml(yaml_obj)
|
||||
s = Spec.from_yaml(yaml_contents)
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading spec.yaml: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
# Now that we have fixed any old specfiles that might have had the wrong
|
||||
# full hash for their dependencies, we can generate the index, compute
|
||||
# the hash, and push those files to the mirror.
|
||||
try:
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
@@ -905,7 +782,6 @@ def generate_package_index(cache_prefix):
|
||||
msg = 'Encountered problem pushing package index to {0}: {1}'.format(
|
||||
cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
tty.debug('\n' + traceback.format_exc())
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@@ -1007,27 +883,19 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# need to copy the spec file so the build cache can be downloaded
|
||||
# without concretizing with the current spack packages
|
||||
# and preferences
|
||||
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.realpath(
|
||||
os.path.join(cache_prefix, specfile_name))
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
if force:
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
else:
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
# make a copy of the install directory to work with
|
||||
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
|
||||
@@ -1075,23 +943,15 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# add sha256 checksum to spec.json
|
||||
|
||||
# add sha256 checksum to spec.yaml
|
||||
with open(spec_file, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
if spec_file.endswith('.yaml'):
|
||||
spec_dict = yaml.load(content)
|
||||
elif spec_file.endswith('.json'):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
spec_dict = yaml.load(content)
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
spec_dict['binary_cache_checksum'] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# Add original install prefix relative to layout root to spec.yaml.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo['relative_prefix'] = os.path.relpath(
|
||||
@@ -1100,7 +960,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
spec_dict['buildinfo'] = buildinfo
|
||||
|
||||
with open(specfile_path, 'w') as outfile:
|
||||
outfile.write(sjson.dump(spec_dict))
|
||||
outfile.write(syaml.dump(spec_dict))
|
||||
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
@@ -1154,14 +1014,14 @@ def download_tarball(spec, preferred_mirrors=None):
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
spec (Spec): Concrete spec
|
||||
preferred_mirrors (list): If provided, this is a list of preferred
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
|
||||
Returns:
|
||||
Path to the downloaded tarball, or ``None`` if the tarball could not
|
||||
be downloaded from any configured mirrors.
|
||||
be downloaded from any configured mirrors.
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " +
|
||||
@@ -1210,7 +1070,7 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
platform = spack.architecture.get_platform(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(
|
||||
cur_path_names, orig_path_names, old_layout_root)
|
||||
@@ -1244,6 +1104,8 @@ def relocate_package(spec, allow_root):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
import spack.hooks.sbang as sbang
|
||||
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.layout.root)
|
||||
@@ -1282,8 +1144,7 @@ def relocate_package(spec, allow_root):
|
||||
prefix_to_prefix_bin = OrderedDict({})
|
||||
|
||||
if old_sbang_install_path:
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
prefix_to_prefix_text[old_sbang_install_path] = sbang.sbang_install_path()
|
||||
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
@@ -1297,7 +1158,7 @@ def relocate_package(spec, allow_root):
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(old_spack_prefix)
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
|
||||
tty.debug("Relocating package from",
|
||||
@@ -1321,7 +1182,7 @@ def is_backup_file(file):
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
platform = spack.architecture.get_platform(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate,
|
||||
old_layout_root,
|
||||
@@ -1380,26 +1241,15 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(tmpdir, specfile_name)
|
||||
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
@@ -1422,10 +1272,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
if specfile_is_json:
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
spec_dict = syaml.load(content)
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -1441,30 +1288,42 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
buildinfo = spec_dict.get('buildinfo', {})
|
||||
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
|
||||
rel = buildinfo.get('relative_rpaths')
|
||||
# if the original relative prefix and new relative prefix differ the
|
||||
# directory layout has changed and the buildcache cannot be installed
|
||||
# if it was created with relative rpaths
|
||||
info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s'
|
||||
tty.debug(info %
|
||||
(old_relative_prefix, new_relative_prefix, rel))
|
||||
# if (old_relative_prefix != new_relative_prefix and (rel)):
|
||||
# shutil.rmtree(tmpdir)
|
||||
# msg = "Package tarball was created from an install "
|
||||
# msg += "prefix with a different directory layout. "
|
||||
# msg += "It cannot be relocated because it "
|
||||
# msg += "uses relative rpaths."
|
||||
# raise NewLayoutException(msg)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.layout.root, '.tmp')
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp,
|
||||
old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
# extract the tarball in a temp directory
|
||||
with closing(tarfile.open(tarfile_path, 'r')) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
tar.extractall(path=tmpdir)
|
||||
# get the parent directory of the file .spack/binary_distribution
|
||||
# this should the directory unpacked from the tarball whose
|
||||
# name is unknown because the prefix naming is unknown
|
||||
bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0]
|
||||
workdir = re.sub('/.spack/binary_distribution$', '', bindist_file)
|
||||
tty.debug('workdir %s' % workdir)
|
||||
# install_tree copies hardlinks
|
||||
# create a temporary tarfile from prefix and exract it to workdir
|
||||
# tarfile preserves hardlinks
|
||||
temp_tarfile_name = tarball_name(spec, '.tar')
|
||||
temp_tarfile_path = os.path.join(tmpdir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, 'w')) as tar:
|
||||
tar.add(name='%s' % workdir,
|
||||
arcname='.')
|
||||
with closing(tarfile.open(temp_tarfile_path, 'r')) as tar:
|
||||
tar.extractall(spec.prefix)
|
||||
os.remove(temp_tarfile_path)
|
||||
|
||||
# cleanup
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
|
||||
@@ -1490,39 +1349,27 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_is_json = True
|
||||
specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
lenient = not full_hash_match
|
||||
found_specs = []
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
buildcache_fetch_url = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url)
|
||||
fetched_spec_yaml = codecs.getreader('utf-8')(fs).read()
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url), url_err)
|
||||
continue
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
if specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec = Spec.from_yaml(fetched_spec_yaml)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
# Do not recompute the full hash for the fetched spec, instead just
|
||||
@@ -1543,14 +1390,14 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
indicating the mirrors on which it can be found
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||
spec (Spec): The spec to look for in binary mirrors
|
||||
full_hash_match (bool): If True, only includes mirrors where the spec
|
||||
full hash matches the locally computed full hash of the ``spec``
|
||||
argument. If False, any mirror which has a matching DAG hash
|
||||
is included in the results.
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.yaml``
|
||||
files from remote mirrors, only consider the indices.
|
||||
|
||||
Return:
|
||||
@@ -1600,9 +1447,6 @@ def update_cache_and_get_specs():
|
||||
possible, so this method will also attempt to initialize and update the
|
||||
local index cache (essentially a no-op if it has been done already and
|
||||
nothing has changed on the configured mirrors.)
|
||||
|
||||
Throws:
|
||||
FetchCacheError
|
||||
"""
|
||||
binary_index.update()
|
||||
return binary_index.get_all_built_specs()
|
||||
@@ -1750,91 +1594,57 @@ def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
|
||||
tty.debug(spec.tree())
|
||||
|
||||
# Try to retrieve the specfile directly, based on the known
|
||||
# Try to retrieve the .spec.yaml directly, based on the known
|
||||
# format of the name, in order to determine if the package
|
||||
# needs to be rebuilt.
|
||||
cache_prefix = build_cache_prefix(mirror_url)
|
||||
specfile_is_json = True
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
deprecated_specfile_path = os.path.join(cache_prefix,
|
||||
deprecated_specfile_name)
|
||||
spec_yaml_file_name = tarball_name(spec, '.spec.yaml')
|
||||
file_path = os.path.join(cache_prefix, spec_yaml_file_name)
|
||||
|
||||
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
|
||||
spec.short_spec, '' if rebuild_on_errors else 'not ')
|
||||
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(specfile_path)
|
||||
_, _, yaml_file = web_util.read_from_url(file_path)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError) as url_err_y:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1} or {2}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(
|
||||
spec.short_spec,
|
||||
specfile_path,
|
||||
deprecated_specfile_path))
|
||||
tty.debug(url_err)
|
||||
tty.debug(url_err_y)
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
if not spec_file_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(
|
||||
specfile_path if specfile_is_json else deprecated_specfile_path))
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(spec.short_spec, file_path))
|
||||
tty.debug(url_err)
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_dict = (sjson.load(spec_file_contents)
|
||||
if specfile_is_json else syaml.load(spec_file_contents))
|
||||
if not yaml_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(file_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
try:
|
||||
nodes = spec_dict['spec']['nodes']
|
||||
except KeyError:
|
||||
# Prior node dict format omitted 'nodes' key
|
||||
nodes = spec_dict['spec']
|
||||
spec_yaml = syaml.load(yaml_contents)
|
||||
|
||||
yaml_spec = spec_yaml['spec']
|
||||
name = spec.name
|
||||
|
||||
# In the old format:
|
||||
# The "spec" key represents a list of objects, each with a single
|
||||
# The "spec" key in the yaml is a list of objects, each with a single
|
||||
# key that is the package name. While the list usually just contains
|
||||
# a single object, we iterate over the list looking for the object
|
||||
# with the name of this concrete spec as a key, out of an abundance
|
||||
# of caution.
|
||||
# In format version 2:
|
||||
# ['spec']['nodes'] is still a list of objects, but with a
|
||||
# multitude of keys. The list will commonly contain many objects, and in the
|
||||
# case of build specs, it is highly likely that the same name will occur
|
||||
# once as the actual package, and then again as the build provenance of that
|
||||
# same package. Hence format version 2 matches on the dag hash, not name.
|
||||
if nodes and 'name' not in nodes[0]:
|
||||
# old style
|
||||
cached_pkg_specs = [item[name] for item in nodes if name in item]
|
||||
elif nodes and spec_dict['spec']['_meta']['version'] == 2:
|
||||
cached_pkg_specs = [item for item in nodes
|
||||
if item[ht.dag_hash.name] == spec.dag_hash()]
|
||||
cached_pkg_specs = [item[name] for item in yaml_spec if name in item]
|
||||
cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
|
||||
|
||||
# If either the full_hash didn't exist in the specfile, or it
|
||||
# If either the full_hash didn't exist in the .spec.yaml file, or it
|
||||
# did, but didn't match the one we computed locally, then we should
|
||||
# just rebuild. This can be simplified once the dag_hash and the
|
||||
# full_hash become the same thing.
|
||||
rebuild = False
|
||||
|
||||
if not cached_target:
|
||||
reason = 'did not find spec in specfile contents'
|
||||
rebuild = True
|
||||
elif ht.full_hash.name not in cached_target:
|
||||
reason = 'full_hash was missing from remote specfile'
|
||||
if not cached_target or 'full_hash' not in cached_target:
|
||||
reason = 'full_hash was missing from remote spec.yaml'
|
||||
rebuild = True
|
||||
else:
|
||||
full_hash = cached_target[ht.full_hash.name]
|
||||
full_hash = cached_target['full_hash']
|
||||
if full_hash != pkg_full_hash:
|
||||
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
|
||||
full_hash, pkg_full_hash)
|
||||
@@ -1857,11 +1667,11 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
|
||||
Arguments:
|
||||
mirrors (dict): Mirrors to check against
|
||||
specs (typing.Iterable): Specs to check against mirrors
|
||||
output_file (str): Path to output file to be written. If provided,
|
||||
specs (iterable): Specs to check against mirrors
|
||||
output_file (string): Path to output file to be written. If provided,
|
||||
mirrors with missing or out-of-date specs will be formatted as a
|
||||
JSON object and written to this file.
|
||||
rebuild_on_errors (bool): Treat any errors encountered while
|
||||
rebuild_on_errors (boolean): Treat any errors encountered while
|
||||
checking specs as a signal to rebuild package.
|
||||
|
||||
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
|
||||
@@ -1896,23 +1706,24 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
|
||||
def _download_buildcache_entry(mirror_root, descriptions):
|
||||
for description in descriptions:
|
||||
description_url = os.path.join(mirror_root, description['url'])
|
||||
path = description['path']
|
||||
mkdirp(path)
|
||||
fail_if_missing = description['required']
|
||||
for url in description['url']:
|
||||
description_url = os.path.join(mirror_root, url)
|
||||
stage = Stage(
|
||||
description_url, name="build_cache", path=path, keep=True)
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except fs.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
|
||||
mkdirp(path)
|
||||
|
||||
stage = Stage(
|
||||
description_url, name="build_cache", path=path, keep=True)
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError as e:
|
||||
tty.debug(e)
|
||||
if fail_if_missing:
|
||||
tty.error('Failed to download required url {0}'.format(
|
||||
description_url))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -2,15 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
try:
|
||||
@@ -24,403 +17,16 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.architecture
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.environment
|
||||
import spack.main
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
#: "spack buildcache" command, initialized lazily
|
||||
_buildcache_cmd = None
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
def _bootstrapper(type):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
|
||||
Args:
|
||||
type (str): string identifying the class
|
||||
"""
|
||||
def _register(cls):
|
||||
_bootstrap_methods[type] = cls
|
||||
return cls
|
||||
return _register
|
||||
|
||||
|
||||
def _try_import_from_store(module, abstract_spec_str):
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
Args:
|
||||
module: Python module to be imported
|
||||
abstract_spec_str: abstract spec that may provide the module
|
||||
"""
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.linux.Linux()
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
abstract_spec_str = str(spack.spec.Spec(abstract_spec_str))
|
||||
|
||||
# We have to run as part of this python interpreter
|
||||
abstract_spec_str += ' ^' + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
lib_spd = candidate_spec['python'].package.default_site_packages_dir
|
||||
lib64_spd = lib_spd.replace('lib/', 'lib64/')
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, lib_spd),
|
||||
os.path.join(candidate_spec.prefix, lib64_spd)
|
||||
]
|
||||
sys.path.extend(module_paths)
|
||||
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, abstract_spec_str, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _fix_ext_suffix(candidate_spec):
|
||||
"""Fix the external suffixes of Python extensions on the fly for
|
||||
platforms that may need it
|
||||
|
||||
Args:
|
||||
candidate_spec (Spec): installed spec with a Python module
|
||||
to be checked.
|
||||
"""
|
||||
# Here we map target families to the patterns expected
|
||||
# by pristine CPython. Only architectures with known issues
|
||||
# are included. Known issues:
|
||||
#
|
||||
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
|
||||
#
|
||||
_suffix_to_be_checked = {
|
||||
'ppc64le': {
|
||||
'glob': '*.cpython-*-powerpc64le-linux-gnu.so',
|
||||
're': r'.cpython-[\w]*-powerpc64le-linux-gnu.so',
|
||||
'fmt': r'{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so'
|
||||
}
|
||||
}
|
||||
|
||||
# If the current architecture is not problematic return
|
||||
generic_target = archspec.cpu.host().family
|
||||
if str(generic_target) not in _suffix_to_be_checked:
|
||||
return
|
||||
|
||||
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
|
||||
# the expectations, return since the package is surely good
|
||||
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
|
||||
if ext_suffix is None:
|
||||
return
|
||||
|
||||
expected = _suffix_to_be_checked[str(generic_target)]
|
||||
if fnmatch.fnmatch(ext_suffix, expected['glob']):
|
||||
return
|
||||
|
||||
# If we are here it means the current interpreter expects different names
|
||||
# than pristine CPython. So:
|
||||
# 1. Find what we have installed
|
||||
# 2. Create symbolic links for the other names, it they're not there already
|
||||
|
||||
# Check if standard names are installed and if we have to create
|
||||
# link for this interpreter
|
||||
standard_extensions = fs.find(candidate_spec.prefix, expected['glob'])
|
||||
link_names = [re.sub(expected['re'], ext_suffix, s) for s in standard_extensions]
|
||||
for file_name, link_name in zip(standard_extensions, link_names):
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(file_name, link_name)
|
||||
|
||||
# Check if this interpreter installed something and we have to create
|
||||
# links for a standard CPython interpreter
|
||||
non_standard_extensions = fs.find(candidate_spec.prefix, '*' + ext_suffix)
|
||||
for abs_path in non_standard_extensions:
|
||||
directory, filename = os.path.split(abs_path)
|
||||
module = filename.split('.')[0]
|
||||
link_name = os.path.join(directory, expected['fmt'].format(
|
||||
module=module, major=sys.version_info[0], minor=sys.version_info[1])
|
||||
)
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
def _executables_in_store(executables, abstract_spec_str):
|
||||
"""Return True if at least one of the executables can be retrieved from
|
||||
a spec in store, False otherwise.
|
||||
|
||||
The different executables must provide the same functionality and are
|
||||
"alternate" to each other, i.e. the function will exit True on the first
|
||||
executable found.
|
||||
|
||||
Args:
|
||||
executables: list of executables to be searched
|
||||
abstract_spec_str: abstract spec that may provide the executable
|
||||
"""
|
||||
executables_str = ', '.join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, abstract_spec_str))
|
||||
installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
# IF we have a "bin" directory and it contains
|
||||
# the executables we are looking for
|
||||
if (os.path.exists(bin_dir) and os.path.isdir(bin_dir) and
|
||||
spack.util.executable.which_string(*executables, path=bin_dir)):
|
||||
spack.util.environment.path_put_first('PATH', [bin_dir])
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
"""Return the spec object and platform we need to use when
|
||||
querying the buildcache.
|
||||
|
||||
Args:
|
||||
abstract_spec_str: abstract spec string we are looking for
|
||||
"""
|
||||
# This import is local since it is needed only on Cray
|
||||
import spack.platforms.linux
|
||||
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.Linux()
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
return abstract_spec, bincache_platform
|
||||
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
global _buildcache_cmd
|
||||
|
||||
if _buildcache_cmd is None:
|
||||
_buildcache_cmd = spack.main.SpackCommand('buildcache')
|
||||
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null"
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family)
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override(
|
||||
'compilers', [{'compiler': compiler_entry}]
|
||||
):
|
||||
spec_str = '/' + pkg_hash
|
||||
install_args = [
|
||||
'install',
|
||||
'--sha256', pkg_sha256,
|
||||
'--only-root',
|
||||
'-a', '-u', '-o', '-f', spec_str
|
||||
]
|
||||
_buildcache_cmd(*install_args, fail_on_error=False)
|
||||
|
||||
def _install_and_test(
|
||||
self, abstract_spec, bincache_platform, bincache_data, test_fn
|
||||
):
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
raise RuntimeError("The binary index is empty")
|
||||
|
||||
for item in bincache_data['verified']:
|
||||
candidate_spec = item['spec']
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get('python', None)
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for pkg_name, pkg_hash, pkg_sha256 in item['binaries']:
|
||||
# TODO: undo installations that didn't complete?
|
||||
self._install_by_hash(
|
||||
pkg_hash, pkg_sha256, index, bincache_platform
|
||||
)
|
||||
|
||||
if test_fn():
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn = functools.partial(_try_import_from_store, module, abstract_spec_str)
|
||||
if test_fn():
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from pre-built binaries".format(module))
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
data = self._read_metadata(module)
|
||||
return self._install_and_test(
|
||||
abstract_spec, bincache_platform, data, test_fn
|
||||
)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
test_fn = functools.partial(
|
||||
_executables_in_store, executables, abstract_spec_str
|
||||
)
|
||||
if test_fn():
|
||||
return True
|
||||
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str
|
||||
)
|
||||
tty.info("Bootstrapping {0} from pre-built binaries".format(abstract_spec.name))
|
||||
data = self._read_metadata(abstract_spec.name)
|
||||
return self._install_and_test(
|
||||
abstract_spec, bincache_platform, data, test_fn
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
|
||||
@staticmethod
|
||||
def try_import(module, abstract_spec_str):
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(module))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.platforms.host()) == 'cray':
|
||||
abstract_spec_str += ' os=fe'
|
||||
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
if module == 'clingo':
|
||||
# TODO: remove when the old concretizer is deprecated
|
||||
concrete_spec._old_concretize(deprecation_warning=False)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
if _executables_in_store(executables, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.platforms.host()) == 'cray':
|
||||
abstract_spec_str += ' os=fe'
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP GnuPG] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
concrete_spec.package.do_install()
|
||||
return _executables_in_store(executables, abstract_spec_str)
|
||||
|
||||
|
||||
def _make_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the
|
||||
configuration argument
|
||||
"""
|
||||
btype = conf['type']
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
@@ -447,7 +53,7 @@ def spack_python_interpreter():
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = sys.exec_prefix
|
||||
python_prefix = os.path.dirname(os.path.dirname(sys.executable))
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
@@ -461,106 +67,69 @@ def spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
"""Make the requested module available for import, or raise.
|
||||
|
||||
This function tries to import a Python module in the current interpreter
|
||||
using, in order, the methods configured in bootstrap.yaml.
|
||||
|
||||
If none of the methods succeed, an exception is raised. The function exits
|
||||
on first success.
|
||||
|
||||
Args:
|
||||
module (str): module to be imported in the current interpreter
|
||||
abstract_spec (str): abstract spec that might provide the module. If not
|
||||
given it defaults to "module"
|
||||
|
||||
Raises:
|
||||
ImportError: if the module couldn't be imported
|
||||
"""
|
||||
# If we can import it already, that's great
|
||||
tty.debug("[BOOTSTRAP MODULE {0}] Try importing from Python".format(module))
|
||||
if _python_import(module):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
errors = {}
|
||||
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
"""Ensure that some executables are in path or raise.
|
||||
|
||||
Args:
|
||||
executables (list): list of executables to be searched in the PATH,
|
||||
in order. The function exits on the first one found.
|
||||
abstract_spec (str): abstract spec that provides the executables
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the executables cannot be ensured to be in PATH
|
||||
"""
|
||||
if spack.util.executable.which_string(*executables):
|
||||
return
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _python_import(module):
|
||||
def make_module_available(module, spec=None, install=False):
|
||||
"""Ensure module is importable"""
|
||||
# If we already can import it, that's great
|
||||
try:
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
pass
|
||||
|
||||
# If it's already installed, use it
|
||||
# Search by spec
|
||||
spec = spack.spec.Spec(spec or module)
|
||||
|
||||
# We have to run as part of this python
|
||||
# We can constrain by a shortened version in place of a version range
|
||||
# because this spec is only used for querying or as a placeholder to be
|
||||
# replaced by an external that already has a concrete version. This syntax
|
||||
# is not sufficient when concretizing without an external, as it will
|
||||
# concretize to python@X.Y instead of python@X.Y.Z
|
||||
python_requirement = '^' + spec_for_current_python()
|
||||
spec.constrain(python_requirement)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
|
||||
for ispec in installed_specs:
|
||||
# TODO: make sure run-environment is appropriate
|
||||
module_path = os.path.join(ispec.prefix,
|
||||
ispec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
tty.warn("Spec %s did not provide module %s" % (ispec, module))
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
def _raise_error(module_name, module_spec):
|
||||
error_msg = 'cannot import module "{0}"'.format(module_name)
|
||||
if module_spec:
|
||||
error_msg += ' from spec "{0}'.format(module_spec)
|
||||
raise ImportError(error_msg)
|
||||
|
||||
if not install:
|
||||
_raise_error(module, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
spec.package.do_install()
|
||||
|
||||
module_path = os.path.join(spec.prefix,
|
||||
spec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
sys.path = sys.path[:-2]
|
||||
_raise_error(module, spec)
|
||||
|
||||
|
||||
def get_executable(exe, spec=None, install=False):
|
||||
@@ -568,14 +137,13 @@ def get_executable(exe, spec=None, install=False):
|
||||
|
||||
Args:
|
||||
exe (str): needed executable name
|
||||
spec (spack.spec.Spec or str): spec to search for exe in (default exe)
|
||||
spec (Spec or str): spec to search for exe in (default exe)
|
||||
install (bool): install spec if not available
|
||||
|
||||
When ``install`` is True, Spack will use the python used to run Spack as an
|
||||
external. The ``install`` option should only be used with packages that
|
||||
install quickly (when using external python) or are guaranteed by Spack
|
||||
organization to be in a binary mirror (clingo).
|
||||
"""
|
||||
organization to be in a binary mirror (clingo)."""
|
||||
# Search the system first
|
||||
runner = spack.util.executable.which(exe)
|
||||
if runner:
|
||||
@@ -592,9 +160,7 @@ def get_executable(exe, spec=None, install=False):
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in ispec.traverse(root=True, order='post'):
|
||||
envmod.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(dep)
|
||||
)
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
else:
|
||||
@@ -623,9 +189,7 @@ def _raise_error(executable, exe_spec):
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in spec.traverse(root=True, order='post'):
|
||||
envmod.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(dep)
|
||||
)
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
|
||||
@@ -637,12 +201,8 @@ def _bootstrap_config_scopes():
|
||||
config_scopes = [
|
||||
spack.config.InternalConfigScope('_builtin', spack.config.config_defaults)
|
||||
]
|
||||
configuration_paths = (
|
||||
spack.config.configuration_defaults_path,
|
||||
('bootstrap', _config_path())
|
||||
)
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
for name, path in spack.config.configuration_paths:
|
||||
platform = spack.architecture.platform().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
'/'.join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
@@ -654,135 +214,38 @@ def _bootstrap_config_scopes():
|
||||
return config_scopes
|
||||
|
||||
|
||||
def _add_compilers_if_missing():
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get('cmake'),
|
||||
spack.repo.path.get('bison'),
|
||||
# GnuPG
|
||||
spack.repo.path.get('gawk')
|
||||
]
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.use_platform(spack.platforms.real_host()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(bootstrap_store_path):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we have
|
||||
# compilers for the current platform before switching parts.
|
||||
_add_compilers_if_missing()
|
||||
spack.config.set('bootstrap', user_configuration['bootstrap'])
|
||||
spack.config.set('config', user_configuration['config'])
|
||||
with spack.modules.disable_modules():
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def _read_and_sanitize_configuration():
|
||||
"""Read the user configuration that needs to be reused for bootstrapping
|
||||
and remove the entries that should not be copied over.
|
||||
"""
|
||||
# Read the "config" section but pop the install tree (the entry will not be
|
||||
# considered due to the use_store context manager, so it will be confusing
|
||||
# to have it in the configuration).
|
||||
config_yaml = spack.config.get('config')
|
||||
config_yaml.pop('install_tree', None)
|
||||
user_configuration = {
|
||||
'bootstrap': spack.config.get('bootstrap'),
|
||||
'config': config_yaml
|
||||
}
|
||||
return user_configuration
|
||||
def clingo_root_spec():
|
||||
# Construct the root spec that will be used to bootstrap clingo
|
||||
spec_str = 'clingo-bootstrap@spack+python'
|
||||
|
||||
|
||||
def store_path():
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get('bootstrap:enable', True)
|
||||
if not enabled:
|
||||
msg = ('bootstrapping is currently disabled. '
|
||||
'Use "spack bootstrap enable" to enable it')
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return _store_path()
|
||||
|
||||
|
||||
def _root_path():
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.config.get(
|
||||
'bootstrap:root', spack.paths.default_user_bootstrap_path
|
||||
)
|
||||
|
||||
|
||||
def _store_path():
|
||||
bootstrap_root_path = _root_path()
|
||||
return spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, 'store')
|
||||
)
|
||||
|
||||
|
||||
def _config_path():
|
||||
bootstrap_root_path = _root_path()
|
||||
return spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, 'config')
|
||||
)
|
||||
|
||||
|
||||
def _root_spec(spec_str):
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS.
|
||||
if str(spack.platforms.host()) == 'darwin':
|
||||
if str(spack.architecture.platform()) == 'darwin':
|
||||
spec_str += ' %apple-clang'
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(target)
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
spec_str += ' os=fe'
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] {0}'.format(spec_str))
|
||||
return spec_str
|
||||
# Add the generic target
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(str(generic_target))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str))
|
||||
|
||||
def clingo_root_spec():
|
||||
"""Return the root spec used to bootstrap clingo"""
|
||||
return _root_spec('clingo-bootstrap@spack+python')
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(
|
||||
module='clingo', abstract_spec=clingo_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def gnupg_root_spec():
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec('gnupg@2.3:')
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise():
|
||||
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||
ensure_executables_in_path_or_raise(
|
||||
executables=['gpg2', 'gpg'], abstract_spec=gnupg_root_spec(),
|
||||
)
|
||||
return spack.spec.Spec(spec_str)
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.architecture as arch
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.config
|
||||
@@ -56,12 +57,10 @@
|
||||
import spack.main
|
||||
import spack.package
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.util.cpus import cpus_available
|
||||
@@ -70,8 +69,8 @@
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
preserve_environment,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
@@ -147,14 +146,6 @@ def __call__(self, *args, **kwargs):
|
||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system('default_os')
|
||||
on_cray = str(host_platform) == 'cray'
|
||||
using_cnl = re.match(r'cnl\d+', str(host_os))
|
||||
return on_cray, using_cnl
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -178,9 +169,6 @@ def clean_environment():
|
||||
|
||||
env.unset('CMAKE_PREFIX_PATH')
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
env.unset('MAKEFLAGS')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
env.unset('DYLD_INSERT_LIBRARIES')
|
||||
@@ -189,7 +177,9 @@ def clean_environment():
|
||||
# interference with Spack dependencies.
|
||||
# CNL requires these variables to be set (or at least some of them,
|
||||
# depending on the CNL version).
|
||||
on_cray, using_cnl = _on_cray()
|
||||
hostarch = arch.Arch(arch.platform(), 'default_os', 'default_target')
|
||||
on_cray = str(hostarch.platform) == 'cray'
|
||||
using_cnl = re.match(r'cnl\d+', str(hostarch.os))
|
||||
if on_cray and not using_cnl:
|
||||
env.unset('CRAY_LD_LIBRARY_PATH')
|
||||
for varname in os.environ.keys():
|
||||
@@ -232,7 +222,7 @@ def clean_environment():
|
||||
if '/macports/' in p:
|
||||
env.remove_path('PATH', p)
|
||||
|
||||
return env
|
||||
env.apply_modifications()
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
@@ -465,11 +455,11 @@ def determine_number_of_jobs(
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel (bool or None): true when package supports parallel builds
|
||||
command_line (int or None): command line override
|
||||
config_default (int or None): config default number of jobs
|
||||
max_cpus (int or None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
parallel (bool): true when package supports parallel builds
|
||||
command_line (int/None): command line override
|
||||
config_default (int/None): config default number of jobs
|
||||
max_cpus (int/None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
@@ -695,14 +685,14 @@ def get_std_cmake_args(pkg):
|
||||
"""List of standard arguments used if a package is a CMakePackage.
|
||||
|
||||
Returns:
|
||||
list: standard arguments that would be used if this
|
||||
list of str: standard arguments that would be used if this
|
||||
package were a CMakePackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
pkg (PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for cmake
|
||||
list of str: arguments for cmake
|
||||
"""
|
||||
return spack.build_systems.cmake.CMakePackage._std_args(pkg)
|
||||
|
||||
@@ -711,14 +701,14 @@ def get_std_meson_args(pkg):
|
||||
"""List of standard arguments used if a package is a MesonPackage.
|
||||
|
||||
Returns:
|
||||
list: standard arguments that would be used if this
|
||||
list of str: standard arguments that would be used if this
|
||||
package were a MesonPackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
pkg (PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for meson
|
||||
list of str: arguments for meson
|
||||
"""
|
||||
return spack.build_systems.meson.MesonPackage._std_args(pkg)
|
||||
|
||||
@@ -748,7 +738,7 @@ def load_external_modules(pkg):
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package to load deps for
|
||||
pkg (PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
@@ -765,77 +755,72 @@ def setup_package(pkg, dirty, context='build'):
|
||||
|
||||
set_module_variables_for_package(pkg)
|
||||
|
||||
# Keep track of env changes from packages separately, since we want to
|
||||
# issue warnings when packages make "suspicious" modifications.
|
||||
env_base = EnvironmentModifications() if dirty else clean_environment()
|
||||
env_mods = EnvironmentModifications()
|
||||
env = EnvironmentModifications()
|
||||
|
||||
if not dirty:
|
||||
clean_environment()
|
||||
|
||||
# setup compilers for build contexts
|
||||
need_compiler = context == 'build' or (context == 'test' and
|
||||
pkg.test_requires_compiler)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
set_compiler_environment_variables(pkg, env)
|
||||
set_wrapper_variables(pkg, env)
|
||||
|
||||
env_mods.extend(modifications_from_dependencies(
|
||||
env.extend(modifications_from_dependencies(
|
||||
pkg.spec, context, custom_mods_only=False))
|
||||
|
||||
# architecture specific setup
|
||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
pkg.architecture.platform.setup_platform_environment(pkg, env)
|
||||
|
||||
if context == 'build':
|
||||
pkg.setup_build_environment(env_mods)
|
||||
pkg.setup_build_environment(env)
|
||||
|
||||
if (not dirty) and (not env_mods.is_unset('CPATH')):
|
||||
if (not dirty) and (not env.is_unset('CPATH')):
|
||||
tty.debug("A dependency has updated CPATH, this may lead pkg-"
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
elif context == 'test':
|
||||
env_mods.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env_mods)
|
||||
env_mods.prepend_path('PATH', '.')
|
||||
pkg.setup_run_environment(env)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
# First apply the clean environment changes
|
||||
env_base.apply_modifications()
|
||||
# Loading modules, in particular if they are meant to be used outside
|
||||
# of Spack, can change environment variables that are relevant to the
|
||||
# build of packages. To avoid a polluted environment, preserve the
|
||||
# value of a few, selected, environment variables
|
||||
# With the current ordering of environment modifications, this is strictly
|
||||
# unnecessary. Modules affecting these variables will be overwritten anyway
|
||||
with preserve_environment('CC', 'CXX', 'FC', 'F77'):
|
||||
# All module loads that otherwise would belong in previous
|
||||
# functions have to occur after the env object has its
|
||||
# modifications applied. Otherwise the environment modifications
|
||||
# could undo module changes, such as unsetting LD_LIBRARY_PATH
|
||||
# after a module changes it.
|
||||
if need_compiler:
|
||||
for mod in pkg.compiler.modules:
|
||||
# Fixes issue https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
||||
load_module("cce")
|
||||
load_module(mod)
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray:
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
module('unload', 'cray-libsci')
|
||||
|
||||
if target.module_name:
|
||||
load_module(target.module_name)
|
||||
if pkg.architecture.target.module_name:
|
||||
load_module(pkg.architecture.target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
load_external_modules(pkg)
|
||||
|
||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||
if implicit_rpaths:
|
||||
env_mods.set('SPACK_COMPILER_IMPLICIT_RPATHS',
|
||||
':'.join(implicit_rpaths))
|
||||
env.set('SPACK_COMPILER_IMPLICIT_RPATHS',
|
||||
':'.join(implicit_rpaths))
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
env_mods.apply_modifications()
|
||||
|
||||
# Return all env modifications we controlled (excluding module related ones)
|
||||
env_base.extend(env_mods)
|
||||
return env_base
|
||||
validate(env, tty.warn)
|
||||
env.apply_modifications()
|
||||
|
||||
|
||||
def _make_runnable(pkg, env):
|
||||
@@ -879,7 +864,7 @@ def modifications_from_dependencies(spec, context, custom_mods_only=True):
|
||||
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec for which we want the modifications
|
||||
spec (Spec): spec for which we want the modifications
|
||||
context (str): either 'build' for build-time modifications or 'run'
|
||||
for run-time modifications
|
||||
"""
|
||||
@@ -1023,8 +1008,8 @@ def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
|
||||
if not kwargs.get('fake', False):
|
||||
kwargs['unmodified_env'] = os.environ.copy()
|
||||
kwargs['env_modifications'] = setup_package(
|
||||
pkg, dirty=kwargs.get('dirty', False), context=context)
|
||||
setup_package(pkg, dirty=kwargs.get('dirty', False),
|
||||
context=context)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
|
||||
@@ -1077,9 +1062,9 @@ def start_build_process(pkg, function, kwargs):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
||||
pkg (PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
function (callable): argless function to run in the child
|
||||
process.
|
||||
|
||||
Usage::
|
||||
@@ -1164,7 +1149,7 @@ def get_package_context(traceback, context=3):
|
||||
"""Return some context for an error message when the build fails.
|
||||
|
||||
Args:
|
||||
traceback: A traceback from some exception raised during
|
||||
traceback (traceback): A traceback from some exception raised during
|
||||
install
|
||||
|
||||
context (int): Lines of context to show before and after the line
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
@@ -13,8 +14,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import force_remove, working_dir
|
||||
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -31,7 +30,7 @@ class AutotoolsPackage(PackageBase):
|
||||
|
||||
They all have sensible defaults and for many packages the only thing
|
||||
necessary will be to override the helper method
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
|
||||
:py:meth:`~.AutotoolsPackage.configure_args`.
|
||||
For a finer tuning you may also override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
@@ -55,24 +54,9 @@ class AutotoolsPackage(PackageBase):
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'AutotoolsPackage'
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
"""
|
||||
Whether or not to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball. This currently only applies to
|
||||
``ppc64le:``, ``aarch64:``, and ``riscv64`` target architectures. The
|
||||
substitutes are taken from the ``gnuconfig`` package, which is
|
||||
automatically added as a build dependency for these architectures. In
|
||||
case system versions of these config files are required, the
|
||||
``gnuconfig`` package can be marked external with a prefix pointing to
|
||||
the directory containing the system ``config.guess`` and ``config.sub``
|
||||
files.
|
||||
"""
|
||||
return (self.spec.satisfies('target=ppc64le:')
|
||||
or self.spec.satisfies('target=aarch64:')
|
||||
or self.spec.satisfies('target=riscv64:'))
|
||||
|
||||
#: Whether or not to update ``config.guess`` and ``config.sub`` on old
|
||||
#: architectures
|
||||
patch_config_files = True
|
||||
#: Whether or not to update ``libtool``
|
||||
#: (currently only for Arm/Clang/Fujitsu compilers)
|
||||
patch_libtool = True
|
||||
@@ -99,10 +83,6 @@ def patch_config_files(self):
|
||||
#: after the installation. If True instead it installs them.
|
||||
install_libtool_archives = False
|
||||
|
||||
depends_on('gnuconfig', type='build', when='target=ppc64le:')
|
||||
depends_on('gnuconfig', type='build', when='target=aarch64:')
|
||||
depends_on('gnuconfig', type='build', when='target=riscv64:')
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
"""File containing the list of remove libtool archives"""
|
||||
@@ -124,10 +104,12 @@ def _do_patch_config_files(self):
|
||||
"""Some packages ship with older config.guess/config.sub files and
|
||||
need to have these updated when installed on a newer architecture.
|
||||
In particular, config.guess fails for PPC64LE for version prior
|
||||
to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and
|
||||
RISC-V (riscv64).
|
||||
to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64).
|
||||
"""
|
||||
if not self.patch_config_files:
|
||||
if not self.patch_config_files or (
|
||||
not self.spec.satisfies('target=ppc64le:') and
|
||||
not self.spec.satisfies('target=aarch64:')
|
||||
):
|
||||
return
|
||||
|
||||
# TODO: Expand this to select the 'config.sub'-compatible architecture
|
||||
@@ -137,8 +119,6 @@ def _do_patch_config_files(self):
|
||||
config_arch = 'ppc64le'
|
||||
elif self.spec.satisfies('target=aarch64:'):
|
||||
config_arch = 'aarch64'
|
||||
elif self.spec.satisfies('target=riscv64:'):
|
||||
config_arch = 'riscv64'
|
||||
else:
|
||||
config_arch = 'local'
|
||||
|
||||
@@ -158,69 +138,39 @@ def runs_ok(script_abs_path):
|
||||
|
||||
return True
|
||||
|
||||
# Get the list of files that needs to be patched
|
||||
to_be_patched = fs.find(self.stage.path, files=['config.sub', 'config.guess'])
|
||||
# Compute the list of files that needs to be patched
|
||||
search_dir = self.stage.path
|
||||
to_be_patched = fs.find(
|
||||
search_dir, files=['config.sub', 'config.guess'], recursive=True
|
||||
)
|
||||
to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
|
||||
|
||||
# If there are no files to be patched, return early
|
||||
if not to_be_patched:
|
||||
return
|
||||
|
||||
# Otherwise, require `gnuconfig` to be a build dependency
|
||||
self._require_build_deps(
|
||||
pkgs=['gnuconfig'],
|
||||
spec=self.spec,
|
||||
err="Cannot patch config files")
|
||||
# Directories where to search for files to be copied
|
||||
# over the failing ones
|
||||
good_file_dirs = ['/usr/share']
|
||||
if 'automake' in self.spec:
|
||||
good_file_dirs.insert(0, self.spec['automake'].prefix)
|
||||
|
||||
# Get the config files we need to patch (config.sub / config.guess).
|
||||
# List of files to be found in the directories above
|
||||
to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
|
||||
gnuconfig = self.spec['gnuconfig']
|
||||
gnuconfig_dir = gnuconfig.prefix
|
||||
|
||||
# An external gnuconfig may not not have a prefix.
|
||||
if gnuconfig_dir is None:
|
||||
raise InstallError("Spack could not find substitutes for GNU config "
|
||||
"files because no prefix is available for the "
|
||||
"`gnuconfig` package. Make sure you set a prefix "
|
||||
"path instead of modules for external `gnuconfig`.")
|
||||
|
||||
candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)
|
||||
|
||||
# For external packages the user may have specified an incorrect prefix.
|
||||
# otherwise the installation is just corrupt.
|
||||
if not candidates:
|
||||
msg = ("Spack could not find `config.guess` and `config.sub` "
|
||||
"files in the `gnuconfig` prefix `{0}`. This means the "
|
||||
"`gnuconfig` package is broken").format(gnuconfig_dir)
|
||||
if gnuconfig.external:
|
||||
msg += (" or the `gnuconfig` package prefix is misconfigured as"
|
||||
" an external package")
|
||||
raise InstallError(msg)
|
||||
|
||||
# Filter working substitutes
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
substitutes = {}
|
||||
for candidate in candidates:
|
||||
config_file = os.path.basename(candidate)
|
||||
substitutes[config_file] = candidate
|
||||
to_be_found.remove(config_file)
|
||||
for directory in good_file_dirs:
|
||||
candidates = fs.find(directory, files=to_be_found, recursive=True)
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
for name, good_files in itertools.groupby(
|
||||
candidates, key=os.path.basename
|
||||
):
|
||||
substitutes[name] = next(good_files)
|
||||
to_be_found.remove(name)
|
||||
|
||||
# Check that we found everything we needed
|
||||
if to_be_found:
|
||||
msg = """\
|
||||
Spack could not find working replacements for the following autotools config
|
||||
files: {0}.
|
||||
|
||||
To resolve this problem, please try the following:
|
||||
1. Try to rebuild with `patch_config_files = False` in the package `{1}`, to
|
||||
rule out that Spack tries to replace config files not used by the build.
|
||||
2. Verify that the `gnuconfig` package is up-to-date.
|
||||
3. On some systems you need to use system-provided `config.guess` and `config.sub`
|
||||
files. In this case, mark `gnuconfig` as an non-buildable external package,
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise InstallError(msg.format(', '.join(to_be_found), self.name))
|
||||
msg = 'Failed to find suitable substitutes for {0}'
|
||||
raise RuntimeError(msg.format(', '.join(to_be_found)))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -302,41 +252,17 @@ def delete_configure_to_force_update(self):
|
||||
if self.force_autoreconf:
|
||||
force_remove(self.configure_abs_path)
|
||||
|
||||
def _require_build_deps(self, pkgs, spec, err):
|
||||
"""Require `pkgs` to be direct build dependencies of `spec`. Raises a
|
||||
RuntimeError with a helpful error messages when any dep is missing."""
|
||||
|
||||
build_deps = [d.name for d in spec.dependencies(deptype='build')]
|
||||
missing_deps = [x for x in pkgs if x not in build_deps]
|
||||
|
||||
if not missing_deps:
|
||||
return
|
||||
|
||||
# Raise an exception on missing deps.
|
||||
msg = ("{0}: missing dependencies: {1}.\n\nPlease add "
|
||||
"the following lines to the package:\n\n"
|
||||
.format(err, ", ".join(missing_deps)))
|
||||
|
||||
for dep in missing_deps:
|
||||
msg += (" depends_on('{0}', type='build', when='@{1}')\n"
|
||||
.format(dep, spec.version))
|
||||
|
||||
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
return
|
||||
|
||||
# Else try to regenerate it, which reuquires a few build dependencies
|
||||
self._require_build_deps(
|
||||
pkgs=['autoconf', 'automake', 'libtool'],
|
||||
spec=spec,
|
||||
err="Cannot generate configure")
|
||||
|
||||
# Else try to regenerate it
|
||||
autotools = ['m4', 'autoconf', 'automake', 'libtool']
|
||||
missing = [x for x in autotools if x not in spec]
|
||||
if missing:
|
||||
msg = 'Cannot generate configure: missing dependencies {0}'
|
||||
raise RuntimeError(msg.format(missing))
|
||||
tty.msg('Configure script not found: trying to generate it')
|
||||
tty.warn('*********************************************************')
|
||||
tty.warn('* If the default procedure fails, consider implementing *')
|
||||
@@ -405,7 +331,7 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
"""Runs configure with the arguments specified in
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
|
||||
:py:meth:`~.AutotoolsPackage.configure_args`
|
||||
and an appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self, 'configure_flag_args', [])
|
||||
@@ -447,28 +373,25 @@ def _activate_or_not(
|
||||
name,
|
||||
activation_word,
|
||||
deactivation_word,
|
||||
activation_value=None,
|
||||
variant=None
|
||||
activation_value=None
|
||||
):
|
||||
"""This function contains the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
|
||||
:py:meth:`~.AutotoolsPackage.with_or_without` and
|
||||
:py:meth:`~.AutotoolsPackage.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name (str): name of the option that is being activated or not
|
||||
name (str): name of the variant that is being processed
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
activation_value (callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -478,7 +401,6 @@ def _activate_or_not(
|
||||
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
@@ -488,18 +410,17 @@ def _activate_or_not(
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
)
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
--with-x=<prefix-to-x> --without-y --with-bar --with-ba-z
|
||||
--with-x=<prefix-to-x> --without-y --with-bar
|
||||
|
||||
for ``<spec-name> foo=x +bar``
|
||||
|
||||
Returns:
|
||||
list: list of strings that corresponds to the activation/deactivation
|
||||
list of strings that corresponds to the activation/deactivation
|
||||
of the variant that has been processed
|
||||
|
||||
Raises:
|
||||
@@ -511,37 +432,32 @@ def _activate_or_not(
|
||||
if activation_value == 'prefix':
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
|
||||
variant = variant or name
|
||||
|
||||
# Defensively look that the name passed as argument is among
|
||||
# variants
|
||||
if variant not in self.variants:
|
||||
if name not in self.variants:
|
||||
msg = '"{0}" is not a variant of "{1}"'
|
||||
raise KeyError(msg.format(variant, self.name))
|
||||
raise KeyError(msg.format(name, self.name))
|
||||
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
variant_desc, _ = self.variants[variant]
|
||||
if set(variant_desc.values) == set((True, False)):
|
||||
if set(self.variants[name].values) == set((True, False)):
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
condition = '+{name}'.format(name=variant)
|
||||
condition = '+{name}'.format(name=name)
|
||||
options = [(name, condition in spec)]
|
||||
else:
|
||||
condition = '{variant}={value}'
|
||||
condition = '{name}={value}'
|
||||
# "feature_values" is used to track values which correspond to
|
||||
# features which can be enabled or disabled as understood by the
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(
|
||||
variant_desc.values, 'feature_values', None
|
||||
) or variant_desc.values
|
||||
self.variants[name].values, 'feature_values', None
|
||||
) or self.variants[name].values
|
||||
|
||||
options = [
|
||||
(value,
|
||||
condition.format(variant=variant,
|
||||
value=value) in spec)
|
||||
(value, condition.format(name=name, value=value) in spec)
|
||||
for value in feature_values
|
||||
]
|
||||
|
||||
@@ -569,7 +485,7 @@ def _default_generator(is_activated):
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
def with_or_without(self, name, activation_value=None):
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -585,7 +501,7 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
activation_value (callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
|
||||
@@ -595,17 +511,15 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, 'with', 'without', activation_value,
|
||||
variant)
|
||||
return self._activate_or_not(name, 'with', 'without', activation_value)
|
||||
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
def enable_or_disable(self, name, activation_value=None):
|
||||
"""Same as :py:meth:`~.AutotoolsPackage.with_or_without` but substitute
|
||||
``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
activation_value (callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
|
||||
@@ -616,7 +530,7 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(
|
||||
name, 'enable', 'disable', activation_value, variant
|
||||
name, 'enable', 'disable', activation_value
|
||||
)
|
||||
|
||||
run_after('install')(PackageBase._run_default_install_time_test_callbacks)
|
||||
@@ -646,6 +560,3 @@ def remove_libtool_archives(self):
|
||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode='w') as f:
|
||||
f.write('\n'.join(libtool_files))
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after('install')(PackageBase.apply_macos_rpath_fixups)
|
||||
|
||||
@@ -108,6 +108,21 @@ def initconfig_compiler_entries(self):
|
||||
if fflags:
|
||||
entries.append(cmake_cache_string("CMAKE_Fortran_FLAGS", fflags))
|
||||
|
||||
# Override XL compiler family
|
||||
familymsg = ("Override to proper compiler family for XL")
|
||||
if "xlf" in (self.compiler.fc or ''): # noqa: F821
|
||||
entries.append(cmake_cache_string(
|
||||
"CMAKE_Fortran_COMPILER_ID", "XL",
|
||||
familymsg))
|
||||
if "xlc" in self.compiler.cc: # noqa: F821
|
||||
entries.append(cmake_cache_string(
|
||||
"CMAKE_C_COMPILER_ID", "XL",
|
||||
familymsg))
|
||||
if "xlC" in self.compiler.cxx: # noqa: F821
|
||||
entries.append(cmake_cache_string(
|
||||
"CMAKE_CXX_COMPILER_ID", "XL",
|
||||
familymsg))
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
|
||||
@@ -236,7 +236,7 @@ def define_from_variant(self, cmake_var, variant=None):
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`.
|
||||
:py:meth:`~.AutotoolsPackage.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -254,9 +254,9 @@ def define_from_variant(self, cmake_var, variant=None):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
[define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ class CudaPackage(PackageBase):
|
||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage.
|
||||
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
||||
Maintainers: ax3l, Rombur
|
||||
"""
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
@@ -87,45 +87,28 @@ def cuda_flags(arch_list):
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
|
||||
# GCC
|
||||
# According to
|
||||
# https://github.com/spack/spack/pull/25054#issuecomment-886531664
|
||||
# these conflicts are valid independently from the architecture
|
||||
|
||||
# minimum supported versions
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0:')
|
||||
conflicts('%gcc@:5', when='+cuda ^cuda@11.4:')
|
||||
|
||||
# maximum supported version
|
||||
# NOTE:
|
||||
# in order to not constrain future cuda version to old gcc versions,
|
||||
# it has been decided to use an upper bound for the latest version.
|
||||
# This implies that the last one in the list has to be updated at
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.5.0')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5.0')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:', when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:', when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:',
|
||||
when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:',
|
||||
when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:', when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:',
|
||||
when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:',
|
||||
when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
@@ -149,6 +132,9 @@ def cuda_flags(arch_list):
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
@@ -159,7 +145,7 @@ def cuda_flags(arch_list):
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
@@ -184,7 +170,7 @@ def cuda_flags(arch_list):
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
|
||||
@@ -116,9 +116,9 @@ class IntelPackage(PackageBase):
|
||||
# that satisfies self.spec will be used.
|
||||
version_years = {
|
||||
# intel-daal is versioned 2016 and later, no divining is needed
|
||||
'intel-ipp@9.0:9': 2016,
|
||||
'intel-mkl@11.3.0:11.3': 2016,
|
||||
'intel-mpi@5.1:5': 2016,
|
||||
'intel-ipp@9.0:9.99': 2016,
|
||||
'intel-mkl@11.3.0:11.3.999': 2016,
|
||||
'intel-mpi@5.1:5.99': 2016,
|
||||
}
|
||||
|
||||
# Below is the list of possible values for setting auto dispatch functions
|
||||
@@ -368,7 +368,7 @@ def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
|
||||
toplevel psxevars.sh or equivalent file to source (and thus by
|
||||
the modulefiles that Spack produces).
|
||||
|
||||
version_globs (list): Suffix glob patterns (most specific
|
||||
version_globs (list of str): Suffix glob patterns (most specific
|
||||
first) expected to qualify suite_dir_name to its fully
|
||||
version-specific install directory (as opposed to a
|
||||
compatibility directory or symlink).
|
||||
@@ -685,9 +685,9 @@ def openmp_libs(self):
|
||||
# packages.yaml), specificially to provide the 'iomp5' libs.
|
||||
|
||||
elif '%gcc' in self.spec:
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
gcc = Executable(self.compiler.cc)
|
||||
omp_lib_path = gcc(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
omp_libs = LibraryList(omp_lib_path.strip())
|
||||
|
||||
if len(omp_libs) < 1:
|
||||
@@ -728,9 +728,8 @@ def tbb_libs(self):
|
||||
|
||||
# TODO: clang(?)
|
||||
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
|
||||
with self.compiler.compiler_environment():
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
|
||||
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
|
||||
debug_print(libs)
|
||||
@@ -740,9 +739,8 @@ def tbb_libs(self):
|
||||
def _tbb_abi(self):
|
||||
'''Select the ABI needed for linking TBB'''
|
||||
gcc = self._gcc_executable
|
||||
with self.compiler.compiler_environment():
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
abi = ''
|
||||
if sys.platform == 'darwin':
|
||||
pass
|
||||
|
||||
@@ -110,6 +110,3 @@ def installcheck(self):
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after('install')(PackageBase.apply_macos_rpath_fixups)
|
||||
|
||||
@@ -69,9 +69,6 @@ def install(self, spec, prefix, installer_path=None):
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', prefix)
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env('XDG_RUNTIME_DIR',
|
||||
join_path(self.stage.path, 'runtime'))
|
||||
|
||||
bash(installer_path,
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
|
||||
@@ -127,25 +127,24 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['true']['false'],
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
# Python libraries may be installed in lib or lib64
|
||||
# See issues #18520 and #17126
|
||||
for lib in ['lib', 'lib64']:
|
||||
root = os.path.join(self.prefix, lib, 'python{0}'.format(
|
||||
self.spec['python'].version.up_to(2)), 'site-packages')
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
|
||||
tty.debug('Detected the following modules: {0}'.format(modules))
|
||||
|
||||
return modules
|
||||
|
||||
def setup_file(self):
|
||||
@@ -255,11 +254,15 @@ def install_args(self, spec, prefix):
|
||||
# Get all relative paths since we set the root to `prefix`
|
||||
# We query the python with which these will be used for the lib and inc
|
||||
# directories. This ensures we use `lib`/`lib64` as expected by python.
|
||||
pure_site_packages_dir = spec['python'].package.config_vars[
|
||||
'python_lib']['false']['false']
|
||||
plat_site_packages_dir = spec['python'].package.config_vars[
|
||||
'python_lib']['true']['false']
|
||||
inc_dir = spec['python'].package.config_vars['python_inc']['true']
|
||||
python = spec['python'].package.command
|
||||
command_start = 'print(distutils.sysconfig.'
|
||||
commands = ';'.join([
|
||||
'import distutils.sysconfig',
|
||||
command_start + 'get_python_lib(plat_specific=False, prefix=""))',
|
||||
command_start + 'get_python_lib(plat_specific=True, prefix=""))',
|
||||
command_start + 'get_python_inc(plat_specific=True, prefix=""))'])
|
||||
pure_site_packages_dir, plat_site_packages_dir, inc_dir = python(
|
||||
'-c', commands, output=str, error=str).strip().split('\n')
|
||||
|
||||
args += ['--root=%s' % prefix,
|
||||
'--install-purelib=%s' % pure_site_packages_dir,
|
||||
@@ -393,15 +396,11 @@ def remove_files_from_view(self, view, merge_map):
|
||||
self.spec
|
||||
)
|
||||
)
|
||||
|
||||
to_remove = []
|
||||
for src, dst in merge_map.items():
|
||||
if ignore_namespace and namespace_init(dst):
|
||||
continue
|
||||
|
||||
if global_view or not path_contains_subdirectory(src, bin_dir):
|
||||
to_remove.append(dst)
|
||||
view.remove_file(src, dst)
|
||||
else:
|
||||
os.remove(dst)
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
@@ -18,9 +18,6 @@ class RubyPackage(PackageBase):
|
||||
#. :py:meth:`~.RubyPackage.build`
|
||||
#. :py:meth:`~.RubyPackage.install`
|
||||
"""
|
||||
|
||||
maintainers = ['Kerilk']
|
||||
|
||||
#: Phases of a Ruby package
|
||||
phases = ['build', 'install']
|
||||
|
||||
@@ -53,12 +50,8 @@ def install(self, spec, prefix):
|
||||
|
||||
gems = glob.glob('*.gem')
|
||||
if gems:
|
||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||
# environement, and Gems required to build native extensions will
|
||||
# not be found. Those extensions are built during `gem install`.
|
||||
inspect.getmodule(self).gem(
|
||||
'install', '--norc', '--ignore-dependencies',
|
||||
'--install-dir', prefix, gems[0])
|
||||
'install', '--norc', '--ignore-dependencies', gems[0])
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
||||
@@ -64,25 +64,24 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['true']['false'],
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
# Python libraries may be installed in lib or lib64
|
||||
# See issues #18520 and #17126
|
||||
for lib in ['lib', 'lib64']:
|
||||
root = os.path.join(self.prefix, lib, 'python{0}'.format(
|
||||
self.spec['python'].version.up_to(2)), 'site-packages')
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, '__init__.py', recursive=True):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
os.sep + '__init__.py', '').replace('/', '.'))
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, '*.py', recursive=False):
|
||||
modules.append(path.replace(root + os.sep, '', 1).replace(
|
||||
'.py', '').replace('/', '.'))
|
||||
|
||||
tty.debug('Detected the following modules: {0}'.format(modules))
|
||||
|
||||
return modules
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
@@ -99,9 +98,7 @@ def configure(self, spec, prefix):
|
||||
|
||||
args = self.configure_args()
|
||||
|
||||
python_include_dir = os.path.basename(
|
||||
inspect.getmodule(self).python_include_dir
|
||||
)
|
||||
python_include_dir = 'python' + str(spec['python'].version.up_to(2))
|
||||
|
||||
args.extend([
|
||||
'--verbose',
|
||||
|
||||
@@ -23,8 +23,11 @@ def misc_cache_location():
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get('config:misc_cache', spack.paths.default_misc_cache_path)
|
||||
return spack.util.path.canonicalize_path(path)
|
||||
path = spack.config.get('config:misc_cache')
|
||||
if not path:
|
||||
path = os.path.join(spack.paths.user_config_path, 'cache')
|
||||
path = spack.util.path.canonicalize_path(path)
|
||||
return path
|
||||
|
||||
|
||||
def _misc_cache():
|
||||
@@ -44,7 +47,7 @@ def fetch_cache_location():
|
||||
"""
|
||||
path = spack.config.get('config:source_cache')
|
||||
if not path:
|
||||
path = spack.paths.default_fetch_cache_path
|
||||
path = os.path.join(spack.paths.var_path, "cache")
|
||||
path = spack.util.path.canonicalize_path(path)
|
||||
return path
|
||||
|
||||
|
||||
@@ -45,8 +45,6 @@
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
@@ -396,6 +394,9 @@ def append_dep(s, d):
|
||||
})
|
||||
|
||||
for spec in spec_list:
|
||||
spec.concretize()
|
||||
|
||||
# root_spec = get_spec_string(spec)
|
||||
root_spec = spec
|
||||
|
||||
for s in spec.traverse(deptype=all):
|
||||
@@ -611,14 +612,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
# Add this mirror if it's enabled, as some specs might be up to date
|
||||
# here and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -665,35 +663,16 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the per-PR mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
bindist.binary_index.update()
|
||||
|
||||
staged_phases = {}
|
||||
try:
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
if phase_name == 'specs':
|
||||
# Anything in the "specs" of the environment are already
|
||||
# concretized by the block at the top of this method, so we
|
||||
# only need to find the concrete versions, and then avoid
|
||||
# re-concretizing them needlessly later on.
|
||||
concrete_phase_specs = [
|
||||
concrete for abstract, concrete in env.concretized_specs()
|
||||
if abstract in env.spec_lists[phase_name]
|
||||
]
|
||||
else:
|
||||
# Any specs lists in other definitions (but not in the
|
||||
# "specs") of the environment are not yet concretized so we
|
||||
# have to concretize them explicitly here.
|
||||
concrete_phase_specs = env.spec_lists[phase_name]
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
for phase_spec in concrete_phase_specs:
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only)
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
env.spec_lists[phase_name],
|
||||
check_index_only=check_index_only)
|
||||
finally:
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
@@ -709,17 +688,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
max_length_needs = 0
|
||||
max_needs_job = ''
|
||||
|
||||
# If this is configured, spack will fail "spack ci generate" if it
|
||||
# generates any full hash which exists under the broken specs url.
|
||||
broken_spec_urls = None
|
||||
if broken_specs_url:
|
||||
if broken_specs_url.startswith('http'):
|
||||
# To make checking each spec against the list faster, we require
|
||||
# a url protocol that allows us to iterate the url in advance.
|
||||
tty.msg('Cannot use an http(s) url for broken specs, ignoring')
|
||||
else:
|
||||
broken_spec_urls = web_util.list_url(broken_specs_url)
|
||||
|
||||
before_script, after_script = None, None
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
@@ -903,13 +871,16 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
tty.debug(debug_msg)
|
||||
|
||||
if prune_dag and not rebuild_spec:
|
||||
tty.debug('Pruning spec that does not need to be rebuilt.')
|
||||
continue
|
||||
|
||||
if (broken_spec_urls is not None and
|
||||
release_spec_full_hash in broken_spec_urls):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
# Check if this spec is in our list of known failures, now that
|
||||
# we know this spec needs a rebuild
|
||||
if broken_specs_url:
|
||||
broken_spec_path = url_util.join(
|
||||
broken_specs_url, release_spec_full_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append({
|
||||
@@ -946,7 +917,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
bc_root = os.path.join(
|
||||
local_mirror_dir, 'build_cache')
|
||||
artifact_paths.extend([os.path.join(bc_root, p) for p in [
|
||||
bindist.tarball_name(release_spec, '.spec.json'),
|
||||
bindist.tarball_name(release_spec, '.spec.yaml'),
|
||||
bindist.tarball_name(release_spec, '.cdashid'),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]])
|
||||
@@ -1027,14 +998,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'after_script',
|
||||
]
|
||||
|
||||
service_job_retries = {
|
||||
'max': 2,
|
||||
'when': [
|
||||
'runner_system_failure',
|
||||
'stuck_or_timeout_failure'
|
||||
]
|
||||
}
|
||||
|
||||
if job_id > 0:
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
@@ -1054,7 +1017,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
temp_storage_url_prefix)
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
@@ -1078,7 +1040,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
index_target_mirror)
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
|
||||
if artifacts_root:
|
||||
final_job['variables'] = {
|
||||
'SPACK_CONCRETE_ENV_DIR': concrete_env_dir
|
||||
}
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1142,8 +1108,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'echo "All specs already up to date, nothing to rebuild."',
|
||||
]
|
||||
|
||||
noop_job['retry'] = service_job_retries
|
||||
|
||||
sorted_output = {'no-specs-to-rebuild': noop_job}
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
@@ -1352,20 +1316,17 @@ def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
|
||||
|
||||
request = Request(cdash_api_url, data=enc_data, headers=headers)
|
||||
|
||||
try:
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
except Exception as e:
|
||||
print("Relating builds in CDash failed: {0}".format(e))
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
|
||||
|
||||
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
|
||||
@@ -1412,13 +1373,13 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
return int(contents)
|
||||
|
||||
|
||||
def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, sign_binaries):
|
||||
try:
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
spack.cmd.buildcache._createtarball(
|
||||
env, spec_file=specfile_path, add_deps=False,
|
||||
env, spec_yaml=yaml_path, add_deps=False,
|
||||
output_location=mirror_url, force=True, allow_root=True,
|
||||
unsigned=unsigned)
|
||||
except Exception as inst:
|
||||
@@ -1434,7 +1395,7 @@ def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
|
||||
# BaseException
|
||||
# object
|
||||
err_msg = 'Error msg: {0}'.format(inst)
|
||||
if any(x in err_msg for x in ['Access Denied', 'InvalidAccessKeyId']):
|
||||
if 'Access Denied' in err_msg:
|
||||
tty.msg('Permission problem writing to {0}'.format(
|
||||
mirror_url))
|
||||
tty.msg(err_msg)
|
||||
@@ -1551,7 +1512,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
|
||||
# Next attempt to clone your local spack repo into the repro dir
|
||||
with fs.working_dir(repro_dir):
|
||||
clone_out = git("clone", spack_git_path, "spack",
|
||||
clone_out = git("clone", spack_git_path,
|
||||
output=str, error=os.devnull,
|
||||
fail_on_error=False)
|
||||
|
||||
|
||||
@@ -21,13 +21,11 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.string
|
||||
|
||||
@@ -154,7 +152,6 @@ def parse_specs(args, **kwargs):
|
||||
concretize = kwargs.get('concretize', False)
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
reuse = kwargs.get('reuse', False)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
@@ -163,7 +160,7 @@ def parse_specs(args, **kwargs):
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests, reuse=reuse) # implies normalize
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
@@ -189,13 +186,29 @@ def matching_spec_from_env(spec):
|
||||
If no matching spec is found in the environment (or if no environment is
|
||||
active), this will return the given spec but concretized.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
env = spack.environment.get_env({}, cmd_name)
|
||||
if env:
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
else:
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
@@ -203,10 +216,10 @@ def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
env (spack.environment.Environment): a spack environment,
|
||||
if one is active, or None if no environment is active
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
local (boolean, default False): do not search chained spack instances
|
||||
installed (boolean or any, or spack.database.InstallStatus or iterable
|
||||
of spack.database.InstallStatus): install status argument passed to
|
||||
database query. See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||
@@ -218,11 +231,11 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
hashes (iterable): a set of hashes of specs among which to disambiguate
|
||||
local (boolean, default False): do not search chained spack instances
|
||||
installed (boolean or any, or spack.database.InstallStatus or iterable
|
||||
of spack.database.InstallStatus): install status argument passed to
|
||||
database query. See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.db.query_local(spec, hashes=hashes,
|
||||
@@ -261,19 +274,17 @@ def display_specs_as_json(specs, deps=False):
|
||||
seen = set()
|
||||
records = []
|
||||
for spec in specs:
|
||||
dag_hash = spec.dag_hash()
|
||||
if dag_hash in seen:
|
||||
if spec.dag_hash() in seen:
|
||||
continue
|
||||
records.append(spec.node_dict_with_hashes())
|
||||
seen.add(dag_hash)
|
||||
seen.add(spec.dag_hash())
|
||||
records.append(spec.to_record_dict())
|
||||
|
||||
if deps:
|
||||
for dep in spec.traverse():
|
||||
dep_dag_hash = dep.dag_hash()
|
||||
if dep_dag_hash in seen:
|
||||
if dep.dag_hash() in seen:
|
||||
continue
|
||||
records.append(dep.node_dict_with_hashes())
|
||||
seen.add(dep_dag_hash)
|
||||
seen.add(dep.dag_hash())
|
||||
records.append(dep.to_record_dict())
|
||||
|
||||
sjson.dump(records, sys.stdout)
|
||||
|
||||
@@ -322,8 +333,9 @@ def display_specs(specs, args=None, **kwargs):
|
||||
namespace.
|
||||
|
||||
Args:
|
||||
specs (list): the specs to display
|
||||
args (argparse.Namespace or None): namespace containing formatting arguments
|
||||
specs (list of spack.spec.Spec): the specs to display
|
||||
args (optional argparse.Namespace): namespace containing
|
||||
formatting arguments
|
||||
|
||||
Keyword Args:
|
||||
paths (bool): Show paths with each displayed spec
|
||||
@@ -336,9 +348,9 @@ def display_specs(specs, args=None, **kwargs):
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
header_callback (function): called at start of arch/compiler groups
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
output (stream): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
"""
|
||||
def get_arg(name, default=None):
|
||||
@@ -443,28 +455,6 @@ def format_list(specs):
|
||||
output.flush()
|
||||
|
||||
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, '').split(':')
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
Args:
|
||||
specs (list): depending on how many items are in this list, choose
|
||||
the plural or singular form of the word "package"
|
||||
pkg_type (str): the output string will mention this provided
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % spack.util.string.plural(
|
||||
len(specs), pkg_type + " package"))
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
"""Ensure that this instance of Spack is a git clone."""
|
||||
return is_git_repo(spack.paths.prefix)
|
||||
@@ -512,71 +502,3 @@ def extant_file(f):
|
||||
if not os.path.isfile(f):
|
||||
raise argparse.ArgumentTypeError('%s does not exist' % f)
|
||||
return f
|
||||
|
||||
|
||||
def require_active_env(cmd_name):
|
||||
"""Used by commands to get the active environment
|
||||
|
||||
If an environment is not found, print an error message that says the calling
|
||||
command *needs* an active environment.
|
||||
|
||||
Arguments:
|
||||
cmd_name (str): name of calling command
|
||||
|
||||
Returns:
|
||||
(spack.environment.Environment): the active environment
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
'`spack %s` requires an environment' % cmd_name,
|
||||
'activate an environment first:',
|
||||
' spack env activate ENV',
|
||||
'or use:',
|
||||
' spack -e ENV %s ...' % cmd_name)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
"""Find active environment from args or environment variable.
|
||||
|
||||
Check for an environment in this order:
|
||||
1. via ``spack -e ENV`` or ``spack -D DIR`` (arguments)
|
||||
2. via a path in the spack.environment.spack_env_var environment variable.
|
||||
|
||||
If an environment is found, read it in. If not, return None.
|
||||
|
||||
Arguments:
|
||||
args (argparse.Namespace): argparse namespace with command arguments
|
||||
|
||||
Returns:
|
||||
(spack.environment.Environment): a found environment, or ``None``
|
||||
"""
|
||||
|
||||
# treat env as a name
|
||||
env = args.env
|
||||
if env:
|
||||
if ev.exists(env):
|
||||
return ev.read(env)
|
||||
|
||||
else:
|
||||
# if env was specified, see if it is a directory otherwise, look
|
||||
# at env_dir (env and env_dir are mutually exclusive)
|
||||
env = args.env_dir
|
||||
|
||||
# if no argument, look for the environment variable
|
||||
if not env:
|
||||
env = os.environ.get(ev.spack_env_var)
|
||||
|
||||
# nothing was set; there's no active environment
|
||||
if not env:
|
||||
return None
|
||||
|
||||
# if we get here, env isn't the name of a spack environment; it has
|
||||
# to be a path to an environment, or there is something wrong.
|
||||
if ev.is_env_dir(env):
|
||||
return ev.Environment(env)
|
||||
|
||||
raise ev.SpackEnvironmentError('no environment in %s' % env)
|
||||
|
||||
@@ -30,7 +30,8 @@ def activate(parser, args):
|
||||
if len(specs) != 1:
|
||||
tty.die("activate requires one spec. %d given." % len(specs))
|
||||
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], ev.active_environment())
|
||||
env = ev.get_env(args, 'activate')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
if not spec.package.is_extension:
|
||||
tty.die("%s is not an extension." % spec.name)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'add a spec to an environment'
|
||||
section = "environments"
|
||||
@@ -21,7 +22,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def add(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name='add')
|
||||
env = ev.get_env(args, 'add', required=True)
|
||||
|
||||
with env.write_transaction():
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
|
||||
@@ -58,9 +58,9 @@ def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=Fals
|
||||
analyze_spec(spec, args.analyzers, args.outdir, monitor)
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec object of installed package
|
||||
spec (Spec): spec object of installed package
|
||||
analyzers (list): list of analyzer (keys) to run
|
||||
monitor (spack.monitor.SpackMonitorClient): a monitor client
|
||||
monitor (monitor.SpackMonitorClient): a monitor client
|
||||
overwrite (bool): overwrite result if already exists
|
||||
"""
|
||||
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())
|
||||
@@ -95,7 +95,7 @@ def analyze(parser, args, **kwargs):
|
||||
sys.exit(0)
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.active_environment()
|
||||
env = ev.get_env(args, 'analyze')
|
||||
|
||||
# Get an disambiguate spec (we should only have one)
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import llnl.util.tty.colify as colify
|
||||
import llnl.util.tty.color as color
|
||||
|
||||
import spack.platforms
|
||||
import spack.architecture as architecture
|
||||
|
||||
description = "print architecture information about this machine"
|
||||
section = "system"
|
||||
@@ -20,10 +20,6 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-g', '--generic-target', action='store_true',
|
||||
help='show the best generic target'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--known-targets', action='store_true',
|
||||
help='show a list of all known targets and exit'
|
||||
@@ -78,32 +74,25 @@ def display_target_group(header, target_group):
|
||||
|
||||
|
||||
def arch(parser, args):
|
||||
if args.generic_target:
|
||||
print(archspec.cpu.host().generic)
|
||||
return
|
||||
|
||||
if args.known_targets:
|
||||
display_targets(archspec.cpu.TARGETS)
|
||||
return
|
||||
|
||||
os_args, target_args = 'default_os', 'default_target'
|
||||
if args.frontend:
|
||||
os_args, target_args = 'frontend', 'frontend'
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'frontend', 'frontend')
|
||||
elif args.backend:
|
||||
os_args, target_args = 'backend', 'backend'
|
||||
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'backend', 'backend')
|
||||
else:
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'default_os', 'default_target')
|
||||
|
||||
if args.platform:
|
||||
print(architecture.platform)
|
||||
print(arch.platform)
|
||||
elif args.operating_system:
|
||||
print(architecture.os)
|
||||
print(arch.os)
|
||||
elif args.target:
|
||||
print(architecture.target)
|
||||
print(arch.target)
|
||||
else:
|
||||
print(architecture)
|
||||
print(arch)
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@@ -20,24 +19,12 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser('configs', help='audit configuration files')
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser('packages-https', help='check https in packages')
|
||||
https_parser.add_argument(
|
||||
'--all',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='check_all',
|
||||
help="audit all packages"
|
||||
)
|
||||
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser('packages', help='audit package recipes')
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
group.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
pkg_parser.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
|
||||
# List all checks
|
||||
sp.add_parser('list', help='list available checks and exits')
|
||||
@@ -54,17 +41,6 @@ def packages(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def packages_https(parser, args):
|
||||
|
||||
# Since packages takes a long time, --all is required without name
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize('@*b{' + subcommand + '}:'))
|
||||
@@ -82,7 +58,6 @@ def audit(parser, args):
|
||||
subcommands = {
|
||||
'configs': configs,
|
||||
'packages': packages,
|
||||
'packages-https': packages_https,
|
||||
'list': list
|
||||
}
|
||||
subcommands[args.subcommand](parser, args)
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
section = "system"
|
||||
level = "long"
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
help="configuration scope to read/modify"
|
||||
)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(dest='subcommand')
|
||||
|
||||
enable = sp.add_parser('enable', help='enable bootstrapping')
|
||||
_add_scope_option(enable)
|
||||
|
||||
disable = sp.add_parser('disable', help='disable bootstrapping')
|
||||
_add_scope_option(disable)
|
||||
|
||||
reset = sp.add_parser(
|
||||
'reset', help='reset bootstrapping configuration to Spack defaults'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(
|
||||
reset, ['yes_to_all']
|
||||
)
|
||||
|
||||
root = sp.add_parser(
|
||||
'root', help='get/set the root bootstrap directory'
|
||||
)
|
||||
_add_scope_option(root)
|
||||
root.add_argument(
|
||||
'path', nargs='?', default=None,
|
||||
help='set the bootstrap directory to this value'
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
def _enable_or_disable(args):
|
||||
# Set to True if we called "enable", otherwise set to false
|
||||
value = args.subcommand == 'enable'
|
||||
spack.config.set('bootstrap:enable', value, scope=args.scope)
|
||||
|
||||
|
||||
def _reset(args):
|
||||
if not args.yes_to_all:
|
||||
msg = [
|
||||
"Bootstrapping configuration is being reset to Spack's defaults. "
|
||||
"Current configuration will be lost.\n",
|
||||
"Do you want to continue?"
|
||||
]
|
||||
ok_to_continue = llnl.util.tty.get_yes_or_no(
|
||||
''.join(msg), default=True
|
||||
)
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError('Aborting')
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == 'defaults':
|
||||
continue
|
||||
|
||||
# If we are in an env scope we can't delete a file, but the best we
|
||||
# can do is nullify the corresponding configuration
|
||||
if (scope.name.startswith('env') and
|
||||
spack.config.get('bootstrap', scope=scope.name)):
|
||||
spack.config.set('bootstrap', {}, scope=scope.name)
|
||||
continue
|
||||
|
||||
# If we are outside of an env scope delete the bootstrap.yaml file
|
||||
bootstrap_yaml = os.path.join(scope.path, 'bootstrap.yaml')
|
||||
backup_file = bootstrap_yaml + '.bkp'
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
|
||||
def _root(args):
|
||||
if args.path:
|
||||
spack.config.set('bootstrap:root', args.path, scope=args.scope)
|
||||
|
||||
root = spack.config.get('bootstrap:root', default=None, scope=args.scope)
|
||||
if root:
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
print(root)
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
)
|
||||
return
|
||||
|
||||
def _print_method(source, trusted):
|
||||
color = llnl.util.tty.color
|
||||
|
||||
def fmt(header, content):
|
||||
header_fmt = "@*b{{{0}:}} {1}"
|
||||
color.cprint(header_fmt.format(header, content))
|
||||
|
||||
trust_str = "@*y{UNKNOWN}"
|
||||
if trusted is True:
|
||||
trust_str = "@*g{TRUSTED}"
|
||||
elif trusted is False:
|
||||
trust_str = "@*r{UNTRUSTED}"
|
||||
|
||||
fmt("Name", source['name'] + ' ' + trust_str)
|
||||
print()
|
||||
fmt(" Type", source['type'])
|
||||
print()
|
||||
|
||||
info_lines = ['\n']
|
||||
for key, value in source.get('info', {}).items():
|
||||
info_lines.append(' ' * 4 + '@*{{{0}}}: {1}\n'.format(key, value))
|
||||
if len(info_lines) > 1:
|
||||
fmt(" Info", ''.join(info_lines))
|
||||
|
||||
description_lines = ['\n']
|
||||
for line in source['description'].split('\n'):
|
||||
description_lines.append(' ' * 4 + line + '\n')
|
||||
|
||||
fmt(" Description", ''.join(description_lines))
|
||||
|
||||
trusted = spack.config.get('bootstrap:trusted', {})
|
||||
for s in sources:
|
||||
_print_method(s, trusted.get(s['name'], None))
|
||||
|
||||
|
||||
def _write_trust_state(args, value):
|
||||
name = args.name
|
||||
sources = spack.config.get('bootstrap:sources')
|
||||
|
||||
matches = [s for s in sources if s['name'] == name]
|
||||
if not matches:
|
||||
names = [s['name'] for s in sources]
|
||||
msg = ('there is no bootstrapping method named "{0}". Valid '
|
||||
'method names are: {1}'.format(name, ', '.join(names)))
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if len(matches) > 1:
|
||||
msg = ('there is more than one bootstrapping method named "{0}". '
|
||||
'Please delete all methods but one from bootstrap.yaml '
|
||||
'before proceeding').format(name)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Setting the scope explicitly is needed to not copy over to a new scope
|
||||
# the entire default configuration for bootstrap.yaml
|
||||
scope = args.scope or spack.config.default_modify_scope('bootstrap')
|
||||
spack.config.add(
|
||||
'bootstrap:trusted:{0}:{1}'.format(name, str(value)), scope=scope
|
||||
)
|
||||
|
||||
|
||||
def _trust(args):
|
||||
_write_trust_state(args, value=True)
|
||||
msg = '"{0}" is now trusted for bootstrapping'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _untrust(args):
|
||||
_write_trust_state(args, value=False)
|
||||
msg = '"{0}" is now untrusted and will not be used for bootstrapping'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'enable': _enable_or_disable,
|
||||
'disable': _enable_or_disable,
|
||||
'reset': _reset,
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
@@ -2,33 +2,29 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -74,9 +70,8 @@ def setup_parser(subparser):
|
||||
create.add_argument('--rebuild-index', action='store_true',
|
||||
default=False, help="Regenerate buildcache index " +
|
||||
"after building package(s)")
|
||||
create.add_argument('--spec-file', default=None,
|
||||
help=('Create buildcache entry for spec from json or ' +
|
||||
'yaml file'))
|
||||
create.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Create buildcache entry for spec from yaml file')
|
||||
create.add_argument('--only', default='package,dependencies',
|
||||
dest='things_to_install',
|
||||
choices=['package', 'dependencies'],
|
||||
@@ -102,11 +97,6 @@ def setup_parser(subparser):
|
||||
install.add_argument('-o', '--otherarch', action='store_true',
|
||||
help="install specs from other architectures" +
|
||||
" instead of default platform and OS")
|
||||
# This argument is needed by the bootstrapping logic to verify checksums
|
||||
install.add_argument('--sha256', help=argparse.SUPPRESS)
|
||||
install.add_argument(
|
||||
'--only-root', action='store_true', help=argparse.SUPPRESS
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(install, ['specs'])
|
||||
install.set_defaults(func=installtarball)
|
||||
@@ -166,9 +156,8 @@ def setup_parser(subparser):
|
||||
help='Check single spec instead of release specs file')
|
||||
|
||||
check.add_argument(
|
||||
'--spec-file', default=None,
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help='Check single spec from yaml file instead of release specs file')
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
@@ -177,15 +166,14 @@ def setup_parser(subparser):
|
||||
|
||||
check.set_defaults(func=check_binaries)
|
||||
|
||||
# Download tarball and specfile
|
||||
# Download tarball and spec.yaml
|
||||
dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
|
||||
dltarball.add_argument(
|
||||
'-s', '--spec', default=None,
|
||||
help="Download built tarball for spec from mirror")
|
||||
dltarball.add_argument(
|
||||
'--spec-file', default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) " +
|
||||
"from mirror"))
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help="Download built tarball for spec (from yaml file) from mirror")
|
||||
dltarball.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
help="Path to directory where tarball should be downloaded")
|
||||
@@ -201,27 +189,26 @@ def setup_parser(subparser):
|
||||
'-s', '--spec', default=None,
|
||||
help='Spec string for which buildcache name is desired')
|
||||
getbuildcachename.add_argument(
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file for which buildcache name is ' +
|
||||
'desired'))
|
||||
'-y', '--spec-yaml', default=None,
|
||||
help='Path to spec yaml file for which buildcache name is desired')
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser('save-specfile',
|
||||
help=save_specfiles.__doc__)
|
||||
savespecfile.add_argument(
|
||||
saveyaml = subparsers.add_parser('save-yaml',
|
||||
help=save_spec_yamls.__doc__)
|
||||
saveyaml.add_argument(
|
||||
'--root-spec', default=None,
|
||||
help='Root spec of dependent spec')
|
||||
savespecfile.add_argument(
|
||||
'--root-specfile', default=None,
|
||||
help='Path to json or yaml file containing root spec of dependent spec')
|
||||
savespecfile.add_argument(
|
||||
saveyaml.add_argument(
|
||||
'--root-spec-yaml', default=None,
|
||||
help='Path to yaml file containing root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
'-s', '--specs', default=None,
|
||||
help='List of dependent specs for which saved yaml is desired')
|
||||
savespecfile.add_argument(
|
||||
'--specfile-dir', default=None,
|
||||
saveyaml.add_argument(
|
||||
'-y', '--yaml-dir', default=None,
|
||||
help='Path to directory where spec yamls should be saved')
|
||||
savespecfile.set_defaults(func=save_specfiles)
|
||||
saveyaml.set_defaults(func=save_spec_yamls)
|
||||
|
||||
# Copy buildcache from some directory to another mirror url
|
||||
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
|
||||
@@ -229,44 +216,13 @@ def setup_parser(subparser):
|
||||
'--base-dir', default=None,
|
||||
help='Path to mirror directory (root of existing buildcache)')
|
||||
copy.add_argument(
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file representing buildcache entry to' +
|
||||
' copy'))
|
||||
'--spec-yaml', default=None,
|
||||
help='Path to spec yaml file representing buildcache entry to copy')
|
||||
copy.add_argument(
|
||||
'--destination-url', default=None,
|
||||
help='Destination mirror url')
|
||||
copy.set_defaults(func=buildcache_copy)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser('sync', help=buildcache_sync.__doc__)
|
||||
source = sync.add_mutually_exclusive_group(required=True)
|
||||
source.add_argument('--src-directory',
|
||||
metavar='DIRECTORY',
|
||||
type=str,
|
||||
help="Source mirror as a local file path")
|
||||
source.add_argument('--src-mirror-name',
|
||||
metavar='MIRROR_NAME',
|
||||
type=str,
|
||||
help="Name of the source mirror")
|
||||
source.add_argument('--src-mirror-url',
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the source mirror")
|
||||
dest = sync.add_mutually_exclusive_group(required=True)
|
||||
dest.add_argument('--dest-directory',
|
||||
metavar='DIRECTORY',
|
||||
type=str,
|
||||
help="Destination mirror as a local file path")
|
||||
dest.add_argument('--dest-mirror-name',
|
||||
metavar='MIRROR_NAME',
|
||||
type=str,
|
||||
help="Name of the destination mirror")
|
||||
dest.add_argument('--dest-mirror-url',
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the destination mirror")
|
||||
sync.set_defaults(func=buildcache_sync)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
update_index = subparsers.add_parser(
|
||||
'update-index', help=buildcache_update_index.__doc__)
|
||||
@@ -283,13 +239,12 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
pkgs (str): spec to be matched against installed packages
|
||||
pkgs (string): spec to be matched against installed packages
|
||||
allow_multiple_matches (bool): if True multiple matches are admitted
|
||||
env (spack.environment.Environment or None): active environment, or ``None``
|
||||
if there is not one
|
||||
env (Environment): active environment, or ``None`` if there is not one
|
||||
|
||||
Return:
|
||||
list: list of specs
|
||||
list of specs
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
@@ -337,13 +292,9 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
if not other_arch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
arch = spack.architecture.default_arch().to_spec()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
for pkg in pkgs:
|
||||
@@ -377,19 +328,16 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def _createtarball(env, spec_file=None, packages=None, add_spec=True,
|
||||
def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_file:
|
||||
with open(spec_file, 'r') as fd:
|
||||
specfile_contents = fd.read()
|
||||
tty.debug('createtarball read specfile contents:')
|
||||
tty.debug(specfile_contents)
|
||||
if spec_file.endswith('.json'):
|
||||
s = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
s = Spec.from_yaml(specfile_contents)
|
||||
if spec_yaml:
|
||||
with open(spec_yaml, 'r') as fd:
|
||||
yaml_text = fd.read()
|
||||
tty.debug('createtarball read spec yaml:')
|
||||
tty.debug(yaml_text)
|
||||
s = Spec.from_yaml(yaml_text)
|
||||
package = '/{0}'.format(s.dag_hash())
|
||||
matches = find_matching_specs(package, env=env)
|
||||
|
||||
@@ -402,7 +350,7 @@ def _createtarball(env, spec_file=None, packages=None, add_spec=True,
|
||||
else:
|
||||
tty.die("build cache file creation requires at least one" +
|
||||
" installed package spec, an active environment," +
|
||||
" or else a path to a json or yaml file containing a spec" +
|
||||
" or else a path to a yaml file containing a spec" +
|
||||
" to install")
|
||||
specs = set()
|
||||
|
||||
@@ -471,7 +419,7 @@ def createtarball(args):
|
||||
"""create a binary package from an existing install"""
|
||||
|
||||
# restrict matching to current environment if one is active
|
||||
env = ev.active_environment()
|
||||
env = ev.get_env(args, 'buildcache create')
|
||||
|
||||
output_location = None
|
||||
if args.directory:
|
||||
@@ -511,7 +459,7 @@ def createtarball(args):
|
||||
add_spec = ('package' in args.things_to_install)
|
||||
add_deps = ('dependencies' in args.things_to_install)
|
||||
|
||||
_createtarball(env, spec_file=args.spec_file, packages=args.specs,
|
||||
_createtarball(env, spec_yaml=args.spec_yaml, packages=args.specs,
|
||||
add_spec=add_spec, add_deps=add_deps,
|
||||
output_location=output_location, signing_key=args.key,
|
||||
force=args.force, make_relative=args.rel,
|
||||
@@ -537,29 +485,15 @@ def install_tarball(spec, args):
|
||||
if s.external or s.virtual:
|
||||
tty.warn("Skipping external or virtual package %s" % spec.format())
|
||||
return
|
||||
|
||||
# This argument is used only for bootstrapping specs without signatures,
|
||||
# since we need to check the sha256 of each tarball
|
||||
if not args.only_root:
|
||||
for d in s.dependencies(deptype=('link', 'run')):
|
||||
tty.msg("Installing buildcache for dependency spec %s" % d)
|
||||
install_tarball(d, args)
|
||||
|
||||
for d in s.dependencies(deptype=('link', 'run')):
|
||||
tty.msg("Installing buildcache for dependency spec %s" % d)
|
||||
install_tarball(d, args)
|
||||
package = spack.repo.get(spec)
|
||||
if s.concrete and package.installed and not args.force:
|
||||
tty.warn("Package for spec %s already installed." % spec.format())
|
||||
else:
|
||||
tarball = bindist.download_tarball(spec)
|
||||
if tarball:
|
||||
if args.sha256:
|
||||
checker = spack.util.crypto.Checker(args.sha256)
|
||||
msg = ('cannot verify checksum for "{0}"'
|
||||
' [expected={1}]')
|
||||
msg = msg.format(tarball, args.sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing buildcache for spec %s' % spec.format())
|
||||
bindist.extract_tarball(spec, tarball, args.allow_root,
|
||||
args.unsigned, args.force)
|
||||
@@ -572,13 +506,9 @@ def install_tarball(spec, args):
|
||||
|
||||
def listspecs(args):
|
||||
"""list binary packages available from mirrors"""
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
arch = spack.architecture.default_arch().to_spec()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
if args.specs:
|
||||
@@ -621,10 +551,10 @@ def check_binaries(args):
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
if args.spec or args.spec_yaml:
|
||||
specs = [get_concrete_spec(args)]
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache')
|
||||
env = ev.get_env(args, 'buildcache', required=True)
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
|
||||
@@ -658,16 +588,15 @@ def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
'url': [tarball_path_name],
|
||||
'url': tarball_path_name,
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [bindist.tarball_name(concrete_spec, '.spec.json'),
|
||||
bindist.tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'url': bindist.tarball_name(concrete_spec, '.spec.yaml'),
|
||||
'path': local_dest,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [bindist.tarball_name(concrete_spec, '.cdashid')],
|
||||
'url': bindist.tarball_name(concrete_spec, '.cdashid'),
|
||||
'path': local_dest,
|
||||
'required': require_cdashid,
|
||||
},
|
||||
@@ -681,9 +610,9 @@ def get_tarball(args):
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components. Normally, just the
|
||||
tarball and .spec.json files are required, but if the --require-cdashid
|
||||
tarball and .spec.yaml files are required, but if the --require-cdashid
|
||||
argument was provided, then a .cdashid file is also required."""
|
||||
if not args.spec and not args.spec_file:
|
||||
if not args.spec and not args.spec_yaml:
|
||||
tty.msg('No specs provided, exiting.')
|
||||
sys.exit(0)
|
||||
|
||||
@@ -700,7 +629,7 @@ def get_tarball(args):
|
||||
|
||||
def get_concrete_spec(args):
|
||||
spec_str = args.spec
|
||||
spec_yaml_path = args.spec_file
|
||||
spec_yaml_path = args.spec_yaml
|
||||
|
||||
if not spec_str and not spec_yaml_path:
|
||||
tty.msg('Must provide either spec string or path to ' +
|
||||
@@ -732,14 +661,14 @@ def get_buildcache_name(args):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def save_specfiles(args):
|
||||
def save_spec_yamls(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero."""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
if not args.root_spec and not args.root_spec_yaml:
|
||||
tty.msg('No root spec provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -747,20 +676,20 @@ def save_specfiles(args):
|
||||
tty.msg('No dependent specs provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specfile_dir:
|
||||
if not args.yaml_dir:
|
||||
tty.msg('No yaml directory provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
if args.root_spec_yaml:
|
||||
with open(args.root_spec_yaml) as fd:
|
||||
root_spec_as_yaml = fd.read()
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash)
|
||||
|
||||
save_dependency_spec_yamls(
|
||||
root_spec_as_yaml, args.yaml_dir, args.specs.split())
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
@@ -769,10 +698,10 @@ def buildcache_copy(args):
|
||||
"""Copy a buildcache entry and all its files from one mirror, given as
|
||||
'--base-dir', to some other mirror, specified as '--destination-url'.
|
||||
The specific buildcache entry to be copied from one location to the
|
||||
other is identified using the '--spec-file' argument."""
|
||||
other is identified using the '--spec-yaml' argument."""
|
||||
# TODO: This sub-command should go away once #11117 is merged
|
||||
|
||||
if not args.spec_file:
|
||||
if not args.spec_yaml:
|
||||
tty.msg('No spec yaml provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -792,12 +721,12 @@ def buildcache_copy(args):
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(args.spec_file, 'r') as fd:
|
||||
with open(args.spec_yaml, 'r') as fd:
|
||||
spec = Spec.from_yaml(fd.read())
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from yaml {0}'.format(
|
||||
args.spec_file))
|
||||
args.spec_yaml))
|
||||
sys.exit(1)
|
||||
|
||||
dest_root_path = dest_url
|
||||
@@ -812,15 +741,10 @@ def buildcache_copy(args):
|
||||
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
|
||||
|
||||
specfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.json'))
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
specfile_rel_path_yaml = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
cdashidfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
|
||||
@@ -836,134 +760,12 @@ def buildcache_copy(args):
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path))
|
||||
shutil.copyfile(specfile_src_path, specfile_dest_path)
|
||||
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
|
||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
||||
|
||||
# Copy the cdashid file (if exists) to the destination mirror
|
||||
if os.path.exists(cdashid_src_path):
|
||||
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def buildcache_sync(args):
|
||||
""" Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
"""
|
||||
# Figure out the source mirror
|
||||
source_location = None
|
||||
if args.src_directory:
|
||||
source_location = args.src_directory
|
||||
scheme = url_util.parse(source_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--src-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
source_location = 'file://' + source_location
|
||||
elif args.src_mirror_name:
|
||||
source_location = args.src_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=source_location))
|
||||
elif args.src_mirror_url:
|
||||
source_location = args.src_mirror_url
|
||||
scheme = url_util.parse(source_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=source_location))
|
||||
|
||||
src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
src_mirror_url = url_util.format(src_mirror.fetch_url)
|
||||
|
||||
# Figure out the destination mirror
|
||||
dest_location = None
|
||||
if args.dest_directory:
|
||||
dest_location = args.dest_directory
|
||||
scheme = url_util.parse(dest_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--dest-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
dest_location = 'file://' + dest_location
|
||||
elif args.dest_mirror_name:
|
||||
dest_location = args.dest_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=dest_location))
|
||||
elif args.dest_mirror_url:
|
||||
dest_location = args.dest_mirror_url
|
||||
scheme = url_util.parse(dest_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=dest_location))
|
||||
|
||||
dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
dest_mirror_url = url_util.format(dest_mirror.fetch_url)
|
||||
|
||||
# Get the active environment
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache sync')
|
||||
|
||||
tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
|
||||
src_mirror_url, dest_mirror_url))
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
buildcache_rel_paths = []
|
||||
|
||||
tty.debug('Syncing the following specs:')
|
||||
for s in env.all_specs():
|
||||
tty.debug(' {0}{1}: {2}'.format(
|
||||
'* ' if s in env.roots() else ' ', s.name, s.dag_hash()))
|
||||
|
||||
buildcache_rel_paths.extend([
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_path_name(s, '.spack')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
|
||||
])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
for rel_path in buildcache_rel_paths:
|
||||
src_url = url_util.join(src_mirror_url, rel_path)
|
||||
local_path = os.path.join(tmpdir, rel_path)
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug('Copying {0} to {1} via {2}'.format(
|
||||
src_url, dest_url, local_path))
|
||||
|
||||
stage = Stage(src_url,
|
||||
name="temporary_file",
|
||||
path=os.path.dirname(local_path),
|
||||
keep=True)
|
||||
|
||||
try:
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
web_util.push_to_url(
|
||||
local_path,
|
||||
dest_url,
|
||||
keep_original=True)
|
||||
except fs.FetchError as e:
|
||||
tty.debug('spack buildcache unable to sync {0}'.format(rel_path))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
stage.destroy()
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def update_index(mirror_url, update_keys=False):
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
outdir = url_util.format(mirror.push_url)
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version, ver
|
||||
|
||||
@@ -27,16 +26,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--keep-stage', action='store_true',
|
||||
help="don't clean up staging area when command completes")
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
'-b', '--batch', action='store_true',
|
||||
help="don't ask which versions to checksum")
|
||||
sp.add_argument(
|
||||
'-l', '--latest', action='store_true',
|
||||
help="checksum the latest available version only")
|
||||
sp.add_argument(
|
||||
'-p', '--preferred', action='store_true',
|
||||
help="checksum the preferred version only")
|
||||
arguments.add_common_arguments(subparser, ['package'])
|
||||
subparser.add_argument(
|
||||
'versions', nargs=argparse.REMAINDER,
|
||||
@@ -56,38 +48,25 @@ def checksum(parser, args):
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
url_dict = {}
|
||||
if args.versions:
|
||||
# If the user asked for specific versions, use those
|
||||
url_dict = {}
|
||||
for version in args.versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url_dict[version] = pkg.url_for_version(version)
|
||||
elif args.preferred:
|
||||
version = preferred_version(pkg)
|
||||
url_dict = dict([(version, pkg.url_for_version(version))])
|
||||
else:
|
||||
# Otherwise, see what versions we can find online
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
|
||||
# And ensure the specified version URLs take precedence, if available
|
||||
try:
|
||||
explicit_dict = {}
|
||||
for v in pkg.versions:
|
||||
if not v.isdevelop():
|
||||
explicit_dict[v] = pkg.url_for_version(v)
|
||||
url_dict.update(explicit_dict)
|
||||
except spack.package.NoURLError:
|
||||
pass
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest, fetch_options=pkg.fetch_options)
|
||||
fetch_options=pkg.fetch_options)
|
||||
|
||||
print()
|
||||
print(version_lines)
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -31,7 +30,6 @@
|
||||
level = "long"
|
||||
|
||||
CI_REBUILD_INSTALL_BASE_ARGS = ['spack', '-d', '-v']
|
||||
INSTALL_FAIL_CODE = 1
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
@@ -78,8 +76,8 @@ def setup_parser(subparser):
|
||||
default=False, help="""Spack always check specs against configured
|
||||
binary mirrors when generating the pipeline, regardless of whether or not
|
||||
DAG pruning is enabled. This flag controls whether it might attempt to
|
||||
fetch remote spec files directly (ensuring no spec is rebuilt if it
|
||||
is present on the mirror), or whether it should reduce pipeline generation time
|
||||
fetch remote spec.yaml files directly (ensuring no spec is rebuilt if it is
|
||||
present on the mirror), or whether it should reduce pipeline generation time
|
||||
by assuming all remote buildcache indices are up to date and only use those
|
||||
to determine whether a given spec is up to date on mirrors. In the latter
|
||||
case, specs might be needlessly rebuilt if remote buildcache indices are out
|
||||
@@ -118,7 +116,7 @@ def ci_generate(args):
|
||||
for creating a build group for the generated workload and registering
|
||||
all generated jobs under that build group. If this environment
|
||||
variable is not set, no build group will be created on CDash."""
|
||||
env = spack.cmd.require_active_env(cmd_name='ci generate')
|
||||
env = ev.get_env(args, 'ci generate', required=True)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
@@ -152,7 +150,7 @@ def ci_generate(args):
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index associated with the mirror in the
|
||||
active, gitlab-enabled environment. """
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild-index')
|
||||
env = ev.get_env(args, 'ci rebuild-index', required=True)
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
@@ -169,7 +167,7 @@ def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
env = ev.get_env(args, 'ci rebuild', required=True)
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
# to do.
|
||||
@@ -362,10 +360,8 @@ def ci_rebuild(args):
|
||||
# Write information about spack into an artifact in the repro dir
|
||||
spack_info = spack_ci.get_spack_info()
|
||||
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
|
||||
with open(spack_info_file, 'wb') as fd:
|
||||
fd.write(b'\n')
|
||||
fd.write(spack_info.encode('utf8'))
|
||||
fd.write(b'\n')
|
||||
with open(spack_info_file, 'w') as fd:
|
||||
fd.write('\n{0}\n'.format(spack_info))
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
@@ -495,7 +491,7 @@ def ci_rebuild(args):
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
if install_exit_code == 1 and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
@@ -506,17 +502,9 @@ def ci_rebuild(args):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
empty_file_path = os.path.join(tmpdir, 'empty.txt')
|
||||
|
||||
broken_spec_details = {
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(empty_file_path, 'w') as efd:
|
||||
efd.write(syaml.dump(broken_spec_details))
|
||||
efd.write('')
|
||||
web_util.push_to_url(
|
||||
empty_file_path,
|
||||
broken_spec_path,
|
||||
@@ -578,26 +566,6 @@ def ci_rebuild(args):
|
||||
cdash_build_id, pipeline_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, pipeline_mirror_url)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
broken_spec_path))
|
||||
try:
|
||||
web_util.remove_url(broken_spec_path)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = 'Error removing {0} from broken specs list: {1}'.format(
|
||||
broken_spec_path, err)
|
||||
tty.warn(msg)
|
||||
|
||||
else:
|
||||
tty.debug('spack install exited non-zero, will not create buildcache')
|
||||
|
||||
|
||||
@@ -7,17 +7,15 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.caches
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.test
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
description = "remove temporary build files and/or downloaded archives"
|
||||
@@ -28,7 +26,7 @@
|
||||
class AllClean(argparse.Action):
|
||||
"""Activates flags -s -d -f -m and -p simultaneously"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
parser.parse_args(['-sdfmp'], namespace=namespace)
|
||||
parser.parse_args(['-sdfmpb'], namespace=namespace)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -49,11 +47,9 @@ def setup_parser(subparser):
|
||||
help="remove .pyc, .pyo files and __pycache__ folders")
|
||||
subparser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help="remove software and configuration needed to bootstrap Spack")
|
||||
help="remove software needed to bootstrap Spack")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean,
|
||||
help="equivalent to -sdfmp (does not include --bootstrap)",
|
||||
nargs=0
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmpb", nargs=0
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
@@ -76,11 +72,7 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg('Removing all temporary build stages')
|
||||
spack.stage.purge()
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.layout.root, '.tmp')
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug('Removing {0}'.format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
|
||||
if args.downloads:
|
||||
tty.msg('Removing cached downloads')
|
||||
spack.caches.fetch_cache.destroy()
|
||||
@@ -109,9 +101,8 @@ def clean(parser, args):
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||
spack.config.get('bootstrap:root')
|
||||
)
|
||||
msg = 'Removing bootstrapped software and configuration in "{0}"'
|
||||
tty.msg(msg.format(bootstrap_prefix))
|
||||
llnl.util.filesystem.remove_directory_contents(bootstrap_prefix)
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
||||
@@ -117,7 +117,7 @@ def format(self, cmd):
|
||||
'virtual': '_providers',
|
||||
'namespace': '_repos',
|
||||
'hash': '_all_resource_hashes',
|
||||
'pytest': '_unit_tests',
|
||||
'pytest': '_tests',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ def _specs(self, **kwargs):
|
||||
|
||||
# If an environment is provided, we'll restrict the search to
|
||||
# only its installed packages.
|
||||
env = ev.active_environment()
|
||||
env = ev._active_environment
|
||||
if env:
|
||||
kwargs['hashes'] = set(env.all_hashes())
|
||||
|
||||
@@ -320,11 +320,3 @@ def add_cdash_args(subparser, add_help):
|
||||
default=None,
|
||||
help=cdash_help['buildstamp']
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def reuse():
|
||||
return Args(
|
||||
'--reuse', action='store_true', default=False,
|
||||
help='reuse installed dependencies'
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user