Merge remote-tracking branch 'upstream/develop' into develop
This commit is contained in:
13
.codecov.yml
13
.codecov.yml
@@ -19,3 +19,16 @@ comment: off
|
||||
# annotations in files that seemingly have nothing to do with the PR.
|
||||
github_checks:
|
||||
annotations: false
|
||||
|
||||
# Attempt to fix "Missing base commit" messages in the codecov UI.
|
||||
# Because we do not run full tests on package PRs, package PRs' merge
|
||||
# commits on `develop` don't have coverage info. It appears that
|
||||
# codecov will give you an error if the pseudo-base's coverage data
|
||||
# doesn't all apply properly to the real PR base.
|
||||
#
|
||||
# See here for docs:
|
||||
# https://docs.codecov.com/docs/comparing-commits#pseudo-comparison
|
||||
# See here for another potential solution:
|
||||
# https://community.codecov.com/t/2480/15
|
||||
codecov:
|
||||
allow_coverage_offsets: true
|
||||
|
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,42 +0,0 @@
|
||||
---
|
||||
name: "\U0001F41E Bug report"
|
||||
about: Report a bug in the core of Spack (command not working as expected, etc.)
|
||||
labels: "bug,triage"
|
||||
---
|
||||
|
||||
<!-- Explain, in a clear and concise way, the command you ran and the result you were trying to achieve.
|
||||
Example: "I ran `spack find` to list all the installed packages and ..." -->
|
||||
|
||||
### Steps to reproduce the issue
|
||||
|
||||
```console
|
||||
$ spack <command1> <spec>
|
||||
$ spack <command2> <spec>
|
||||
...
|
||||
```
|
||||
|
||||
### Error Message
|
||||
|
||||
<!-- If Spack reported an error, provide the error message. If it did not report an error but the output appears incorrect, provide the incorrect output. If there was no error message and no output but the result is incorrect, describe how it does not match what you expect. -->
|
||||
```console
|
||||
$ spack --debug --stacktrace <command>
|
||||
```
|
||||
|
||||
### Information on your system
|
||||
|
||||
<!-- Please include the output of `spack debug report` -->
|
||||
|
||||
<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
|
||||
|
||||
### Additional information
|
||||
|
||||
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
|
||||
- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
- [ ] I have run the failing commands in debug mode and reported the output
|
||||
|
||||
<!-- We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack! -->
|
58
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
58
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: "\U0001F41E Bug report"
|
||||
description: Report a bug in the core of Spack (command not working as expected, etc.)
|
||||
labels: [bug, triage]
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Explain, in a clear and concise way, the command you ran and the result you were trying to achieve.
|
||||
Example: "I ran `spack find` to list all the installed packages and ..."
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack <command1> <spec>
|
||||
$ spack <command2> <spec>
|
||||
...
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
If Spack reported an error, provide the error message. If it did not report an error but the output appears incorrect, provide the incorrect output. If there was no error message and no output but the result is incorrect, describe how it does not match what you expect.
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack --debug --stacktrace <command>
|
||||
```
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- label: I have run the failing commands in debug mode and reported the output
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
43
.github/ISSUE_TEMPLATE/build_error.md
vendored
43
.github/ISSUE_TEMPLATE/build_error.md
vendored
@@ -1,43 +0,0 @@
|
||||
---
|
||||
name: "\U0001F4A5 Build error"
|
||||
about: Some package in Spack didn't build correctly
|
||||
title: "Installation issue: "
|
||||
labels: "build-error"
|
||||
---
|
||||
|
||||
<!-- Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
|
||||
1. Title the issue "Installation issue: <name-of-the-package>".
|
||||
2. Provide the information required below.
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively! -->
|
||||
|
||||
### Steps to reproduce the issue
|
||||
|
||||
<!-- Fill in the exact spec you are trying to build and the relevant part of the error message -->
|
||||
```console
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
|
||||
### Information on your system
|
||||
|
||||
<!-- Please include the output of `spack debug report` -->
|
||||
|
||||
<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
|
||||
|
||||
### Additional information
|
||||
|
||||
<!-- Please upload the following files. They should be present in the stage directory of the failing build. Also upload any config.log or similar file if one exists. -->
|
||||
* [spack-build-out.txt]()
|
||||
* [spack-build-env.txt]()
|
||||
|
||||
<!-- Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and @mention them here if they exist. -->
|
||||
|
||||
### General information
|
||||
|
||||
<!-- These boxes can be checked by replacing [ ] with [x] or by clicking them after submitting the issue. -->
|
||||
- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
- [ ] I have run `spack maintainers <name-of-the-package>` and @mentioned any maintainers
|
||||
- [ ] I have uploaded the build log and environment files
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
64
.github/ISSUE_TEMPLATE/build_error.yml
vendored
Normal file
64
.github/ISSUE_TEMPLATE/build_error.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: "\U0001F4A5 Build error"
|
||||
description: Some package in Spack didn't build correctly
|
||||
title: "Installation issue: "
|
||||
labels: [build-error]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
1. Title the issue `Installation issue: <name-of-the-package>`.
|
||||
2. Provide the information required below.
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Fill in the exact spec you are trying to build and the relevant part of the error message
|
||||
placeholder: |
|
||||
```console
|
||||
$ spack install <spec>
|
||||
...
|
||||
```
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: |
|
||||
Please upload the following files:
|
||||
* **`spack-build-out.txt`**
|
||||
* **`spack-build-env.txt`**
|
||||
|
||||
They should be present in the stage directory of the failing build. Also upload any `config.log` or similar file if one exists.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and **@mention** them here if they exist.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack debug report` and reported the version of Spack/Python/Platform
|
||||
required: true
|
||||
- label: I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
|
||||
required: true
|
||||
- label: I have uploaded the build log and environment files
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: true
|
33
.github/ISSUE_TEMPLATE/feature_request.md
vendored
33
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,33 +0,0 @@
|
||||
---
|
||||
name: "\U0001F38A Feature request"
|
||||
about: Suggest adding a feature that is not yet in Spack
|
||||
labels: feature
|
||||
|
||||
---
|
||||
|
||||
<!--*Please add a concise summary of your suggestion here.*-->
|
||||
|
||||
### Rationale
|
||||
|
||||
<!--*Is your feature request related to a problem? Please describe it!*-->
|
||||
|
||||
### Description
|
||||
|
||||
<!--*Describe the solution you'd like and the alternatives you have considered.*-->
|
||||
|
||||
|
||||
### Additional information
|
||||
<!--*Add any other context about the feature request here.*-->
|
||||
|
||||
|
||||
### General information
|
||||
|
||||
- [ ] I have run `spack --version` and reported the version of Spack
|
||||
- [ ] I have searched the issues of this repo and believe this is not a duplicate
|
||||
|
||||
|
||||
|
||||
<!--If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
||||
-->
|
41
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
41
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: "\U0001F38A Feature request"
|
||||
description: Suggest adding a feature that is not yet in Spack
|
||||
labels: [feature]
|
||||
body:
|
||||
- type: textarea
|
||||
id: summary
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Please add a concise summary of your suggestion here.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: rationale
|
||||
attributes:
|
||||
label: Rationale
|
||||
description: Is your feature request related to a problem? Please describe it!
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe the solution you'd like and the alternatives you have considered.
|
||||
- type: textarea
|
||||
id: additional_information
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Add any other context about the feature request here.
|
||||
- type: checkboxes
|
||||
id: checks
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack --version` and reported the version of Spack
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
161
.github/workflows/bootstrap.yml
vendored
Normal file
161
.github/workflows/bootstrap.yml
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
name: Bootstrapping
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/tumbleweed:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
zypper update -y
|
||||
zypper install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap untrust github-actions
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap untrust spack-install
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
72
.github/workflows/build-containers.yml
vendored
Normal file
72
.github/workflows/build-containers.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Build & Deploy Docker Containers
|
||||
on:
|
||||
# Build new Spack develop containers nightly.
|
||||
schedule:
|
||||
- cron: '34 0 * * *'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
# Even if one container fails to build we still want the others
|
||||
# to continue their builds.
|
||||
fail-fast: false
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
dockerfile: [[amazon-linux, amazonlinux-2.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[centos7, centos-7.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[leap15, leap-15.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[ubuntu-xenial, ubuntu-1604.dockerfile, 'linux/amd64,linux/arm64'],
|
||||
[ubuntu-bionic, ubuntu-1804.dockerfile, 'linux/amd64,linux/arm64']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
container="ghcr.io/spack/${{ matrix.dockerfile[0]}}:latest"
|
||||
echo "container=${container}" >> $GITHUB_ENV
|
||||
echo "versioned=${container}" >> $GITHUB_ENV
|
||||
|
||||
# On a new release create a container with the same tag as the release.
|
||||
- name: Set Container Tag on Release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
versioned="ghcr.io/spack/${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Check ${{ matrix.dockerfile[1] }} Exists
|
||||
run: |
|
||||
printf "Preparing to build ${{ env.container }} from ${{ matrix.dockerfile[1] }}"
|
||||
if [ ! -f "share/spack/docker/${{ matrix.dockerfile[1]}}" ]; then
|
||||
printf "Dockerfile ${{ matrix.dockerfile[0]}} does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[1] }}
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
file: share/spack/docker/${{matrix.dockerfile[1]}}
|
||||
platforms: ${{ matrix.dockerfile[2] }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.container }}
|
||||
${{ env.versioned }}
|
77
.github/workflows/linux_build_tests.yaml
vendored
77
.github/workflows/linux_build_tests.yaml
vendored
@@ -1,77 +0,0 @@
|
||||
name: linux builds
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- lz4 # MakefilePackage
|
||||
- mpich~fortran # AutotoolsPackage
|
||||
- 'tut%gcc@:10.99.99' # WafPackage
|
||||
- py-setuptools # PythonPackage
|
||||
- openjpeg # CMakePackage
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/cache@v2.1.6
|
||||
with:
|
||||
path: ~/.ccache
|
||||
key: ccache-build-${{ matrix.package }}
|
||||
restore-keys: |
|
||||
ccache-build-${{ matrix.package }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System Packages
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev ruby findutils openssl libssl-dev libpciaccess-dev
|
||||
R --version
|
||||
perl --version
|
||||
ruby --version
|
||||
- name: Copy Configuration
|
||||
run: |
|
||||
ccache -M 300M && ccache -z
|
||||
# Set up external deps for build tests, b/c they take too long to compile
|
||||
cp share/spack/qa/configuration/*.yaml etc/spack/
|
||||
- name: Run the build test
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
SPEC=${{ matrix.package }} share/spack/qa/run-build-tests
|
||||
ccache -s
|
64
.github/workflows/unit_tests.yaml
vendored
64
.github/workflows/unit_tests.yaml
vendored
@@ -131,10 +131,7 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml]
|
||||
@@ -148,24 +145,13 @@ jobs:
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack external find --not-buildable cmake bison
|
||||
spack bootstrap untrust spack-install
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
@@ -185,7 +171,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@v2.0.2
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -204,10 +190,7 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml]
|
||||
@@ -216,17 +199,6 @@ jobs:
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 38
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run shell tests (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
@@ -237,7 +209,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@v2.0.2
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -314,21 +286,7 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
patchelf kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage[toml] clingo
|
||||
@@ -353,7 +311,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@v2.0.2
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -379,10 +337,14 @@ jobs:
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap untrust spack-install
|
||||
$(which spack) solve zlib
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -x
|
||||
@@ -395,7 +357,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@v2.0.2
|
||||
- uses: codecov/codecov-action@v2.0.3
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -132,6 +132,7 @@ celerybeat.pid
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
!/lib/spack/env
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://spack.readthedocs.io)
|
||||
|
23
bin/spack
23
bin/spack
@@ -28,6 +28,7 @@ exit 1
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 5)
|
||||
@@ -70,6 +71,28 @@ if "ruamel.yaml" in sys.modules:
|
||||
if "ruamel" in sys.modules:
|
||||
del sys.modules["ruamel"]
|
||||
|
||||
# The following code is here to avoid failures when updating
|
||||
# the develop version, due to spurious argparse.pyc files remaining
|
||||
# in the libs/spack/external directory, see:
|
||||
# https://github.com/spack/spack/pull/25376
|
||||
# TODO: Remove in v0.18.0 or later
|
||||
try:
|
||||
import argparse
|
||||
except ImportError:
|
||||
argparse_pyc = os.path.join(spack_external_libs, 'argparse.pyc')
|
||||
if not os.path.exists(argparse_pyc):
|
||||
raise
|
||||
try:
|
||||
os.remove(argparse_pyc)
|
||||
import argparse # noqa
|
||||
except Exception:
|
||||
msg = ('The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. '
|
||||
'Either delete it manually or ask some administrator to '
|
||||
'delete it for you.')
|
||||
print(msg.format(argparse_pyc))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
import spack.main # noqa
|
||||
|
||||
# Once we've set up the system path, run the spack main method
|
||||
|
@@ -5,3 +5,28 @@ bootstrap:
|
||||
# Root directory for bootstrapping work. The software bootstrapped
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: ~/.spack/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions: true
|
||||
spack-install: true
|
@@ -134,6 +134,10 @@ config:
|
||||
# enabling locks.
|
||||
locks: true
|
||||
|
||||
# The default url fetch method to use.
|
||||
# If set to 'curl', Spack will require curl on the user's system
|
||||
# If set to 'urllib', Spack will use python built-in libs to fetch
|
||||
url_fetch_method: urllib
|
||||
|
||||
# The maximum number of jobs to use for the build system (e.g. `make`), when
|
||||
# the -j flag is not given on the command line. Defaults to 16 when not set.
|
||||
|
@@ -695,6 +695,136 @@ structured the way you want:
|
||||
}
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
``spack diff``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
It's often the case that you have two versions of a spec that you need to
|
||||
disambiguate. Let's say that we've installed two variants of zlib, one with
|
||||
and one without the optimize variant:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib
|
||||
$ spack install zlib -optimize
|
||||
|
||||
When we do ``spack find`` we see the two versions.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find zlib
|
||||
==> 2 installed packages
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
zlib@1.2.11 zlib@1.2.11
|
||||
|
||||
|
||||
Let's now say that we want to uninstall zlib. We run the command, and hit a problem
|
||||
real quickly since we have two!
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack uninstall zlib
|
||||
==> Error: zlib matches multiple packages:
|
||||
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
efzjziy zlib@1.2.11 sl7m27m zlib@1.2.11
|
||||
|
||||
==> Error: You can either:
|
||||
a) use a more specific spec, or
|
||||
b) specify the spec by its hash (e.g. `spack uninstall /hash`), or
|
||||
c) use `spack uninstall --all` to uninstall ALL matching specs.
|
||||
|
||||
Oh no! We can see from the above that we have two different versions of zlib installed,
|
||||
and the only difference between the two is the hash. This is a good use case for
|
||||
``spack diff``, which can easily show us the "diff" or set difference
|
||||
between properties for two packages. Let's try it out.
|
||||
Since the only difference we see in the ``spack find`` view is the hash, let's use
|
||||
``spack diff`` to look for more detail. We will provide the two hashes:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff /efzjziy /sl7m27m
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- zlib@1.2.11efzjziyc3dmb5h5u5azsthgbgog5mj7g
|
||||
+++ zlib@1.2.11sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
@@ variant_value @@
|
||||
- zlib optimize False
|
||||
+ zlib optimize True
|
||||
|
||||
|
||||
The output is colored, and written in the style of a git diff. This means that you
|
||||
can copy and paste it into a GitHub markdown as a code block with language "diff"
|
||||
and it will render nicely! Here is an example:
|
||||
|
||||
.. code-block:: md
|
||||
|
||||
```diff
|
||||
--- zlib@1.2.11/efzjziyc3dmb5h5u5azsthgbgog5mj7g
|
||||
+++ zlib@1.2.11/sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
@@ variant_value @@
|
||||
- zlib optimize False
|
||||
+ zlib optimize True
|
||||
```
|
||||
|
||||
Awesome! Now let's read the diff. It tells us that our first zlib was built with ``~optimize``
|
||||
(``False``) and the second was built with ``+optimize`` (``True``). You can't see it in the docs
|
||||
here, but the output above is also colored based on the content being an addition (+) or
|
||||
subtraction (-).
|
||||
|
||||
This is a small example, but you will be able to see differences for any attributes on the
|
||||
installation spec. Running ``spack diff A B`` means we'll see which spec attributes are on
|
||||
``B`` but not on ``A`` (green) and which are on ``A`` but not on ``B`` (red). Here is another
|
||||
example with an additional difference type, ``version``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff python@2.7.8 python@3.8.11
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- python@2.7.8/tsxdi6gl4lihp25qrm4d6nys3nypufbf
|
||||
+++ python@3.8.11/yjtseru4nbpllbaxb46q7wfkyxbuvzxx
|
||||
@@ variant_value @@
|
||||
- python patches a8c52415a8b03c0e5f28b5d52ae498f7a7e602007db2b9554df28cd5685839b8
|
||||
+ python patches 0d98e93189bc278fbc37a50ed7f183bd8aaf249a8e1670a465f0db6bb4f8cf87
|
||||
@@ version @@
|
||||
- openssl 1.0.2u
|
||||
+ openssl 1.1.1k
|
||||
- python 2.7.8
|
||||
+ python 3.8.11
|
||||
|
||||
Let's say that we were only interested in one kind of attribute above, ``version``.
|
||||
We can ask the command to only output this attribute. To do this, you'd add
|
||||
the ``--attribute`` for attribute parameter, which defaults to all. Here is how you
|
||||
would filter to show just versions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff --attribute version python@2.7.8 python@3.8.11
|
||||
==> Warning: This interface is subject to change.
|
||||
|
||||
--- python@2.7.8/tsxdi6gl4lihp25qrm4d6nys3nypufbf
|
||||
+++ python@3.8.11/yjtseru4nbpllbaxb46q7wfkyxbuvzxx
|
||||
@@ version @@
|
||||
- openssl 1.0.2u
|
||||
+ openssl 1.1.1k
|
||||
- python 2.7.8
|
||||
+ python 3.8.11
|
||||
|
||||
And you can add as many attributes as you'd like with multiple `--attribute` arguments
|
||||
(for lots of attributes, you can use ``-a`` for short). Finally, if you want to view the
|
||||
data as json (and possibly pipe into an output file) just add ``--json``:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack diff --json python@2.7.8 python@3.8.11
|
||||
|
||||
|
||||
This data will be much longer because along with the differences for ``A`` vs. ``B`` and
|
||||
``B`` vs. ``A``, the JSON output also showsthe intersection.
|
||||
|
||||
|
||||
------------------------
|
||||
Using installed packages
|
||||
------------------------
|
||||
|
@@ -63,6 +63,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/custompackage
|
||||
build_systems/multiplepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
|
350
lib/spack/docs/build_systems/multiplepackage.rst
Normal file
350
lib/spack/docs/build_systems/multiplepackage.rst
Normal file
@@ -0,0 +1,350 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _multiplepackage:
|
||||
|
||||
----------------------
|
||||
Multiple Build Systems
|
||||
----------------------
|
||||
|
||||
Quite frequently, a package will change build systems from one version to the
|
||||
next. For example, a small project that once used a single Makefile to build
|
||||
may now require Autotools to handle the increased number of files that need to
|
||||
be compiled. Or, a package that once used Autotools may switch to CMake for
|
||||
Windows support. In this case, it becomes a bit more challenging to write a
|
||||
single build recipe for this package in Spack.
|
||||
|
||||
There are several ways that this can be handled in Spack:
|
||||
|
||||
#. Subclass the new build system, and override phases as needed (preferred)
|
||||
#. Subclass ``Package`` and implement ``install`` as needed
|
||||
#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system
|
||||
#. Rename the old package to ``*-legacy`` and create a new package
|
||||
#. Move the old package to a ``legacy`` repository and create a new package
|
||||
#. Drop older versions that only support the older build system
|
||||
|
||||
Of these options, 1 is preferred, and will be demonstrated in this
|
||||
documentation. Options 3-5 have issues with concretization, so shouldn't be
|
||||
used. Options 4-5 also don't support more than two build systems. Option 6 only
|
||||
works if the old versions are no longer needed. Option 1 is preferred over 2
|
||||
because it makes it easier to drop the old build system entirely.
|
||||
|
||||
The exact syntax of the package depends on which build systems you need to
|
||||
support. Below are a couple of common examples.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Makefile -> Autotools
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(MakefilePackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
The package subclasses from :ref:`makefilepackage`, which has three phases:
|
||||
|
||||
#. ``edit`` (does nothing by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, the ``install`` phase needed to be overridden because the
|
||||
Makefile did not have an install target. We also modify the Makefile to use
|
||||
Spack's compiler wrappers. The default ``build`` phase is not changed.
|
||||
|
||||
Starting with version 1.3.0, we want to use Autotools to build instead.
|
||||
:ref:`autotoolspackage` has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(AutotoolsPackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def autoreconf(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def configure(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* We moved the contents of the ``edit`` phase to the ``patch`` function. Since
|
||||
``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this
|
||||
step to be executed is to move it to the ``patch`` function, which always
|
||||
gets run.
|
||||
* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old
|
||||
Makefile-based build system doesn't use these, we ignore these phases when
|
||||
building ``foo@1.2.0``.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``foo@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Autotools -> CMake
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Let's say we have the following package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(AutotoolsPackage):
|
||||
version("1.2.0", sha256="...")
|
||||
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
|
||||
The package subclasses from :ref:`autotoolspackage`, which has four phases:
|
||||
|
||||
#. ``autoreconf`` (does not if a configure script already exists)
|
||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
||||
|
||||
Starting with version 1.3.0, we want to use CMake to build instead.
|
||||
:ref:`cmakepackage` has three phases:
|
||||
|
||||
#. ``cmake`` (runs ``cmake ...`` by default)
|
||||
#. ``build`` (runs ``make`` by default)
|
||||
#. ``install`` (runs ``make install`` by default)
|
||||
|
||||
If the only version we need to support is 1.3.0, the package would look as
|
||||
simple as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
|
||||
In this case, we use the default methods for each phase and only override
|
||||
``cmake_args`` to specify additional flags to pass to ``cmake``.
|
||||
|
||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
||||
1.3.0, it would look something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Bar(CMakePackage):
|
||||
version("1.3.0", sha256="...")
|
||||
version("1.2.0", sha256="...", deprecated=True)
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
|
||||
There are a few interesting things to note here:
|
||||
|
||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
||||
on version 1.2.0, it's still there and builds just fine.
|
||||
* Since CMake and Autotools are so similar, we only need to override the
|
||||
``cmake`` phase, we can use the default ``build`` and ``install`` phases.
|
||||
* We override ``cmake`` to run ``./configure`` for older versions.
|
||||
``configure_args`` remains the same.
|
||||
* The ``@when`` decorator is used to override these phases only for older
|
||||
versions. The default methods are used for ``bar@1.3:``.
|
||||
|
||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
||||
comment can be safely deleted. The result is the same as if we had written a
|
||||
package for version 1.3.0 from scratch.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems for the same version
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
During the transition from one build system to another, developers often
|
||||
support multiple build systems at the same time. Spack can only use a single
|
||||
build system for a single version. To decide which build system to use for a
|
||||
particular version, take the following things into account:
|
||||
|
||||
1. If the developers explicitly state that one build system is preferred over
|
||||
another, use that one.
|
||||
2. If one build system is considered "experimental" while another is considered
|
||||
"stable", use the stable build system.
|
||||
3. Otherwise, use the newer build system.
|
||||
|
||||
The developer preference for which build system to use can change over time as
|
||||
a newer build system becomes stable/recommended.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dropping support for old build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When older versions of a package don't support a newer build system, it can be
|
||||
tempting to simply delete them from a package. This significantly reduces
|
||||
package complexity and makes the build recipe much easier to maintain. However,
|
||||
other packages or Spack users may rely on these older versions. The recommended
|
||||
approach is to first support both build systems (as demonstrated above),
|
||||
:ref:`deprecate <deprecate>` versions that rely on the old build system, and
|
||||
remove those versions and any phases that needed to be overridden in the next
|
||||
Spack release.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Three or more build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In rare cases, a package may change build systems multiple times. For example,
|
||||
a package may start with Makefiles, then switch to Autotools, then switch to
|
||||
CMake. The same logic used above can be extended to any number of build systems.
|
||||
For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Baz(CMakePackage):
|
||||
version("1.4.0", sha256="...") # CMake
|
||||
version("1.3.0", sha256="...") # Autotools
|
||||
version("1.2.0", sha256="...") # Makefile
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
||||
|
||||
# Remove the following once version 1.3.0 is dropped
|
||||
def configure_args(self):
|
||||
return ["--enable-shared"]
|
||||
|
||||
@when("@1.3")
|
||||
def cmake(self, spec, prefix):
|
||||
configure("--prefix=" + prefix, *self.configure_args())
|
||||
|
||||
# Remove the following once version 1.2.0 is dropped
|
||||
@when("@:1.2")
|
||||
def patch(self):
|
||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
||||
|
||||
@when("@:1.2")
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
|
||||
@when("@:1.2")
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Additional examples
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When writing new packages, it often helps to see examples of existing packages.
|
||||
Here is an incomplete list of existing Spack packages that have changed build
|
||||
systems before:
|
||||
|
||||
================ ===================== ================
|
||||
Package Previous Build System New Build System
|
||||
================ ===================== ================
|
||||
amber custom CMake
|
||||
arpack-ng Autotools CMake
|
||||
atk Autotools Meson
|
||||
blast None Autotools
|
||||
dyninst Autotools CMake
|
||||
evtgen Autotools CMake
|
||||
fish Autotools CMake
|
||||
gdk-pixbuf Autotools Meson
|
||||
glib Autotools Meson
|
||||
glog Autotools CMake
|
||||
gmt Autotools CMake
|
||||
gtkplus Autotools Meson
|
||||
hpl Makefile Autotools
|
||||
interproscan Perl Maven
|
||||
jasper Autotools CMake
|
||||
kahip SCons CMake
|
||||
kokkos Makefile CMake
|
||||
kokkos-kernels Makefile CMake
|
||||
leveldb Makefile CMake
|
||||
libdrm Autotools Meson
|
||||
libjpeg-turbo Autotools CMake
|
||||
mesa Autotools Meson
|
||||
metis None CMake
|
||||
mpifileutils Autotools CMake
|
||||
muparser Autotools CMake
|
||||
mxnet Makefile CMake
|
||||
nest Autotools CMake
|
||||
neuron Autotools CMake
|
||||
nsimd CMake nsconfig
|
||||
opennurbs Makefile CMake
|
||||
optional-lite None CMake
|
||||
plasma Makefile CMake
|
||||
preseq Makefile Autotools
|
||||
protobuf Autotools CMake
|
||||
py-pygobject Autotools Python
|
||||
singularity Autotools Makefile
|
||||
span-lite None CMake
|
||||
ssht Makefile CMake
|
||||
string-view-lite None CMake
|
||||
superlu Makefile CMake
|
||||
superlu-dist Makefile CMake
|
||||
uncrustify Autotools CMake
|
||||
================ ===================== ================
|
||||
|
||||
Packages that support multiple build systems can be a bit confusing to write.
|
||||
Don't hesitate to open an issue or draft pull request and ask for advice from
|
||||
other Spack developers!
|
@@ -107,6 +107,7 @@ def setup(sphinx):
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinxcontrib.programoutput',
|
||||
]
|
||||
|
||||
|
@@ -732,13 +732,17 @@ Configuring environment views
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a view descriptor, headed
|
||||
by a name. The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, and ``select`` and
|
||||
``exclude`` lists for the view. For example, in the following manifest
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``. For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
All the dependencies of each root spec in the environment will be linked
|
||||
in the view due to the command ``link: all`` and the files in the view will
|
||||
be symlinks to the spack install directories.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -751,11 +755,16 @@ excludes those built with the PGI compiler at version 18.5.
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: {name}/{version}-{compiler.name}
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
For more information on using view projections, see the section on
|
||||
:ref:`adding_projections_to_views`. The default for the ``select`` and
|
||||
``exclude`` values is to select everything and exclude nothing. The
|
||||
default projection is the default view projection (``{}``).
|
||||
default projection is the default view projection (``{}``). The ``link``
|
||||
defaults to ``all`` but can also be ``roots`` when only the root specs
|
||||
in the environment are desired in the view. The ``link_type`` defaults
|
||||
to ``symlink`` but can also take the value of ``hardlink`` or ``copy``.
|
||||
|
||||
Any number of views may be defined under the ``view`` heading in a
|
||||
Spack Environment.
|
||||
|
@@ -9,21 +9,16 @@
|
||||
Getting Started
|
||||
===============
|
||||
|
||||
-------------
|
||||
Prerequisites
|
||||
-------------
|
||||
--------------------
|
||||
System Prerequisites
|
||||
--------------------
|
||||
|
||||
Spack has the following minimum requirements, which must be installed
|
||||
before Spack is run:
|
||||
Spack has the following minimum system requirements, which are assumed to
|
||||
be present on the machine where Spack is run:
|
||||
|
||||
#. Python 2 (2.6 or 2.7) or 3 (3.5 - 3.9) to run Spack
|
||||
#. A C/C++ compiler for building
|
||||
#. The ``make`` executable for building
|
||||
#. The ``tar``, ``gzip``, ``unzip``, ``bzip2``, ``xz`` and optionally ``zstd``
|
||||
executables for extracting source code
|
||||
#. The ``patch`` command to apply patches
|
||||
#. The ``git`` and ``curl`` commands for fetching
|
||||
#. If using the ``gpg`` subcommand, ``gnupg2`` is required
|
||||
.. csv-table:: System prerequisites for Spack
|
||||
:file: tables/system_prerequisites.csv
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
@@ -89,6 +84,151 @@ sourcing time, ensuring future invocations of the ``spack`` command will
|
||||
continue to use the same consistent python version regardless of changes in
|
||||
the environment.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using ``clingo`` as an external solver to compute which software
|
||||
needs to be installed. The default configuration allows Spack to install
|
||||
``clingo`` from a public buildcache, created by a Github Action workflow. In this
|
||||
case the bootstrapping procedure is transparent to the user, except for a
|
||||
slightly long waiting time on the first concretization of a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
$ time spack solve zlib
|
||||
==> Best of 2 considered solutions.
|
||||
==> Optimization Criteria:
|
||||
Priority Criterion Value
|
||||
1 deprecated versions used 0
|
||||
2 version weight 0
|
||||
3 number of non-default variants (roots) 0
|
||||
4 multi-valued variants 0
|
||||
5 preferred providers for roots 0
|
||||
6 number of non-default variants (non-roots) 0
|
||||
7 preferred providers (non-roots) 0
|
||||
8 compiler mismatches 0
|
||||
9 version badness 0
|
||||
10 count of non-root multi-valued variants 0
|
||||
11 non-preferred compilers 0
|
||||
12 target mismatches 0
|
||||
13 non-preferred targets 0
|
||||
|
||||
zlib@1.2.11%gcc@11.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
real 0m30,618s
|
||||
user 0m27,278s
|
||||
sys 0m1,549s
|
||||
|
||||
After this command you'll see that ``clingo`` has been installed for Spack's own use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find -b
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 2 installed packages
|
||||
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
|
||||
clingo-bootstrap@spack python@3.6
|
||||
|
||||
Subsequent calls to the concretizer will then be much faster:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ time spack solve zlib
|
||||
[ ... ]
|
||||
real 0m1,222s
|
||||
user 0m1,146s
|
||||
sys 0m0,059s
|
||||
|
||||
If for security or for other reasons you don't want to or can't install precompiled
|
||||
binaries, Spack can fall-back to bootstrap ``clingo`` from source files. To forbid
|
||||
Spack from retrieving binaries from the bootstrapping buildcache, the following
|
||||
command must be given:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
since an "untrusted" way of bootstrapping software will not be considered
|
||||
by Spack. You can verify the new settings are effective with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap list
|
||||
Name: github-actions UNTRUSTED
|
||||
|
||||
Type: buildcache
|
||||
|
||||
Info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
|
||||
Description:
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
|
||||
|
||||
Name: spack-install TRUSTED
|
||||
|
||||
Type: install
|
||||
|
||||
Description:
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
|
||||
When bootstrapping from sources, Spack requires a compiler with support
|
||||
for C++14 (GCC on ``linux``, Apple Clang on ``darwin``) and static C++
|
||||
standard libraries on ``linux``. Spack will build the required software
|
||||
on the first request to concretize a spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to speed-up bootstrapping ``clingo`` from sources, you may try to
|
||||
search for ``cmake`` and ``bison`` on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
"""""""""""""""""""
|
||||
The Bootstrap Store
|
||||
"""""""""""""""""""
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Check Installation
|
||||
@@ -117,53 +257,6 @@ environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using clingo as an external solver to compute which software
|
||||
needs to be installed. If you have a default compiler supporting C++14 Spack
|
||||
can automatically bootstrap this tool from sources the first time it is
|
||||
needed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison``
|
||||
on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Alternate Prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -367,6 +460,34 @@ then inject those flags into the compiler command. Compiler flags
|
||||
entered from the command line will be discussed in more detail in the
|
||||
following section.
|
||||
|
||||
Some compilers also require additional environment configuration.
|
||||
Examples include Intels oneAPI and AMDs AOCC compiler suites,
|
||||
which have custom scripts for loading environment variables and setting paths.
|
||||
These variables should be specified in the ``environment`` section of the compiler
|
||||
specification. The operations available to modify the environment are ``set``, ``unset``,
|
||||
``prepend_path``, ``append_path``, and ``remove_path``. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: []
|
||||
operating_system: centos6
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/latest/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/latest/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/latest/linux/bin/ifx
|
||||
spec: oneapi@latest
|
||||
environment:
|
||||
set:
|
||||
MKL_ROOT: "/path/to/mkl/root"
|
||||
unset: # A list of environment variables to unset
|
||||
- CC
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -612,6 +612,7 @@ it executable, then runs it with some arguments.
|
||||
installer = Executable(self.stage.archive_file)
|
||||
installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.')
|
||||
|
||||
.. _deprecate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Deprecating old versions
|
||||
@@ -4367,9 +4368,9 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
where ``srcs`` is a string or a list of strings corresponding to
|
||||
the paths for the files and or subdirectories, relative to the staged
|
||||
source, that are to be copied to the corresponding path relative to
|
||||
``self.install_test_root``. All of the contents within each subdirectory
|
||||
will be also be copied.
|
||||
source, that are to be copied to the corresponding relative test path
|
||||
under the prefix. All of the contents within each subdirectory will
|
||||
also be copied.
|
||||
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
@@ -4377,8 +4378,13 @@ can be implemented as shown below.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``run_after`` directive ensures associated files are copied
|
||||
**after** the package is installed by the build process.
|
||||
The method name ``copy_test_sources`` here is for illustration
|
||||
purposes. You are free to use a name that is more suited to your
|
||||
package.
|
||||
|
||||
The key to copying the files at build time for stand-alone testing
|
||||
is use of the ``run_after`` directive, which ensures the associated
|
||||
files are copied **after** the provided build stage.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -4395,18 +4401,13 @@ can be implemented as shown below.
|
||||
In this case, the method copies the associated files from the build
|
||||
stage **after** the software is installed to the package's metadata
|
||||
directory. The result is the directory and files will be cached in
|
||||
paths under ``self.install_test_root`` as follows:
|
||||
|
||||
* ``join_path(self.install_test_root, 'tests')`` along with its files
|
||||
and subdirectories
|
||||
* ``join_path(self.install_test_root, 'examples', 'foo.c')``
|
||||
* ``join_path(self.install_test_root, 'examples', 'bar.c')``
|
||||
a special test subdirectory under the installation prefix.
|
||||
|
||||
These paths are **automatically copied** to the test stage directory
|
||||
where they are available to the package's ``test`` method through the
|
||||
``self.test_suite.current_test_cache_dir`` property. In our example,
|
||||
the method can access the directory and files using the following
|
||||
paths:
|
||||
during stand-alone testing. The package's ``test`` method can access
|
||||
them using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example, the method would use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, 'tests')``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||
@@ -4414,9 +4415,8 @@ paths:
|
||||
|
||||
.. note::
|
||||
|
||||
Library developers will want to build the associated tests under
|
||||
the ``self.test_suite.current_test_cache_dir`` and against their
|
||||
**installed** libraries before running them.
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -4426,11 +4426,6 @@ paths:
|
||||
would be appropriate for ensuring the installed software continues
|
||||
to work as the underlying system evolves.
|
||||
|
||||
.. note::
|
||||
|
||||
You are free to use a method name that is more suitable for
|
||||
your package.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
"""""""""""""""""""
|
||||
@@ -4509,7 +4504,8 @@ can retrieve the expected output from ``examples/foo.out`` using:
|
||||
|
||||
def test(self):
|
||||
..
|
||||
filename = join_path(self.install_test_root, 'examples', 'foo.out')
|
||||
filename = join_path(self.test_suite.current_test_cache_dir,
|
||||
'examples', 'foo.out')
|
||||
expected = get_escaped_text_output(filename)
|
||||
..
|
||||
|
||||
@@ -4677,9 +4673,6 @@ directory paths are provided in the table below.
|
||||
* - Test Suite Stage Files
|
||||
- ``self.test_suite.stage``
|
||||
- ``join_path(self.test_suite.stage, 'results.txt')``
|
||||
* - Cached Build-time Files
|
||||
- ``self.install_test_root``
|
||||
- ``join_path(self.install_test_root, 'examples', 'foo.c')``
|
||||
* - Staged Cached Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||
|
17
lib/spack/docs/tables/system_prerequisites.csv
Normal file
17
lib/spack/docs/tables/system_prerequisites.csv
Normal file
@@ -0,0 +1,17 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.6/2.7/3.5-3.9, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
|
2
lib/spack/env/cc
vendored
2
lib/spack/env/cc
vendored
@@ -163,7 +163,7 @@ case "$command" in
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
;;
|
||||
ld)
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
;;
|
||||
*)
|
||||
|
1
lib/spack/env/ld.gold
vendored
Symbolic link
1
lib/spack/env/ld.gold
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
1
lib/spack/env/ld.lld
vendored
Symbolic link
1
lib/spack/env/ld.lld
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
cc
|
@@ -692,7 +692,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as e:
|
||||
except (Exception, KeyboardInterrupt, SystemExit):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
@@ -701,10 +701,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
|
||||
|
||||
msg = 'the transactional move of "{0}" failed.'
|
||||
msg += '\n ' + str(e)
|
||||
raise RuntimeError(msg.format(directory_name))
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmp_dir)
|
||||
@@ -1102,14 +1099,14 @@ def find(root, files, recursive=True):
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
======= ====================================
|
||||
Pattern Meaning
|
||||
======= ====================================
|
||||
* matches everything
|
||||
? matches any single character
|
||||
[seq] matches any character in ``seq``
|
||||
[!seq] matches any character not in ``seq``
|
||||
======= ====================================
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches everything
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -31,23 +30,6 @@
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
|
||||
# On macOS, Python 3.8 multiprocessing now defaults to the 'spawn' start
|
||||
# method. Spack cannot currently handle this, so force the process to start
|
||||
# using the 'fork' start method.
|
||||
#
|
||||
# TODO: This solution is not ideal, as the 'fork' start method can lead to
|
||||
# crashes of the subprocess. Figure out how to make 'spawn' work.
|
||||
#
|
||||
# See:
|
||||
# * https://github.com/spack/spack/pull/18124
|
||||
# * https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods # noqa: E501
|
||||
# * https://bugs.python.org/issue33725
|
||||
if sys.version_info >= (3,): # novm
|
||||
fork_context = multiprocessing.get_context('fork')
|
||||
else:
|
||||
fork_context = multiprocessing
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
"""Create a hierarchy of dictionaries by splitting the supplied
|
||||
set of objects on unique values of the supplied functions.
|
||||
@@ -258,6 +240,47 @@ def new_dec(*args, **kwargs):
|
||||
return new_dec
|
||||
|
||||
|
||||
def key_ordering(cls):
|
||||
"""Decorates a class with extra methods that implement rich comparison
|
||||
operations and ``__hash__``. The decorator assumes that the class
|
||||
implements a function called ``_cmp_key()``. The rich comparison
|
||||
operations will compare objects using this key, and the ``__hash__``
|
||||
function will return the hash of this key.
|
||||
|
||||
If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
|
||||
``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
|
||||
|
||||
Raises:
|
||||
TypeError: If the class does not have a ``_cmp_key`` method
|
||||
"""
|
||||
def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not has_method(cls, '_cmp_key'):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter('__eq__',
|
||||
lambda s, o:
|
||||
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter('__lt__',
|
||||
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter('__le__',
|
||||
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
|
||||
setter('__ne__',
|
||||
lambda s, o:
|
||||
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter('__gt__',
|
||||
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter('__ge__',
|
||||
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
|
||||
setter('__hash__', lambda self: hash(self._cmp_key()))
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
#: sentinel for testing that iterators are done in lazy_lexicographic_ordering
|
||||
done = object()
|
||||
|
||||
@@ -892,3 +915,19 @@ class Devnull(object):
|
||||
"""
|
||||
def write(self, *_):
|
||||
pass
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import socket
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, Tuple # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -36,6 +37,126 @@
|
||||
true_fn = lambda: True
|
||||
|
||||
|
||||
class OpenFile(object):
|
||||
"""Record for keeping track of open lockfiles (with reference counting).
|
||||
|
||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||
filehandle here as it's the thing we end up using in python code. You can get
|
||||
the file descriptor from the file handle if needed -- or we could make this track
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
def __init__(self, fh):
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
|
||||
|
||||
class OpenFileTracker(object):
|
||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||
|
||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||
This is convenient, because if a process exits, it releases its locks.
|
||||
Unfortunately, this also means that if you close a file, *all* locks associated
|
||||
with that file's inode are released, regardless of whether the process has any
|
||||
other open file descriptors on it.
|
||||
|
||||
Because of this, we need to track open lock files so that we only close them when
|
||||
a process no longer needs them. We do this by tracking each lockfile by its
|
||||
inode and process id. This has several nice properties:
|
||||
|
||||
1. Tracking by pid ensures that, if we fork, we don't inadvertently track the parent
|
||||
process's lockfiles. ``fcntl`` locks are not inherited across forks, so we'll
|
||||
just track new lockfiles in the child.
|
||||
2. Tracking by inode ensures that referencs are counted per inode, and that we don't
|
||||
inadvertently close a file whose inode still has open locks.
|
||||
3. Tracking by both pid and inode ensures that we only open lockfiles the minimum
|
||||
number of times necessary for the locks we have.
|
||||
|
||||
Note: as mentioned elsewhere, these locks aren't thread safe -- they're designed to
|
||||
work in Python and assume the GIL.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors = {} # type: Dict[Tuple[int, int], OpenFile]
|
||||
|
||||
def get_fh(self, path):
|
||||
"""Get a filehandle for a lockfile.
|
||||
|
||||
This routine will open writable files for read/write even if you're asking
|
||||
for a shared (read-only) lock. This is so that we can upgrade to an exclusive
|
||||
(write) lock later if requested.
|
||||
|
||||
Arguments:
|
||||
path (str): path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
|
||||
pid = os.getpid()
|
||||
open_file = None # OpenFile object, if there is one
|
||||
stat = None # stat result for the lockfile, if it exists
|
||||
|
||||
try:
|
||||
# see whether we've seen this inode/pid before
|
||||
stat = os.stat(path)
|
||||
key = (stat.st_ino, pid)
|
||||
open_file = self._descriptors.get(key)
|
||||
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
|
||||
# path does not exist -- fail if we won't be able to create it
|
||||
parent = os.path.dirname(path) or '.'
|
||||
if not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(path)
|
||||
|
||||
# if there was no already open file, we'll need to open one
|
||||
if not open_file:
|
||||
if stat and not os.access(path, os.W_OK):
|
||||
# we know path exists but not if it's writable. If it's read-only,
|
||||
# only open the file for reading (and fail if we're trying to get
|
||||
# an exclusive (write) lock on it)
|
||||
os_mode, fh_mode = os.O_RDONLY, 'r'
|
||||
|
||||
fd = os.open(path, os_mode)
|
||||
fh = os.fdopen(fd, fh_mode)
|
||||
open_file = OpenFile(fh)
|
||||
|
||||
# if we just created the file, we'll need to get its inode here
|
||||
if not stat:
|
||||
inode = os.fstat(fd).st_ino
|
||||
key = (inode, pid)
|
||||
|
||||
self._descriptors[key] = open_file
|
||||
|
||||
open_file.refs += 1
|
||||
return open_file.fh
|
||||
|
||||
def release_fh(self, path):
|
||||
"""Release a filehandle, only closing it if there are no more references."""
|
||||
try:
|
||||
inode = os.stat(path).st_ino
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT: # only handle file not found
|
||||
raise
|
||||
inode = None # this will not be in self._descriptors
|
||||
|
||||
key = (inode, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing lock path: %s" % path
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
del self._descriptors[key]
|
||||
open_file.fh.close()
|
||||
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
file_tracker = OpenFileTracker()
|
||||
|
||||
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
# Don't print anything if we succeeded on the first try
|
||||
if nattempts <= 1:
|
||||
@@ -56,7 +177,8 @@ class Lock(object):
|
||||
Note that this is for managing contention over resources *between*
|
||||
processes and not for managing contention between threads in a process: the
|
||||
functions of this object are not thread-safe. A process also must not
|
||||
maintain multiple locks on the same file.
|
||||
maintain multiple locks on the same file (or, more specifically, on
|
||||
overlapping byte ranges in the same file).
|
||||
"""
|
||||
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None,
|
||||
@@ -161,25 +283,10 @@ def _lock(self, op, timeout=None):
|
||||
|
||||
# Create file and parent directories if they don't exist.
|
||||
if self._file is None:
|
||||
parent = self._ensure_parent_directory()
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fd_mode = (os.O_RDWR | os.O_CREAT), 'r+'
|
||||
if os.path.exists(self.path):
|
||||
if not os.access(self.path, os.W_OK):
|
||||
if op == fcntl.LOCK_SH:
|
||||
# can still lock read-only files if we open 'r'
|
||||
os_mode, fd_mode = os.O_RDONLY, 'r'
|
||||
else:
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
elif not os.access(parent, os.W_OK):
|
||||
raise CantCreateLockError(self.path)
|
||||
|
||||
fd = os.open(self.path, os_mode)
|
||||
self._file = os.fdopen(fd, fd_mode)
|
||||
|
||||
elif op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
if op == fcntl.LOCK_EX and self._file.mode == 'r':
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
@@ -292,7 +399,8 @@ def _unlock(self):
|
||||
"""
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN,
|
||||
self._length, self._start, os.SEEK_SET)
|
||||
self._file.close()
|
||||
|
||||
file_tracker.release_fh(self.path)
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
@@ -28,6 +29,7 @@
|
||||
_msg_enabled = True
|
||||
_warn_enabled = True
|
||||
_error_enabled = True
|
||||
_output_filter = lambda s: s
|
||||
indent = " "
|
||||
|
||||
|
||||
@@ -90,6 +92,18 @@ def error_enabled():
|
||||
return _error_enabled
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def output_filter(filter_fn):
|
||||
"""Context manager that applies a filter to all output."""
|
||||
global _output_filter
|
||||
saved_filter = _output_filter
|
||||
try:
|
||||
_output_filter = filter_fn
|
||||
yield
|
||||
finally:
|
||||
_output_filter = saved_filter
|
||||
|
||||
|
||||
class SuppressOutput:
|
||||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
@@ -166,13 +180,23 @@ def msg(message, *args, **kwargs):
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint("@*b{%s==>} %s%s" % (
|
||||
st_text, get_timestamp(), cescape(message)))
|
||||
cprint(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (
|
||||
st_text, get_timestamp(), cescape(message)))
|
||||
cwrite(
|
||||
"@*b{%s==>} %s%s" % (
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(message))
|
||||
)
|
||||
)
|
||||
for arg in args:
|
||||
print(indent + six.text_type(arg))
|
||||
print(indent + _output_filter(six.text_type(arg)))
|
||||
|
||||
|
||||
def info(message, *args, **kwargs):
|
||||
@@ -188,18 +212,29 @@ def info(message, *args, **kwargs):
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(st_countback)
|
||||
cprint("@%s{%s==>} %s%s" % (
|
||||
format, st_text, get_timestamp(), cescape(six.text_type(message))
|
||||
), stream=stream)
|
||||
cprint(
|
||||
"@%s{%s==>} %s%s" % (
|
||||
format,
|
||||
st_text,
|
||||
get_timestamp(),
|
||||
cescape(_output_filter(six.text_type(message)))
|
||||
),
|
||||
stream=stream
|
||||
)
|
||||
for arg in args:
|
||||
if wrap:
|
||||
lines = textwrap.wrap(
|
||||
six.text_type(arg), initial_indent=indent,
|
||||
subsequent_indent=indent, break_long_words=break_long_words)
|
||||
_output_filter(six.text_type(arg)),
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
break_long_words=break_long_words
|
||||
)
|
||||
for line in lines:
|
||||
stream.write(line + '\n')
|
||||
else:
|
||||
stream.write(indent + six.text_type(arg) + '\n')
|
||||
stream.write(
|
||||
indent + _output_filter(six.text_type(arg)) + '\n'
|
||||
)
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
|
@@ -533,13 +533,15 @@ def __enter__(self):
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
try:
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(
|
||||
os.dup(sys.stdin.fileno())
|
||||
)
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
input_multiprocess_fd = None
|
||||
pass
|
||||
|
||||
with replace_environment(self.env):
|
||||
self.process = multiprocessing.Process(
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 16, 1)
|
||||
spack_version_info = (0, 16, 2)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
@@ -72,6 +72,7 @@
|
||||
import spack.config
|
||||
import spack.error as serr
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.util.classes
|
||||
import spack.util.executable
|
||||
import spack.version
|
||||
@@ -197,7 +198,6 @@ def optimization_flags(self, compiler):
|
||||
contains both the name and the version of the compiler we want to use
|
||||
"""
|
||||
# Mixed toolchains are not supported yet
|
||||
import spack.compilers
|
||||
if isinstance(compiler, spack.compiler.Compiler):
|
||||
if spack.compilers.is_mixed_toolchain(compiler):
|
||||
msg = ('microarchitecture specific optimizations are not '
|
||||
@@ -218,7 +218,6 @@ def optimization_flags(self, compiler):
|
||||
# of its name in compilers.yaml. Depending on where this function
|
||||
# is called we might get either a CompilerSpec or a fully fledged
|
||||
# compiler object.
|
||||
import spack.spec
|
||||
if isinstance(compiler, spack.spec.CompilerSpec):
|
||||
compiler = spack.compilers.compilers_for_spec(compiler).pop()
|
||||
try:
|
||||
@@ -593,17 +592,20 @@ def use_platform(new_platform):
|
||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||
|
||||
original_platform_fn, original_all_platforms_fn = platform, all_platforms
|
||||
platform = _PickleableCallable(new_platform)
|
||||
all_platforms = _PickleableCallable([type(new_platform)])
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
try:
|
||||
platform = _PickleableCallable(new_platform)
|
||||
all_platforms = _PickleableCallable([type(new_platform)])
|
||||
|
||||
yield new_platform
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
platform, all_platforms = original_platform_fn, original_all_platforms_fn
|
||||
yield new_platform
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
finally:
|
||||
platform, all_platforms = original_platform_fn, original_all_platforms_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
@@ -37,12 +37,16 @@ def _search_duplicate_compilers(error_cls):
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
try:
|
||||
from collections.abc import Sequence # novm
|
||||
except ImportError:
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
CALLBACKS = {}
|
||||
|
||||
@@ -261,6 +265,45 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
#: Sanity checks on linting
|
||||
# This can take some time, so it's run separately from packages
|
||||
package_https_directives = AuditClass(
|
||||
group='packages-https',
|
||||
tag='PKG-HTTPS-DIRECTIVES',
|
||||
description='Sanity checks on https checks of package urls, etc.',
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links
|
||||
"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg.homepage, 1)
|
||||
try:
|
||||
response = urlopen(https)
|
||||
except Exception as e:
|
||||
msg = 'Error with attempting https for "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||
continue
|
||||
|
||||
if response.getcode() == 200:
|
||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||
errors.append(msg.format(pkg.name))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
@@ -29,6 +29,7 @@
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.relocate as relocate
|
||||
import spack.util.file_cache as file_cache
|
||||
@@ -615,9 +616,8 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
||||
|
||||
# Create buildinfo data and write it to disk
|
||||
import spack.hooks.sbang as sbang
|
||||
buildinfo = {}
|
||||
buildinfo['sbang_install_path'] = sbang.sbang_install_path()
|
||||
buildinfo['sbang_install_path'] = spack.hooks.sbang.sbang_install_path()
|
||||
buildinfo['relative_rpaths'] = rel
|
||||
buildinfo['buildpath'] = spack.store.layout.root
|
||||
buildinfo['spackprefix'] = spack.paths.prefix
|
||||
@@ -1169,8 +1169,6 @@ def relocate_package(spec, allow_root):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
import spack.hooks.sbang as sbang
|
||||
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.layout.root)
|
||||
@@ -1209,7 +1207,8 @@ def relocate_package(spec, allow_root):
|
||||
prefix_to_prefix_bin = OrderedDict({})
|
||||
|
||||
if old_sbang_install_path:
|
||||
prefix_to_prefix_text[old_sbang_install_path] = sbang.sbang_install_path()
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
@@ -1223,7 +1222,7 @@ def relocate_package(spec, allow_root):
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(old_spack_prefix)
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
|
||||
tty.debug("Relocating package from",
|
||||
|
@@ -2,7 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -18,7 +21,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -28,6 +34,214 @@
|
||||
import spack.util.path
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
def _bootstrapper(type):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
|
||||
Args:
|
||||
type (str): string identifying the class
|
||||
"""
|
||||
def _register(cls):
|
||||
_bootstrap_methods[type] = cls
|
||||
return cls
|
||||
return _register
|
||||
|
||||
|
||||
def _try_import_from_store(module, abstract_spec_str):
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
Args:
|
||||
module: Python module to be imported
|
||||
abstract_spec_str: abstract spec that may provide the module
|
||||
"""
|
||||
bincache_platform = spack.architecture.real_platform()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.linux.Linux()
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
abstract_spec_str = str(spack.spec.Spec(abstract_spec_str))
|
||||
|
||||
# We have to run as part of this python interpreter
|
||||
abstract_spec_str += ' ^' + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(abstract_spec_str, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
lib_spd = candidate_spec['python'].package.default_site_packages_dir
|
||||
lib64_spd = lib_spd.replace('lib/', 'lib64/')
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, lib_spd),
|
||||
os.path.join(candidate_spec.prefix, lib64_spd)
|
||||
]
|
||||
sys.path.extend(module_paths)
|
||||
|
||||
try:
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, abstract_spec_str, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
# This import is local since it is needed only on Cray
|
||||
import spack.platforms.linux
|
||||
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.architecture.real_platform()
|
||||
if str(bincache_platform) == 'cray':
|
||||
bincache_platform = spack.platforms.linux.Linux()
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
# Read information on verified clingo binaries
|
||||
json_filename = '{0}.json'.format(module)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
buildcache = spack.main.SpackCommand('buildcache')
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
mirror_scope = spack.config.InternalConfigScope(
|
||||
'bootstrap', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
with spack.config.override(mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that wwe know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
for item in data['verified']:
|
||||
candidate_spec = item['spec']
|
||||
python_spec = item['python']
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for pkg_name, pkg_hash, pkg_sha256 in item['binaries']:
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Try installing "{1}" from binary '
|
||||
'cache at "{2}"')
|
||||
tty.debug(msg.format(module, pkg_name, self.url))
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null"
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family)
|
||||
}
|
||||
with spack.architecture.use_platform(bincache_platform):
|
||||
with spack.config.override(
|
||||
'compilers', [{'compiler': compiler_entry}]
|
||||
):
|
||||
spec_str = '/' + pkg_hash
|
||||
install_args = [
|
||||
'install',
|
||||
'--sha256', pkg_sha256,
|
||||
'-a', '-u', '-o', '-f', spec_str
|
||||
]
|
||||
buildcache(*install_args, fail_on_error=False)
|
||||
# TODO: undo installations that didn't complete?
|
||||
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
|
||||
@staticmethod
|
||||
def try_import(module, abstract_spec_str):
|
||||
if _try_import_from_store(module, abstract_spec_str):
|
||||
return True
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
abstract_spec_str += ' os=fe'
|
||||
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + ' ^' + spec_for_current_python()
|
||||
)
|
||||
|
||||
if module == 'clingo':
|
||||
# TODO: remove when the old concretizer is deprecated
|
||||
concrete_spec._old_concretize()
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
concrete_spec.package.do_install()
|
||||
|
||||
return _try_import_from_store(module, abstract_spec_str=abstract_spec_str)
|
||||
|
||||
|
||||
def _make_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the
|
||||
configuration argument
|
||||
"""
|
||||
btype = conf['type']
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
@@ -54,7 +268,7 @@ def spack_python_interpreter():
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = os.path.dirname(os.path.dirname(sys.executable))
|
||||
python_prefix = sys.exec_prefix
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
@@ -68,63 +282,58 @@ def spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def make_module_available(module, spec=None, install=False):
|
||||
"""Ensure module is importable"""
|
||||
# If we already can import it, that's great
|
||||
try:
|
||||
__import__(module)
|
||||
def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
"""Make the requested module available for import, or raise.
|
||||
|
||||
This function tries to import a Python module in the current interpreter
|
||||
using, in order, the methods configured in bootstrap.yaml.
|
||||
|
||||
If none of the methods succeed, an exception is raised. The function exits
|
||||
on first success.
|
||||
|
||||
Args:
|
||||
module (str): module to be imported in the current interpreter
|
||||
abstract_spec (str): abstract spec that might provide the module. If not
|
||||
given it defaults to "module"
|
||||
|
||||
Raises:
|
||||
ImportError: if the module couldn't be imported
|
||||
"""
|
||||
# If we can import it already, that's great
|
||||
tty.debug("[BOOTSTRAP MODULE {0}] Try importing from Python".format(module))
|
||||
if _python_import(module):
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# If it's already installed, use it
|
||||
# Search by spec
|
||||
spec = spack.spec.Spec(spec or module)
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
# We have to run as part of this python
|
||||
# We can constrain by a shortened version in place of a version range
|
||||
# because this spec is only used for querying or as a placeholder to be
|
||||
# replaced by an external that already has a concrete version. This syntax
|
||||
# is not sufficient when concretizing without an external, as it will
|
||||
# concretize to python@X.Y instead of python@X.Y.Z
|
||||
python_requirement = '^' + spec_for_current_python()
|
||||
spec.constrain(python_requirement)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
|
||||
for ispec in installed_specs:
|
||||
# TODO: make sure run-environment is appropriate
|
||||
module_path = ispec['python'].package.get_python_lib(prefix=ispec.prefix)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
tty.warn("Spec %s did not provide module %s" % (ispec, module))
|
||||
sys.path = sys.path[:-1]
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
|
||||
def _raise_error(module_name, module_spec):
|
||||
error_msg = 'cannot import module "{0}"'.format(module_name)
|
||||
if module_spec:
|
||||
error_msg += ' from spec "{0}'.format(module_spec)
|
||||
raise ImportError(error_msg)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise ImportError(msg)
|
||||
|
||||
if not install:
|
||||
_raise_error(module, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
spec.package.do_install()
|
||||
|
||||
module_path = spec['python'].package.get_python_lib(prefix=spec.prefix)
|
||||
def _python_import(module):
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
sys.path = sys.path[:-1]
|
||||
_raise_error(module, spec)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_executable(exe, spec=None, install=False):
|
||||
@@ -138,7 +347,8 @@ def get_executable(exe, spec=None, install=False):
|
||||
When ``install`` is True, Spack will use the python used to run Spack as an
|
||||
external. The ``install`` option should only be used with packages that
|
||||
install quickly (when using external python) or are guaranteed by Spack
|
||||
organization to be in a binary mirror (clingo)."""
|
||||
organization to be in a binary mirror (clingo).
|
||||
"""
|
||||
# Search the system first
|
||||
runner = spack.util.executable.which(exe)
|
||||
if runner:
|
||||
@@ -212,15 +422,16 @@ def _bootstrap_config_scopes():
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
bootstrap_store_path = store_path()
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(bootstrap_store_path):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
with spack.environment.deactivate_environment():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(bootstrap_store_path):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def store_path():
|
||||
@@ -251,14 +462,17 @@ def clingo_root_spec():
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
spec_str += ' os=fe'
|
||||
|
||||
# Add the generic target
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(str(generic_target))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str))
|
||||
|
||||
return spack.spec.Spec(spec_str)
|
||||
return spec_str
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(
|
||||
module='clingo', abstract_spec=clingo_root_spec()
|
||||
)
|
||||
|
@@ -61,6 +61,7 @@
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.util.cpus import cpus_available
|
||||
@@ -69,6 +70,7 @@
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
preserve_environment,
|
||||
system_dirs,
|
||||
@@ -781,6 +783,13 @@ def setup_package(pkg, dirty, context='build'):
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
elif context == 'test':
|
||||
env.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
|
@@ -254,9 +254,9 @@ def define_from_variant(self, cmake_var, variant=None):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
define_from_variant('SWR')]
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
@@ -87,90 +88,87 @@ def cuda_flags(arch_list):
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:',
|
||||
when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:',
|
||||
when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:',
|
||||
when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:',
|
||||
when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
with when('~allow-unsupported-compilers'):
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:', when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:', when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:', when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
|
||||
conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
|
||||
conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
|
||||
conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
|
||||
conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
|
||||
# Intel 15.x is compatible with CUDA 7 thru current CUDA
|
||||
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
|
||||
conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
|
||||
conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
|
||||
conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
|
||||
conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
|
||||
# Intel 15.x is compatible with CUDA 7 thru current CUDA
|
||||
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
|
||||
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
|
||||
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
|
||||
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
|
||||
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
|
||||
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:')
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
for value in cuda_arch_values:
|
||||
|
@@ -127,7 +127,10 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = self.spec['python'].package.get_python_lib(prefix=self.prefix)
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['false']['false'],
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
@@ -252,12 +255,11 @@ def install_args(self, spec, prefix):
|
||||
# Get all relative paths since we set the root to `prefix`
|
||||
# We query the python with which these will be used for the lib and inc
|
||||
# directories. This ensures we use `lib`/`lib64` as expected by python.
|
||||
pure_site_packages_dir = spec['python'].package.get_python_lib(
|
||||
plat_specific=False, prefix='')
|
||||
plat_site_packages_dir = spec['python'].package.get_python_lib(
|
||||
plat_specific=True, prefix='')
|
||||
inc_dir = spec['python'].package.get_python_inc(
|
||||
plat_specific=True, prefix='')
|
||||
pure_site_packages_dir = spec['python'].package.config_vars[
|
||||
'python_lib']['false']['false']
|
||||
plat_site_packages_dir = spec['python'].package.config_vars[
|
||||
'python_lib']['true']['false']
|
||||
inc_dir = spec['python'].package.config_vars['python_inc']['true']
|
||||
|
||||
args += ['--root=%s' % prefix,
|
||||
'--install-purelib=%s' % pure_site_packages_dir,
|
||||
|
@@ -64,7 +64,10 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = self.spec['python'].package.get_python_lib(prefix=self.prefix)
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec['python'].package.config_vars['python_lib']['false']['false'],
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
|
@@ -45,6 +45,8 @@
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
@@ -612,11 +614,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# Add this mirror if it's enabled, as some specs might be up to date
|
||||
# here and thus not need to be rebuilt.
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -1316,17 +1321,20 @@ def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
|
||||
|
||||
request = Request(cdash_api_url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
try:
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
except Exception as e:
|
||||
print("Relating builds in CDash failed: {0}".format(e))
|
||||
|
||||
|
||||
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
|
||||
|
@@ -21,6 +21,7 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.paths
|
||||
@@ -186,29 +187,13 @@ def matching_spec_from_env(spec):
|
||||
If no matching spec is found in the environment (or if no environment is
|
||||
active), this will return the given spec but concretized.
|
||||
"""
|
||||
env = spack.environment.get_env({}, cmd_name)
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
else:
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
@@ -501,3 +486,71 @@ def extant_file(f):
|
||||
if not os.path.isfile(f):
|
||||
raise argparse.ArgumentTypeError('%s does not exist' % f)
|
||||
return f
|
||||
|
||||
|
||||
def require_active_env(cmd_name):
|
||||
"""Used by commands to get the active environment
|
||||
|
||||
If an environment is not found, print an error message that says the calling
|
||||
command *needs* an active environment.
|
||||
|
||||
Arguments:
|
||||
cmd_name (str): name of calling command
|
||||
|
||||
Returns:
|
||||
(spack.environment.Environment): the active environment
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
'`spack %s` requires an environment' % cmd_name,
|
||||
'activate an environment first:',
|
||||
' spack env activate ENV',
|
||||
'or use:',
|
||||
' spack -e ENV %s ...' % cmd_name)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
"""Find active environment from args or environment variable.
|
||||
|
||||
Check for an environment in this order:
|
||||
1. via ``spack -e ENV`` or ``spack -D DIR`` (arguments)
|
||||
2. via a path in the spack.environment.spack_env_var environment variable.
|
||||
|
||||
If an environment is found, read it in. If not, return None.
|
||||
|
||||
Arguments:
|
||||
args (argparse.Namespace): argparse namespace with command arguments
|
||||
|
||||
Returns:
|
||||
(spack.environment.Environment): a found environment, or ``None``
|
||||
"""
|
||||
|
||||
# treat env as a name
|
||||
env = args.env
|
||||
if env:
|
||||
if ev.exists(env):
|
||||
return ev.read(env)
|
||||
|
||||
else:
|
||||
# if env was specified, see if it is a directory otherwise, look
|
||||
# at env_dir (env and env_dir are mutually exclusive)
|
||||
env = args.env_dir
|
||||
|
||||
# if no argument, look for the environment variable
|
||||
if not env:
|
||||
env = os.environ.get(ev.spack_env_var)
|
||||
|
||||
# nothing was set; there's no active environment
|
||||
if not env:
|
||||
return None
|
||||
|
||||
# if we get here, env isn't the name of a spack environment; it has
|
||||
# to be a path to an environment, or there is something wrong.
|
||||
if ev.is_env_dir(env):
|
||||
return ev.Environment(env)
|
||||
|
||||
raise ev.SpackEnvironmentError('no environment in %s' % env)
|
||||
|
@@ -30,8 +30,7 @@ def activate(parser, args):
|
||||
if len(specs) != 1:
|
||||
tty.die("activate requires one spec. %d given." % len(specs))
|
||||
|
||||
env = ev.get_env(args, 'activate')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], ev.active_environment())
|
||||
if not spec.package.is_extension:
|
||||
tty.die("%s is not an extension." % spec.name)
|
||||
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'add a spec to an environment'
|
||||
section = "environments"
|
||||
@@ -22,7 +21,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def add(parser, args):
|
||||
env = ev.get_env(args, 'add', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='add')
|
||||
|
||||
with env.write_transaction():
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
|
@@ -95,7 +95,7 @@ def analyze(parser, args, **kwargs):
|
||||
sys.exit(0)
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.get_env(args, 'analyze')
|
||||
env = ev.active_environment()
|
||||
|
||||
# Get an disambiguate spec (we should only have one)
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@@ -19,12 +20,24 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser('configs', help='audit configuration files')
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser('packages-https', help='check https in packages')
|
||||
https_parser.add_argument(
|
||||
'--all',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='check_all',
|
||||
help="audit all packages"
|
||||
)
|
||||
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser('packages', help='audit package recipes')
|
||||
pkg_parser.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
group.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
|
||||
# List all checks
|
||||
sp.add_parser('list', help='list available checks and exits')
|
||||
@@ -41,6 +54,17 @@ def packages(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def packages_https(parser, args):
|
||||
|
||||
# Since packages takes a long time, --all is required without name
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize('@*b{' + subcommand + '}:'))
|
||||
@@ -58,6 +82,7 @@ def audit(parser, args):
|
||||
subcommands = {
|
||||
'configs': configs,
|
||||
'packages': packages,
|
||||
'packages-https': packages_https,
|
||||
'list': list
|
||||
}
|
||||
subcommands[args.subcommand](parser, args)
|
||||
|
@@ -2,10 +2,13 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
@@ -51,6 +54,27 @@ def setup_parser(subparser):
|
||||
help='set the bootstrap directory to this value'
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
def _enable_or_disable(args):
|
||||
# Set to True if we called "enable", otherwise set to false
|
||||
@@ -100,11 +124,97 @@ def _root(args):
|
||||
print(root)
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
)
|
||||
return
|
||||
|
||||
def _print_method(source, trusted):
|
||||
color = llnl.util.tty.color
|
||||
|
||||
def fmt(header, content):
|
||||
header_fmt = "@*b{{{0}:}} {1}"
|
||||
color.cprint(header_fmt.format(header, content))
|
||||
|
||||
trust_str = "@*y{UNKNOWN}"
|
||||
if trusted is True:
|
||||
trust_str = "@*g{TRUSTED}"
|
||||
elif trusted is False:
|
||||
trust_str = "@*r{UNTRUSTED}"
|
||||
|
||||
fmt("Name", source['name'] + ' ' + trust_str)
|
||||
print()
|
||||
fmt(" Type", source['type'])
|
||||
print()
|
||||
|
||||
info_lines = ['\n']
|
||||
for key, value in source.get('info', {}).items():
|
||||
info_lines.append(' ' * 4 + '@*{{{0}}}: {1}\n'.format(key, value))
|
||||
if len(info_lines) > 1:
|
||||
fmt(" Info", ''.join(info_lines))
|
||||
|
||||
description_lines = ['\n']
|
||||
for line in source['description'].split('\n'):
|
||||
description_lines.append(' ' * 4 + line + '\n')
|
||||
|
||||
fmt(" Description", ''.join(description_lines))
|
||||
|
||||
trusted = spack.config.get('bootstrap:trusted', {})
|
||||
for s in sources:
|
||||
_print_method(s, trusted.get(s['name'], None))
|
||||
|
||||
|
||||
def _write_trust_state(args, value):
|
||||
name = args.name
|
||||
sources = spack.config.get('bootstrap:sources')
|
||||
|
||||
matches = [s for s in sources if s['name'] == name]
|
||||
if not matches:
|
||||
names = [s['name'] for s in sources]
|
||||
msg = ('there is no bootstrapping method named "{0}". Valid '
|
||||
'method names are: {1}'.format(name, ', '.join(names)))
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if len(matches) > 1:
|
||||
msg = ('there is more than one bootstrapping method named "{0}". '
|
||||
'Please delete all methods but one from bootstrap.yaml '
|
||||
'before proceeding').format(name)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Setting the scope explicitly is needed to not copy over to a new scope
|
||||
# the entire default configuration for bootstrap.yaml
|
||||
scope = args.scope or spack.config.default_modify_scope('bootstrap')
|
||||
spack.config.add(
|
||||
'bootstrap:trusted:{0}:{1}'.format(name, str(value)), scope=scope
|
||||
)
|
||||
|
||||
|
||||
def _trust(args):
|
||||
_write_trust_state(args, value=True)
|
||||
msg = '"{0}" is now trusted for bootstrapping'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _untrust(args):
|
||||
_write_trust_state(args, value=False)
|
||||
msg = '"{0}" is now untrusted and will not be used for bootstrapping'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'enable': _enable_or_disable,
|
||||
'disable': _enable_or_disable,
|
||||
'reset': _reset,
|
||||
'root': _root
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
@@ -2,10 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -15,16 +16,20 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -97,6 +102,8 @@ def setup_parser(subparser):
|
||||
install.add_argument('-o', '--otherarch', action='store_true',
|
||||
help="install specs from other architectures" +
|
||||
" instead of default platform and OS")
|
||||
# This argument is needed by the bootstrapping logic to verify checksums
|
||||
install.add_argument('--sha256', help=argparse.SUPPRESS)
|
||||
|
||||
arguments.add_common_arguments(install, ['specs'])
|
||||
install.set_defaults(func=installtarball)
|
||||
@@ -223,6 +230,36 @@ def setup_parser(subparser):
|
||||
help='Destination mirror url')
|
||||
copy.set_defaults(func=buildcache_copy)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser('sync', help=buildcache_sync.__doc__)
|
||||
source = sync.add_mutually_exclusive_group(required=True)
|
||||
source.add_argument('--src-directory',
|
||||
metavar='DIRECTORY',
|
||||
type=str,
|
||||
help="Source mirror as a local file path")
|
||||
source.add_argument('--src-mirror-name',
|
||||
metavar='MIRROR_NAME',
|
||||
type=str,
|
||||
help="Name of the source mirror")
|
||||
source.add_argument('--src-mirror-url',
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the source mirror")
|
||||
dest = sync.add_mutually_exclusive_group(required=True)
|
||||
dest.add_argument('--dest-directory',
|
||||
metavar='DIRECTORY',
|
||||
type=str,
|
||||
help="Destination mirror as a local file path")
|
||||
dest.add_argument('--dest-mirror-name',
|
||||
metavar='MIRROR_NAME',
|
||||
type=str,
|
||||
help="Name of the destination mirror")
|
||||
dest.add_argument('--dest-mirror-url',
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the destination mirror")
|
||||
sync.set_defaults(func=buildcache_sync)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
update_index = subparsers.add_parser(
|
||||
'update-index', help=buildcache_update_index.__doc__)
|
||||
@@ -420,7 +457,7 @@ def createtarball(args):
|
||||
"""create a binary package from an existing install"""
|
||||
|
||||
# restrict matching to current environment if one is active
|
||||
env = ev.get_env(args, 'buildcache create')
|
||||
env = ev.active_environment()
|
||||
|
||||
output_location = None
|
||||
if args.directory:
|
||||
@@ -495,6 +532,15 @@ def install_tarball(spec, args):
|
||||
else:
|
||||
tarball = bindist.download_tarball(spec)
|
||||
if tarball:
|
||||
if args.sha256:
|
||||
checker = spack.util.crypto.Checker(args.sha256)
|
||||
msg = ('cannot verify checksum for "{0}"'
|
||||
' [expected={1}]')
|
||||
msg = msg.format(tarball, args.sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing buildcache for spec %s' % spec.format())
|
||||
bindist.extract_tarball(spec, tarball, args.allow_root,
|
||||
args.unsigned, args.force)
|
||||
@@ -555,7 +601,7 @@ def check_binaries(args):
|
||||
if args.spec or args.spec_yaml:
|
||||
specs = [get_concrete_spec(args)]
|
||||
else:
|
||||
env = ev.get_env(args, 'buildcache', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache')
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
|
||||
@@ -767,6 +813,123 @@ def buildcache_copy(args):
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def buildcache_sync(args):
|
||||
""" Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
"""
|
||||
# Figure out the source mirror
|
||||
source_location = None
|
||||
if args.src_directory:
|
||||
source_location = args.src_directory
|
||||
scheme = url_util.parse(source_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--src-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
source_location = 'file://' + source_location
|
||||
elif args.src_mirror_name:
|
||||
source_location = args.src_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=source_location))
|
||||
elif args.src_mirror_url:
|
||||
source_location = args.src_mirror_url
|
||||
scheme = url_util.parse(source_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=source_location))
|
||||
|
||||
src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
src_mirror_url = url_util.format(src_mirror.fetch_url)
|
||||
|
||||
# Figure out the destination mirror
|
||||
dest_location = None
|
||||
if args.dest_directory:
|
||||
dest_location = args.dest_directory
|
||||
scheme = url_util.parse(dest_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--dest-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
dest_location = 'file://' + dest_location
|
||||
elif args.dest_mirror_name:
|
||||
dest_location = args.dest_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=dest_location))
|
||||
elif args.dest_mirror_url:
|
||||
dest_location = args.dest_mirror_url
|
||||
scheme = url_util.parse(dest_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=dest_location))
|
||||
|
||||
dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
dest_mirror_url = url_util.format(dest_mirror.fetch_url)
|
||||
|
||||
# Get the active environment
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache sync')
|
||||
|
||||
tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
|
||||
src_mirror_url, dest_mirror_url))
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
buildcache_rel_paths = []
|
||||
|
||||
tty.debug('Syncing the following specs:')
|
||||
for s in env.all_specs():
|
||||
tty.debug(' {0}{1}: {2}'.format(
|
||||
'* ' if s in env.roots() else ' ', s.name, s.dag_hash()))
|
||||
|
||||
buildcache_rel_paths.extend([
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_path_name(s, '.spack')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
|
||||
os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
|
||||
])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
for rel_path in buildcache_rel_paths:
|
||||
src_url = url_util.join(src_mirror_url, rel_path)
|
||||
local_path = os.path.join(tmpdir, rel_path)
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug('Copying {0} to {1} via {2}'.format(
|
||||
src_url, dest_url, local_path))
|
||||
|
||||
stage = Stage(src_url,
|
||||
name="temporary_file",
|
||||
path=os.path.dirname(local_path),
|
||||
keep=True)
|
||||
|
||||
try:
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
web_util.push_to_url(
|
||||
local_path,
|
||||
dest_url,
|
||||
keep_original=True)
|
||||
except fs.FetchError as e:
|
||||
tty.debug('spack buildcache unable to sync {0}'.format(rel_path))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
stage.destroy()
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def update_index(mirror_url, update_keys=False):
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
outdir = url_util.format(mirror.push_url)
|
||||
|
@@ -22,6 +22,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -30,6 +31,7 @@
|
||||
level = "long"
|
||||
|
||||
CI_REBUILD_INSTALL_BASE_ARGS = ['spack', '-d', '-v']
|
||||
INSTALL_FAIL_CODE = 1
|
||||
|
||||
|
||||
def get_env_var(variable_name):
|
||||
@@ -116,7 +118,7 @@ def ci_generate(args):
|
||||
for creating a build group for the generated workload and registering
|
||||
all generated jobs under that build group. If this environment
|
||||
variable is not set, no build group will be created on CDash."""
|
||||
env = ev.get_env(args, 'ci generate', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='ci generate')
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
@@ -150,7 +152,7 @@ def ci_generate(args):
|
||||
def ci_reindex(args):
|
||||
"""Rebuild the buildcache index associated with the mirror in the
|
||||
active, gitlab-enabled environment. """
|
||||
env = ev.get_env(args, 'ci rebuild-index', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild-index')
|
||||
yaml_root = ev.config_dict(env.yaml)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
@@ -167,7 +169,7 @@ def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
env = ev.get_env(args, 'ci rebuild', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
# to do.
|
||||
@@ -491,7 +493,7 @@ def ci_rebuild(args):
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == 1 and spack_is_develop_pipeline:
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
@@ -502,9 +504,17 @@ def ci_rebuild(args):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
empty_file_path = os.path.join(tmpdir, 'empty.txt')
|
||||
|
||||
broken_spec_details = {
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(empty_file_path, 'w') as efd:
|
||||
efd.write('')
|
||||
efd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
empty_file_path,
|
||||
broken_spec_path,
|
||||
@@ -566,6 +576,26 @@ def ci_rebuild(args):
|
||||
cdash_build_id, pipeline_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, pipeline_mirror_url)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
broken_spec_path))
|
||||
try:
|
||||
web_util.remove_url(broken_spec_path)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = 'Error removing {0} from broken specs list: {1}'.format(
|
||||
broken_spec_path, err)
|
||||
tty.warn(msg)
|
||||
|
||||
else:
|
||||
tty.debug('spack install exited non-zero, will not create buildcache')
|
||||
|
||||
|
@@ -104,6 +104,6 @@ def clean(parser, args):
|
||||
if args.bootstrap:
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.bootstrap.store_path()))
|
||||
with spack.store.use_store(spack.bootstrap.store_path()):
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'concretize an environment and write a lockfile'
|
||||
@@ -23,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def concretize(parser, args):
|
||||
env = ev.get_env(args, 'concretize', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='concretize')
|
||||
|
||||
if args.test == 'all':
|
||||
tests = True
|
||||
|
@@ -118,7 +118,7 @@ def _get_scope_and_section(args):
|
||||
|
||||
# w/no args and an active environment, point to env manifest
|
||||
if not section:
|
||||
env = ev.get_env(args, 'config edit')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
scope = env.env_file_config_scope_name()
|
||||
|
||||
|
@@ -36,7 +36,7 @@ def deactivate(parser, args):
|
||||
if len(specs) != 1:
|
||||
tty.die("deactivate requires one spec. %d given." % len(specs))
|
||||
|
||||
env = ev.get_env(args, 'deactivate')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
pkg = spec.package
|
||||
|
||||
|
@@ -41,7 +41,7 @@ def dependencies(parser, args):
|
||||
tty.die("spack dependencies takes only one spec.")
|
||||
|
||||
if args.installed:
|
||||
env = ev.get_env(args, 'dependencies')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = '{name}{@version}{%compiler}{/hash:7}'
|
||||
|
@@ -82,7 +82,7 @@ def dependents(parser, args):
|
||||
tty.die("spack dependents takes only one spec.")
|
||||
|
||||
if args.installed:
|
||||
env = ev.get_env(args, 'dependents')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
format_string = '{name}{@version}{%compiler}{/hash:7}'
|
||||
|
@@ -71,7 +71,7 @@ def setup_parser(sp):
|
||||
|
||||
def deprecate(parser, args):
|
||||
"""Deprecate one spec in favor of another"""
|
||||
env = ev.get_env(args, 'deprecate')
|
||||
env = ev.active_environment()
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
if len(specs) != 2:
|
||||
|
@@ -9,7 +9,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "add a spec to an environment's dev-build information"
|
||||
@@ -37,7 +36,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def develop(parser, args):
|
||||
env = ev.get_env(args, 'develop', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='develop')
|
||||
|
||||
if not args.spec:
|
||||
if args.clone is False:
|
||||
|
197
lib/spack/spack/cmd/diff.py
Normal file
197
lib/spack/spack/cmd/diff.py
Normal file
@@ -0,0 +1,197 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import cprint, get_color_when
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.solver.asp as asp
|
||||
import spack.util.environment
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
description = "compare two specs"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['specs'])
|
||||
|
||||
subparser.add_argument(
|
||||
'--json',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='dump_json',
|
||||
help="Dump json output instead of pretty printing."
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--first',
|
||||
action='store_true',
|
||||
default=False,
|
||||
dest='load_first',
|
||||
help="load the first match if multiple packages match the spec"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-a', '--attribute',
|
||||
action='append',
|
||||
help="select the attributes to show (defaults to all)"
|
||||
)
|
||||
|
||||
|
||||
def compare_specs(a, b, to_string=False, color=None):
|
||||
"""
|
||||
Generate a comparison, including diffs (for each side) and an intersection.
|
||||
|
||||
We can either print the result to the console, or parse
|
||||
into a json object for the user to save. We return an object that shows
|
||||
the differences, intersection, and names for a pair of specs a and b.
|
||||
|
||||
Arguments:
|
||||
a (spack.spec.Spec): the first spec to compare
|
||||
b (spack.spec.Spec): the second spec to compare
|
||||
a_name (str): the name of spec a
|
||||
b_name (str): the name of spec b
|
||||
to_string (bool): return an object that can be json dumped
|
||||
color (bool): whether to format the names for the console
|
||||
"""
|
||||
if color is None:
|
||||
color = get_color_when()
|
||||
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
spec1_not_spec2 = sorted(a_facts.difference(b_facts))
|
||||
spec2_not_spec1 = sorted(b_facts.difference(a_facts))
|
||||
|
||||
# Format the spec names to be colored
|
||||
fmt = "{name}{@version}{/hash}"
|
||||
a_name = a.format(fmt, color=color)
|
||||
b_name = b.format(fmt, color=color)
|
||||
|
||||
# We want to show what is the same, and then difference for each
|
||||
return {
|
||||
"intersect": flatten(intersect) if to_string else intersect,
|
||||
"a_not_b": flatten(spec1_not_spec2) if to_string else spec1_not_spec2,
|
||||
"b_not_a": flatten(spec2_not_spec1) if to_string else spec2_not_spec1,
|
||||
"a_name": a_name,
|
||||
"b_name": b_name,
|
||||
}
|
||||
|
||||
|
||||
def flatten(functions):
|
||||
"""
|
||||
Given a list of ASP functions, convert into a list of key: value tuples.
|
||||
|
||||
We are squashing whatever is after the first index into one string for
|
||||
easier parsing in the interface
|
||||
"""
|
||||
updated = []
|
||||
for fun in functions:
|
||||
updated.append([fun.name, " ".join(str(a) for a in fun.args)])
|
||||
return updated
|
||||
|
||||
|
||||
def print_difference(c, attributes="all", out=None):
|
||||
"""
|
||||
Print the difference.
|
||||
|
||||
Given a diffset for A and a diffset for B, print red/green diffs to show
|
||||
the differences.
|
||||
"""
|
||||
# Default to standard out unless another stream is provided
|
||||
out = out or sys.stdout
|
||||
|
||||
A = c['b_not_a']
|
||||
B = c['a_not_b']
|
||||
|
||||
cprint("@R{--- %s}" % c["a_name"]) # bright red
|
||||
cprint("@G{+++ %s}" % c["b_name"]) # bright green
|
||||
|
||||
# Cut out early if we don't have any differences!
|
||||
if not A and not B:
|
||||
print("No differences\n")
|
||||
return
|
||||
|
||||
def group_by_type(diffset):
|
||||
grouped = {}
|
||||
for entry in diffset:
|
||||
if entry[0] not in grouped:
|
||||
grouped[entry[0]] = []
|
||||
grouped[entry[0]].append(entry[1])
|
||||
|
||||
# Sort by second value to make comparison slightly closer
|
||||
for key, values in grouped.items():
|
||||
values.sort()
|
||||
return grouped
|
||||
|
||||
A = group_by_type(A)
|
||||
B = group_by_type(B)
|
||||
|
||||
# print a directionally relevant diff
|
||||
keys = list(A) + list(B)
|
||||
|
||||
category = None
|
||||
for key in keys:
|
||||
if "all" not in attributes and key not in attributes:
|
||||
continue
|
||||
|
||||
# Write the attribute, B is subtraction A is addition
|
||||
subtraction = [] if key not in B else B[key]
|
||||
addition = [] if key not in A else A[key]
|
||||
|
||||
# Bail out early if we don't have any entries
|
||||
if not subtraction and not addition:
|
||||
continue
|
||||
|
||||
# If we have a new category, create a new section
|
||||
if category != key:
|
||||
category = key
|
||||
|
||||
# print category in bold, colorized
|
||||
cprint("@*b{@@ %s @@}" % category) # bold blue
|
||||
|
||||
# Print subtractions first
|
||||
while subtraction:
|
||||
cprint("@R{- %s}" % subtraction.pop(0)) # bright red
|
||||
if addition:
|
||||
cprint("@G{+ %s}" % addition.pop(0)) # bright green
|
||||
|
||||
# Any additions left?
|
||||
while addition:
|
||||
cprint("@G{+ %s}" % addition.pop(0))
|
||||
|
||||
|
||||
def diff(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if len(args.specs) != 2:
|
||||
tty.die("You must provide two specs to diff.")
|
||||
|
||||
specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.specs)]
|
||||
|
||||
# Calculate the comparison (c)
|
||||
color = False if args.dump_json else get_color_when()
|
||||
c = compare_specs(specs[0], specs[1], to_string=True, color=color)
|
||||
|
||||
# Default to all attributes
|
||||
attributes = args.attribute or ["all"]
|
||||
|
||||
if args.dump_json:
|
||||
print(sjson.dump(c))
|
||||
else:
|
||||
tty.warn("This interface is subject to change.\n")
|
||||
print_difference(c, attributes)
|
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -89,6 +88,11 @@ def env_activate(args):
|
||||
)
|
||||
return 1
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die('Calling spack env activate with --env, --env-dir and --no-env '
|
||||
'is ambiguous')
|
||||
|
||||
if ev.exists(env) and not args.dir:
|
||||
spack_env = ev.root(env)
|
||||
short_name = env
|
||||
@@ -103,12 +107,11 @@ def env_activate(args):
|
||||
tty.die("No such environment: '%s'" % env)
|
||||
|
||||
if spack_env == os.environ.get('SPACK_ENV'):
|
||||
tty.die("Environment %s is already active" % args.activate_env)
|
||||
tty.debug("Environment %s is already active" % args.activate_env)
|
||||
return
|
||||
|
||||
active_env = ev.get_env(namedtuple('args', ['env'])(env),
|
||||
'activate')
|
||||
cmds = ev.activate(
|
||||
active_env, add_view=args.with_view, shell=args.shell,
|
||||
ev.Environment(spack_env), add_view=args.with_view, shell=args.shell,
|
||||
prompt=env_prompt if args.prompt else None
|
||||
)
|
||||
sys.stdout.write(cmds)
|
||||
@@ -139,6 +142,11 @@ def env_deactivate(args):
|
||||
)
|
||||
return 1
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die('Calling spack env deactivate with --env, --env-dir and --no-env '
|
||||
'is ambiguous')
|
||||
|
||||
if 'SPACK_ENV' not in os.environ:
|
||||
tty.die('No environment is currently active.')
|
||||
|
||||
@@ -314,7 +322,7 @@ def env_view_setup_parser(subparser):
|
||||
|
||||
|
||||
def env_view(args):
|
||||
env = ev.get_env(args, 'env view')
|
||||
env = ev.active_environment()
|
||||
|
||||
if env:
|
||||
if args.action == ViewAction.regenerate:
|
||||
@@ -341,7 +349,7 @@ def env_status_setup_parser(subparser):
|
||||
|
||||
|
||||
def env_status(args):
|
||||
env = ev.get_env(args, 'env status')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
if env.path == os.getcwd():
|
||||
tty.msg('Using %s in current directory: %s'
|
||||
@@ -372,7 +380,7 @@ def env_loads_setup_parser(subparser):
|
||||
|
||||
|
||||
def env_loads(args):
|
||||
env = ev.get_env(args, 'env loads', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='env loads')
|
||||
|
||||
# Set the module types that have been selected
|
||||
module_type = args.module_type
|
||||
|
@@ -67,7 +67,7 @@ def extensions(parser, args):
|
||||
if not spec[0].package.extendable:
|
||||
tty.die("%s is not an extendable package." % spec[0].name)
|
||||
|
||||
env = ev.get_env(args, 'extensions')
|
||||
env = ev.active_environment()
|
||||
spec = cmd.disambiguate_spec(spec[0], env)
|
||||
|
||||
if not spec.package.extendable:
|
||||
|
@@ -47,7 +47,7 @@ def fetch(parser, args):
|
||||
# fetch all uninstalled specs from it otherwise fetch all.
|
||||
# If we are also not in an environment, complain to the
|
||||
# user that we don't know what to do.
|
||||
env = ev.get_env(args, "fetch")
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
if args.missing:
|
||||
specs = env.uninstalled_specs()
|
||||
|
@@ -205,24 +205,24 @@ def display_env(env, args, decorator):
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
# Query the current store or the internal bootstrap store if required
|
||||
if args.bootstrap:
|
||||
bootstrap_store_path = spack.bootstrap.store_path()
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(bootstrap_store_path))
|
||||
with spack.store.use_store(bootstrap_store_path):
|
||||
results = args.specs(**q_args)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(bootstrap_store_path))
|
||||
_find(parser, args)
|
||||
return
|
||||
_find(parser, args)
|
||||
|
||||
|
||||
def _find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
added = set()
|
||||
removed = set()
|
||||
|
||||
env = ev.get_env(args, 'find')
|
||||
if env:
|
||||
decorator, added, roots, removed = setup_env(env)
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -233,7 +233,7 @@ def find(parser, args):
|
||||
msg = "No package matches the query: {0}"
|
||||
msg = msg.format(' '.join(args.constraint))
|
||||
tty.msg(msg)
|
||||
return 1
|
||||
raise SystemExit(1)
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.uninstall
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
|
||||
description = "remove specs that are now no longer needed"
|
||||
@@ -24,7 +24,7 @@ def gc(parser, args):
|
||||
|
||||
# Restrict garbage collection to the active environment
|
||||
# speculating over roots that are yet to be installed
|
||||
env = spack.environment.get_env(args=None, cmd_name='gc')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
msg = 'Restricting the garbage collection to the "{0}" environment'
|
||||
tty.msg(msg.format(env.name))
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.graph import graph_ascii, graph_dot
|
||||
|
||||
@@ -35,7 +36,7 @@ def setup_parser(subparser):
|
||||
|
||||
subparser.add_argument(
|
||||
'-i', '--installed', action='store_true',
|
||||
help="graph all installed specs in dot format (implies --dot)")
|
||||
help="graph installed specs, or specs in the active env (implies --dot)")
|
||||
|
||||
arguments.add_common_arguments(subparser, ['deptype', 'specs'])
|
||||
|
||||
@@ -45,7 +46,12 @@ def graph(parser, args):
|
||||
if args.specs:
|
||||
tty.die("Can't specify specs with --installed")
|
||||
args.dot = True
|
||||
specs = spack.store.db.query()
|
||||
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
specs = env.all_specs()
|
||||
else:
|
||||
specs = spack.store.db.query()
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=not args.static)
|
||||
|
@@ -204,7 +204,7 @@ def install_specs(cli_args, kwargs, specs):
|
||||
"""
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.get_env(cli_args, 'install')
|
||||
env = ev.active_environment()
|
||||
|
||||
try:
|
||||
if env:
|
||||
@@ -219,7 +219,7 @@ def install_specs(cli_args, kwargs, specs):
|
||||
|
||||
# If there is any ambiguity in the above call to matching_spec
|
||||
# (i.e. if more than one spec in the environment matches), then
|
||||
# SpackEnvironmentError is rasied, with a message listing the
|
||||
# SpackEnvironmentError is raised, with a message listing the
|
||||
# the matches. Getting to this point means there were either
|
||||
# no matches or exactly one match.
|
||||
|
||||
@@ -243,7 +243,7 @@ def install_specs(cli_args, kwargs, specs):
|
||||
|
||||
if m_spec in env.roots() or cli_args.no_add:
|
||||
# either the single match is a root spec (and --no-add is
|
||||
# the default for roots) or --no-add was stated explictly
|
||||
# the default for roots) or --no-add was stated explicitly
|
||||
tty.debug('just install {0}'.format(m_spec.name))
|
||||
specs_to_install.append(m_spec)
|
||||
else:
|
||||
@@ -324,10 +324,14 @@ def get_tests(specs):
|
||||
else:
|
||||
return False
|
||||
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to the package installer
|
||||
update_kwargs_from_args(args, kwargs)
|
||||
|
||||
if not args.spec and not args.specfiles:
|
||||
# if there are no args but an active environment
|
||||
# then install the packages from it.
|
||||
env = ev.get_env(args, 'install')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
tests = get_tests(env.user_specs)
|
||||
kwargs['tests'] = tests
|
||||
@@ -352,7 +356,7 @@ def get_tests(specs):
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(args, **kwargs)
|
||||
env.install_all(**kwargs)
|
||||
|
||||
tty.debug("Regenerating environment views for {0}"
|
||||
.format(env.name))
|
||||
@@ -381,10 +385,6 @@ def get_tests(specs):
|
||||
if args.deprecated:
|
||||
spack.config.set('config:deprecated', True, scope='command_line')
|
||||
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to the package installer
|
||||
update_kwargs_from_args(args, kwargs)
|
||||
|
||||
# 1. Abstract specs from cli
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
tests = get_tests(abstract_specs)
|
||||
|
@@ -175,7 +175,8 @@ def wrong_spdx_identifier(line, path):
|
||||
if error:
|
||||
return error
|
||||
|
||||
print('{0}: the license does not match the expected format'.format(path))
|
||||
print('{0}: the license header at the top of the file does not match the \
|
||||
expected format'.format(path))
|
||||
return GENERAL_MISMATCH
|
||||
|
||||
|
||||
|
@@ -55,7 +55,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def load(parser, args):
|
||||
env = ev.get_env(args, 'load')
|
||||
env = ev.active_environment()
|
||||
specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
|
||||
for spec in spack.cmd.parse_specs(args.specs)]
|
||||
|
||||
|
@@ -11,7 +11,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
@@ -73,7 +72,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.location_env:
|
||||
path = spack.environment.root(args.location_env)
|
||||
path = ev.root(args.location_env)
|
||||
if not os.path.isdir(path):
|
||||
tty.die("no such environment: '%s'" % args.location_env)
|
||||
print(path)
|
||||
@@ -97,7 +96,7 @@ def location(parser, args):
|
||||
|
||||
# install_dir command matches against installed specs.
|
||||
if args.install_dir:
|
||||
env = ev.get_env(args, 'location')
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
print(spec.prefix)
|
||||
return
|
||||
|
@@ -253,7 +253,7 @@ def _determine_specs_to_mirror(args):
|
||||
"To mirror all packages, use the '--all' option"
|
||||
" (this will require significant time and space).")
|
||||
|
||||
env = ev.get_env(args, 'mirror')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
env_specs = env.all_specs()
|
||||
else:
|
||||
|
@@ -3,11 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
from typing import Callable, Dict # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.modules.lmod
|
||||
import spack.cmd.modules.tcl
|
||||
|
||||
@@ -18,49 +15,12 @@
|
||||
|
||||
_subcommands = {} # type: Dict[str, Callable]
|
||||
|
||||
_deprecated_commands = ('refresh', 'find', 'rm', 'loads')
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
|
||||
spack.cmd.modules.lmod.add_command(sp, _subcommands)
|
||||
spack.cmd.modules.tcl.add_command(sp, _subcommands)
|
||||
|
||||
for name in _deprecated_commands:
|
||||
add_deprecated_command(sp, name)
|
||||
|
||||
|
||||
def add_deprecated_command(subparser, name):
|
||||
parser = subparser.add_parser(name)
|
||||
parser.add_argument(
|
||||
'-m', '--module-type', help=argparse.SUPPRESS,
|
||||
choices=spack.modules.module_types.keys(), action='append'
|
||||
)
|
||||
|
||||
|
||||
def handle_deprecated_command(args, unknown_args):
|
||||
command = args.module_command
|
||||
unknown = ' '.join(unknown_args)
|
||||
|
||||
module_types = args.module_type or ['tcl']
|
||||
|
||||
msg = '`spack module {0} {1}` has moved. Use these commands instead:\n'
|
||||
msg = msg.format(command, ' '.join('-m ' + x for x in module_types))
|
||||
for x in module_types:
|
||||
msg += '\n\t$ spack module {0} {1} {2}'.format(x, command, unknown)
|
||||
msg += '\n'
|
||||
tty.die(msg)
|
||||
|
||||
|
||||
def module(parser, args, unknown_args):
|
||||
|
||||
# Here we permit unknown arguments to intercept deprecated calls
|
||||
if args.module_command in _deprecated_commands:
|
||||
handle_deprecated_command(args, unknown_args)
|
||||
|
||||
# Fail if unknown arguments are present, once we excluded a deprecated
|
||||
# command
|
||||
if unknown_args:
|
||||
tty.die('unrecognized arguments: {0}'.format(' '.join(unknown_args)))
|
||||
|
||||
def module(parser, args):
|
||||
_subcommands[args.module_command](parser, args)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'remove specs from an environment'
|
||||
section = "environments"
|
||||
@@ -28,7 +27,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def remove(parser, args):
|
||||
env = ev.get_env(args, 'remove', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='remove')
|
||||
|
||||
with env.write_transaction():
|
||||
if args.all:
|
||||
|
@@ -34,7 +34,7 @@ def stage(parser, args):
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.get_env(args, 'stage')
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
tty.msg("Staging specs from environment %s" % env.name)
|
||||
for spec in env.specs_by_hash.values():
|
||||
|
@@ -312,7 +312,7 @@ def run_mypy(mypy_cmd, file_list, args):
|
||||
@tool("isort")
|
||||
def run_isort(isort_cmd, file_list, args):
|
||||
# always run with config from running spack prefix
|
||||
isort_args = ("--settings-file", os.path.join(spack.paths.prefix, "pyproject.toml"))
|
||||
isort_args = ("--settings-path", os.path.join(spack.paths.prefix, "pyproject.toml"))
|
||||
if not args.fix:
|
||||
isort_args += ("--check", "--diff")
|
||||
|
||||
|
@@ -155,7 +155,7 @@ def test_run(args):
|
||||
spack.config.set('config:fail_fast', True, scope='command_line')
|
||||
|
||||
# Get specs to test
|
||||
env = ev.get_env(args, 'test')
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
@@ -221,7 +221,7 @@ def test_list(args):
|
||||
|
||||
# TODO: This can be extended to have all of the output formatting options
|
||||
# from `spack find`.
|
||||
env = ev.get_env(args, 'test')
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.db.query(hashes=hashes)
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
description = 'remove specs from an environment'
|
||||
section = "environments"
|
||||
@@ -22,7 +21,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def undevelop(parser, args):
|
||||
env = ev.get_env(args, 'undevelop', required=True)
|
||||
env = spack.cmd.require_active_env(cmd_name='undevelop')
|
||||
|
||||
if args.all:
|
||||
specs = env.dev_specs.keys()
|
||||
|
@@ -311,7 +311,7 @@ def get_uninstall_list(args, specs, env):
|
||||
|
||||
|
||||
def uninstall_specs(args, specs):
|
||||
env = ev.get_env(args, 'uninstall')
|
||||
env = ev.active_environment()
|
||||
|
||||
uninstall_list, remove_list = get_uninstall_list(args, specs, env)
|
||||
anything_to_do = set(uninstall_list).union(set(remove_list))
|
||||
|
@@ -74,7 +74,7 @@ def verify(parser, args):
|
||||
|
||||
elif args.specs_or_files:
|
||||
# construct disambiguated spec list
|
||||
env = ev.get_env(args, 'verify')
|
||||
env = ev.active_environment()
|
||||
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env,
|
||||
local=local),
|
||||
spec_args))
|
||||
|
@@ -42,12 +42,7 @@
|
||||
import spack.schema.projections
|
||||
import spack.store
|
||||
from spack.config import validate
|
||||
from spack.filesystem_view import (
|
||||
YamlFilesystemView,
|
||||
view_copy,
|
||||
view_hardlink,
|
||||
view_symlink,
|
||||
)
|
||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem."
|
||||
@@ -187,12 +182,10 @@ def view(parser, args):
|
||||
ordered_projections = {}
|
||||
|
||||
# What method are we using for this view
|
||||
if args.action in ("hardlink", "hard"):
|
||||
link_fn = view_hardlink
|
||||
elif args.action in ("copy", "relocate"):
|
||||
link_fn = view_copy
|
||||
if args.action in actions_link:
|
||||
link_fn = view_func_parser(args.action)
|
||||
else:
|
||||
link_fn = view_symlink
|
||||
link_fn = view_func_parser('symlink')
|
||||
|
||||
view = YamlFilesystemView(
|
||||
path, spack.store.layout,
|
||||
@@ -209,7 +202,7 @@ def view(parser, args):
|
||||
|
||||
elif args.action in actions_link:
|
||||
# only link commands need to disambiguate specs
|
||||
env = ev.get_env(args, 'view link')
|
||||
env = ev.active_environment()
|
||||
specs = [spack.cmd.disambiguate_spec(s, env) for s in specs]
|
||||
|
||||
elif args.action in actions_status:
|
||||
|
@@ -98,15 +98,12 @@ def fc_pic_flag(self):
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
loc_ver = 'unknown'
|
||||
|
||||
match = re.search(
|
||||
r'AMD clang version ([^ )]+)',
|
||||
r'AOCC_(\d+)[._](\d+)[._](\d+)',
|
||||
output
|
||||
)
|
||||
if match:
|
||||
loc_ver = output.split('AOCC_')[1].split('-')[0]
|
||||
return loc_ver
|
||||
return '.'.join(match.groups())
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
|
@@ -33,6 +33,7 @@
|
||||
import spack.abi
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -66,7 +67,7 @@ def concretize_develop(self, spec):
|
||||
"""
|
||||
Add ``dev_path=*`` variant to packages built from local source.
|
||||
"""
|
||||
env = spack.environment.get_env(None, None)
|
||||
env = spack.environment.active_environment()
|
||||
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
||||
if not dev_info:
|
||||
return False
|
||||
|
@@ -536,7 +536,7 @@ def update_config(self, section, update_data, scope=None, force=False):
|
||||
msg = ('The "{0}" section of the configuration needs to be written'
|
||||
' to disk, but is currently using a deprecated format. '
|
||||
'Please update it using:\n\n'
|
||||
'\tspack config [--scope=<scope] update {0}\n\n'
|
||||
'\tspack config [--scope=<scope>] update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
'use the updated configuration.')
|
||||
msg = msg.format(section)
|
||||
@@ -1238,11 +1238,12 @@ def use_configuration(*scopes_or_paths):
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
yield configuration
|
||||
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"""
|
||||
import warnings
|
||||
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.schema.env as env
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
@@ -36,7 +36,7 @@ def validate(configuration_file):
|
||||
config = syaml.load(f)
|
||||
|
||||
# Ensure we have a "container" attribute with sensible defaults set
|
||||
env_dict = spack.environment.config_dict(config)
|
||||
env_dict = ev.config_dict(config)
|
||||
env_dict.setdefault('container', {
|
||||
'format': 'docker',
|
||||
'images': {'os': 'ubuntu:18.04', 'spack': 'develop'}
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import collections
|
||||
import copy
|
||||
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.schema.env
|
||||
import spack.tengine as tengine
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -37,7 +37,7 @@ def create(configuration):
|
||||
Args:
|
||||
configuration: how to generate the current recipe
|
||||
"""
|
||||
name = spack.environment.config_dict(configuration)['container']['format']
|
||||
name = ev.config_dict(configuration)['container']['format']
|
||||
return _writer_factory[name](configuration)
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ class PathContext(tengine.Context):
|
||||
directly via PATH.
|
||||
"""
|
||||
def __init__(self, config):
|
||||
self.config = spack.environment.config_dict(config)
|
||||
self.config = ev.config_dict(config)
|
||||
self.container_config = self.config['container']
|
||||
|
||||
@tengine.context_property
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import contextlib
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
@@ -34,7 +35,12 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.filesystem_view import (
|
||||
YamlFilesystemView,
|
||||
inverse_view_func_parser,
|
||||
view_func_parser,
|
||||
)
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
@@ -259,100 +265,9 @@ def deactivate(shell='sh'):
|
||||
return cmds
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
"""Find active environment from args, spack.yaml, or environment variable.
|
||||
|
||||
This is called in ``spack.main`` to figure out which environment to
|
||||
activate.
|
||||
|
||||
Check for an environment in this order:
|
||||
1. via ``spack -e ENV`` or ``spack -D DIR`` (arguments)
|
||||
2. as a spack.yaml file in the current directory, or
|
||||
3. via a path in the SPACK_ENV environment variable.
|
||||
|
||||
If an environment is found, read it in. If not, return None.
|
||||
|
||||
Arguments:
|
||||
args (argparse.Namespace): argparse namespace wtih command arguments
|
||||
|
||||
Returns:
|
||||
(Environment): a found environment, or ``None``
|
||||
"""
|
||||
# try arguments
|
||||
env = getattr(args, 'env', None)
|
||||
|
||||
# treat env as a name
|
||||
if env:
|
||||
if exists(env):
|
||||
return read(env)
|
||||
|
||||
else:
|
||||
# if env was specified, see if it is a dirctory otherwise, look
|
||||
# at env_dir (env and env_dir are mutually exclusive)
|
||||
env = getattr(args, 'env_dir', None)
|
||||
|
||||
# if no argument, look for the environment variable
|
||||
if not env:
|
||||
env = os.environ.get(spack_env_var)
|
||||
|
||||
# nothing was set; there's no active environment
|
||||
if not env:
|
||||
return None
|
||||
|
||||
# if we get here, env isn't the name of a spack environment; it has
|
||||
# to be a path to an environment, or there is something wrong.
|
||||
if is_env_dir(env):
|
||||
return Environment(env)
|
||||
|
||||
raise SpackEnvironmentError('no environment in %s' % env)
|
||||
|
||||
|
||||
def get_env(args, cmd_name, required=False):
|
||||
"""Used by commands to get the active environment.
|
||||
|
||||
This first checks for an ``env`` argument, then looks at the
|
||||
``active`` environment. We check args first because Spack's
|
||||
subcommand arguments are parsed *after* the ``-e`` and ``-D``
|
||||
arguments to ``spack``. So there may be an ``env`` argument that is
|
||||
*not* the active environment, and we give it precedence.
|
||||
|
||||
This is used by a number of commands for determining whether there is
|
||||
an active environment.
|
||||
|
||||
If an environment is not found *and* is required, print an error
|
||||
message that says the calling command *needs* an active environment.
|
||||
|
||||
Arguments:
|
||||
args (argparse.Namespace): argparse namespace wtih command arguments
|
||||
cmd_name (str): name of calling command
|
||||
required (bool): if ``True``, raise an exception when no environment
|
||||
is found; if ``False``, just return ``None``
|
||||
|
||||
Returns:
|
||||
(Environment): if there is an arg or active environment
|
||||
"""
|
||||
# try argument first
|
||||
env = getattr(args, 'env', None)
|
||||
if env:
|
||||
if exists(env):
|
||||
return read(env)
|
||||
elif is_env_dir(env):
|
||||
return Environment(env)
|
||||
else:
|
||||
raise SpackEnvironmentError('no environment in %s' % env)
|
||||
|
||||
# try the active environment. This is set by find_environment() (above)
|
||||
if _active_environment:
|
||||
return _active_environment
|
||||
elif not required:
|
||||
return None
|
||||
else:
|
||||
tty.die(
|
||||
'`spack %s` requires an environment' % cmd_name,
|
||||
'activate an environment first:',
|
||||
' spack env activate ENV',
|
||||
'or use:',
|
||||
' spack -e ENV %s ...' % cmd_name)
|
||||
def active_environment():
|
||||
"""Returns the active environment when there is any"""
|
||||
return _active_environment
|
||||
|
||||
|
||||
def _root(name):
|
||||
@@ -456,12 +371,13 @@ def _eval_conditional(string):
|
||||
|
||||
class ViewDescriptor(object):
|
||||
def __init__(self, base_path, root, projections={}, select=[], exclude=[],
|
||||
link=default_view_link):
|
||||
link=default_view_link, link_type='symlink'):
|
||||
self.base = base_path
|
||||
self.root = spack.util.path.canonicalize_path(root)
|
||||
self.projections = projections
|
||||
self.select = select
|
||||
self.exclude = exclude
|
||||
self.link_type = view_func_parser(link_type)
|
||||
self.link = link
|
||||
|
||||
def select_fn(self, spec):
|
||||
@@ -475,7 +391,8 @@ def __eq__(self, other):
|
||||
self.projections == other.projections,
|
||||
self.select == other.select,
|
||||
self.exclude == other.exclude,
|
||||
self.link == other.link])
|
||||
self.link == other.link,
|
||||
self.link_type == other.link_type])
|
||||
|
||||
def to_dict(self):
|
||||
ret = syaml.syaml_dict([('root', self.root)])
|
||||
@@ -490,6 +407,8 @@ def to_dict(self):
|
||||
ret['select'] = self.select
|
||||
if self.exclude:
|
||||
ret['exclude'] = self.exclude
|
||||
if self.link_type:
|
||||
ret['link_type'] = inverse_view_func_parser(self.link_type)
|
||||
if self.link != default_view_link:
|
||||
ret['link'] = self.link
|
||||
return ret
|
||||
@@ -501,7 +420,8 @@ def from_dict(base_path, d):
|
||||
d.get('projections', {}),
|
||||
d.get('select', []),
|
||||
d.get('exclude', []),
|
||||
d.get('link', default_view_link))
|
||||
d.get('link', default_view_link),
|
||||
d.get('link_type', 'symlink'))
|
||||
|
||||
@property
|
||||
def _current_root(self):
|
||||
@@ -565,7 +485,8 @@ def view(self, new=None):
|
||||
raise SpackEnvironmentViewError(msg)
|
||||
return YamlFilesystemView(root, spack.store.layout,
|
||||
ignore_conflicts=True,
|
||||
projections=self.projections)
|
||||
projections=self.projections,
|
||||
link=self.link_type)
|
||||
|
||||
def __contains__(self, spec):
|
||||
"""Is the spec described by the view descriptor
|
||||
@@ -1180,6 +1101,10 @@ def undevelop(self, spec):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_develop(self, spec):
|
||||
"""Returns true when the spec is built from local sources"""
|
||||
return spec.name in self.dev_specs
|
||||
|
||||
def concretize(self, force=False, tests=False):
|
||||
"""Concretize user_specs in this environment.
|
||||
|
||||
@@ -1546,21 +1471,18 @@ def uninstalled_specs(self):
|
||||
uninstalled_specs.append(spec)
|
||||
return uninstalled_specs
|
||||
|
||||
def install_all(self, args=None, **install_args):
|
||||
def install_all(self, **install_args):
|
||||
"""Install all concretized specs in an environment.
|
||||
|
||||
Note: this does not regenerate the views for the environment;
|
||||
that needs to be done separately with a call to write().
|
||||
|
||||
Args:
|
||||
args (argparse.Namespace): argparse namespace with command arguments
|
||||
install_args (dict): keyword install arguments
|
||||
"""
|
||||
self.install_specs(None, args=args, **install_args)
|
||||
|
||||
def install_specs(self, specs=None, args=None, **install_args):
|
||||
from spack.installer import PackageInstaller
|
||||
self.install_specs(None, **install_args)
|
||||
|
||||
def install_specs(self, specs=None, **install_args):
|
||||
tty.debug('Assessing installation status of environment packages')
|
||||
# If "spack install" is invoked repeatedly for a large environment
|
||||
# where all specs are already installed, the operation can take
|
||||
@@ -1594,15 +1516,7 @@ def install_specs(self, specs=None, args=None, **install_args):
|
||||
|
||||
installs = []
|
||||
for spec in specs_to_install:
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to the package installer
|
||||
kwargs = dict()
|
||||
if install_args:
|
||||
kwargs.update(install_args)
|
||||
if args:
|
||||
spack.cmd.install.update_kwargs_from_args(args, kwargs)
|
||||
|
||||
installs.append((spec.package, kwargs))
|
||||
installs.append((spec.package, install_args))
|
||||
|
||||
try:
|
||||
builder = PackageInstaller(installs)
|
||||
@@ -1689,7 +1603,22 @@ def matching_spec(self, spec):
|
||||
# Dependency-only specs will have value None
|
||||
matches = {}
|
||||
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
for user_spec, concretized_user_spec in self.concretized_specs():
|
||||
# Deal with concrete specs differently
|
||||
if spec.concrete:
|
||||
# Matching a concrete spec is more restrictive
|
||||
# than just matching the dag hash
|
||||
is_match = (
|
||||
spec in concretized_user_spec and
|
||||
concretized_user_spec[spec.name].build_hash() == spec.build_hash()
|
||||
)
|
||||
if is_match:
|
||||
matches[spec] = spec
|
||||
continue
|
||||
|
||||
if concretized_user_spec.satisfies(spec):
|
||||
matches[concretized_user_spec] = user_spec
|
||||
for dep_spec in concretized_user_spec.traverse(root=False):
|
||||
@@ -2216,6 +2145,17 @@ def is_latest_format(manifest):
|
||||
return not changed
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def deactivate_environment():
|
||||
"""Deactivate an active environment for the duration of the context."""
|
||||
global _active_environment
|
||||
current, _active_environment = _active_environment, None
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_active_environment = current
|
||||
|
||||
|
||||
class SpackEnvironmentError(spack.error.SpackError):
|
||||
"""Superclass for all errors to do with Spack environments."""
|
||||
|
||||
|
@@ -338,7 +338,7 @@ def fetch(self):
|
||||
def _existing_url(self, url):
|
||||
tty.debug('Checking existence of {0}'.format(url))
|
||||
|
||||
if spack.config.get('config:use_curl'):
|
||||
if spack.config.get('config:url_fetch_method') == 'curl':
|
||||
curl = self.curl
|
||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||
# portable.
|
||||
@@ -357,7 +357,7 @@ def _existing_url(self, url):
|
||||
return (response.getcode() is None or response.getcode() == 200)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get('config:use_curl'):
|
||||
if spack.config.get('config:url_fetch_method') == 'curl':
|
||||
return self._fetch_curl(url)
|
||||
else:
|
||||
return self._fetch_urllib(url)
|
||||
|
@@ -98,6 +98,29 @@ def view_copy(src, dst, view, spec=None):
|
||||
)
|
||||
|
||||
|
||||
def view_func_parser(parsed_name):
|
||||
# What method are we using for this view
|
||||
if parsed_name in ("hardlink", "hard"):
|
||||
return view_hardlink
|
||||
elif parsed_name in ("copy", "relocate"):
|
||||
return view_copy
|
||||
elif parsed_name in ("add", "symlink", "soft"):
|
||||
return view_symlink
|
||||
else:
|
||||
raise ValueError("invalid link type for view: '%s'" % parsed_name)
|
||||
|
||||
|
||||
def inverse_view_func_parser(view_type):
|
||||
# get string based on view type
|
||||
if view_type is view_hardlink:
|
||||
link_name = 'hardlink'
|
||||
elif view_type is view_copy:
|
||||
link_name = 'copy'
|
||||
else:
|
||||
link_name = 'symlink'
|
||||
return link_name
|
||||
|
||||
|
||||
class FilesystemView(object):
|
||||
"""
|
||||
Governs a filesystem view that is located at certain root-directory.
|
||||
|
@@ -550,11 +550,21 @@ def dynamic_graph(spec, deptypes):
|
||||
out.write(' style="rounded,filled"')
|
||||
out.write(' ]\n')
|
||||
|
||||
# write nodes
|
||||
out.write('\n')
|
||||
for key, label in nodes:
|
||||
out.write(' "%s" [label="%s"]\n' % (key, label))
|
||||
|
||||
# write edges
|
||||
out.write('\n')
|
||||
for src, dest in edges:
|
||||
out.write(' "%s" -> "%s"\n' % (src, dest))
|
||||
|
||||
# ensure that roots are all at the top of the plot
|
||||
dests = set([d for _, d in edges])
|
||||
roots = ['"%s"' % k for k, _ in nodes if k not in dests]
|
||||
out.write('\n')
|
||||
out.write(' { rank=min; %s; }' % "; ".join(roots))
|
||||
|
||||
out.write('\n')
|
||||
out.write('}\n')
|
||||
|
@@ -37,8 +37,8 @@ def _for_each_enabled(spec, method_name):
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
import spack.environment # break import cycle
|
||||
if spack.environment.get_env({}, ''):
|
||||
import spack.environment as ev # break import cycle
|
||||
if ev.active_environment():
|
||||
# If the installed through an environment, we skip post_install
|
||||
# module generation and generate the modules on env_write so Spack
|
||||
# can manage interactions between env views and modules
|
||||
|
@@ -12,7 +12,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.store
|
||||
|
||||
|
@@ -54,6 +54,7 @@
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
from spack.util.environment import dump_environment
|
||||
from spack.util.executable import which
|
||||
from spack.util.timer import Timer
|
||||
@@ -144,14 +145,8 @@ def _handle_external_and_upstream(pkg, explicit):
|
||||
|
||||
|
||||
def _do_fake_install(pkg):
|
||||
"""Make a fake install directory with fake executables, headers, and libraries.
|
||||
"""
|
||||
Make a fake install directory containing fake executables, headers,
|
||||
and libraries.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package whose installation is to be faked
|
||||
"""
|
||||
|
||||
command = pkg.name
|
||||
header = pkg.name
|
||||
library = pkg.name
|
||||
@@ -372,8 +367,13 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
|
||||
binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
|
||||
unsigned=unsigned, force=False)
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False
|
||||
)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
return True
|
||||
@@ -1168,10 +1168,9 @@ def _install_task(self, task):
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
pid = '{0}: '.format(pkg.pid) if tty.show_pid() else ''
|
||||
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
|
||||
tty.debug('{0}{1}'.format(pid, str(e)))
|
||||
tty.debug('Package stage directory: {0}'
|
||||
.format(pkg.stage.source_path))
|
||||
tty.debug('Package stage directory: {0}' .format(pkg.stage.source_path))
|
||||
|
||||
def _next_is_pri0(self):
|
||||
"""
|
||||
@@ -1570,6 +1569,9 @@ def install(self):
|
||||
if os.path.exists(rec.path):
|
||||
with fs.replace_directory_transaction(
|
||||
rec.path):
|
||||
# fs transaction will put the old prefix
|
||||
# back on failure, so make sure to keep it.
|
||||
keep_prefix = True
|
||||
self._install_task(task)
|
||||
else:
|
||||
tty.debug("Missing installation to overwrite")
|
||||
@@ -1678,152 +1680,222 @@ def install(self):
|
||||
'reported errors for failing package(s).')
|
||||
|
||||
|
||||
def build_process(pkg, kwargs):
|
||||
class BuildProcessInstaller(object):
|
||||
"""This class implements the part installation that happens in the child process."""
|
||||
|
||||
def __init__(self, pkg, install_args):
|
||||
"""Create a new BuildProcessInstaller.
|
||||
|
||||
It is assumed that the lifecycle of this object is the same as the child
|
||||
process in the build.
|
||||
|
||||
Arguments:
|
||||
pkg (spack.package.PackageBase) the package being installed.
|
||||
install_args (dict) arguments to do_install() from parent process.
|
||||
|
||||
"""
|
||||
self.pkg = pkg
|
||||
|
||||
# whether to do a fake install
|
||||
self.fake = install_args.get('fake', False)
|
||||
|
||||
# whether to install source code with the packag
|
||||
self.install_source = install_args.get('install_source', False)
|
||||
|
||||
# whether to keep the build stage after installation
|
||||
self.keep_stage = install_args.get('keep_stage', False)
|
||||
|
||||
# whether to skip the patch phase
|
||||
self.skip_patch = install_args.get('skip_patch', False)
|
||||
|
||||
# whether to enable echoing of build output initially or not
|
||||
self.verbose = install_args.get('verbose', False)
|
||||
|
||||
# env before starting installation
|
||||
self.unmodified_env = install_args.get('unmodified_env', {})
|
||||
|
||||
# timer for build phases
|
||||
self.timer = Timer()
|
||||
|
||||
# If we are using a padded path, filter the output to compress padded paths
|
||||
# The real log still has full-length paths.
|
||||
filter_padding = spack.config.get("config:install_tree:padded_length", None)
|
||||
self.filter_fn = spack.util.path.padding_filter if filter_padding else None
|
||||
|
||||
# info/debug information
|
||||
pid = '{0}: '.format(os.getpid()) if tty.show_pid() else ''
|
||||
self.pre = '{0}{1}:'.format(pid, pkg.name)
|
||||
self.pkg_id = package_id(pkg)
|
||||
|
||||
def run(self):
|
||||
"""Main entry point from ``build_process`` to kick off install in child."""
|
||||
|
||||
if not self.fake:
|
||||
if not self.skip_patch:
|
||||
self.pkg.do_patch()
|
||||
else:
|
||||
self.pkg.do_stage()
|
||||
|
||||
tty.debug(
|
||||
'{0} Building {1} [{2}]' .format(
|
||||
self.pre,
|
||||
self.pkg_id,
|
||||
self.pkg.build_system_class
|
||||
)
|
||||
)
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
self.echo = self.verbose
|
||||
if spack.package.PackageBase._verbose is not None:
|
||||
self.echo = spack.package.PackageBase._verbose
|
||||
|
||||
self.pkg.stage.keep = self.keep_stage
|
||||
|
||||
with self.pkg.stage:
|
||||
# Run the pre-install hook in the child process after
|
||||
# the directory is created.
|
||||
spack.hooks.pre_install(self.pkg.spec)
|
||||
if self.fake:
|
||||
_do_fake_install(self.pkg)
|
||||
else:
|
||||
if self.install_source:
|
||||
self._install_source()
|
||||
|
||||
self._real_install()
|
||||
|
||||
# Stop the timer and save results
|
||||
self.timer.stop()
|
||||
with open(self.pkg.times_log_path, 'w') as timelog:
|
||||
self.timer.write_json(timelog)
|
||||
|
||||
# Run post install hooks before build stage is removed.
|
||||
spack.hooks.post_install(self.pkg.spec)
|
||||
|
||||
build_time = self.timer.total - self.pkg._fetch_time
|
||||
tty.msg('{0} Successfully installed {1}'.format(self.pre, self.pkg_id),
|
||||
'Fetch: {0}. Build: {1}. Total: {2}.'
|
||||
.format(_hms(self.pkg._fetch_time), _hms(build_time),
|
||||
_hms(self.timer.total)))
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
||||
# Send final status that install is successful
|
||||
spack.hooks.on_install_success(self.pkg.spec)
|
||||
|
||||
# preserve verbosity across runs
|
||||
return self.echo
|
||||
|
||||
def _install_source(self):
|
||||
"""Install source code from stage into share/pkg/src if necessary."""
|
||||
pkg = self.pkg
|
||||
if not os.path.isdir(pkg.stage.source_path):
|
||||
return
|
||||
|
||||
src_target = os.path.join(pkg.spec.prefix, 'share', pkg.name, 'src')
|
||||
tty.debug('{0} Copying source to {1}' .format(self.pre, src_target))
|
||||
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
|
||||
def _real_install(self):
|
||||
pkg = self.pkg
|
||||
|
||||
# Do the real install in the source directory.
|
||||
with fs.working_dir(pkg.stage.source_path):
|
||||
# Save the build environment in a file before building.
|
||||
dump_environment(pkg.env_path)
|
||||
|
||||
for attr in ('configure_args', 'cmake_args'):
|
||||
try:
|
||||
configure_args = getattr(pkg, attr)()
|
||||
configure_args = ' '.join(configure_args)
|
||||
|
||||
with open(pkg.configure_args_path, 'w') as \
|
||||
args_file:
|
||||
args_file.write(configure_args)
|
||||
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# cache debug settings
|
||||
debug_level = tty.debug_level()
|
||||
|
||||
# Spawn a daemon that reads from a pipe and redirects
|
||||
# everything to log_path, and provide the phase for logging
|
||||
for i, (phase_name, phase_attr) in enumerate(zip(
|
||||
pkg.phases, pkg._InstallPhase_phases)):
|
||||
|
||||
# Keep a log file for each phase
|
||||
log_dir = os.path.dirname(pkg.log_path)
|
||||
log_file = "spack-build-%02d-%s-out.txt" % (
|
||||
i + 1, phase_name.lower()
|
||||
)
|
||||
log_file = os.path.join(log_dir, log_file)
|
||||
|
||||
try:
|
||||
# DEBUGGING TIP - to debug this section, insert an IPython
|
||||
# embed here, and run the sections below without log capture
|
||||
log_contextmanager = log_output(
|
||||
log_file,
|
||||
self.echo,
|
||||
True,
|
||||
env=self.unmodified_env,
|
||||
filter_fn=self.filter_fn
|
||||
)
|
||||
|
||||
with log_contextmanager as logger:
|
||||
with logger.force_echo():
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
tty.msg(
|
||||
"{0} Executing phase: '{1}'" .format(
|
||||
self.pre,
|
||||
phase_name
|
||||
)
|
||||
)
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
phase = getattr(pkg, phase_attr)
|
||||
self.timer.phase(phase_name)
|
||||
|
||||
# Catch any errors to report to logging
|
||||
phase(pkg.spec, pkg.prefix)
|
||||
spack.hooks.on_phase_success(pkg, phase_name, log_file)
|
||||
|
||||
except BaseException:
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
spack.hooks.on_phase_error(pkg, phase_name, log_file)
|
||||
raise
|
||||
|
||||
# We assume loggers share echo True/False
|
||||
self.echo = logger.echo
|
||||
|
||||
# After log, we can get all output/error files from the package stage
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
log(pkg)
|
||||
|
||||
|
||||
def build_process(pkg, install_args):
|
||||
"""Perform the installation/build of the package.
|
||||
|
||||
This runs in a separate child process, and has its own process and
|
||||
python module space set up by build_environment.start_build_process().
|
||||
|
||||
This essentially wraps an instance of ``BuildProcessInstaller`` so that we can
|
||||
more easily create one in a subprocess.
|
||||
|
||||
This function's return value is returned to the parent process.
|
||||
|
||||
Arguments:
|
||||
pkg (spack.package.PackageBase): the package being installed.
|
||||
install_args (dict): arguments to do_install() from parent process.
|
||||
|
||||
"""
|
||||
fake = kwargs.get('fake', False)
|
||||
install_source = kwargs.get('install_source', False)
|
||||
keep_stage = kwargs.get('keep_stage', False)
|
||||
skip_patch = kwargs.get('skip_patch', False)
|
||||
unmodified_env = kwargs.get('unmodified_env', {})
|
||||
verbose = kwargs.get('verbose', False)
|
||||
installer = BuildProcessInstaller(pkg, install_args)
|
||||
|
||||
timer = Timer()
|
||||
|
||||
# If we are using a padded path, filter the output to compress padded paths
|
||||
# The real log still has full-length paths.
|
||||
filter_padding = spack.config.get("config:install_tree:padded_length", None)
|
||||
filter_fn = spack.util.path.padding_filter if filter_padding else None
|
||||
|
||||
if not fake:
|
||||
if not skip_patch:
|
||||
pkg.do_patch()
|
||||
else:
|
||||
pkg.do_stage()
|
||||
|
||||
pid = '{0}: '.format(pkg.pid) if tty.show_pid() else ''
|
||||
pre = '{0}{1}:'.format(pid, pkg.name)
|
||||
pkg_id = package_id(pkg)
|
||||
|
||||
tty.debug('{0} Building {1} [{2}]'
|
||||
.format(pre, pkg_id, pkg.build_system_class))
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
echo = verbose
|
||||
if spack.package.PackageBase._verbose is not None:
|
||||
echo = spack.package.PackageBase._verbose
|
||||
|
||||
pkg.stage.keep = keep_stage
|
||||
|
||||
with pkg.stage:
|
||||
# Run the pre-install hook in the child process after
|
||||
# the directory is created.
|
||||
spack.hooks.pre_install(pkg.spec)
|
||||
if fake:
|
||||
_do_fake_install(pkg)
|
||||
else:
|
||||
source_path = pkg.stage.source_path
|
||||
if install_source and os.path.isdir(source_path):
|
||||
src_target = os.path.join(pkg.spec.prefix, 'share',
|
||||
pkg.name, 'src')
|
||||
tty.debug('{0} Copying source to {1}'
|
||||
.format(pre, src_target))
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
|
||||
# Do the real install in the source directory.
|
||||
with fs.working_dir(pkg.stage.source_path):
|
||||
|
||||
# Save the build environment in a file before building.
|
||||
dump_environment(pkg.env_path)
|
||||
|
||||
for attr in ('configure_args', 'cmake_args'):
|
||||
try:
|
||||
configure_args = getattr(pkg, attr)()
|
||||
configure_args = ' '.join(configure_args)
|
||||
|
||||
with open(pkg.configure_args_path, 'w') as \
|
||||
args_file:
|
||||
args_file.write(configure_args)
|
||||
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# cache debug settings
|
||||
debug_level = tty.debug_level()
|
||||
|
||||
# Spawn a daemon that reads from a pipe and redirects
|
||||
# everything to log_path, and provide the phase for logging
|
||||
for i, (phase_name, phase_attr) in enumerate(zip(
|
||||
pkg.phases, pkg._InstallPhase_phases)):
|
||||
|
||||
# Keep a log file for each phase
|
||||
log_dir = os.path.dirname(pkg.log_path)
|
||||
log_file = "spack-build-%02d-%s-out.txt" % (
|
||||
i + 1, phase_name.lower()
|
||||
)
|
||||
log_file = os.path.join(log_dir, log_file)
|
||||
|
||||
try:
|
||||
# DEBUGGING TIP - to debug this section, insert an IPython
|
||||
# embed here, and run the sections below without log capture
|
||||
with log_output(
|
||||
log_file, echo, True, env=unmodified_env,
|
||||
filter_fn=filter_fn
|
||||
) as logger:
|
||||
|
||||
with logger.force_echo():
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
tty.msg("{0} Executing phase: '{1}'"
|
||||
.format(pre, phase_name))
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
phase = getattr(pkg, phase_attr)
|
||||
timer.phase(phase_name)
|
||||
|
||||
# Catch any errors to report to logging
|
||||
phase(pkg.spec, pkg.prefix)
|
||||
spack.hooks.on_phase_success(pkg, phase_name, log_file)
|
||||
|
||||
except BaseException:
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
spack.hooks.on_phase_error(pkg, phase_name, log_file)
|
||||
raise
|
||||
|
||||
# We assume loggers share echo True/False
|
||||
echo = logger.echo
|
||||
|
||||
# After log, we can get all output/error files from the package stage
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
log(pkg)
|
||||
|
||||
# Stop the timer and save results
|
||||
timer.stop()
|
||||
with open(pkg.times_log_path, 'w') as timelog:
|
||||
timer.write_json(timelog)
|
||||
|
||||
# Run post install hooks before build stage is removed.
|
||||
spack.hooks.post_install(pkg.spec)
|
||||
|
||||
build_time = timer.total - pkg._fetch_time
|
||||
tty.msg('{0} Successfully installed {1}'.format(pre, pkg_id),
|
||||
'Fetch: {0}. Build: {1}. Total: {2}.'
|
||||
.format(_hms(pkg._fetch_time), _hms(build_time),
|
||||
_hms(timer.total)))
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
|
||||
# Send final status that install is successful
|
||||
spack.hooks.on_install_success(pkg.spec)
|
||||
|
||||
# preserve verbosity across runs
|
||||
return echo
|
||||
# don't print long padded paths in executable debug output.
|
||||
with spack.util.path.filter_padding():
|
||||
return installer.run()
|
||||
|
||||
|
||||
class BuildTask(object):
|
||||
|
@@ -27,6 +27,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.log import log_output
|
||||
|
||||
@@ -173,14 +174,16 @@ def _format_actions_usage(self, actions, groups):
|
||||
usage = super(
|
||||
SpackHelpFormatter, self)._format_actions_usage(actions, groups)
|
||||
|
||||
# Eliminate any occurrence of two or more consecutive spaces
|
||||
usage = re.sub(r'[ ]{2,}', ' ', usage)
|
||||
|
||||
# compress single-character flags that are not mutually exclusive
|
||||
# at the beginning of the usage string
|
||||
chars = ''.join(re.findall(r'\[-(.)\]', usage))
|
||||
usage = re.sub(r'\[-.\] ?', '', usage)
|
||||
if chars:
|
||||
return '[-%s] %s' % (chars, usage)
|
||||
else:
|
||||
return usage
|
||||
usage = '[-%s] %s' % (chars, usage)
|
||||
return usage.strip()
|
||||
|
||||
|
||||
class SpackArgumentParser(argparse.ArgumentParser):
|
||||
@@ -293,7 +296,18 @@ def add_subcommand_group(title, commands):
|
||||
def add_subparsers(self, **kwargs):
|
||||
"""Ensure that sensible defaults are propagated to subparsers"""
|
||||
kwargs.setdefault('metavar', 'SUBCOMMAND')
|
||||
|
||||
# From Python 3.7 we can require a subparser, earlier versions
|
||||
# of argparse will error because required=True is unknown
|
||||
if sys.version_info[:2] > (3, 6):
|
||||
kwargs.setdefault('required', True)
|
||||
|
||||
sp = super(SpackArgumentParser, self).add_subparsers(**kwargs)
|
||||
# This monkey patching is needed for Python 3.5 and 3.6, which support
|
||||
# having a required subparser but don't expose the API used above
|
||||
if sys.version_info[:2] == (3, 5) or sys.version_info[:2] == (3, 6):
|
||||
sp.required = True
|
||||
|
||||
old_add_parser = sp.add_parser
|
||||
|
||||
def add_parser(name, **kwargs):
|
||||
@@ -336,6 +350,15 @@ def format_help(self, level='short'):
|
||||
# in subparsers, self.prog is, e.g., 'spack install'
|
||||
return super(SpackArgumentParser, self).format_help()
|
||||
|
||||
def _check_value(self, action, value):
|
||||
# converted value must be one of the choices (if specified)
|
||||
if action.choices is not None and value not in action.choices:
|
||||
cols = llnl.util.tty.colify.colified(
|
||||
sorted(action.choices), indent=4, tty=True
|
||||
)
|
||||
msg = 'invalid choice: %r choose from:\n%s' % (value, cols)
|
||||
raise argparse.ArgumentError(action, msg)
|
||||
|
||||
|
||||
def make_argument_parser(**kwargs):
|
||||
"""Create an basic argument parser without any subcommands added."""
|
||||
@@ -720,7 +743,7 @@ def main(argv=None):
|
||||
|
||||
# activate an environment if one was specified on the command line
|
||||
if not args.no_env:
|
||||
env = ev.find_environment(args)
|
||||
env = spack.cmd.find_environment(args)
|
||||
if env:
|
||||
ev.activate(env, args.use_env_repo, add_view=False)
|
||||
|
||||
|
@@ -698,7 +698,11 @@ def environment_modifications(self):
|
||||
if use_view is True:
|
||||
use_view = ev.default_view_name
|
||||
|
||||
env = ev.get_env({}, 'post_env_write_hook', required=True)
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
raise ev.SpackEnvironmentViewError("Module generation with views "
|
||||
"requires active environment")
|
||||
|
||||
view = env.views[use_view]
|
||||
|
||||
spec.prefix = view.get_projection_for_spec(spec)
|
||||
|
@@ -87,7 +87,7 @@ def get_monitor_group(subparser):
|
||||
monitor_group = subparser.add_argument_group()
|
||||
monitor_group.add_argument(
|
||||
'--monitor', action='store_true', dest='use_monitor', default=False,
|
||||
help="interact with a montor server during builds.")
|
||||
help="interact with a monitor server during builds.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-save-local', action='store_true', dest='monitor_save_local',
|
||||
default=False, help="save monitor results to .spack instead of server.")
|
||||
|
@@ -41,6 +41,7 @@
|
||||
import spack.dependency
|
||||
import spack.directives
|
||||
import spack.directory_layout
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hooks
|
||||
@@ -1251,18 +1252,14 @@ def installed(self):
|
||||
Returns:
|
||||
True if the package has been installed, False otherwise.
|
||||
"""
|
||||
has_prefix = os.path.isdir(self.prefix)
|
||||
try:
|
||||
# If the spec is in the DB, check the installed
|
||||
# attribute of the record
|
||||
rec = spack.store.db.get_record(self.spec)
|
||||
db_says_installed = rec.installed
|
||||
return spack.store.db.get_record(self.spec).installed
|
||||
except KeyError:
|
||||
# If the spec is not in the DB, the method
|
||||
# above raises a Key error
|
||||
db_says_installed = False
|
||||
|
||||
return has_prefix and db_says_installed
|
||||
return False
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
@@ -1537,7 +1534,9 @@ def content_hash(self, content=None):
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
if not self.spec.external:
|
||||
env = spack.environment.active_environment()
|
||||
from_local_sources = env and env.is_develop(self.spec)
|
||||
if not self.spec.external and not from_local_sources:
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
|
@@ -16,7 +16,6 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.cmd
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.executable as executable
|
||||
@@ -88,7 +87,8 @@ def _patchelf():
|
||||
return patchelf.path
|
||||
|
||||
# Check if patchelf spec is installed
|
||||
spec = spack.spec.Spec('patchelf').concretized()
|
||||
spec = spack.spec.Spec('patchelf')
|
||||
spec._old_concretize()
|
||||
exe_path = os.path.join(spec.prefix.bin, "patchelf")
|
||||
if spec.package.installed and os.path.exists(exe_path):
|
||||
return exe_path
|
||||
|
@@ -1299,19 +1299,24 @@ def use_repositories(*paths_and_repos):
|
||||
"""
|
||||
global path
|
||||
|
||||
remove_from_meta = None
|
||||
|
||||
# Construct a temporary RepoPath object from
|
||||
temporary_repositories = RepoPath(*paths_and_repos)
|
||||
|
||||
# Swap the current repository out
|
||||
saved = path
|
||||
remove_from_meta = set_path(temporary_repositories)
|
||||
|
||||
yield temporary_repositories
|
||||
try:
|
||||
remove_from_meta = set_path(temporary_repositories)
|
||||
|
||||
# Restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(temporary_repositories)
|
||||
path = saved
|
||||
yield temporary_repositories
|
||||
|
||||
finally:
|
||||
# Restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(temporary_repositories)
|
||||
path = saved
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user