Compare commits
39 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f70d2c7ccb | ||
![]() |
62d18d9af7 | ||
![]() |
8824040eda | ||
![]() |
d744b83584 | ||
![]() |
14601b59ef | ||
![]() |
8d7360106f | ||
![]() |
881f184eab | ||
![]() |
63bec85a24 | ||
![]() |
159975a561 | ||
![]() |
25ba7d071a | ||
![]() |
54b79d5661 | ||
![]() |
235f93c241 | ||
![]() |
a73c5ffb0b | ||
![]() |
ce53ce284b | ||
![]() |
ae68318475 | ||
![]() |
bd63c19b94 | ||
![]() |
3f3bcacd16 | ||
![]() |
d349d4ab0b | ||
![]() |
e61b3c96f6 | ||
![]() |
bcbb0a3b85 | ||
![]() |
3c556ab318 | ||
![]() |
563ae5188e | ||
![]() |
f4b3561f71 | ||
![]() |
0748a1b290 | ||
![]() |
0bfece0c5e | ||
![]() |
6e9b16279a | ||
![]() |
cf71baff30 | ||
![]() |
b9831acb44 | ||
![]() |
5f2edb1860 | ||
![]() |
8b6809cf66 | ||
![]() |
a93a4274aa | ||
![]() |
59bb42ee7f | ||
![]() |
11d382bff1 | ||
![]() |
57889ec446 | ||
![]() |
66bda49c44 | ||
![]() |
94a2ab5359 | ||
![]() |
6d0044f703 | ||
![]() |
3fa447a84a | ||
![]() |
d501ce0c7e |
11
.github/workflows/bootstrap.yml
vendored
11
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,6 @@ jobs:
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -58,7 +57,6 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -95,7 +93,6 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -129,7 +126,6 @@ jobs:
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -158,7 +154,6 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -179,7 +174,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -201,7 +195,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
@@ -223,7 +216,6 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -258,7 +250,6 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -294,7 +285,6 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -312,7 +302,6 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
5
.github/workflows/build-containers.yml
vendored
5
.github/workflows/build-containers.yml
vendored
@@ -43,7 +43,6 @@ jobs:
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
@@ -76,7 +75,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -95,7 +94,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
3
.github/workflows/macos_python.yml
vendored
3
.github/workflows/macos_python.yml
vendored
@@ -22,7 +22,6 @@ on:
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
@@ -37,7 +36,6 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
@@ -52,7 +50,6 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
|
1
.github/workflows/setup_git.ps1
vendored
1
.github/workflows/setup_git.ps1
vendored
@@ -4,7 +4,6 @@ Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
|
2
.github/workflows/unit_tests.yaml
vendored
2
.github/workflows/unit_tests.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools==62.3.4 types-six
|
||||
pip install --upgrade pip six setuptools types-six
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six setuptools==62.3.4 flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
|
||||
python -m pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black pywin32 types-python-dateutil
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
@@ -139,11 +139,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
|
222
CHANGELOG.md
222
CHANGELOG.md
@@ -1,223 +1,3 @@
|
||||
# v0.18.1 (2022-07-19)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
Python 2 and Python 3 (#31092)
|
||||
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
||||
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
||||
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173.#31186)
|
||||
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
||||
in the system (#19895)
|
||||
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
|
||||
### Package updates
|
||||
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
||||
* openPMD: add v0.14.5, update recipe for @develop (#29484,#31023)
|
||||
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Concretizer now reuses by default**
|
||||
|
||||
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||
is now the default concretization mode. Spack will try hard to
|
||||
resolve dependencies using installed packages or binaries (#30396).
|
||||
|
||||
To avoid reuse and to use the latest package configurations, (the
|
||||
old default), you can use `spack install --fresh`, or add
|
||||
configuration like this to your environment or `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse: false
|
||||
```
|
||||
|
||||
2. **Finer-grained hashes**
|
||||
|
||||
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||
as well as a canonical hash of package recipes. Previously, hashes
|
||||
only included `link` and `run` dependencies (though `build`
|
||||
dependencies were stored by environments). We coarsened the hash to
|
||||
reduce churn in user installations, but the new default concretizer
|
||||
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||
You will be able to see the build dependencies of new installations
|
||||
with `spack find`. Old installations will not change and their
|
||||
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||
|
||||
3. **Improved error messages**
|
||||
|
||||
Error handling with the new concretizer is now done with
|
||||
optimization criteria rather than with unsatisfiable cores, and
|
||||
Spack reports many more details about conflicting constraints.
|
||||
(#30669)
|
||||
|
||||
4. **Unify environments when possible**
|
||||
|
||||
Environments have thus far supported `concretization: together` or
|
||||
`concretization: separately`. These have been replaced by a new
|
||||
preference in `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
unify: [true|false|when_possible]
|
||||
```
|
||||
|
||||
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||
unified environment, but if it cannot, it will create multiple
|
||||
configurations of some packages where it has to. For large
|
||||
environments that previously had to be concretized separately, this
|
||||
can result in a huge speedup (40-50x). (#28941)
|
||||
|
||||
5. **Automatically find externals on Cray machines**
|
||||
|
||||
Spack can now automatically discover installed packages in the Cray
|
||||
Programming Environment by running `spack external find` (or `spack
|
||||
external read-cray-manifest` to *only* query the PE). Packages from
|
||||
the PE (e.g., `cray-mpich` are added to the database with full
|
||||
dependency information, and compilers from the PE are added to
|
||||
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||
Programming Environment. (#24894, #30428)
|
||||
|
||||
6. **New binary format and hardened signing**
|
||||
|
||||
Spack now has an updated binary format, with improvements for
|
||||
security. The new format has a detached signature file, and Spack
|
||||
verifies the signature before untarring or decompressing the binary
|
||||
package. The previous format embedded the signature in a `tar`
|
||||
file, which required the client to run `tar` *before* verifying
|
||||
(#30750). Spack can still install from build caches using the old
|
||||
format, but we encourage users to switch to the new format going
|
||||
forward.
|
||||
|
||||
Production GitLab pipelines have been hardened to securely sign
|
||||
binaries. There is now a separate signing stage so that signing
|
||||
keys are never exposed to build system code, and signing keys are
|
||||
ephemeral and only live as long as the signing pipeline stage.
|
||||
(#30753)
|
||||
|
||||
7. **Bootstrap mirror generation**
|
||||
|
||||
The `spack bootstrap mirror` command can automatically create a
|
||||
mirror for bootstrapping the concretizer and other needed
|
||||
dependencies in an air-gapped environment. (#28556)
|
||||
|
||||
8. **Nascent Windows support**
|
||||
|
||||
Spack now has initial support for Windows. Spack core has been
|
||||
refactored to run in the Windows environment, and a small number of
|
||||
packages can now build for Windows. More details are
|
||||
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||
(#27021, #28385, many more)
|
||||
|
||||
9. **Makefile generation**
|
||||
|
||||
`spack env depfile` can be used to generate a `Makefile` from an
|
||||
environment, which can be used to build packages the environment
|
||||
in parallel on a single node. e.g.:
|
||||
|
||||
```console
|
||||
spack -e myenv env depfile > Makefile
|
||||
make
|
||||
```
|
||||
|
||||
Spack propagates `gmake` jobserver information to builds so that
|
||||
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||
|
||||
10. **New variant features**
|
||||
|
||||
In addition to being conditional themselves, variants can now have
|
||||
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||
that are only possible for certain configurations of a package. (#29530)
|
||||
|
||||
Variants can be
|
||||
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||
which prevents them from being enabled or disabled by the
|
||||
concretizer. Sticky variants must be set explicitly by users
|
||||
on the command line or in `packages.yaml`. (#28630)
|
||||
|
||||
* Allow conditional possible values in variants
|
||||
* Add a "sticky" property to variants
|
||||
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Environment views can optionally link only `run` dependencies
|
||||
with `link:run` (#29336)
|
||||
* `spack external find --all` finds library-only packages in
|
||||
addition to build dependencies (#28005)
|
||||
* Customizable `config:license_dir` option (#30135)
|
||||
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||
* `spack info` now has cleaner output and displays test info (#22097)
|
||||
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||
now works even if `/hash` is not installed. (#30276)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* full hash (see above)
|
||||
* new develop versioning scheme `0.19.0-dev0`
|
||||
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||
* Splice differing virtual packages (#27919)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Concretization of large environments with `unify: when_possible` is
|
||||
much faster than concretizing separately (#28941, see above)
|
||||
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||
|
||||
## Archspec improvements
|
||||
|
||||
* `oneapi` and `dpcpp` flag support (#30783)
|
||||
* better support for `M1` and `a64fx` (#30683)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Spack no longer supports Python `2.6` (#27256)
|
||||
* Removed deprecated `--run-tests` option of `spack install`;
|
||||
use `spack test` (#30461)
|
||||
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||
|
||||
* Deprecate `spack:concretization` config option; use
|
||||
`concretizer:unify` (#30038)
|
||||
* Deprecate top-level module configuration; use module sets (#28659)
|
||||
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||
in #29336 above)
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Fix bug that broke locks with many parallel builds (#27846)
|
||||
* Many bugfixes and consistency improvements for the new concretizer
|
||||
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||
|
||||
## Packages
|
||||
|
||||
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||
* PythonPackage: now installs packages with `pip` (#27798)
|
||||
* Python: improve site_packages_dir handling (#28346)
|
||||
* Extends: support spec, not just package name (#27754)
|
||||
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,416 total packages, 458 new since `v0.17.0`
|
||||
* 219 new Python packages
|
||||
* 60 new R packages
|
||||
* 377 people contributed to this release
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
@@ -231,7 +11,7 @@
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
@@ -6,15 +6,34 @@ bootstrap:
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
- name: 'github-actions-v0.1'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
|
@@ -28,9 +28,3 @@ concretizer:
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
@@ -33,9 +33,6 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
@@ -50,13 +50,6 @@ build cache files for the "ninja" spec:
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
.. warning::
|
||||
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
@@ -1,160 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _bootstrapping:
|
||||
|
||||
=============
|
||||
Bootstrapping
|
||||
=============
|
||||
|
||||
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
|
||||
[FAIL] Binary packages
|
||||
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack solve zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
|
||||
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||
Its location can be checked with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root
|
||||
|
||||
It can also be changed with the same command by just specifying the newly desired path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root /opt/spack/bootstrap
|
||||
|
||||
You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||
|
||||
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
--------------------------------------------
|
||||
Enabling and disabling bootstrapping methods
|
||||
--------------------------------------------
|
||||
|
||||
Bootstrapping is always performed by trying the methods listed by:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
in the order they appear, from top to bottom. By default Spack is
|
||||
configured to try first bootstrapping from pre-built binaries and to
|
||||
fall-back to bootstrapping from sources if that failed.
|
||||
|
||||
If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap reset
|
||||
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||
Do you want to continue? [Y/n]
|
||||
%
|
||||
|
||||
----------------------------------------
|
||||
Creating a mirror for air-gapped systems
|
||||
----------------------------------------
|
||||
|
||||
Spack's default configuration for bootstrapping relies on the user having
|
||||
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||
|
||||
To help with similar situations Spack has a command that recreates, in a local folder
|
||||
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||
bootstrapping.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
@@ -39,7 +39,6 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
|
@@ -1,123 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
@@ -59,8 +59,7 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -246,8 +245,7 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -368,8 +366,7 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
@@ -273,9 +273,19 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Once some user specs have been added to an environment, they can be
|
||||
concretized. *By default specs are concretized separately*, one after
|
||||
the other. This mode of operation permits to deploy a full
|
||||
software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
@@ -483,76 +493,32 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
.. _environments_concretization_config:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
- ncview
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
When concretizing specs together the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
@@ -799,7 +765,7 @@ directories.
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
all: {name}/{version}-{compiler.name}
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
@@ -1051,4 +1017,4 @@ the include is conditional.
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
@@ -63,7 +63,6 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
|
@@ -115,8 +115,7 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: false
|
||||
concretization: separately
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
@@ -78,8 +78,7 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
|
@@ -25,5 +25,4 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretizer:
|
||||
unify: true
|
||||
concretization: together
|
||||
|
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh -f
|
||||
#!/bin/sh
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -768,9 +768,7 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
with open("/proc/cpuinfo") as file:
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,46 +80,26 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if _machine() == "x86_64":
|
||||
if platform.machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -145,18 +125,6 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -216,7 +184,12 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = _machine()
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -271,7 +244,12 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -310,7 +288,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -325,9 +303,8 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
|
||||
|
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
from collections import MutableMapping
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
with open(filename, "r") as file:
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
@@ -85,21 +85,7 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
@@ -143,20 +129,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -214,20 +186,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -290,20 +248,6 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -344,19 +288,8 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -400,18 +333,6 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -463,20 +384,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -525,20 +432,6 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -597,18 +490,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -669,18 +550,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -746,18 +615,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -815,18 +672,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -887,18 +732,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -965,20 +798,6 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1049,20 +868,6 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1132,18 +937,6 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1211,18 +1004,6 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1317,20 +1098,6 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1375,20 +1142,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1439,20 +1192,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1507,20 +1246,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1576,20 +1301,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1649,22 +1360,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1727,22 +1422,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1806,22 +1485,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1880,30 +1543,6 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2149,6 +1788,7 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -2181,26 +1821,18 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -2322,40 +1954,7 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -2365,22 +1964,14 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:12.0",
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:12.5",
|
||||
"versions": "11.0:",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -367,7 +367,7 @@ def group_ids(uid=None):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
def chgrp(path, group):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
@@ -376,10 +376,7 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
os.chown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
|
@@ -11,9 +11,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -1011,64 +1009,3 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 18, 1)
|
||||
spack_version_info = (0, 18, 0, 'dev0')
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
|
@@ -298,8 +298,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
|
@@ -210,7 +210,7 @@ def get_all_built_specs(self):
|
||||
|
||||
return spec_list
|
||||
|
||||
def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
def find_built_spec(self, spec):
|
||||
"""Look in our cache for the built spec corresponding to ``spec``.
|
||||
|
||||
If the spec can be found among the configured binary mirrors, a
|
||||
@@ -225,8 +225,6 @@ def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec to find
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
|
||||
Returns:
|
||||
An list of objects containing the found specs and mirror url where
|
||||
@@ -242,23 +240,17 @@ def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
]
|
||||
"""
|
||||
self.regenerate_spec_cache()
|
||||
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
|
||||
return self.find_by_hash(spec.dag_hash())
|
||||
|
||||
def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||
def find_by_hash(self, find_hash):
|
||||
"""Same as find_built_spec but uses the hash of a spec.
|
||||
|
||||
Args:
|
||||
find_hash (str): hash of the spec to search
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
"""
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
results = self._mirrors_for_spec[find_hash]
|
||||
if not mirrors_to_check:
|
||||
return results
|
||||
mirror_urls = mirrors_to_check.values()
|
||||
return [r for r in results if r['mirror_url'] in mirror_urls]
|
||||
return self._mirrors_for_spec[find_hash]
|
||||
|
||||
def update_spec(self, spec, found_list):
|
||||
"""
|
||||
@@ -571,13 +563,6 @@ def __init__(self, msg):
|
||||
super(NewLayoutException, self).__init__(msg)
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
return hashlib.sha256(data.encode('utf-8')).hexdigest()
|
||||
|
||||
@@ -766,16 +751,15 @@ def select_signing_key(key=None):
|
||||
return key
|
||||
|
||||
|
||||
def sign_specfile(key, force, specfile_path):
|
||||
signed_specfile_path = '%s.sig' % specfile_path
|
||||
if os.path.exists(signed_specfile_path):
|
||||
def sign_tarball(key, force, specfile_path):
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
if force:
|
||||
os.remove(signed_specfile_path)
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
else:
|
||||
raise NoOverwriteException(signed_specfile_path)
|
||||
raise NoOverwriteException('%s.asc' % specfile_path)
|
||||
|
||||
key = select_signing_key(key)
|
||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
@@ -784,10 +768,7 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json.sig'):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
s = Spec.from_dict(specfile_json)
|
||||
elif spec_url.endswith('.json'):
|
||||
if spec_url.endswith('.json'):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
@@ -848,9 +829,7 @@ def generate_package_index(cache_prefix):
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml') or
|
||||
entry.endswith('spec.json') or
|
||||
entry.endswith('spec.json.sig'))
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -965,7 +944,7 @@ def _build_tarball(
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
cache_prefix = build_cache_prefix(tmpdir)
|
||||
|
||||
tarfile_name = tarball_name(spec, '.spack')
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(
|
||||
@@ -988,12 +967,10 @@ def _build_tarball(
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = '{0}.sig'.format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_signed_specfile_path = '{0}.sig'.format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
@@ -1002,12 +979,9 @@ def _build_tarball(
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_signed_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1069,7 +1043,6 @@ def _build_tarball(
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
spec_dict['buildcache_layout_version'] = 1
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
@@ -1088,15 +1061,25 @@ def _build_tarball(
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
key = select_signing_key(key)
|
||||
sign_specfile(key, force, specfile_path)
|
||||
sign_tarball(key, force, specfile_path)
|
||||
|
||||
# put tarball, spec and signature files in .spack archive
|
||||
with closing(tarfile.open(spackfile_path, 'w')) as tar:
|
||||
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
|
||||
tar.add(name=specfile_path, arcname='%s' % specfile_name)
|
||||
if not unsigned:
|
||||
tar.add(name='%s.asc' % specfile_path,
|
||||
arcname='%s.asc' % specfile_name)
|
||||
|
||||
# cleanup file moved to archive
|
||||
os.remove(tarfile_path)
|
||||
if not unsigned:
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(
|
||||
spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not unsigned else remote_specfile_path,
|
||||
keep_original=False)
|
||||
specfile_path, remote_specfile_path, keep_original=False)
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'
|
||||
.format(spec, remote_spackfile_path))
|
||||
@@ -1179,174 +1162,48 @@ def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
warnings.warn(str(e))
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
|
||||
Args:
|
||||
specfile_path (str): Path to file to be verified.
|
||||
|
||||
Returns:
|
||||
``True`` if the signature could be verified, ``False`` otherwise.
|
||||
"""
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
|
||||
try:
|
||||
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_fetch(url_to_fetch):
|
||||
"""Utility function to try and fetch a file from a url, stage it
|
||||
locally, and return the path to the staged file.
|
||||
|
||||
Args:
|
||||
url_to_fetch (str): Url pointing to remote resource to fetch
|
||||
|
||||
Returns:
|
||||
Path to locally staged resource or ``None`` if it could not be fetched.
|
||||
"""
|
||||
stage = Stage(url_to_fetch, keep=True)
|
||||
stage.create()
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
return stage
|
||||
|
||||
|
||||
def _delete_staged_downloads(download_result):
|
||||
"""Clean up stages used to download tarball and specfile"""
|
||||
download_result['tarball_stage'].destroy()
|
||||
download_result['specfile_stage'].destroy()
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
def download_tarball(spec, preferred_mirrors=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
configured mirrors.
|
||||
preferred_mirrors (list): If provided, this is a list of preferred
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
|
||||
Returns:
|
||||
``None`` if the tarball could not be downloaded (maybe also verified,
|
||||
depending on whether new-style signed binary packages were found).
|
||||
Otherwise, return an object indicating the path to the downloaded
|
||||
tarball, the path to the downloaded specfile (in the case of new-style
|
||||
buildcache), and whether or not the tarball is already verified.
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"tarball_path": "path-to-locally-saved-tarfile",
|
||||
"specfile_path": "none-or-path-to-locally-saved-specfile",
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
Path to the downloaded tarball, or ``None`` if the tarball could not
|
||||
be downloaded from any configured mirrors.
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " +
|
||||
"download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, '.spack')
|
||||
specfile_prefix = tarball_name(spec, '.spec')
|
||||
|
||||
mirrors_to_try = []
|
||||
urls_to_try = []
|
||||
|
||||
# Note on try_first and try_next:
|
||||
# mirrors_for_spec mostly likely came from spack caching remote
|
||||
# mirror indices locally and adding their specs to a local data
|
||||
# structure supporting quick lookup of concrete specs. Those
|
||||
# mirrors are likely a subset of all configured mirrors, and
|
||||
# we'll probably find what we need in one of them. But we'll
|
||||
# look in all configured mirrors if needed, as maybe the spec
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i['mirror_url'] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [
|
||||
i.fetch_url for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
if preferred_mirrors:
|
||||
for preferred_url in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
preferred_url, _build_cache_relative_path, tarball))
|
||||
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append({
|
||||
'specfile': url_util.join(url,
|
||||
_build_cache_relative_path, specfile_prefix),
|
||||
'spackfile': url_util.join(url,
|
||||
_build_cache_relative_path, tarball)
|
||||
})
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
if not preferred_mirrors or mirror.fetch_url not in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, tarball))
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ['json.sig', 'json', 'yaml']:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = '{0}.{1}'.format(mirror_to_try['specfile'], ext)
|
||||
spackfile_url = mirror_to_try['spackfile']
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
if ext.endswith('.sig') and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if unsigned or signature_verified or not ext.endswith('.sig'):
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
'tarball_stage': tarball_stage,
|
||||
'specfile_stage': local_specfile_stage,
|
||||
'signature_verified': signature_verified,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
|
||||
# Falling through the nested loops meeans we exhaustively searched
|
||||
# for all known kinds of spec files on all mirrors and did not find
|
||||
# an acceptable one for which we could download a tarball.
|
||||
|
||||
if tried_to_verify_sigs:
|
||||
raise NoVerifyException(("Spack found new style signed binary packages, "
|
||||
"but was unable to verify any of them. Please "
|
||||
"obtain and trust the correct public key. If "
|
||||
"these are public spack binaries, please see the "
|
||||
"spack docs for locations where keys can be found."))
|
||||
for try_url in urls_to_try:
|
||||
# stage the tarball into standard place
|
||||
stage = Stage(try_url, name="build_cache", keep=True)
|
||||
stage.create()
|
||||
try:
|
||||
stage.fetch()
|
||||
return stage.save_filename
|
||||
except fs.FetchError:
|
||||
continue
|
||||
|
||||
tty.warn("download_tarball() was unable to download " +
|
||||
"{0} from any configured mirrors".format(spec))
|
||||
@@ -1520,55 +1377,7 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(extract_to)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(extract_to))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
try:
|
||||
spack.util.gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception:
|
||||
raise NoVerifyException("Spack was unable to verify package "
|
||||
"signature, please obtain and trust the "
|
||||
"correct public key.")
|
||||
else:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
# get the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != remote_checksum['hash']:
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1579,56 +1388,66 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
specfile_path = download_result['specfile_stage'].save_filename
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
spack.util.gpg.verify(
|
||||
'%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception as e:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoVerifyException(
|
||||
"Package spec file failed signature verification.\n"
|
||||
"Use spack buildcache keys to download "
|
||||
"and install a key for verification from the mirror.")
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# get the sha256 checksum recorded at creation
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
if specfile_path.endswith('.json.sig'):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
elif specfile_path.endswith('.json'):
|
||||
if specfile_is_json:
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
filename = download_result['tarball_stage'].save_filename
|
||||
signature_verified = download_result['signature_verified']
|
||||
tmpdir = None
|
||||
|
||||
if ('buildcache_layout_version' not in spec_dict or
|
||||
int(spec_dict['buildcache_layout_version']) < 1):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, unsigned, bchecksum)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
# Newer buildcache layout: the .spack file contains just
|
||||
# in the install tree, the signature, if it exists, is
|
||||
# wrapped around the spec.json at the root. If sig verify
|
||||
# was required, it was already done before downloading
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if not unsigned and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != bchecksum['hash']:
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
# if the checksums don't match don't install
|
||||
if bchecksum['hash'] != checksum:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix,
|
||||
spack.store.layout.root))
|
||||
@@ -1653,13 +1472,11 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
os.remove(tarfile_path)
|
||||
@@ -1678,11 +1495,9 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
spec_id = spec.format('{name}/{hash:7}')
|
||||
tty.warn('No manifest file in tarball for spec %s' % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
|
||||
@@ -1710,23 +1525,21 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
if not download_result:
|
||||
tarball = download_tarball(spec)
|
||||
if not tarball:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
msg = 'cannot verify checksum for "{0}" [expected={1}]'
|
||||
tarball_path = download_result['tarball_stage'].save_filename
|
||||
msg = msg.format(tarball_path, sha256)
|
||||
if not checker.check(tarball_path):
|
||||
_delete_staged_downloads(download_result)
|
||||
msg = msg.format(tarball, sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, allow_root, unsigned, force)
|
||||
extract_tarball(spec, tarball, allow_root, unsigned, force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
@@ -1752,8 +1565,6 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
signed_specfile_name = tarball_name(spec, '.spec.json.sig')
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
@@ -1762,35 +1573,24 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
buildcache_fetch_url_signed_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, signed_specfile_name)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_is_signed = True
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_signed_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err_x)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif specfile_is_json:
|
||||
if specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
@@ -1827,7 +1627,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
results = binary_index.find_built_spec(spec)
|
||||
|
||||
# Maybe we just didn't have the latest information from the mirror, so
|
||||
# try to fetch directly, unless we are only considering the indices.
|
||||
@@ -2117,8 +1917,7 @@ def download_single_spec(
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.spec.json.sig'),
|
||||
tarball_name(concrete_spec, '.spec.json'),
|
||||
'url': [tarball_name(concrete_spec, '.spec.json'),
|
||||
tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': destination,
|
||||
'required': True,
|
||||
|
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
@@ -22,7 +21,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -38,11 +36,6 @@
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
@@ -210,43 +203,12 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
class _BootstrapperBase(object):
|
||||
"""Base class to derive types that can bootstrap software for Spack"""
|
||||
config_scope_name = ''
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if '://' in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = 'bootstrap_buildcache'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.last_search = None
|
||||
|
||||
@staticmethod
|
||||
@@ -269,8 +231,9 @@ def _spec_and_platform(abstract_spec_str):
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
@@ -344,6 +307,12 @@ def _install_and_test(
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
@@ -373,13 +342,9 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
class _SourceBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
config_scope_name = 'bootstrap_source'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
|
||||
@@ -412,8 +377,7 @@ def try_import(self, module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -426,8 +390,6 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
@@ -440,8 +402,7 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -456,11 +417,11 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError('source is not trusted')
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -525,26 +486,36 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
errors = {}
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -567,14 +538,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
return cmd
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -589,16 +562,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
@@ -855,19 +826,6 @@ def ensure_flake8_in_path_or_raise():
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def all_root_specs(development=False):
|
||||
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||
|
||||
Args:
|
||||
development (bool): if True include dev dependencies
|
||||
"""
|
||||
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
if development:
|
||||
specs += [isort_root_spec(), mypy_root_spec(),
|
||||
black_root_spec(), flake8_root_spec()]
|
||||
return specs
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
@@ -1005,23 +963,3 @@ def status_message(section):
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml) as f:
|
||||
current.update(spack.util.spack_yaml.load(f))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
@@ -210,10 +210,6 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
@@ -33,6 +33,7 @@
|
||||
import spack.util.executable as exe
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
@@ -41,8 +42,10 @@
|
||||
'always',
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
spack_compiler = spack.main.SpackCommand('compiler')
|
||||
@@ -196,11 +199,6 @@ def _get_cdash_build_name(spec, build_group):
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
@@ -233,10 +231,8 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
mirrors_to_check=None):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -253,7 +249,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
def stage_spec_jobs(specs, check_index_only=False):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
@@ -265,8 +261,6 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
@@ -303,8 +297,8 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
_get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -346,7 +340,7 @@ def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
_get_spec_string(s)))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
def _compute_spec_deps(spec_list, check_index_only=False):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -419,7 +413,7 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
|
||||
spec=s, index_only=check_index_only)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
@@ -608,8 +602,8 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None, remote_mirror_override=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
artifacts_root=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
@@ -635,10 +629,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -688,19 +678,17 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for s in affected_specs:
|
||||
tty.debug(' {0}'.format(s.name))
|
||||
|
||||
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||
# artifacts, which include spack.lock file) this pipeline generation
|
||||
# job by both name and pipeline id. If those environment variables
|
||||
# do not exist, then maybe this is just running in a shell, in which
|
||||
# case, there is no expectation gitlab will ever run the generated
|
||||
# pipeline and those environment variables do not matter.
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
|
||||
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
|
||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
||||
pr_mirror_url = None
|
||||
if spack_pr_branch:
|
||||
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
spack_pr_branch)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
tty.die('spack ci generate requires an env containing a mirror')
|
||||
@@ -755,25 +743,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
else:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -848,13 +825,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
check_index_only=check_index_only)
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type != 'spack_protected_branch':
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -914,14 +889,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
tags = _remove_reserved_tags(tags)
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
tags.extend(['aws', 'protected'])
|
||||
elif spack_pipeline_type == 'spack_pull_request':
|
||||
tags.extend(['public'])
|
||||
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
@@ -1207,10 +1174,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
if 'tags' in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job['tags'])
|
||||
cleanup_job['tags'] = service_tags
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
|
||||
@@ -1218,74 +1181,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
cleanup_job['interruptible'] = True
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
if ('signing-job-attributes' in gitlab_ci and
|
||||
spack_pipeline_type == 'spack_protected_branch'):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append('stage-sign-pkgs')
|
||||
signing_job_config = gitlab_ci['signing-job-attributes']
|
||||
signing_job = {}
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
'image',
|
||||
'tags',
|
||||
'variables',
|
||||
'before_script',
|
||||
'script',
|
||||
'after_script',
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy,
|
||||
signing_job_config,
|
||||
signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if 'tags' in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
|
||||
|
||||
for tag in ['aws', 'protected', 'notary']:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job['tags'] = signing_job_tags
|
||||
|
||||
signing_job['stage'] = 'stage-sign-pkgs'
|
||||
signing_job['when'] = 'always'
|
||||
signing_job['retry'] = {
|
||||
'max': 2,
|
||||
'when': ['always']
|
||||
}
|
||||
signing_job['interruptible'] = True
|
||||
|
||||
output_object['sign-pkgs'] = signing_job
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Generate a job to copy the contents from wherever the builds are getting
|
||||
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||
# variable.
|
||||
src_url = remote_mirror_override or remote_mirror_url
|
||||
dest_url = spack_buildcache_copy
|
||||
|
||||
stage_names.append('stage-copy-buildcache')
|
||||
copy_job = {
|
||||
'stage': 'stage-copy-buildcache',
|
||||
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
|
||||
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
|
||||
'when': 'on_success',
|
||||
'interruptible': True,
|
||||
'retry': service_job_retries,
|
||||
'script': [
|
||||
'. ./share/spack/setup-env.sh',
|
||||
'spack --version',
|
||||
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
|
||||
src_url, dest_url)
|
||||
]
|
||||
}
|
||||
|
||||
output_object['copy-mirror'] = copy_job
|
||||
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append('stage-rebuild-index')
|
||||
@@ -1296,13 +1194,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
if 'tags' in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job['tags'])
|
||||
final_job['tags'] = service_tags
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
if is_pr_pipeline:
|
||||
index_target_mirror = pr_mirror_url
|
||||
|
||||
final_job['stage'] = 'stage-rebuild-index'
|
||||
final_job['script'] = [
|
||||
@@ -1311,7 +1205,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
final_job['interruptible'] = True
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1344,9 +1237,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
(output_object['variables']
|
||||
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
|
||||
if pr_mirror_url:
|
||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
||||
|
||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||
if spack_stack_name:
|
||||
|
@@ -8,10 +8,7 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
@@ -150,58 +147,6 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags(object):
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
))
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
single_errors = [
|
||||
'({0}) {1} {2} => {3}'.format(
|
||||
i + 1, match.group(0), next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent("""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}""").format('\n'.join(single_errors))
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -210,28 +155,29 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
if unquoted_flags:
|
||||
msg += '\n\n'
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
@@ -6,9 +6,7 @@
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
@@ -17,9 +15,6 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
@@ -27,38 +22,6 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||
|
||||
# Metadata for a generated binary mirror
|
||||
BINARY_METADATA = {
|
||||
'type': 'buildcache',
|
||||
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||
'The sha256 checksum of binaries is checked before installation.'),
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||
'tarball': BINARY_TARBALL
|
||||
}
|
||||
}
|
||||
|
||||
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
'type': 'install',
|
||||
'description': 'Mirror with software needed to bootstrap Spack',
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
@@ -104,61 +67,24 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list all the sources of software to bootstrap Spack'
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping source'
|
||||
'trust', help='trust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the source to be trusted'
|
||||
'name', help='name of the method to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping source'
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the source to be untrusted'
|
||||
)
|
||||
|
||||
add = sp.add_parser(
|
||||
'add', help='add a new source for bootstrapping'
|
||||
)
|
||||
_add_scope_option(add)
|
||||
add.add_argument(
|
||||
'--trust', action='store_true',
|
||||
help='trust the source immediately upon addition')
|
||||
add.add_argument(
|
||||
'name', help='name of the new source of software'
|
||||
)
|
||||
add.add_argument(
|
||||
'metadata_dir', help='directory where to find metadata files'
|
||||
)
|
||||
|
||||
remove = sp.add_parser(
|
||||
'remove', help='remove a bootstrapping source'
|
||||
)
|
||||
remove.add_argument(
|
||||
'name', help='name of the source to be removed'
|
||||
)
|
||||
|
||||
mirror = sp.add_parser(
|
||||
'mirror', help='create a local mirror to bootstrap Spack'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--binary-packages', action='store_true',
|
||||
help='download public binaries in the mirror'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--dev', action='store_true',
|
||||
help='download dev dependencies too'
|
||||
)
|
||||
mirror.add_argument(
|
||||
metavar='DIRECTORY', dest='root_dir',
|
||||
help='root directory in which to create the mirror and metadata'
|
||||
'name', help='name of the method to be untrusted'
|
||||
)
|
||||
|
||||
|
||||
@@ -211,7 +137,10 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
@@ -320,121 +249,6 @@ def _status(args):
|
||||
print()
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
if args.name in names:
|
||||
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
# Check that the metadata file exists
|
||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||
raise RuntimeError(
|
||||
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||
)
|
||||
|
||||
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||
if not os.path.exists(file):
|
||||
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||
|
||||
# Insert the new source as the highest priority one
|
||||
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||
sources = [
|
||||
{'name': args.name, 'metadata': args.metadata_dir}
|
||||
] + sources
|
||||
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||
|
||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||
if args.trust:
|
||||
_trust(args)
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||
'Run `spack bootstrap list` to see available sources.')
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
for current_scope in spack.config.scopes():
|
||||
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||
if args.name in [s['name'] for s in sources]:
|
||||
sources = [s for s in sources if s['name'] != args.name]
|
||||
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||
'"{1}" configuration scope.')
|
||||
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||
if args.name in trusted:
|
||||
trusted.pop(args.name)
|
||||
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = spack.util.path.canonicalize_path(
|
||||
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||
metadata_yaml = os.path.join(
|
||||
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||
)
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode='w') as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||
'to be used, move "{0}" to its final location and run the '
|
||||
'following command(s):\n\n').format(args.root_dir)
|
||||
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||
instructions += cmd.format('local-sources', rel_directory)
|
||||
if args.binary_packages:
|
||||
abs_directory, rel_directory = write_metadata(
|
||||
subdir='binaries', metadata=BINARY_METADATA
|
||||
)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
instructions += cmd.format('local-binaries', rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
@@ -444,9 +258,6 @@ def bootstrap(parser, args):
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust,
|
||||
'add': _add,
|
||||
'remove': _remove,
|
||||
'mirror': _mirror
|
||||
'untrust': _untrust
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
@@ -478,12 +478,11 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'json'
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
|
@@ -64,11 +64,6 @@ def setup_parser(subparser):
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.add_argument(
|
||||
'--buildcache-destination', default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||
"in order to push binaries from the generated pipeline to a " +
|
||||
"different location.")
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
'--prune-dag', action='store_true', dest='prune_dag',
|
||||
@@ -132,7 +127,6 @@ def ci_generate(args):
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -146,8 +140,7 @@ def ci_generate(args):
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, prune_dag=prune_dag,
|
||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination)
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
@@ -187,9 +180,6 @@ def ci_rebuild(args):
|
||||
if not gitlab_ci:
|
||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||
|
||||
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -206,7 +196,7 @@ def ci_rebuild(args):
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -254,10 +244,6 @@ def ci_rebuild(args):
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
@@ -387,24 +373,7 @@ def ci_rebuild(args):
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add('mirror_override',
|
||||
remote_mirror_override,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -548,6 +517,13 @@ def ci_rebuild(args):
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
||||
# on the main mirror defined in the gitlab-enabled spack environment
|
||||
if spack_is_pr_pipeline:
|
||||
buildcache_mirror_url = pr_mirror_url
|
||||
else:
|
||||
buildcache_mirror_url = remote_mirror_url
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
|
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -94,21 +93,6 @@ def external_find(args):
|
||||
# It's fine to not find any manifest file if we are doing the
|
||||
# search implicitly (i.e. as part of 'spack external find')
|
||||
pass
|
||||
except Exception as e:
|
||||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = ("Skipping manifest and continuing with other external "
|
||||
"checks")
|
||||
if ((isinstance(e, IOError) or isinstance(e, OSError)) and
|
||||
e.errno in [errno.EPERM, errno.EACCES]):
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient "
|
||||
"permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}"
|
||||
.format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
@@ -141,7 +125,7 @@ def external_find(args):
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = list(spack.repo.path.all_packages())
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(
|
||||
packages_to_check, path_hints=args.path)
|
||||
@@ -193,10 +177,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
if fname.endswith('.json'):
|
||||
fpath = os.path.join(directory, fname)
|
||||
tty.debug("Adding manifest file: {0}".format(fpath))
|
||||
manifest_files.append(os.path.join(directory, fpath))
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
@@ -204,7 +185,6 @@ def _collect_and_consume_cray_manifest_files(
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
tty.debug("Reading manifest file: " + path)
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
|
@@ -15,8 +15,6 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package
|
||||
import spack.solver.asp as asp
|
||||
@@ -76,51 +74,6 @@ def setup_parser(subparser):
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
tty.msg("Unsolved specs")
|
||||
for spec in result.unsolved_specs:
|
||||
print(spec)
|
||||
print()
|
||||
|
||||
|
||||
def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
|
||||
@@ -149,42 +102,58 @@ def solve(parser, args):
|
||||
if models < 0:
|
||||
tty.die("model count must be non-negative: %d")
|
||||
|
||||
# Format required for the output (JSON, YAML or None)
|
||||
required_format = args.format
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
setup_only = set(show) == {'asp'}
|
||||
unify = spack.config.get('concretizer:unify')
|
||||
if unify != 'when_possible':
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(solver.solve_in_rounds(
|
||||
specs, out=output, models=models, timers=args.timers, stats=args.stats
|
||||
)):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
tty.msg("")
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=(set(show) == {'asp'})
|
||||
)
|
||||
if 'solutions' not in show:
|
||||
return
|
||||
|
||||
# die if no solution was found
|
||||
result.raise_if_unsat()
|
||||
|
||||
# show the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
opt, _, _ = min(result.answers)
|
||||
|
||||
if ("opt" in show) and (not args.format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
|
@@ -80,8 +80,7 @@ def spec(parser, args):
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
specs = [(s, s.concretized()) for s in input_specs]
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
|
@@ -27,6 +27,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
if custom_path:
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -48,10 +54,6 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
|
||||
# prevent multiple specs from extracting in the same folder
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.18"
|
||||
tutorial_branch = "releases/v0.17"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -235,7 +235,7 @@ def is_ready(dag_hash):
|
||||
|
||||
# If this spec is only used as a build dependency, we can uninstall
|
||||
return all(
|
||||
dspec.deptypes == ("build",) or not dspec.parent.installed
|
||||
dspec.deptypes == ("build",)
|
||||
for dspec in record.spec.edges_from_dependents()
|
||||
)
|
||||
|
||||
|
@@ -252,13 +252,6 @@ def find_new_compilers(path_hints=None, scope=None):
|
||||
merged configuration.
|
||||
"""
|
||||
compilers = find_compilers(path_hints)
|
||||
return select_new_compilers(compilers, scope)
|
||||
|
||||
|
||||
def select_new_compilers(compilers, scope=None):
|
||||
"""Given a list of compilers, remove those that are already defined in
|
||||
the configuration.
|
||||
"""
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
@@ -773,8 +766,7 @@ def name_matches(name, name_list):
|
||||
toolchains.add(compiler_cls.__name__)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']) or \
|
||||
toolchains == set(['Dpcpp', 'Oneapi']):
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
return True
|
||||
|
@@ -88,7 +88,7 @@
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = (
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'defaults')
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
)
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
@@ -104,13 +104,12 @@
|
||||
'build_jobs': min(16, cpus_available()),
|
||||
'build_stage': '$tempdir/spack-stage',
|
||||
'concretizer': 'clingo',
|
||||
'license_dir': spack.paths.default_license_dir,
|
||||
}
|
||||
}
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT'
|
||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = 'overrides-'
|
||||
@@ -816,7 +815,7 @@ def _config():
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(
|
||||
('site', os.path.join(spack.paths.etc_path)),
|
||||
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||
)
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
|
@@ -171,12 +171,11 @@ def strip(self):
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple('Paths', [
|
||||
'environment', 'store', 'hidden_view', 'view'
|
||||
'environment', 'store', 'view'
|
||||
])
|
||||
return Paths(
|
||||
environment='/opt/spack-environment',
|
||||
store='/opt/software',
|
||||
hidden_view='/opt/._view',
|
||||
view='/opt/view'
|
||||
)
|
||||
|
||||
|
@@ -20,7 +20,6 @@
|
||||
|
||||
compiler_name_translation = {
|
||||
'nvidia': 'nvhpc',
|
||||
'rocm': 'rocmcc',
|
||||
}
|
||||
|
||||
|
||||
@@ -39,6 +38,10 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
elif manifest_compiler_name in spack.compilers.supported_compilers():
|
||||
return manifest_compiler_name
|
||||
else:
|
||||
# Try to fail quickly. This can occur in two cases: (1) the compiler
|
||||
# definition (2) a spec can specify a compiler that doesn't exist; the
|
||||
# first will be caught when creating compiler definition. The second
|
||||
# will result in Specs with associated undefined compilers.
|
||||
raise spack.compilers.UnknownCompilerError(
|
||||
"Manifest parsing - unknown compiler: {0}"
|
||||
.format(manifest_compiler_name))
|
||||
@@ -182,8 +185,6 @@ def read(path, apply_updates):
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(
|
||||
path,
|
||||
str(len(compilers))))
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
compilers, init_config=False)
|
||||
|
@@ -356,10 +356,10 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
self.prefix_fail_path = os.path.join(self._db_dir, 'prefix_failures')
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self._db_dir):
|
||||
if not os.path.exists(self._db_dir):
|
||||
fs.mkdirp(self._db_dir)
|
||||
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
if not os.path.exists(self._failure_dir) and not is_upstream:
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
@@ -1064,7 +1064,9 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
tty.warn('upstream not found: {0}'.format(self._index_path))
|
||||
raise UpstreamDatabaseLockingError(
|
||||
"No database index file is present, and upstream"
|
||||
" databases cannot generate an index file")
|
||||
|
||||
def _add(
|
||||
self,
|
||||
|
@@ -79,9 +79,8 @@
|
||||
env_subdir_name = '.spack-env'
|
||||
|
||||
|
||||
def default_manifest_yaml():
|
||||
"""default spack.yaml file to put in new environments"""
|
||||
return """\
|
||||
#: default spack.yaml file to put in new environments
|
||||
default_manifest_yaml = """\
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
@@ -90,11 +89,7 @@ def default_manifest_yaml():
|
||||
# add package specs to the `specs` list
|
||||
specs: []
|
||||
view: true
|
||||
concretizer:
|
||||
unify: {}
|
||||
""".format('true' if spack.config.get('concretizer:unify') else 'false')
|
||||
|
||||
|
||||
"""
|
||||
#: regex for validating enviroment names
|
||||
valid_environment_name_re = r'^\w[\w-]*$'
|
||||
|
||||
@@ -628,7 +623,7 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
|
||||
# This attribute will be set properly from configuration
|
||||
# during concretization
|
||||
self.unify = None
|
||||
self.concretization = None
|
||||
self.clear()
|
||||
|
||||
if init_file:
|
||||
@@ -637,11 +632,11 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
# the init file.
|
||||
with fs.open_if_filename(init_file) as f:
|
||||
if hasattr(f, 'name') and f.name.endswith('.lock'):
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_lockfile(f)
|
||||
self._set_user_specs_from_lockfile()
|
||||
else:
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml())
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml)
|
||||
|
||||
# Rewrite relative develop paths when initializing a new
|
||||
# environment in a different location from the spack.yaml file.
|
||||
@@ -705,7 +700,7 @@ def _read(self):
|
||||
default_manifest = not os.path.exists(self.manifest_path)
|
||||
if default_manifest:
|
||||
# No manifest, use default yaml
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
else:
|
||||
with open(self.manifest_path) as f:
|
||||
self._read_manifest(f)
|
||||
@@ -771,15 +766,8 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
self.views = {}
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.yaml)
|
||||
|
||||
# Let `concretization` overrule `concretize:unify` config for now,
|
||||
# but use a translation table to have internally a representation
|
||||
# as if we were using the new configuration
|
||||
translation = {'separately': False, 'together': True}
|
||||
try:
|
||||
self.unify = translation[configuration['concretization']]
|
||||
except KeyError:
|
||||
self.unify = spack.config.get('concretizer:unify', False)
|
||||
# default concretization to separately
|
||||
self.concretization = configuration.get('concretization', 'separately')
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = configuration.get('develop', {})
|
||||
@@ -1160,44 +1148,14 @@ def concretize(self, force=False, tests=False):
|
||||
self.specs_by_hash = {}
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.unify == 'when_possible':
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
|
||||
if self.unify is True:
|
||||
if self.concretization == 'together':
|
||||
return self._concretize_together(tests=tests)
|
||||
|
||||
if self.unify is False:
|
||||
if self.concretization == 'separately':
|
||||
return self._concretize_separately(tests=tests)
|
||||
|
||||
msg = 'concretization strategy not implemented [{0}]'
|
||||
raise SpackEnvironmentError(msg.format(self.unify))
|
||||
|
||||
def _concretize_together_where_possible(self, tests=False):
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
user_specs_did_not_change = not bool(
|
||||
set(self.user_specs) - set(self.concretized_user_specs)
|
||||
)
|
||||
if user_specs_did_not_change:
|
||||
return []
|
||||
|
||||
# Proceed with concretization
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
for result in solver.solve_in_rounds(self.user_specs, tests=tests):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
result.append((abstract, concrete))
|
||||
return result
|
||||
raise SpackEnvironmentError(msg.format(self.concretization))
|
||||
|
||||
def _concretize_together(self, tests=False):
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
@@ -1350,7 +1308,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
concrete_spec: if provided, then it is assumed that it is the
|
||||
result of concretizing the provided ``user_spec``
|
||||
"""
|
||||
if self.unify is True:
|
||||
if self.concretization == 'together':
|
||||
msg = 'cannot install a single spec in an environment that is ' \
|
||||
'configured to be concretized together. Run instead:\n\n' \
|
||||
' $ spack add <spec>\n' \
|
||||
@@ -1612,10 +1570,9 @@ def install_specs(self, specs=None, **install_args):
|
||||
# ensure specs already installed are marked explicit
|
||||
all_specs = specs or [cs for _, cs in self.concretized_specs()]
|
||||
specs_installed = [s for s in all_specs if s.installed]
|
||||
if specs_installed:
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_installed:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_installed:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
|
||||
if not specs_to_install:
|
||||
tty.msg('All of the packages are already installed')
|
||||
@@ -1654,14 +1611,7 @@ def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
all_specs = set()
|
||||
for h in self.concretized_order:
|
||||
try:
|
||||
spec = self.specs_by_hash[h]
|
||||
except KeyError:
|
||||
tty.warn(
|
||||
'Environment %s appears to be corrupt: missing spec '
|
||||
'"%s"' % (self.name, h))
|
||||
continue
|
||||
all_specs.update(spec.traverse())
|
||||
all_specs.update(self.specs_by_hash[h].traverse())
|
||||
|
||||
return sorted(all_specs)
|
||||
|
||||
@@ -1919,15 +1869,17 @@ def write(self, regenerate=True):
|
||||
regenerate (bool): regenerate views and run post-write hooks as
|
||||
well as writing if True.
|
||||
"""
|
||||
# Warn that environments are not in the latest format.
|
||||
if not is_latest_format(self.manifest_path):
|
||||
ver = '.'.join(str(s) for s in spack.spack_version_info[:2])
|
||||
msg = ('The environment "{}" is written to disk in a deprecated format. '
|
||||
'Please update it using:\n\n'
|
||||
'\tspack env update {}\n\n'
|
||||
'Note that versions of Spack older than {} may not be able to '
|
||||
# Intercept environment not using the latest schema format and prevent
|
||||
# them from being modified
|
||||
manifest_exists = os.path.exists(self.manifest_path)
|
||||
if manifest_exists and not is_latest_format(self.manifest_path):
|
||||
msg = ('The environment "{0}" needs to be written to disk, but '
|
||||
'is currently using a deprecated format. Please update it '
|
||||
'using:\n\n'
|
||||
'\tspack env update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
'use the updated configuration.')
|
||||
tty.warn(msg.format(self.name, self.name, ver))
|
||||
raise RuntimeError(msg.format(self.name))
|
||||
|
||||
# ensure path in var/spack/environments
|
||||
fs.mkdirp(self.path)
|
||||
@@ -2279,16 +2231,14 @@ def _top_level_key(data):
|
||||
|
||||
|
||||
def is_latest_format(manifest):
|
||||
"""Return False if the manifest file exists and is not in the latest schema format.
|
||||
"""Return True if the manifest file is at the latest schema format,
|
||||
False otherwise.
|
||||
|
||||
Args:
|
||||
manifest (str): manifest file to be analyzed
|
||||
"""
|
||||
try:
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
return True
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
return not changed
|
||||
|
@@ -327,7 +327,9 @@ def fetch(self):
|
||||
continue
|
||||
|
||||
try:
|
||||
self._fetch_from_url(url)
|
||||
partial_file, save_file = self._fetch_from_url(url)
|
||||
if save_file and (partial_file is not None):
|
||||
llnl.util.filesystem.rename(partial_file, save_file)
|
||||
break
|
||||
except FailedDownloadError as e:
|
||||
errors.append(str(e))
|
||||
@@ -377,7 +379,9 @@ def _check_headers(self, headers):
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
save_file = self.stage.save_filename
|
||||
save_file = None
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
tty.msg('Fetching {0}'.format(url))
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
@@ -387,18 +391,16 @@ def _fetch_urllib(self, url):
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(save_file):
|
||||
if save_file and os.path.exists(save_file):
|
||||
os.remove(save_file)
|
||||
msg = 'urllib failed to fetch with error {0}'.format(e)
|
||||
raise FailedDownloadError(url, msg)
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, 'wb') as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
|
||||
self._check_headers(str(headers))
|
||||
return None, save_file
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
@@ -459,7 +461,7 @@ def _fetch_curl(self, url):
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
|
||||
if partial_file and os.path.lexists(partial_file):
|
||||
if partial_file and os.path.exists(partial_file):
|
||||
os.remove(partial_file)
|
||||
|
||||
if curl.returncode == 22:
|
||||
@@ -486,9 +488,7 @@ def _fetch_curl(self, url):
|
||||
"Curl failed with error %d" % curl.returncode)
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
if save_file and (partial_file is not None):
|
||||
os.rename(partial_file, save_file)
|
||||
return partial_file, save_file
|
||||
|
||||
@property # type: ignore # decorated properties unsupported in mypy
|
||||
@_needs_stage
|
||||
@@ -642,7 +642,7 @@ def fetch(self):
|
||||
|
||||
# remove old symlink if one is there.
|
||||
filename = self.stage.save_filename
|
||||
if os.path.lexists(filename):
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
|
||||
# Symlink to local cached archive.
|
||||
|
@@ -493,11 +493,9 @@ def write(self, spec, color=None, out=None):
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
edges = sorted(
|
||||
node.edges_to_dependencies(deptype=self.deptype), reverse=True
|
||||
)
|
||||
if edges:
|
||||
deps = [e.spec.dag_hash() for e in edges]
|
||||
deps = node.dependencies(deptype=self.deptype)
|
||||
if deps:
|
||||
deps = sorted((d.dag_hash() for d in deps), reverse=True)
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
|
||||
elif self._frontier:
|
||||
|
@@ -350,7 +350,7 @@ def _process_external_package(pkg, explicit):
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
mirrors_for_spec=None):
|
||||
preferred_mirrors=None):
|
||||
"""
|
||||
Process the binary cache tarball.
|
||||
|
||||
@@ -360,17 +360,17 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
preferred_mirrors (list): Optional list of urls to prefer when
|
||||
attempting to download the tarball
|
||||
|
||||
Return:
|
||||
bool: ``True`` if the package was extracted from binary cache,
|
||||
else ``False``
|
||||
"""
|
||||
download_result = binary_distribution.download_tarball(
|
||||
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec)
|
||||
tarball = binary_distribution.download_tarball(
|
||||
binary_spec, preferred_mirrors=preferred_mirrors)
|
||||
# see #10063 : install from source if tarball doesn't exist
|
||||
if download_result is None:
|
||||
if tarball is None:
|
||||
tty.msg('{0} exists in binary cache but with different hash'
|
||||
.format(pkg.name))
|
||||
return False
|
||||
@@ -380,9 +380,9 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(binary_spec, download_result,
|
||||
allow_root=False, unsigned=unsigned,
|
||||
force=False)
|
||||
binary_distribution.extract_tarball(
|
||||
binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False
|
||||
)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
@@ -406,8 +406,12 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
if not matches:
|
||||
return False
|
||||
|
||||
return _process_binary_cache_tarball(pkg, pkg.spec, explicit, unsigned,
|
||||
mirrors_for_spec=matches)
|
||||
# In the absence of guidance from user or some other reason to prefer one
|
||||
# mirror over another, any match will suffice, so just pick the first one.
|
||||
preferred_mirrors = [match['mirror_url'] for match in matches]
|
||||
binary_spec = matches[0]['spec']
|
||||
return _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
preferred_mirrors=preferred_mirrors)
|
||||
|
||||
|
||||
def clear_failures():
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform as py_platform
|
||||
import re
|
||||
from subprocess import check_output
|
||||
|
||||
from spack.version import Version
|
||||
|
||||
@@ -52,17 +51,6 @@ def __init__(self):
|
||||
|
||||
if 'ubuntu' in distname:
|
||||
version = '.'.join(version[0:2])
|
||||
# openSUSE Tumbleweed is a rolling release which can change
|
||||
# more than once in a week, so set version to tumbleweed$GLIBVERS
|
||||
elif 'opensuse-tumbleweed' in distname or 'opensusetumbleweed' in distname:
|
||||
distname = 'opensuse'
|
||||
output = check_output(["ldd", "--version"]).decode()
|
||||
libcvers = re.findall(r'ldd \(GNU libc\) (.*)', output)
|
||||
if len(libcvers) == 1:
|
||||
version = 'tumbleweed' + libcvers[0]
|
||||
else:
|
||||
version = 'tumbleweed' + version[0]
|
||||
|
||||
else:
|
||||
version = version[0]
|
||||
|
||||
|
@@ -53,7 +53,6 @@
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import TestFailure, TestSuite
|
||||
@@ -61,6 +60,7 @@
|
||||
from spack.stage import ResourceStage, Stage, StageComposite, stage_prefix
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.path import win_exe_ext
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.version import Version
|
||||
|
||||
@@ -200,9 +200,9 @@ def __init__(cls, name, bases, attr_dict):
|
||||
def platform_executables(self):
|
||||
def to_windows_exe(exe):
|
||||
if exe.endswith('$'):
|
||||
exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
|
||||
exe = exe.replace('$', '%s$' % win_exe_ext())
|
||||
else:
|
||||
exe += spack.util.path.win_exe_ext()
|
||||
exe += win_exe_ext()
|
||||
return exe
|
||||
plat_exe = []
|
||||
if hasattr(self, 'executables'):
|
||||
@@ -438,11 +438,6 @@ def name(self):
|
||||
self._name = self._name[self._name.rindex('.') + 1:]
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def global_license_dir(self):
|
||||
"""Returns the directory where license files for all packages are stored."""
|
||||
return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
|
||||
|
||||
|
||||
def run_before(*phases):
|
||||
"""Registers a method of a package to be run before a given phase"""
|
||||
@@ -943,8 +938,9 @@ def name(self):
|
||||
|
||||
@property
|
||||
def global_license_dir(self):
|
||||
"""Returns the directory where global license files are stored."""
|
||||
return type(self).global_license_dir
|
||||
"""Returns the directory where global license files for all
|
||||
packages are stored."""
|
||||
return os.path.join(spack.paths.prefix, 'etc', 'spack', 'licenses')
|
||||
|
||||
@property
|
||||
def global_license_file(self):
|
||||
@@ -1718,10 +1714,7 @@ def content_hash(self, content=None):
|
||||
hash_content.append(source_id.encode('utf-8'))
|
||||
|
||||
# patch sha256's
|
||||
# Only include these if they've been assigned by the concretizer.
|
||||
# We check spec._patches_assigned instead of spec.concrete because
|
||||
# we have to call package_hash *before* marking specs concrete
|
||||
if self.spec._patches_assigned():
|
||||
if self.spec.concrete:
|
||||
hash_content.extend(
|
||||
':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches
|
||||
|
@@ -123,11 +123,11 @@ def accept(self, id):
|
||||
|
||||
def next_token_error(self, message):
|
||||
"""Raise an error about the next token in the stream."""
|
||||
raise ParseError(message, self.text[0], self.token.end)
|
||||
raise ParseError(message, self.text, self.token.end)
|
||||
|
||||
def last_token_error(self, message):
|
||||
"""Raise an error about the previous token in the stream."""
|
||||
raise ParseError(message, self.text[0], self.token.start)
|
||||
raise ParseError(message, self.text, self.token.start)
|
||||
|
||||
def unexpected_token(self):
|
||||
self.next_token_error("Unexpected token: '%s'" % self.next.value)
|
||||
|
@@ -43,12 +43,8 @@
|
||||
hooks_path = os.path.join(module_path, "hooks")
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
share_path = os.path.join(prefix, "share", "spack")
|
||||
etc_path = os.path.join(prefix, "etc", "spack")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
|
||||
#
|
||||
# Things in $spack/etc/spack
|
||||
#
|
||||
default_license_dir = os.path.join(etc_path, "licenses")
|
||||
|
||||
#
|
||||
# Things in $spack/var/spack
|
||||
|
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from io import BufferedReader, IOBase
|
||||
from io import BufferedReader
|
||||
|
||||
import six
|
||||
import six.moves.urllib.error as urllib_error
|
||||
@@ -23,15 +23,11 @@
|
||||
# https://github.com/python/cpython/pull/3249
|
||||
class WrapStream(BufferedReader):
|
||||
def __init__(self, raw):
|
||||
# In botocore >=1.23.47, StreamingBody inherits from IOBase, so we
|
||||
# only add missing attributes in older versions.
|
||||
# https://github.com/boto/botocore/commit/a624815eabac50442ed7404f3c4f2664cd0aa784
|
||||
if not isinstance(raw, IOBase):
|
||||
raw.readable = lambda: True
|
||||
raw.writable = lambda: False
|
||||
raw.seekable = lambda: False
|
||||
raw.closed = False
|
||||
raw.flush = lambda: None
|
||||
raw.readable = lambda: True
|
||||
raw.writable = lambda: False
|
||||
raw.seekable = lambda: False
|
||||
raw.closed = False
|
||||
raw.flush = lambda: None
|
||||
super(WrapStream, self).__init__(raw)
|
||||
|
||||
def detach(self):
|
||||
|
@@ -4,8 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
||||
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -51,12 +49,10 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
msg = msg_str_or_func.format(properties=deprecated_properties)
|
||||
else:
|
||||
msg = msg_str_or_func(instance, deprecated_properties)
|
||||
if msg is None:
|
||||
return
|
||||
|
||||
is_error = deprecated['error']
|
||||
if not is_error:
|
||||
warnings.warn(msg)
|
||||
llnl.util.tty.warn(msg)
|
||||
else:
|
||||
import jsonschema
|
||||
yield jsonschema.ValidationError(msg)
|
||||
|
@@ -9,10 +9,12 @@
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'metadata': {'type': 'string'}
|
||||
'description': {'type': 'string'},
|
||||
'type': {'type': 'string'},
|
||||
'info': {'type': 'object'}
|
||||
},
|
||||
'additionalProperties': False,
|
||||
'required': ['name', 'metadata']
|
||||
'required': ['name', 'description', 'type']
|
||||
}
|
||||
|
||||
properties = {
|
||||
|
@@ -37,6 +37,5 @@
|
||||
'hash': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'buildcache_layout_version': {'type': 'number'}
|
||||
},
|
||||
}
|
||||
|
@@ -25,12 +25,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
'unify': {
|
||||
'oneOf': [
|
||||
{'type': 'boolean'},
|
||||
{'type': 'string', 'enum': ['when_possible']}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -56,7 +56,6 @@
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'}
|
||||
},
|
||||
'license_dir': {'type': 'string'},
|
||||
'source_cache': {'type': 'string'},
|
||||
'misc_cache': {'type': 'string'},
|
||||
'connect_timeout': {'type': 'integer', 'minimum': 0},
|
||||
|
@@ -26,9 +26,6 @@
|
||||
"cpe-version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1},
|
||||
"schema-version": {"type": "string", "minLength": 1},
|
||||
# Older schemas use did not have "cpe-version", just the
|
||||
# schema version; in that case it was just called "version"
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
}
|
||||
},
|
||||
"compilers": {
|
||||
|
@@ -16,24 +16,6 @@
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
warned_about_concretization = False
|
||||
|
||||
|
||||
def deprecate_concretization(instance, props):
|
||||
global warned_about_concretization
|
||||
if warned_about_concretization:
|
||||
return None
|
||||
# Deprecate `spack:concretization` in favor of `spack:concretizer:unify`.
|
||||
concretization_to_unify = {'together': 'true', 'separately': 'false'}
|
||||
concretization = instance['concretization']
|
||||
unify = concretization_to_unify[concretization]
|
||||
|
||||
return (
|
||||
'concretization:{} is deprecated and will be removed in Spack 0.19 in favor of '
|
||||
'the new concretizer:unify:{} config option.'.format(concretization, unify)
|
||||
)
|
||||
|
||||
|
||||
#: legal first keys in the schema
|
||||
keys = ('spack', 'env')
|
||||
|
||||
@@ -79,11 +61,6 @@ def deprecate_concretization(instance, props):
|
||||
'type': 'object',
|
||||
'default': {},
|
||||
'additionalProperties': False,
|
||||
'deprecatedProperties': {
|
||||
'properties': ['concretization'],
|
||||
'message': deprecate_concretization,
|
||||
'error': False
|
||||
},
|
||||
'properties': union_dicts(
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
@@ -192,33 +169,11 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
updated = False
|
||||
if 'include' in data:
|
||||
msg = ("included configuration files should be updated manually"
|
||||
" [files={0}]")
|
||||
warnings.warn(msg.format(', '.join(data['include'])))
|
||||
|
||||
if 'packages' in data:
|
||||
updated |= spack.schema.packages.update(data['packages'])
|
||||
|
||||
# Spack 0.19 drops support for `spack:concretization` in favor of
|
||||
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
|
||||
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
|
||||
# forward compatible with `spack:concretizer:unify`.
|
||||
if 'concretization' in data:
|
||||
has_unify = 'unify' in data.get('concretizer', {})
|
||||
to_unify = {'together': True, 'separately': False}
|
||||
unify = to_unify[data['concretization']]
|
||||
|
||||
if has_unify and data['concretizer']['unify'] != unify:
|
||||
warnings.warn(
|
||||
'The following configuration conflicts: '
|
||||
'`spack:concretization:{}` and `spack:concretizer:unify:{}`'
|
||||
'. Please update manually.'.format(
|
||||
data['concretization'], data['concretizer']['unify']))
|
||||
else:
|
||||
data.update({'concretizer': {'unify': unify}})
|
||||
data.pop('concretization')
|
||||
updated = True
|
||||
|
||||
return updated
|
||||
return spack.schema.packages.update(data['packages'])
|
||||
return False
|
||||
|
@@ -110,7 +110,6 @@
|
||||
},
|
||||
},
|
||||
'service-job-attributes': runner_selector_schema,
|
||||
'signing-job-attributes': runner_selector_schema,
|
||||
'rebuild-index': {'type': 'boolean'},
|
||||
'broken-specs-url': {'type': 'string'},
|
||||
},
|
||||
|
@@ -98,20 +98,6 @@ def getter(node):
|
||||
# Below numbers are used to map names of criteria to the order
|
||||
# they appear in the solution. See concretize.lp
|
||||
|
||||
# The space of possible priorities for optimization targets
|
||||
# is partitioned in the following ranges:
|
||||
#
|
||||
# [0-100) Optimization criteria for software being reused
|
||||
# [100-200) Fixed criteria that are higher priority than reuse, but lower than build
|
||||
# [200-300) Optimization criteria for software being built
|
||||
# [300-1000) High-priority fixed criteria
|
||||
# [1000-inf) Error conditions
|
||||
#
|
||||
# Each optimization target is a minimization with optimal value 0.
|
||||
|
||||
#: High fixed priority offset for criteria that supersede all build criteria
|
||||
high_fixed_priority_offset = 300
|
||||
|
||||
#: Priority offset for "build" criteria (regular criterio shifted to
|
||||
#: higher priority for specs we have to build)
|
||||
build_priority_offset = 200
|
||||
@@ -126,7 +112,6 @@ def build_criteria_names(costs, tuples):
|
||||
priorities_names = []
|
||||
|
||||
num_fixed = 0
|
||||
num_high_fixed = 0
|
||||
for pred, args in tuples:
|
||||
if pred != "opt_criterion":
|
||||
continue
|
||||
@@ -143,8 +128,6 @@ def build_criteria_names(costs, tuples):
|
||||
if priority < fixed_priority_offset:
|
||||
build_priority = priority + build_priority_offset
|
||||
priorities_names.append((build_priority, name))
|
||||
elif priority >= high_fixed_priority_offset:
|
||||
num_high_fixed += 1
|
||||
else:
|
||||
num_fixed += 1
|
||||
|
||||
@@ -158,26 +141,19 @@ def build_criteria_names(costs, tuples):
|
||||
|
||||
# split list into three parts: build criteria, fixed criteria, non-build criteria
|
||||
num_criteria = len(priorities_names)
|
||||
num_build = (num_criteria - num_fixed - num_high_fixed) // 2
|
||||
num_build = (num_criteria - num_fixed) // 2
|
||||
|
||||
build_start_idx = num_high_fixed
|
||||
fixed_start_idx = num_high_fixed + num_build
|
||||
installed_start_idx = num_high_fixed + num_build + num_fixed
|
||||
|
||||
high_fixed = priorities_names[:build_start_idx]
|
||||
build = priorities_names[build_start_idx:fixed_start_idx]
|
||||
fixed = priorities_names[fixed_start_idx:installed_start_idx]
|
||||
installed = priorities_names[installed_start_idx:]
|
||||
build = priorities_names[:num_build]
|
||||
fixed = priorities_names[num_build:num_build + num_fixed]
|
||||
installed = priorities_names[num_build + num_fixed:]
|
||||
|
||||
# mapping from priority to index in cost list
|
||||
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
|
||||
|
||||
# make a list that has each name with its build and non-build costs
|
||||
criteria = [(cost, None, name) for cost, (p, name) in
|
||||
zip(costs[:build_start_idx], high_fixed)]
|
||||
criteria += [(cost, None, name) for cost, (p, name) in
|
||||
zip(costs[fixed_start_idx:installed_start_idx], fixed)]
|
||||
|
||||
criteria = [
|
||||
(costs[p - fixed_priority_offset + num_build], None, name) for p, name in fixed
|
||||
]
|
||||
for (i, name), (b, _) in zip(installed, build):
|
||||
criteria.append((costs[indices[i]], costs[indices[b]], name))
|
||||
|
||||
@@ -330,9 +306,7 @@ def __init__(self, specs, asp=None):
|
||||
self.abstract_specs = specs
|
||||
|
||||
# Concrete specs
|
||||
self._concrete_specs_by_input = None
|
||||
self._concrete_specs = None
|
||||
self._unsolved_specs = None
|
||||
|
||||
def format_core(self, core):
|
||||
"""
|
||||
@@ -429,32 +403,15 @@ def specs(self):
|
||||
"""List of concretized specs satisfying the initial
|
||||
abstract request.
|
||||
"""
|
||||
if self._concrete_specs is None:
|
||||
self._compute_specs_from_answer_set()
|
||||
return self._concrete_specs
|
||||
# The specs were already computed, return them
|
||||
if self._concrete_specs:
|
||||
return self._concrete_specs
|
||||
|
||||
@property
|
||||
def unsolved_specs(self):
|
||||
"""List of abstract input specs that were not solved."""
|
||||
if self._unsolved_specs is None:
|
||||
self._compute_specs_from_answer_set()
|
||||
return self._unsolved_specs
|
||||
# Assert prerequisite
|
||||
msg = 'cannot compute specs ["satisfiable" is not True ]'
|
||||
assert self.satisfiable, msg
|
||||
|
||||
@property
|
||||
def specs_by_input(self):
|
||||
if self._concrete_specs_by_input is None:
|
||||
self._compute_specs_from_answer_set()
|
||||
return self._concrete_specs_by_input
|
||||
|
||||
def _compute_specs_from_answer_set(self):
|
||||
if not self.satisfiable:
|
||||
self._concrete_specs = []
|
||||
self._unsolved_specs = self.abstract_specs
|
||||
self._concrete_specs_by_input = {}
|
||||
return
|
||||
|
||||
self._concrete_specs, self._unsolved_specs = [], []
|
||||
self._concrete_specs_by_input = {}
|
||||
self._concrete_specs = []
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
@@ -463,13 +420,10 @@ def _compute_specs_from_answer_set(self):
|
||||
providers = [spec.name for spec in answer.values()
|
||||
if spec.package.provides(key)]
|
||||
key = providers[0]
|
||||
candidate = answer.get(key)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[key])
|
||||
self._concrete_specs_by_input[input_spec] = answer[key]
|
||||
else:
|
||||
self._unsolved_specs.append(input_spec)
|
||||
self._concrete_specs.append(answer[key])
|
||||
|
||||
return self._concrete_specs
|
||||
|
||||
|
||||
def _normalize_packages_yaml(packages_yaml):
|
||||
@@ -566,7 +520,6 @@ def solve(
|
||||
setup,
|
||||
specs,
|
||||
nmodels=0,
|
||||
reuse=None,
|
||||
timers=False,
|
||||
stats=False,
|
||||
out=None,
|
||||
@@ -577,8 +530,7 @@ def solve(
|
||||
Arguments:
|
||||
setup (SpackSolverSetup): An object to set up the ASP problem.
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
nmodels (int): Number of models to consider (default 0 for unlimited).
|
||||
reuse (None or list): list of concrete specs that can be reused
|
||||
nmodels (list): Number of models to consider (default 0 for unlimited).
|
||||
timers (bool): Print out coarse timers for different solve phases.
|
||||
stats (bool): Whether to output Clingo's internal solver statistics.
|
||||
out: Optional output stream for the generated ASP program.
|
||||
@@ -602,7 +554,7 @@ def solve(
|
||||
self.assumptions = []
|
||||
with self.control.backend() as backend:
|
||||
self.backend = backend
|
||||
setup.setup(self, specs, reuse=reuse)
|
||||
setup.setup(self, specs)
|
||||
timer.phase("setup")
|
||||
|
||||
# read in the main ASP program and display logic -- these are
|
||||
@@ -621,7 +573,6 @@ def visit(node):
|
||||
arg = ast_sym(ast_sym(term.atom).arguments[0])
|
||||
self.fact(AspFunction(name)(arg.string))
|
||||
|
||||
self.h1("Error messages")
|
||||
path = os.path.join(parent_dir, 'concretize.lp')
|
||||
parse_files([path], visit)
|
||||
|
||||
@@ -631,7 +582,6 @@ def visit(node):
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, 'concretize.lp'))
|
||||
self.control.load(os.path.join(parent_dir, "os_facts.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
timer.phase("load")
|
||||
|
||||
@@ -672,7 +622,7 @@ def stringify(x):
|
||||
|
||||
if result.satisfiable:
|
||||
# build spec from the best model
|
||||
builder = SpecBuilder(specs, reuse=reuse)
|
||||
builder = SpecBuilder(specs)
|
||||
min_cost, best_model = min(models)
|
||||
tuples = [
|
||||
(sym.name, [stringify(a) for a in sym.arguments])
|
||||
@@ -704,7 +654,7 @@ def stringify(x):
|
||||
class SpackSolverSetup(object):
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
def __init__(self, tests=False):
|
||||
def __init__(self, reuse=False, tests=False):
|
||||
self.gen = None # set by setup()
|
||||
|
||||
self.declared_versions = {}
|
||||
@@ -717,7 +667,6 @@ def __init__(self, tests=False):
|
||||
self.variant_values_from_specs = set()
|
||||
self.version_constraints = set()
|
||||
self.target_constraints = set()
|
||||
self.default_targets = []
|
||||
self.compiler_version_constraints = set()
|
||||
self.post_facts = []
|
||||
|
||||
@@ -731,10 +680,10 @@ def __init__(self, tests=False):
|
||||
self.target_specs_cache = None
|
||||
|
||||
# whether to add installed/binary hashes to the solve
|
||||
self.tests = tests
|
||||
self.reuse = reuse
|
||||
|
||||
# If False allows for input specs that are not solved
|
||||
self.concretize_everything = True
|
||||
# whether to add installed/binary hashes to the solve
|
||||
self.tests = tests
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
@@ -749,13 +698,7 @@ def key_fn(version):
|
||||
|
||||
pkg = packagize(pkg)
|
||||
declared_versions = self.declared_versions[pkg.name]
|
||||
partially_sorted_versions = sorted(set(declared_versions), key=key_fn)
|
||||
|
||||
most_to_least_preferred = []
|
||||
for _, group in itertools.groupby(partially_sorted_versions, key=key_fn):
|
||||
most_to_least_preferred.extend(list(sorted(
|
||||
group, reverse=True, key=lambda x: spack.version.ver(x.version)
|
||||
)))
|
||||
most_to_least_preferred = sorted(declared_versions, key=key_fn)
|
||||
|
||||
for weight, declared_version in enumerate(most_to_least_preferred):
|
||||
self.gen.fact(fn.version_declared(
|
||||
@@ -1172,26 +1115,24 @@ def preferred_variants(self, pkg_name):
|
||||
pkg_name, variant.name, value
|
||||
))
|
||||
|
||||
def target_preferences(self, pkg_name):
|
||||
def preferred_targets(self, pkg_name):
|
||||
key_fn = spack.package_prefs.PackagePrefs(pkg_name, 'target')
|
||||
|
||||
if not self.target_specs_cache:
|
||||
self.target_specs_cache = [
|
||||
spack.spec.Spec('target={0}'.format(target_name))
|
||||
for _, target_name in self.default_targets
|
||||
for target_name in archspec.cpu.TARGETS
|
||||
]
|
||||
|
||||
package_targets = self.target_specs_cache[:]
|
||||
package_targets.sort(key=key_fn)
|
||||
target_specs = self.target_specs_cache
|
||||
preferred_targets = [x for x in target_specs if key_fn(x) < 0]
|
||||
if not preferred_targets:
|
||||
return
|
||||
|
||||
offset = 0
|
||||
best_default = self.default_targets[0][1]
|
||||
for i, preferred in enumerate(package_targets):
|
||||
if str(preferred.architecture.target) == best_default and i != 0:
|
||||
offset = 100
|
||||
self.gen.fact(fn.target_weight(
|
||||
pkg_name, str(preferred.architecture.target), i + offset
|
||||
))
|
||||
preferred = preferred_targets[0]
|
||||
self.gen.fact(fn.package_target_weight(
|
||||
str(preferred.architecture.target), pkg_name, -30
|
||||
))
|
||||
|
||||
def flag_defaults(self):
|
||||
self.gen.h2("Compiler flag defaults")
|
||||
@@ -1582,7 +1523,7 @@ def target_defaults(self, specs):
|
||||
compiler.name, compiler.version, uarch.family.name
|
||||
))
|
||||
|
||||
i = 0 # TODO compute per-target offset?
|
||||
i = 0
|
||||
for target in candidate_targets:
|
||||
self.gen.fact(fn.target(target.name))
|
||||
self.gen.fact(fn.target_family(target.name, target.family.name))
|
||||
@@ -1591,15 +1532,12 @@ def target_defaults(self, specs):
|
||||
|
||||
# prefer best possible targets; weight others poorly so
|
||||
# they're not used unless set explicitly
|
||||
# these are stored to be generated as facts later offset by the
|
||||
# number of preferred targets
|
||||
if target.name in best_targets:
|
||||
self.default_targets.append((i, target.name))
|
||||
self.gen.fact(fn.default_target_weight(target.name, i))
|
||||
i += 1
|
||||
else:
|
||||
self.default_targets.append((100, target.name))
|
||||
self.gen.fact(fn.default_target_weight(target.name, 100))
|
||||
|
||||
self.default_targets = list(sorted(set(self.default_targets)))
|
||||
self.gen.newline()
|
||||
|
||||
def virtual_providers(self):
|
||||
@@ -1799,7 +1737,32 @@ def define_concrete_input_specs(self, specs, possible):
|
||||
if spec.concrete:
|
||||
self._facts_from_concrete_spec(spec, possible)
|
||||
|
||||
def setup(self, driver, specs, reuse=None):
|
||||
def define_installed_packages(self, specs, possible):
|
||||
"""Add facts about all specs already in the database.
|
||||
|
||||
Arguments:
|
||||
possible (dict): result of Package.possible_dependencies() for
|
||||
specs in this solve.
|
||||
"""
|
||||
# Specs from local store
|
||||
with spack.store.db.read_transaction():
|
||||
for spec in spack.store.db.query(installed=True):
|
||||
if not spec.satisfies('dev_path=*'):
|
||||
self._facts_from_concrete_spec(spec, possible)
|
||||
|
||||
# Specs from configured buildcaches
|
||||
try:
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
for spec in index:
|
||||
if not spec.satisfies('dev_path=*'):
|
||||
self._facts_from_concrete_spec(spec, possible)
|
||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||
# this is raised when no mirrors had indices.
|
||||
# TODO: update mirror configuration so it can indicate that the source cache
|
||||
# TODO: (or any mirror really) doesn't have binaries.
|
||||
pass
|
||||
|
||||
def setup(self, driver, specs):
|
||||
"""Generate an ASP program with relevant constraints for specs.
|
||||
|
||||
This calls methods on the solve driver to set up the problem with
|
||||
@@ -1807,9 +1770,7 @@ def setup(self, driver, specs, reuse=None):
|
||||
specs, as well as constraints from the specs themselves.
|
||||
|
||||
Arguments:
|
||||
driver (PyclingoDriver): driver instance of this solve
|
||||
specs (list): list of Specs to solve
|
||||
reuse (None or list): list of concrete specs that can be reused
|
||||
"""
|
||||
self._condition_id_counter = itertools.count()
|
||||
|
||||
@@ -1848,11 +1809,11 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
if reuse:
|
||||
self.gen.h1("Reusable specs")
|
||||
if self.reuse:
|
||||
self.gen.h1("Installed packages")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
self._facts_from_concrete_spec(reusable_spec, possible)
|
||||
self.gen.newline()
|
||||
self.define_installed_packages(specs, possible)
|
||||
|
||||
self.gen.h1('General Constraints')
|
||||
self.available_compilers()
|
||||
@@ -1875,22 +1836,29 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.gen.h2('Package preferences: %s' % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
self.target_preferences(pkg)
|
||||
self.preferred_targets(pkg)
|
||||
|
||||
# Inject dev_path from environment
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
for name, info in env.dev_specs.items():
|
||||
dev_spec = spack.spec.Spec(info['spec'])
|
||||
dev_spec.constrain(
|
||||
'dev_path=%s' % spack.util.path.canonicalize_path(info['path'])
|
||||
)
|
||||
|
||||
self.condition(spack.spec.Spec(name), dev_spec,
|
||||
msg="%s is a develop spec" % name)
|
||||
for spec in sorted(specs):
|
||||
for dep in spec.traverse():
|
||||
_develop_specs_from_env(dep, env)
|
||||
|
||||
self.gen.h1('Spec Constraints')
|
||||
self.literal_specs(specs)
|
||||
for spec in sorted(specs):
|
||||
self.gen.h2('Spec: %s' % str(spec))
|
||||
self.gen.fact(
|
||||
fn.virtual_root(spec.name) if spec.virtual
|
||||
else fn.root(spec.name)
|
||||
)
|
||||
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(clause)
|
||||
if clause.name == 'variant_set':
|
||||
self.gen.fact(
|
||||
fn.variant_default_value_from_cli(*clause.args)
|
||||
)
|
||||
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
@@ -1907,47 +1875,45 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.gen.h1("Target Constraints")
|
||||
self.define_target_constraints()
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for idx, spec in enumerate(specs):
|
||||
self.gen.h2('Spec: %s' % str(spec))
|
||||
self.gen.fact(fn.literal(idx))
|
||||
|
||||
root_fn = fn.virtual_root(spec.name) if spec.virtual else fn.root(spec.name)
|
||||
self.gen.fact(fn.literal(idx, root_fn.name, *root_fn.args))
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(fn.literal(idx, clause.name, *clause.args))
|
||||
if clause.name == 'variant_set':
|
||||
self.gen.fact(fn.literal(
|
||||
idx, "variant_default_value_from_cli", *clause.args
|
||||
))
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
|
||||
|
||||
class SpecBuilder(object):
|
||||
"""Class with actions to rebuild a spec from ASP results."""
|
||||
#: Attributes that don't need actions
|
||||
ignored_attributes = ["opt_criterion"]
|
||||
|
||||
def __init__(self, specs, reuse=None):
|
||||
def __init__(self, specs):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
self._flag_compiler_defaults = set()
|
||||
|
||||
# Pass in as arguments reusable specs and plug them in
|
||||
# from this dictionary during reconstruction
|
||||
self._hash_lookup = {}
|
||||
if reuse is not None:
|
||||
for spec in reuse:
|
||||
for node in spec.traverse():
|
||||
self._hash_lookup.setdefault(node.dag_hash(), node)
|
||||
|
||||
def hash(self, pkg, h):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
try:
|
||||
# try to get the candidate from the store
|
||||
concrete_spec = spack.store.db.get_by_hash(h)[0]
|
||||
except TypeError:
|
||||
# the dag hash was not in the DB, try buildcache
|
||||
s = spack.binary_distribution.binary_index.find_by_hash(h)
|
||||
if s:
|
||||
concrete_spec = s[0]['spec']
|
||||
else:
|
||||
# last attempt: maybe the hash comes from a particular input spec
|
||||
# this only occurs in tests (so far)
|
||||
for clspec in self._command_line_specs:
|
||||
for spec in clspec.traverse():
|
||||
if spec.concrete and spec.dag_hash() == h:
|
||||
concrete_spec = spec
|
||||
|
||||
assert concrete_spec, "Unable to look up concrete spec with hash %s" % h
|
||||
self._specs[pkg] = concrete_spec
|
||||
else:
|
||||
# TODO: remove this code -- it's dead unless we decide that node() clauses
|
||||
# should come before hashes.
|
||||
# ensure that if it's already there, it's correct
|
||||
spec = self._specs[pkg]
|
||||
assert spec.dag_hash() == h
|
||||
|
||||
def node(self, pkg):
|
||||
if pkg not in self._specs:
|
||||
@@ -2180,9 +2146,8 @@ def build_specs(self, function_tuples):
|
||||
for s in self._specs.values():
|
||||
_develop_specs_from_env(s, ev.active_environment())
|
||||
|
||||
# mark concrete and assign hashes to all specs in the solve
|
||||
for root in roots.values():
|
||||
root._finalize_concretization()
|
||||
for s in self._specs.values():
|
||||
s._mark_concrete()
|
||||
|
||||
for s in self._specs.values():
|
||||
spack.spec.Spec.ensure_no_deprecated(s)
|
||||
@@ -2217,7 +2182,7 @@ def _develop_specs_from_env(spec, env):
|
||||
class Solver(object):
|
||||
"""This is the main external interface class for solving.
|
||||
|
||||
It manages solver configuration and preferences in one place. It sets up the solve
|
||||
It manages solver configuration and preferences in once place. It sets up the solve
|
||||
and passes the setup method to the driver, as well.
|
||||
|
||||
Properties of interest:
|
||||
@@ -2233,42 +2198,6 @@ def __init__(self):
|
||||
# by setting them directly as properties.
|
||||
self.reuse = spack.config.get("concretizer:reuse", False)
|
||||
|
||||
@staticmethod
|
||||
def _check_input_and_extract_concrete_specs(specs):
|
||||
reusable = []
|
||||
for root in specs:
|
||||
for s in root.traverse():
|
||||
if s.virtual:
|
||||
continue
|
||||
if s.concrete:
|
||||
reusable.append(s)
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
return reusable
|
||||
|
||||
def _reusable_specs(self):
|
||||
reusable_specs = []
|
||||
if self.reuse:
|
||||
# Specs from the local Database
|
||||
with spack.store.db.read_transaction():
|
||||
reusable_specs.extend([
|
||||
s for s in spack.store.db.query(installed=True)
|
||||
if not s.satisfies('dev_path=*')
|
||||
])
|
||||
|
||||
# Specs from buildcaches
|
||||
try:
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
reusable_specs.extend([
|
||||
s for s in index if not s.satisfies('dev_path=*')
|
||||
])
|
||||
|
||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||
# this is raised when no mirrors had indices.
|
||||
# TODO: update mirror configuration so it can indicate that the
|
||||
# TODO: source cache (or any mirror really) doesn't have binaries.
|
||||
pass
|
||||
return reusable_specs
|
||||
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
@@ -2292,78 +2221,23 @@ def solve(
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs())
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
for root in specs:
|
||||
for s in root.traverse():
|
||||
if s.virtual:
|
||||
continue
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
|
||||
setup = SpackSolverSetup(reuse=self.reuse, tests=tests)
|
||||
return self.driver.solve(
|
||||
setup,
|
||||
specs,
|
||||
nmodels=models,
|
||||
reuse=reusable_specs,
|
||||
timers=timers,
|
||||
stats=stats,
|
||||
out=out,
|
||||
setup_only=setup_only,
|
||||
)
|
||||
|
||||
def solve_in_rounds(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
models=0,
|
||||
timers=False,
|
||||
stats=False,
|
||||
tests=False,
|
||||
):
|
||||
"""Solve for a stable model of specs in multiple rounds.
|
||||
|
||||
This relaxes the assumption of solve that everything must be consistent and
|
||||
solvable in a single round. Each round tries to maximize the reuse of specs
|
||||
from previous rounds.
|
||||
|
||||
The function is a generator that yields the result of each round.
|
||||
|
||||
Arguments:
|
||||
specs (list): list of Specs to solve.
|
||||
models (int): number of models to search (default: 0)
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
timers (bool): print timing if set to True
|
||||
stats (bool): print internal statistics if set to True
|
||||
tests (bool): add test dependencies to the solve
|
||||
"""
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs())
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
# Tell clingo that we don't have to solve all the inputs at once
|
||||
setup.concretize_everything = False
|
||||
|
||||
input_specs = specs
|
||||
while True:
|
||||
result = self.driver.solve(
|
||||
setup,
|
||||
input_specs,
|
||||
nmodels=models,
|
||||
reuse=reusable_specs,
|
||||
timers=timers,
|
||||
stats=stats,
|
||||
out=out,
|
||||
setup_only=False
|
||||
)
|
||||
yield result
|
||||
|
||||
# If we don't have unsolved specs we are done
|
||||
if not result.unsolved_specs:
|
||||
break
|
||||
|
||||
# This means we cannot progress with solving the input
|
||||
if not result.satisfiable or not result.specs:
|
||||
break
|
||||
|
||||
input_specs = result.unsolved_specs
|
||||
for spec in result.specs:
|
||||
reusable_specs.extend(spec.traverse())
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
|
@@ -7,52 +7,6 @@
|
||||
% This logic program implements Spack's concretizer
|
||||
%=============================================================================
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Map literal input specs to facts that drive the solve
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Give clingo the choice to solve an input spec or not
|
||||
{ literal_solved(ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
|
||||
|
||||
% If concretize_everything() is a fact, then we cannot have unsolved specs
|
||||
:- literal_not_solved(ID), concretize_everything.
|
||||
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { literal_solved(ID) : literal(ID) }.
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
% Map constraint on the literal ID to the correct PSID
|
||||
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2) :- literal(LiteralID, Name, A1, A2), literal_solved(LiteralID).
|
||||
attr(Name, A1, A2, A3) :- literal(LiteralID, Name, A1, A2, A3), literal_solved(LiteralID).
|
||||
|
||||
% For these two atoms we only need implications in one direction
|
||||
root(Package) :- attr("root", Package).
|
||||
virtual_root(Package) :- attr("virtual_root", Package).
|
||||
|
||||
node_platform_set(Package, Platform) :- attr("node_platform_set", Package, Platform).
|
||||
node_os_set(Package, OS) :- attr("node_os_set", Package, OS).
|
||||
node_target_set(Package, Target) :- attr("node_target_set", Package, Target).
|
||||
node_flag_set(Package, Flag, Value) :- attr("node_flag_set", Package, Flag, Value).
|
||||
|
||||
node_compiler_version_set(Package, Compiler, Version)
|
||||
:- attr("node_compiler_version_set", Package, Compiler, Version).
|
||||
|
||||
variant_default_value_from_cli(Package, Variant, Value)
|
||||
:- attr("variant_default_value_from_cli", Package, Variant, Value).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
#defined literal/3.
|
||||
#defined literal/4.
|
||||
#defined literal/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -107,28 +61,10 @@ possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
version_declared(Package, Version, Weight).
|
||||
|
||||
% we can't use the weight for an external version if we don't use the
|
||||
% corresponding external spec.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "external"),
|
||||
not external(Package).
|
||||
|
||||
% we can't use a weight from an installed spec if we are building it
|
||||
% and vice-versa
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "installed"),
|
||||
build(Package).
|
||||
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
not version_declared(Package, Version, Weight, "installed"),
|
||||
not build(Package).
|
||||
|
||||
1 { version_weight(Package, Weight) : version_declared(Package, Version, Weight) } 1
|
||||
version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
node(Package).
|
||||
node(Package),
|
||||
Weight = #min{W : version_declared(Package, Version, W)}.
|
||||
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
@@ -138,11 +74,6 @@ possible_version_weight(Package, Weight)
|
||||
{ version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint).
|
||||
|
||||
% If there is at least a version that satisfy the constraint, impose a lower
|
||||
% bound on the choice rule to avoid false positives with the error below
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint), version_satisfies(Package, Constraint, _).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
@@ -188,11 +119,6 @@ attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
variant_value(Package, Variant, Value),
|
||||
not imposed_constraint(Hash, "variant_value", Package, Variant, Value).
|
||||
|
||||
% we cannot have additional flag values when we are working with concrete specs
|
||||
:- node(Package), hash(Package, Hash),
|
||||
node_flag(Package, FlagType, Flag),
|
||||
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag).
|
||||
|
||||
#defined condition/2.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
@@ -220,8 +146,6 @@ depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
build(Package),
|
||||
not external(Package),
|
||||
condition_holds(ID).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
@@ -504,13 +428,13 @@ variant(Package, Variant) :- variant_condition(ID, Package, Variant),
|
||||
condition_holds(ID).
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
@@ -737,14 +661,9 @@ node_os_mismatch(Package, Dependency) :-
|
||||
% every OS is compatible with itself. We can use `os_compatible` to declare
|
||||
os_compatible(OS, OS) :- os(OS).
|
||||
|
||||
% Transitive compatibility among operating systems
|
||||
os_compatible(OS1, OS3) :- os_compatible(OS1, OS2), os_compatible(OS2, OS3).
|
||||
|
||||
% We can select only operating systems compatible with the ones
|
||||
% for which we can build software. We need a cardinality constraint
|
||||
% since we might have more than one "buildable_os(OS)" fact.
|
||||
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
|
||||
node_os(Package, ReusedOS).
|
||||
% OS compatibility rules for reusing solves.
|
||||
% catalina binaries can be used on bigsur. Direction is package -> dependency.
|
||||
os_compatible("bigsur", "catalina").
|
||||
|
||||
% If an OS is set explicitly respect the value
|
||||
node_os(Package, OS) :- node_os_set(Package, OS), node(Package).
|
||||
@@ -807,6 +726,24 @@ target_compatible(Descendent, Ancestor)
|
||||
#defined target_satisfies/2.
|
||||
#defined target_parent/2.
|
||||
|
||||
% The target weight is either the default target weight
|
||||
% or a more specific per-package weight if set
|
||||
target_weight(Target, Package, Weight)
|
||||
:- default_target_weight(Target, Weight),
|
||||
node(Package),
|
||||
not derive_target_from_parent(_, Package),
|
||||
not package_target_weight(Target, Package, _).
|
||||
|
||||
% TODO: Need to account for the case of more than one parent
|
||||
% TODO: each of which sets different targets
|
||||
target_weight(Target, Dependency, Weight)
|
||||
:- depends_on(Package, Dependency),
|
||||
derive_target_from_parent(Package, Dependency),
|
||||
target_weight(Target, Package, Weight).
|
||||
|
||||
target_weight(Target, Package, Weight)
|
||||
:- package_target_weight(Target, Package, Weight).
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- node_target(Package, Target),
|
||||
@@ -823,7 +760,11 @@ node_target(Package, Target)
|
||||
node_target_weight(Package, Weight)
|
||||
:- node(Package),
|
||||
node_target(Package, Target),
|
||||
target_weight(Package, Target, Weight).
|
||||
target_weight(Target, Package, Weight).
|
||||
|
||||
derive_target_from_parent(Parent, Package)
|
||||
:- depends_on(Parent, Package),
|
||||
not package_target_weight(_, Package, _).
|
||||
|
||||
% compatibility rules for targets among nodes
|
||||
node_target_match(Parent, Dependency)
|
||||
@@ -941,9 +882,6 @@ compiler_weight(Package, 100)
|
||||
not node_compiler_preference(Package, Compiler, Version, _),
|
||||
not default_compiler_preference(Compiler, Version, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
:- node_compiler_version(Package, Compiler, Version), not compiler_version(Compiler, Version).
|
||||
|
||||
#defined node_compiler_preference/4.
|
||||
#defined default_compiler_preference/3.
|
||||
|
||||
@@ -1247,12 +1185,11 @@ opt_criterion(1, "non-preferred targets").
|
||||
%-----------------
|
||||
#heuristic version(Package, Version) : version_declared(Package, Version, 0), node(Package). [10, true]
|
||||
#heuristic version_weight(Package, 0) : version_declared(Package, Version, 0), node(Package). [10, true]
|
||||
#heuristic node_target(Package, Target) : package_target_weight(Target, Package, 0), node(Package). [10, true]
|
||||
#heuristic node_target(Package, Target) : default_target_weight(Target, 0), node(Package). [10, true]
|
||||
#heuristic node_target_weight(Package, 0) : node(Package). [10, true]
|
||||
#heuristic variant_value(Package, Variant, Value) : variant_default_value(Package, Variant, Value), node(Package). [10, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node(Package) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node_os(Package, OS) : buildable_os(OS). [10, true]
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
|
@@ -1,22 +0,0 @@
|
||||
% Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
% OS compatibility rules for reusing solves.
|
||||
% os_compatible(RecentOS, OlderOS)
|
||||
% OlderOS binaries can be used on RecentOS
|
||||
|
||||
% macOS
|
||||
os_compatible("monterey", "bigsur").
|
||||
os_compatible("bigsur", "catalina").
|
||||
|
||||
% Ubuntu
|
||||
os_compatible("ubuntu22.04", "ubuntu21.10").
|
||||
os_compatible("ubuntu21.10", "ubuntu21.04").
|
||||
os_compatible("ubuntu21.04", "ubuntu20.10").
|
||||
os_compatible("ubuntu20.10", "ubuntu20.04").
|
||||
os_compatible("ubuntu20.04", "ubuntu19.10").
|
||||
os_compatible("ubuntu19.10", "ubuntu19.04").
|
||||
os_compatible("ubuntu19.04", "ubuntu18.10").
|
||||
os_compatible("ubuntu18.10", "ubuntu18.04").
|
@@ -183,13 +183,6 @@
|
||||
default_format += '{%compiler.name}{@compiler.version}{compiler_flags}'
|
||||
default_format += '{variants}{arch=architecture}'
|
||||
|
||||
#: Regular expression to pull spec contents out of clearsigned signature
|
||||
#: file.
|
||||
CLEARSIGN_FILE_REGEX = re.compile(
|
||||
(r"^-----BEGIN PGP SIGNED MESSAGE-----"
|
||||
r"\s+Hash:\s+[^\s]+\s+(.+)-----BEGIN PGP SIGNATURE-----"),
|
||||
re.MULTILINE | re.DOTALL)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
specfile_format_version = 3
|
||||
|
||||
@@ -1783,7 +1776,7 @@ def spec_hash(self, hash):
|
||||
json_text = sjson.dump(node_dict)
|
||||
return spack.util.hash.b32_hash(json_text)
|
||||
|
||||
def _cached_hash(self, hash, length=None, force=False):
|
||||
def _cached_hash(self, hash, length=None):
|
||||
"""Helper function for storing a cached hash on the spec.
|
||||
|
||||
This will run spec_hash() with the deptype and package_hash
|
||||
@@ -1792,8 +1785,6 @@ def _cached_hash(self, hash, length=None, force=False):
|
||||
|
||||
Arguments:
|
||||
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
|
||||
length (int): length of hash prefix to return (default is full hash string)
|
||||
force (bool): cache the hash even if spec is not concrete (default False)
|
||||
"""
|
||||
if not hash.attr:
|
||||
return self.spec_hash(hash)[:length]
|
||||
@@ -1803,20 +1794,13 @@ def _cached_hash(self, hash, length=None, force=False):
|
||||
return hash_string[:length]
|
||||
else:
|
||||
hash_string = self.spec_hash(hash)
|
||||
if force or self.concrete:
|
||||
if self.concrete:
|
||||
setattr(self, hash.attr, hash_string)
|
||||
|
||||
return hash_string[:length]
|
||||
|
||||
def package_hash(self):
|
||||
"""Compute the hash of the contents of the package for this node"""
|
||||
# Concrete specs with the old DAG hash did not have the package hash, so we do
|
||||
# not know what the package looked like at concretization time
|
||||
if self.concrete and not self._package_hash:
|
||||
raise ValueError(
|
||||
"Cannot call package_hash() on concrete specs with the old dag_hash()"
|
||||
)
|
||||
|
||||
return self._cached_hash(ht.package_hash)
|
||||
|
||||
def dag_hash(self, length=None):
|
||||
@@ -1939,13 +1923,8 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if hasattr(variant, '_patches_in_order_of_appearance'):
|
||||
d['patches'] = variant._patches_in_order_of_appearance
|
||||
|
||||
if self._concrete and hash.package_hash and self._package_hash:
|
||||
# We use the attribute here instead of `self.package_hash()` because this
|
||||
# should *always* be assignhed at concretization time. We don't want to try
|
||||
# to compute a package hash for concrete spec where a) the package might not
|
||||
# exist, or b) the `dag_hash` didn't include the package hash when the spec
|
||||
# was concretized.
|
||||
package_hash = self._package_hash
|
||||
if self._concrete and hash.package_hash:
|
||||
package_hash = self.package_hash()
|
||||
|
||||
# Full hashes are in bytes
|
||||
if (not isinstance(package_hash, six.text_type)
|
||||
@@ -2402,8 +2381,8 @@ def spec_and_dependency_types(s):
|
||||
def from_dict(data):
|
||||
"""Construct a spec from JSON/YAML.
|
||||
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
Parameters:
|
||||
data -- a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
|
||||
return _spec_from_dict(data)
|
||||
@@ -2412,8 +2391,8 @@ def from_dict(data):
|
||||
def from_yaml(stream):
|
||||
"""Construct a spec from YAML.
|
||||
|
||||
Args:
|
||||
stream: string or file object to read from.
|
||||
Parameters:
|
||||
stream -- string or file object to read from.
|
||||
"""
|
||||
try:
|
||||
data = yaml.load(stream)
|
||||
@@ -2428,8 +2407,8 @@ def from_yaml(stream):
|
||||
def from_json(stream):
|
||||
"""Construct a spec from JSON.
|
||||
|
||||
Args:
|
||||
stream: string or file object to read from.
|
||||
Parameters:
|
||||
stream -- string or file object to read from.
|
||||
"""
|
||||
try:
|
||||
data = sjson.load(stream)
|
||||
@@ -2440,27 +2419,6 @@ def from_json(stream):
|
||||
e,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def extract_json_from_clearsig(data):
|
||||
m = CLEARSIGN_FILE_REGEX.search(data)
|
||||
if m:
|
||||
return sjson.load(m.group(1))
|
||||
return sjson.load(data)
|
||||
|
||||
@staticmethod
|
||||
def from_signed_json(stream):
|
||||
"""Construct a spec from clearsigned json spec file.
|
||||
|
||||
Args:
|
||||
stream: string or file object to read from.
|
||||
"""
|
||||
data = stream
|
||||
if hasattr(stream, 'read'):
|
||||
data = stream.read()
|
||||
|
||||
extracted_json = Spec.extract_json_from_clearsig(data)
|
||||
return Spec.from_dict(extracted_json)
|
||||
|
||||
@staticmethod
|
||||
def from_detection(spec_str, extra_attributes=None):
|
||||
"""Construct a spec from a spec string determined during external
|
||||
@@ -2736,8 +2694,8 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
# TODO: or turn external_path into a lazy property
|
||||
Spec.ensure_external_path_if_external(s)
|
||||
|
||||
# assign hashes and mark concrete
|
||||
self._finalize_concretization()
|
||||
# Mark everything in the spec as concrete, as well.
|
||||
self._mark_concrete()
|
||||
|
||||
# If any spec in the DAG is deprecated, throw an error
|
||||
Spec.ensure_no_deprecated(self)
|
||||
@@ -2767,21 +2725,6 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
# there are declared inconsistencies)
|
||||
self.architecture.target.optimization_flags(self.compiler)
|
||||
|
||||
def _patches_assigned(self):
|
||||
"""Whether patches have been assigned to this spec by the concretizer."""
|
||||
# FIXME: _patches_in_order_of_appearance is attached after concretization
|
||||
# FIXME: to store the order of patches.
|
||||
# FIXME: Probably needs to be refactored in a cleaner way.
|
||||
if "patches" not in self.variants:
|
||||
return False
|
||||
|
||||
# ensure that patch state is consistent
|
||||
patch_variant = self.variants["patches"]
|
||||
assert hasattr(patch_variant, "_patches_in_order_of_appearance"), \
|
||||
"patches should always be assigned with a patch variant."
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def inject_patches_variant(root):
|
||||
# This dictionary will store object IDs rather than Specs as keys
|
||||
@@ -2916,6 +2859,7 @@ def _new_concretize(self, tests=False):
|
||||
|
||||
concretized = answer[name]
|
||||
self._dup(concretized)
|
||||
self._mark_concrete()
|
||||
|
||||
def concretize(self, tests=False):
|
||||
"""Concretize the current spec.
|
||||
@@ -2952,52 +2896,6 @@ def _mark_concrete(self, value=True):
|
||||
s.clear_cached_hashes()
|
||||
s._mark_root_concrete(value)
|
||||
|
||||
def _finalize_concretization(self):
|
||||
"""Assign hashes to this spec, and mark it concrete.
|
||||
|
||||
There are special semantics to consider for `package_hash`, because we can't
|
||||
call it on *already* concrete specs, but we need to assign it *at concretization
|
||||
time* to just-concretized specs. So, the concretizer must assign the package
|
||||
hash *before* marking their specs concrete (so that we know which specs were
|
||||
already concrete before this latest concretization).
|
||||
|
||||
`dag_hash` is also tricky, since it cannot compute `package_hash()` lazily.
|
||||
Because `package_hash` needs to be assigned *at concretization time*,
|
||||
`to_node_dict()` can't just assume that it can compute `package_hash` itself
|
||||
-- it needs to either see or not see a `_package_hash` attribute.
|
||||
|
||||
Rules of thumb for `package_hash`:
|
||||
1. Old-style concrete specs from *before* `dag_hash` included `package_hash`
|
||||
will not have a `_package_hash` attribute at all.
|
||||
2. New-style concrete specs will have a `_package_hash` assigned at
|
||||
concretization time.
|
||||
3. Abstract specs will not have a `_package_hash` attribute at all.
|
||||
|
||||
"""
|
||||
for spec in self.traverse():
|
||||
# Already concrete specs either already have a package hash (new dag_hash())
|
||||
# or they never will b/c we can't know it (old dag_hash()). Skip them.
|
||||
#
|
||||
# We only assign package hash to not-yet-concrete specs, for which we know
|
||||
# we can compute the hash.
|
||||
if not spec.concrete:
|
||||
# we need force=True here because package hash assignment has to happen
|
||||
# before we mark concrete, so that we know what was *already* concrete.
|
||||
spec._cached_hash(ht.package_hash, force=True)
|
||||
|
||||
# keep this check here to ensure package hash is saved
|
||||
assert getattr(spec, ht.package_hash.attr)
|
||||
|
||||
# Mark everything in the spec as concrete
|
||||
self._mark_concrete()
|
||||
|
||||
# Assign dag_hash (this *could* be done lazily, but it's assigned anyway in
|
||||
# ensure_no_deprecated, and it's clearer to see explicitly where it happens).
|
||||
# Any specs that were concrete before finalization will already have a cached
|
||||
# DAG hash.
|
||||
for spec in self.traverse():
|
||||
self._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests=False):
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
@@ -3758,12 +3656,19 @@ def patches(self):
|
||||
TODO: this only checks in the package; it doesn't resurrect old
|
||||
patches from install directories, but it probably should.
|
||||
"""
|
||||
if not self.concrete:
|
||||
raise spack.error.SpecError("Spec is not concrete: " + str(self))
|
||||
|
||||
if not hasattr(self, "_patches"):
|
||||
self._patches = []
|
||||
if 'patches' in self.variants:
|
||||
# FIXME: _patches_in_order_of_appearance is attached after
|
||||
# FIXME: concretization to store the order of patches somewhere.
|
||||
# FIXME: Needs to be refactored in a cleaner way.
|
||||
|
||||
# translate patch sha256sums to patch objects by consulting the index
|
||||
if self._patches_assigned():
|
||||
for sha256 in self.variants["patches"]._patches_in_order_of_appearance:
|
||||
# translate patch sha256sums to patch objects by consulting the index
|
||||
self._patches = []
|
||||
for sha256 in self.variants['patches']._patches_in_order_of_appearance:
|
||||
index = spack.repo.path.patch_index
|
||||
patch = index.patch_for_package(sha256, self.package)
|
||||
self._patches.append(patch)
|
||||
@@ -5402,16 +5307,6 @@ def __init__(self, parse_error):
|
||||
self.string = parse_error.string
|
||||
self.pos = parse_error.pos
|
||||
|
||||
@property
|
||||
def long_message(self):
|
||||
return "\n".join(
|
||||
[
|
||||
" Encountered when parsing spec:",
|
||||
" %s" % self.string,
|
||||
" %s^" % (" " * self.pos),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DuplicateDependencyError(spack.error.SpecError):
|
||||
"""Raised when the same dependency occurs in a spec twice."""
|
||||
|
@@ -78,13 +78,13 @@ def config_directory(tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp('test_configs')
|
||||
# restore some sane defaults for packages and config
|
||||
config_path = py.path.local(spack.paths.etc_path)
|
||||
modules_yaml = config_path.join('defaults', 'modules.yaml')
|
||||
os_modules_yaml = config_path.join('defaults', '%s' %
|
||||
modules_yaml = config_path.join('spack', 'defaults', 'modules.yaml')
|
||||
os_modules_yaml = config_path.join('spack', 'defaults', '%s' %
|
||||
platform.system().lower(),
|
||||
'modules.yaml')
|
||||
packages_yaml = config_path.join('defaults', 'packages.yaml')
|
||||
config_yaml = config_path.join('defaults', 'config.yaml')
|
||||
repos_yaml = config_path.join('defaults', 'repos.yaml')
|
||||
packages_yaml = config_path.join('spack', 'defaults', 'packages.yaml')
|
||||
config_yaml = config_path.join('spack', 'defaults', 'config.yaml')
|
||||
repos_yaml = config_path.join('spack', 'defaults', 'repos.yaml')
|
||||
tmpdir.ensure('site', dir=True)
|
||||
tmpdir.ensure('user', dir=True)
|
||||
tmpdir.ensure('site/%s' % platform.system().lower(), dir=True)
|
||||
@@ -602,25 +602,6 @@ def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
|
||||
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
|
||||
|
||||
|
||||
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
compressed archive file containing the install tree. This test
|
||||
makes sure we can still read that layout."""
|
||||
legacy_layout_dir = os.path.join(test_path, 'data', 'mirrors', 'legacy_layout')
|
||||
mirror_url = "file://{0}".format(legacy_layout_dir)
|
||||
filename = ("test-debian6-core2-gcc-4.5.0-archive-files-2.0-"
|
||||
"l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json")
|
||||
spec_json_path = os.path.join(legacy_layout_dir, 'build_cache', filename)
|
||||
mirror_cmd('add', '--scope', 'site', 'test-legacy-layout', mirror_url)
|
||||
output = install_cmd(
|
||||
'--no-check-signature', '--cache-only', '-f', spec_json_path, output=str)
|
||||
mirror_cmd('rm', '--scope=site', 'test-legacy-layout')
|
||||
expect_line = ("Extracting archive-files-2.0-"
|
||||
"l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache")
|
||||
assert(expect_line in output)
|
||||
|
||||
|
||||
def test_FetchCacheError_only_accepts_lists_of_errors():
|
||||
with pytest.raises(TypeError, match="list"):
|
||||
bindist.FetchCacheError("error")
|
||||
|
@@ -62,22 +62,6 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
||||
spack.bootstrap.store_path()
|
||||
|
||||
|
||||
def test_raising_exception_module_importable():
|
||||
with pytest.raises(
|
||||
ImportError,
|
||||
match='cannot bootstrap the "asdf" Python module',
|
||||
):
|
||||
spack.bootstrap.ensure_module_importable_or_raise("asdf")
|
||||
|
||||
|
||||
def test_raising_exception_executables_in_path():
|
||||
with pytest.raises(
|
||||
RuntimeError,
|
||||
match="cannot bootstrap any of the asdf, fdsa executables",
|
||||
):
|
||||
spack.bootstrap.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
|
||||
|
||||
@pytest.mark.regression('25603')
|
||||
def test_bootstrap_deactivates_environments(active_mock_environment):
|
||||
assert spack.environment.active_environment() == active_mock_environment
|
||||
@@ -180,20 +164,3 @@ def test_status_function_find_files(
|
||||
|
||||
_, missing = spack.bootstrap.status_message('optional')
|
||||
assert missing is expected_missing
|
||||
|
||||
|
||||
@pytest.mark.regression('31042')
|
||||
def test_source_is_disabled(mutable_config):
|
||||
# Get the configuration dictionary of the current bootstrapping source
|
||||
conf = next(iter(spack.bootstrap.bootstrapping_sources()))
|
||||
|
||||
# The source is not explicitly enabled or disabled, so the following
|
||||
# call should raise to skip using it for bootstrapping
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
|
||||
# Try to explicitly disable the source and verify that the behavior
|
||||
# is the same as above
|
||||
spack.config.add('bootstrap:trusted:{0}:{1}'.format(conf['name'], False))
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
_bootstrap = spack.main.SpackCommand('bootstrap')
|
||||
@@ -140,82 +139,13 @@ def test_trust_or_untrust_fails_with_no_method(mutable_config):
|
||||
def test_trust_or_untrust_fails_with_more_than_one_method(mutable_config):
|
||||
wrong_config = {'sources': [
|
||||
{'name': 'github-actions',
|
||||
'metadata': '$spack/share/spack/bootstrap/github-actions'},
|
||||
'type': 'buildcache',
|
||||
'description': ''},
|
||||
{'name': 'github-actions',
|
||||
'metadata': '$spack/share/spack/bootstrap/github-actions'}],
|
||||
'type': 'buildcache',
|
||||
'description': 'Another entry'}],
|
||||
'trusted': {}
|
||||
}
|
||||
with spack.config.override('bootstrap', wrong_config):
|
||||
with pytest.raises(RuntimeError, match='more than one'):
|
||||
_bootstrap('trust', 'github-actions')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('use_existing_dir', [True, False])
|
||||
def test_add_failures_for_non_existing_files(mutable_config, tmpdir, use_existing_dir):
|
||||
metadata_dir = str(tmpdir) if use_existing_dir else '/foo/doesnotexist'
|
||||
with pytest.raises(RuntimeError, match='does not exist'):
|
||||
_bootstrap('add', 'mock-mirror', metadata_dir)
|
||||
|
||||
|
||||
def test_add_failures_for_already_existing_name(mutable_config):
|
||||
with pytest.raises(RuntimeError, match='already exist'):
|
||||
_bootstrap('add', 'github-actions', 'some-place')
|
||||
|
||||
|
||||
def test_remove_failure_for_non_existing_names(mutable_config):
|
||||
with pytest.raises(RuntimeError, match='cannot find'):
|
||||
_bootstrap('remove', 'mock-mirror')
|
||||
|
||||
|
||||
def test_remove_and_add_a_source(mutable_config):
|
||||
# Check we start with a single bootstrapping source
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
# Remove it and check the result
|
||||
_bootstrap('remove', 'github-actions')
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
assert not sources
|
||||
|
||||
# Add it back and check we restored the initial state
|
||||
_bootstrap(
|
||||
'add', 'github-actions', '$spack/share/spack/bootstrap/github-actions-v0.2'
|
||||
)
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.skipif(sys.platform == 'win32', reason="Not supported on Windows (yet)")
|
||||
def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir):
|
||||
"""Test that `spack bootstrap mirror` creates a folder that can be ingested by
|
||||
`spack bootstrap add`. Here we don't download data, since that would be an
|
||||
expensive operation for a unit test.
|
||||
"""
|
||||
old_create = spack.mirror.create
|
||||
monkeypatch.setattr(spack.mirror, 'create', lambda p, s: old_create(p, []))
|
||||
|
||||
# Create the mirror in a temporary folder
|
||||
compilers = [{
|
||||
'compiler': {
|
||||
'spec': 'gcc@12.0.1',
|
||||
'operating_system': '{0.name}{0.version}'.format(linux_os),
|
||||
'modules': [],
|
||||
'paths': {
|
||||
'cc': '/usr/bin',
|
||||
'cxx': '/usr/bin',
|
||||
'fc': '/usr/bin',
|
||||
'f77': '/usr/bin'
|
||||
}
|
||||
}
|
||||
}]
|
||||
with spack.config.override('compilers', compilers):
|
||||
_bootstrap('mirror', str(tmpdir))
|
||||
|
||||
# Register the mirror
|
||||
metadata_dir = tmpdir.join('metadata', 'sources')
|
||||
_bootstrap('add', '--trust', 'test-mirror', str(metadata_dir))
|
||||
|
||||
assert _bootstrap.returncode == 0
|
||||
assert any(m['name'] == 'test-mirror'
|
||||
for m in spack.bootstrap.bootstrapping_sources())
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo as repo
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
@@ -635,6 +636,10 @@ def test_ci_generate_for_pr_pipeline(tmpdir, mutable_mock_env_path,
|
||||
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||
|
||||
with ev.read('test'):
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
|
||||
ci_cmd('generate', '--output-file', outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
@@ -679,6 +684,10 @@ def test_ci_generate_with_external_pkg(tmpdir, mutable_mock_env_path,
|
||||
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||
|
||||
with ev.read('test'):
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
|
||||
ci_cmd('generate', '--output-file', outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
@@ -912,82 +921,11 @@ def fake_dl_method(spec, *args, **kwargs):
|
||||
env_cmd('deactivate')
|
||||
|
||||
|
||||
def test_ci_generate_mirror_override(tmpdir, mutable_mock_env_path,
|
||||
install_mockery_mutable_config, mock_packages,
|
||||
mock_fetch, mock_stage, mock_binary_index,
|
||||
ci_base_environment):
|
||||
"""Ensure that protected pipelines using --buildcache-destination do not
|
||||
skip building specs that are not in the override mirror when they are
|
||||
found in the main mirror."""
|
||||
os.environ.update({
|
||||
'SPACK_PIPELINE_TYPE': 'spack_protected_branch',
|
||||
})
|
||||
|
||||
working_dir = tmpdir.join('working_dir')
|
||||
|
||||
mirror_dir = working_dir.join('mirror')
|
||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
||||
|
||||
spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [patchelf]
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- patchelf
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
service-job-attributes:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
""".format(mirror_url)
|
||||
|
||||
filename = str(tmpdir.join('spack.yaml'))
|
||||
with open(filename, 'w') as f:
|
||||
f.write(spack_yaml_contents)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd('create', 'test', './spack.yaml')
|
||||
first_ci_yaml = str(tmpdir.join('.gitlab-ci-1.yml'))
|
||||
second_ci_yaml = str(tmpdir.join('.gitlab-ci-2.yml'))
|
||||
with ev.read('test'):
|
||||
install_cmd()
|
||||
buildcache_cmd('create', '-u', '--mirror-url', mirror_url, 'patchelf')
|
||||
buildcache_cmd('update-index', '--mirror-url', mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
ci_cmd('generate', '--check-index-only', '--output-file', first_ci_yaml)
|
||||
|
||||
# Because we used a mirror override (--buildcache-destination) on a
|
||||
# spack protected pipeline, we expect to only look in the override
|
||||
# mirror for the spec, and thus the patchelf job should be generated in
|
||||
# this pipeline
|
||||
ci_cmd('generate', '--check-index-only', '--output-file', second_ci_yaml,
|
||||
'--buildcache-destination', 'file:///mirror/not/exist')
|
||||
|
||||
with open(first_ci_yaml) as fd1:
|
||||
first_yaml = fd1.read()
|
||||
assert 'no-specs-to-rebuild' in first_yaml
|
||||
|
||||
with open(second_ci_yaml) as fd2:
|
||||
second_yaml = fd2.read()
|
||||
assert 'no-specs-to-rebuild' not in second_yaml
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
|
||||
install_mockery_mutable_config, mock_packages,
|
||||
mock_fetch, mock_stage, mock_gnupghome,
|
||||
ci_base_environment, mock_binary_index):
|
||||
ci_base_environment):
|
||||
working_dir = tmpdir.join('working_dir')
|
||||
|
||||
mirror_dir = working_dir.join('mirror')
|
||||
@@ -1100,10 +1038,10 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
|
||||
# Also test buildcache_spec schema
|
||||
bc_files_list = os.listdir(buildcache_path)
|
||||
for file_name in bc_files_list:
|
||||
if file_name.endswith('.spec.json.sig'):
|
||||
if file_name.endswith('.spec.json'):
|
||||
spec_json_path = os.path.join(buildcache_path, file_name)
|
||||
with open(spec_json_path) as json_fd:
|
||||
json_object = Spec.extract_json_from_clearsig(json_fd.read())
|
||||
json_object = sjson.load(json_fd)
|
||||
validate(json_object, specfile_schema)
|
||||
|
||||
logs_dir = working_dir.join('logs_dir')
|
||||
@@ -1214,6 +1152,10 @@ def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,
|
||||
with ev.read('test'):
|
||||
monkeypatch.setattr(
|
||||
spack.main, 'get_version', lambda: '0.15.3-416-12ad69eb1')
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
|
||||
ci_cmd('generate', '--output-file', outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
@@ -1315,6 +1257,10 @@ def test_ci_generate_with_workarounds(tmpdir, mutable_mock_env_path,
|
||||
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||
|
||||
with ev.read('test'):
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
|
||||
ci_cmd('generate', '--output-file', outputfile, '--dependencies')
|
||||
|
||||
with open(outputfile) as f:
|
||||
@@ -1472,6 +1418,11 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None,
|
||||
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||
|
||||
with ev.read('test'):
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
|
||||
|
||||
ci_cmd('generate', '--output-file', outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
@@ -1680,6 +1631,11 @@ def test_ci_generate_temp_storage_url(tmpdir, mutable_mock_env_path,
|
||||
env_cmd('create', 'test', './spack.yaml')
|
||||
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
|
||||
monkeypatch.setattr(
|
||||
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
|
||||
|
||||
with ev.read('test'):
|
||||
ci_cmd('generate', '--output-file', outputfile)
|
||||
|
||||
@@ -1760,64 +1716,6 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
|
||||
assert(ex not in output)
|
||||
|
||||
|
||||
def test_ci_generate_external_signing_job(tmpdir, mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages, monkeypatch,
|
||||
ci_base_environment):
|
||||
"""Verify that in external signing mode: 1) each rebuild jobs includes
|
||||
the location where the binary hash information is written and 2) we
|
||||
properly generate a final signing job in the pipeline."""
|
||||
os.environ.update({
|
||||
'SPACK_PIPELINE_TYPE': 'spack_protected_branch'
|
||||
})
|
||||
filename = str(tmpdir.join('spack.yaml'))
|
||||
with open(filename, 'w') as f:
|
||||
f.write("""\
|
||||
spack:
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
temporary-storage-url-prefix: file:///work/temp/mirror
|
||||
mappings:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
signing-job-attributes:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
- secretrunner
|
||||
image:
|
||||
name: customdockerimage
|
||||
entrypoint: []
|
||||
variables:
|
||||
IMPORTANT_INFO: avalue
|
||||
script:
|
||||
- echo hello
|
||||
""")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd('create', 'test', './spack.yaml')
|
||||
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||
|
||||
with ev.read('test'):
|
||||
ci_cmd('generate', '--output-file', outputfile)
|
||||
|
||||
with open(outputfile) as of:
|
||||
pipeline_doc = syaml.load(of.read())
|
||||
|
||||
assert 'sign-pkgs' in pipeline_doc
|
||||
signing_job = pipeline_doc['sign-pkgs']
|
||||
assert 'tags' in signing_job
|
||||
signing_job_tags = signing_job['tags']
|
||||
for expected_tag in ['notary', 'protected', 'aws']:
|
||||
assert expected_tag in signing_job_tags
|
||||
|
||||
|
||||
def test_ci_reproduce(tmpdir, mutable_mock_env_path,
|
||||
install_mockery, mock_packages, monkeypatch,
|
||||
last_two_git_commits, ci_base_environment, mock_binary_index):
|
||||
|
@@ -45,22 +45,21 @@ def test_negative_integers_not_allowed_for_parallel_jobs(job_parser):
|
||||
assert 'expected a positive integer' in str(exc_info.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('specs,cflags,negated_variants', [
|
||||
(['coreutils cflags="-O3 -g"'], ['-O3', '-g'], []),
|
||||
(['coreutils', 'cflags=-O3 -g'], ['-O3'], ['g']),
|
||||
(['coreutils', 'cflags=-O3', '-g'], ['-O3'], ['g']),
|
||||
@pytest.mark.parametrize('specs,expected_variants,unexpected_variants', [
|
||||
(['coreutils', 'cflags=-O3 -g'], [], ['g']),
|
||||
(['coreutils', 'cflags=-O3', '-g'], ['g'], []),
|
||||
])
|
||||
@pytest.mark.regression('12951')
|
||||
def test_parse_spec_flags_with_spaces(specs, cflags, negated_variants):
|
||||
def test_parse_spec_flags_with_spaces(
|
||||
specs, expected_variants, unexpected_variants
|
||||
):
|
||||
spec_list = spack.cmd.parse_specs(specs)
|
||||
assert len(spec_list) == 1
|
||||
|
||||
s = spec_list.pop()
|
||||
|
||||
assert s.compiler_flags['cflags'] == cflags
|
||||
assert list(s.variants.keys()) == negated_variants
|
||||
for v in negated_variants:
|
||||
assert '~{0}'.format(v) in s
|
||||
assert all(x not in s.variants for x in unexpected_variants)
|
||||
assert all(x in s.variants for x in expected_variants)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
|
@@ -18,40 +18,37 @@
|
||||
concretize = SpackCommand('concretize')
|
||||
|
||||
|
||||
unification_strategies = [False, True, 'when_possible']
|
||||
|
||||
|
||||
@pytest.mark.parametrize('unify', unification_strategies)
|
||||
def test_concretize_all_test_dependencies(unify):
|
||||
@pytest.mark.parametrize('concretization', ['separately', 'together'])
|
||||
def test_concretize_all_test_dependencies(concretization):
|
||||
"""Check all test dependencies are concretized."""
|
||||
env('create', 'test')
|
||||
|
||||
with ev.read('test') as e:
|
||||
e.unify = unify
|
||||
e.concretization = concretization
|
||||
add('depb')
|
||||
concretize('--test', 'all')
|
||||
assert e.matching_spec('test-dependency')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('unify', unification_strategies)
|
||||
def test_concretize_root_test_dependencies_not_recursive(unify):
|
||||
@pytest.mark.parametrize('concretization', ['separately', 'together'])
|
||||
def test_concretize_root_test_dependencies_not_recursive(concretization):
|
||||
"""Check that test dependencies are not concretized recursively."""
|
||||
env('create', 'test')
|
||||
|
||||
with ev.read('test') as e:
|
||||
e.unify = unify
|
||||
e.concretization = concretization
|
||||
add('depb')
|
||||
concretize('--test', 'root')
|
||||
assert e.matching_spec('test-dependency') is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('unify', unification_strategies)
|
||||
def test_concretize_root_test_dependencies_are_concretized(unify):
|
||||
@pytest.mark.parametrize('concretization', ['separately', 'together'])
|
||||
def test_concretize_root_test_dependencies_are_concretized(concretization):
|
||||
"""Check that root test dependencies are concretized."""
|
||||
env('create', 'test')
|
||||
|
||||
with ev.read('test') as e:
|
||||
e.unify = unify
|
||||
e.concretization = concretization
|
||||
add('a')
|
||||
add('b')
|
||||
concretize('--test', 'root')
|
||||
|
@@ -486,7 +486,7 @@ def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
|
||||
config('rm', 'config:dirty')
|
||||
output = config('get')
|
||||
|
||||
expected = ev.default_manifest_yaml()
|
||||
expected = ev.default_manifest_yaml
|
||||
expected += """ config: {}
|
||||
|
||||
"""
|
||||
|
@@ -2197,7 +2197,7 @@ def test_env_activate_default_view_root_unconditional(mutable_mock_env_path):
|
||||
|
||||
def test_concretize_user_specs_together():
|
||||
e = ev.create('coconcretization')
|
||||
e.unify = True
|
||||
e.concretization = 'together'
|
||||
|
||||
# Concretize a first time using 'mpich' as the MPI provider
|
||||
e.add('mpileaks')
|
||||
@@ -2225,7 +2225,7 @@ def test_concretize_user_specs_together():
|
||||
|
||||
def test_cant_install_single_spec_when_concretizing_together():
|
||||
e = ev.create('coconcretization')
|
||||
e.unify = True
|
||||
e.concretization = 'together'
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError, match=r'cannot install'):
|
||||
e.concretize_and_add('zlib')
|
||||
@@ -2234,7 +2234,7 @@ def test_cant_install_single_spec_when_concretizing_together():
|
||||
|
||||
def test_duplicate_packages_raise_when_concretizing_together():
|
||||
e = ev.create('coconcretization')
|
||||
e.unify = True
|
||||
e.concretization = 'together'
|
||||
|
||||
e.add('mpileaks+opt')
|
||||
e.add('mpileaks~opt')
|
||||
@@ -2251,7 +2251,7 @@ def test_env_write_only_non_default():
|
||||
with open(e.manifest_path, 'r') as f:
|
||||
yaml = f.read()
|
||||
|
||||
assert yaml == ev.default_manifest_yaml()
|
||||
assert yaml == ev.default_manifest_yaml
|
||||
|
||||
|
||||
@pytest.mark.regression('20526')
|
||||
@@ -2358,26 +2358,6 @@ def test_old_format_cant_be_updated_implicitly(packages_yaml_v015):
|
||||
add('hdf5')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('concretization,unify', [
|
||||
('together', 'true'),
|
||||
('separately', 'false')
|
||||
])
|
||||
def test_update_concretization_to_concretizer_unify(concretization, unify, tmpdir):
|
||||
spack_yaml = """\
|
||||
spack:
|
||||
concretization: {}
|
||||
""".format(concretization)
|
||||
tmpdir.join('spack.yaml').write(spack_yaml)
|
||||
# Update the environment
|
||||
env('update', '-y', str(tmpdir))
|
||||
with open(str(tmpdir.join('spack.yaml'))) as f:
|
||||
assert f.read() == """\
|
||||
spack:
|
||||
concretizer:
|
||||
unify: {}
|
||||
""".format(unify)
|
||||
|
||||
|
||||
@pytest.mark.regression('18147')
|
||||
def test_can_update_attributes_with_override(tmpdir):
|
||||
spack_yaml = """
|
||||
@@ -2411,8 +2391,7 @@ def test_newline_in_commented_sequence_is_not_an_issue(tmpdir):
|
||||
modules:
|
||||
- libelf/3.18.1
|
||||
|
||||
concretizer:
|
||||
unify: false
|
||||
concretization: together
|
||||
"""
|
||||
abspath = tmpdir.join('spack.yaml')
|
||||
abspath.write(spack_yaml)
|
||||
@@ -2556,7 +2535,7 @@ def test_custom_version_concretize_together(tmpdir):
|
||||
# Custom versions should be permitted in specs when
|
||||
# concretizing together
|
||||
e = ev.create('custom_version')
|
||||
e.unify = True
|
||||
e.concretization = 'together'
|
||||
|
||||
# Concretize a first time using 'mpich' as the MPI provider
|
||||
e.add('hdf5@myversion')
|
||||
@@ -2647,7 +2626,7 @@ def test_multiple_modules_post_env_hook(tmpdir, install_mockery, mock_fetch):
|
||||
def test_virtual_spec_concretize_together(tmpdir):
|
||||
# An environment should permit to concretize "mpi"
|
||||
e = ev.create('virtual_spec')
|
||||
e.unify = True
|
||||
e.concretization = 'together'
|
||||
|
||||
e.add('mpi')
|
||||
e.concretize()
|
||||
@@ -2989,19 +2968,3 @@ def test_environment_depfile_out(tmpdir, mock_packages):
|
||||
stdout = env('depfile', '-G', 'make')
|
||||
with open(makefile_path, 'r') as f:
|
||||
assert stdout == f.read()
|
||||
|
||||
|
||||
def test_unify_when_possible_works_around_conflicts():
|
||||
e = ev.create('coconcretization')
|
||||
e.unify = 'when_possible'
|
||||
|
||||
e.add('mpileaks+opt')
|
||||
e.add('mpileaks~opt')
|
||||
e.add('mpich')
|
||||
|
||||
e.concretize()
|
||||
|
||||
assert len([x for x in e.all_specs() if x.satisfies('mpileaks')]) == 2
|
||||
assert len([x for x in e.all_specs() if x.satisfies('mpileaks+opt')]) == 1
|
||||
assert len([x for x in e.all_specs() if x.satisfies('mpileaks~opt')]) == 1
|
||||
assert len([x for x in e.all_specs() if x.satisfies('mpich')]) == 1
|
||||
|
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import getuid, touch
|
||||
|
||||
import spack
|
||||
import spack.detection
|
||||
import spack.detection.path
|
||||
@@ -34,7 +32,7 @@ def _platform_executables(monkeypatch):
|
||||
def _win_exe_ext():
|
||||
return '.bat'
|
||||
|
||||
monkeypatch.setattr(spack.util.path, 'win_exe_ext', _win_exe_ext)
|
||||
monkeypatch.setattr(spack.package, 'win_exe_ext', _win_exe_ext)
|
||||
|
||||
|
||||
def define_plat_exe(exe):
|
||||
@@ -196,53 +194,6 @@ def test_find_external_empty_default_manifest_dir(
|
||||
external('find')
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason='user is root')
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo,
|
||||
_platform_executables, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but with insufficient permissions. Check that
|
||||
the command does not fail.
|
||||
"""
|
||||
test_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
test_manifest_file_path = os.path.join(test_manifest_dir, 'badperms.json')
|
||||
touch(test_manifest_file_path)
|
||||
monkeypatch.setenv('PATH', '')
|
||||
monkeypatch.setattr(spack.cray_manifest, 'default_path',
|
||||
test_manifest_dir)
|
||||
try:
|
||||
os.chmod(test_manifest_file_path, 0)
|
||||
output = external('find')
|
||||
assert 'insufficient permissions' in output
|
||||
assert 'Skipping manifest and continuing' in output
|
||||
finally:
|
||||
os.chmod(test_manifest_file_path, 0o700)
|
||||
|
||||
|
||||
def test_find_external_manifest_failure(
|
||||
mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the manifest parsing fails with
|
||||
some exception. Ensure that the command still succeeds (i.e. moves on
|
||||
to other external detection mechanisms).
|
||||
"""
|
||||
# First, create an empty manifest file (without a file to read, the
|
||||
# manifest parsing is skipped)
|
||||
test_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
test_manifest_file_path = os.path.join(test_manifest_dir, 'test.json')
|
||||
touch(test_manifest_file_path)
|
||||
|
||||
def fail():
|
||||
raise Exception()
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.cmd.external, '_collect_and_consume_cray_manifest_files', fail)
|
||||
monkeypatch.setenv('PATH', '')
|
||||
output = external('find')
|
||||
assert 'Skipping manifest and continuing' in output
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo,
|
||||
_platform_executables, tmpdir, monkeypatch,
|
||||
|
@@ -4,12 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import re
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
@@ -57,54 +55,6 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
assert h in output
|
||||
|
||||
|
||||
def test_spec_parse_dependency_variant_value():
|
||||
"""Verify that we can provide multiple key=value variants to multiple separate
|
||||
packages within a spec string."""
|
||||
output = spec('multivalue-variant fee=barbaz ^ a foobar=baz')
|
||||
|
||||
assert 'fee=barbaz' in output
|
||||
assert 'foobar=baz' in output
|
||||
|
||||
|
||||
def test_spec_parse_cflags_quoting():
|
||||
"""Verify that compiler flags can be provided to a spec from the command line."""
|
||||
output = spec('--yaml', 'gcc cflags="-Os -pipe" cxxflags="-flto -Os"')
|
||||
gh_flagged = spack.spec.Spec.from_yaml(output)
|
||||
|
||||
assert ['-Os', '-pipe'] == gh_flagged.compiler_flags['cflags']
|
||||
assert ['-flto', '-Os'] == gh_flagged.compiler_flags['cxxflags']
|
||||
|
||||
|
||||
def test_spec_parse_unquoted_flags_report():
|
||||
"""Verify that a useful error message is produced if unquoted compiler flags are
|
||||
provided."""
|
||||
# This should fail during parsing, since /usr/include is interpreted as a spec hash.
|
||||
with pytest.raises(spack.error.SpackError) as cm:
|
||||
# We don't try to figure out how many following args were intended to be part of
|
||||
# cflags, we just explain how to fix it for the immediate next arg.
|
||||
spec('gcc cflags=-Os -pipe -other-arg-that-gets-ignored cflags=-I /usr/include')
|
||||
# Verify that the generated error message is nicely formatted.
|
||||
assert str(cm.value) == dedent('''\
|
||||
No installed spec matches the hash: 'usr'
|
||||
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
(1) cflags=-Os -pipe => cflags="-Os -pipe"
|
||||
(2) cflags=-I /usr/include => cflags="-I /usr/include"''')
|
||||
|
||||
# Verify that the same unquoted cflags report is generated in the error message even
|
||||
# if it fails during concretization, not just during parsing.
|
||||
with pytest.raises(spack.error.SpackError) as cm:
|
||||
spec('gcc cflags=-Os -pipe')
|
||||
cm = str(cm.value)
|
||||
assert cm.startswith('trying to set variant "pipe" in package "gcc", but the package has no such variant [happened during concretization of gcc cflags="-Os" ~pipe]') # noqa: E501
|
||||
assert cm.endswith('(1) cflags=-Os -pipe => cflags="-Os -pipe"')
|
||||
|
||||
|
||||
def test_spec_yaml():
|
||||
output = spec('--yaml', 'mpileaks')
|
||||
|
||||
@@ -174,17 +124,6 @@ def test_spec_returncode():
|
||||
assert spec.returncode == 1
|
||||
|
||||
|
||||
def test_spec_parse_error():
|
||||
with pytest.raises(spack.error.SpackError) as e:
|
||||
spec("1.15:")
|
||||
|
||||
# make sure the error is formatted properly
|
||||
error_msg = """\
|
||||
1.15:
|
||||
^"""
|
||||
assert error_msg in str(e.value)
|
||||
|
||||
|
||||
def test_env_aware_spec(mutable_mock_env_path):
|
||||
env = ev.create('test')
|
||||
env.add('mpileaks')
|
||||
|
@@ -41,6 +41,7 @@ def check_stage_path(monkeypatch, tmpdir):
|
||||
|
||||
def fake_stage(pkg, mirror_only=False):
|
||||
assert pkg.path == expected_path
|
||||
assert os.path.isdir(expected_path), expected_path
|
||||
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
|
||||
|
||||
|
@@ -1668,143 +1668,3 @@ def test_reuse_with_unknown_package_dont_raise(
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = Spec('c').concretized()
|
||||
assert s.namespace == 'builtin.mock'
|
||||
|
||||
@pytest.mark.parametrize('specs,expected', [
|
||||
(['libelf', 'libelf@0.8.10'], 1),
|
||||
(['libdwarf%gcc', 'libelf%clang'], 2),
|
||||
(['libdwarf%gcc', 'libdwarf%clang'], 4),
|
||||
(['libdwarf^libelf@0.8.12', 'libdwarf^libelf@0.8.13'], 4),
|
||||
(['hdf5', 'zmpi'], 3),
|
||||
(['hdf5', 'mpich'], 2),
|
||||
(['hdf5^zmpi', 'mpich'], 4),
|
||||
(['mpi', 'zmpi'], 2),
|
||||
(['mpi', 'mpich'], 1),
|
||||
])
|
||||
def test_best_effort_coconcretize(self, specs, expected):
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot concretize in rounds')
|
||||
|
||||
specs = [spack.spec.Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
concrete_specs = set()
|
||||
for result in solver.solve_in_rounds(specs):
|
||||
for s in result.specs:
|
||||
concrete_specs.update(s.traverse())
|
||||
|
||||
assert len(concrete_specs) == expected
|
||||
|
||||
@pytest.mark.parametrize('specs,expected_spec,occurances', [
|
||||
# The algorithm is greedy, and it might decide to solve the "best"
|
||||
# spec early in which case reuse is suboptimal. In this case the most
|
||||
# recent version of libdwarf is selected and concretized to libelf@0.8.13
|
||||
(['libdwarf@20111030^libelf@0.8.10',
|
||||
'libdwarf@20130207^libelf@0.8.12',
|
||||
'libdwarf@20130729'], 'libelf@0.8.12', 1),
|
||||
# Check we reuse the best libelf in the environment
|
||||
(['libdwarf@20130729^libelf@0.8.10',
|
||||
'libdwarf@20130207^libelf@0.8.12',
|
||||
'libdwarf@20111030'], 'libelf@0.8.12', 2),
|
||||
(['libdwarf@20130729',
|
||||
'libdwarf@20130207',
|
||||
'libdwarf@20111030'], 'libelf@0.8.13', 3),
|
||||
# We need to solve in 2 rounds and we expect mpich to be preferred to zmpi
|
||||
(['hdf5+mpi', 'zmpi', 'mpich'], 'mpich', 2)
|
||||
])
|
||||
def test_best_effort_coconcretize_preferences(
|
||||
self, specs, expected_spec, occurances
|
||||
):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot concretize in rounds')
|
||||
|
||||
specs = [spack.spec.Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
concrete_specs = {}
|
||||
for result in solver.solve_in_rounds(specs):
|
||||
concrete_specs.update(result.specs_by_input)
|
||||
|
||||
counter = 0
|
||||
for spec in concrete_specs.values():
|
||||
if expected_spec in spec:
|
||||
counter += 1
|
||||
assert counter == occurances, concrete_specs
|
||||
|
||||
@pytest.mark.regression('30864')
|
||||
def test_misleading_error_message_on_version(self, mutable_database):
|
||||
# For this bug to be triggered we need a reusable dependency
|
||||
# that is not optimal in terms of optimization scores.
|
||||
# We pick an old version of "b"
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse')
|
||||
|
||||
reusable_specs = [
|
||||
spack.spec.Spec('non-existing-conditional-dep@1.0').concretized()
|
||||
]
|
||||
root_spec = spack.spec.Spec('non-existing-conditional-dep@2.0')
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError,
|
||||
match="'dep-with-variants' satisfies '@999'"):
|
||||
solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
|
||||
@pytest.mark.regression('31148')
|
||||
def test_version_weight_and_provenance(self):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse')
|
||||
|
||||
reusable_specs = [
|
||||
spack.spec.Spec(spec_str).concretized()
|
||||
for spec_str in ('b@0.9', 'b@1.0')
|
||||
]
|
||||
root_spec = spack.spec.Spec('a foobar=bar')
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result = solver.driver.solve(
|
||||
setup, [root_spec], reuse=reusable_specs, out=sys.stdout
|
||||
)
|
||||
# The result here should have a single spec to build ('a')
|
||||
# and it should be using b@1.0 with a version badness of 2
|
||||
# The provenance is:
|
||||
# version_declared("b","1.0",0,"package_py").
|
||||
# version_declared("b","0.9",1,"package_py").
|
||||
# version_declared("b","1.0",2,"installed").
|
||||
# version_declared("b","0.9",3,"installed").
|
||||
for criterion in [
|
||||
(1, None, 'number of packages to build (vs. reuse)'),
|
||||
(2, 0, 'version badness')
|
||||
]:
|
||||
assert criterion in result.criteria
|
||||
assert result.specs[0].satisfies('^b@1.0')
|
||||
|
||||
@pytest.mark.regression('31169')
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse')
|
||||
|
||||
root_spec = spack.spec.Spec('b')
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
wrong_compiler.compiler = spack.spec.CompilerSpec('gcc@12.1.0')
|
||||
wrong_os.architecture = spack.spec.ArchSpec('test-ubuntu2204-x86_64')
|
||||
reusable_specs = [wrong_compiler, wrong_os]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result = solver.driver.solve(
|
||||
setup, [root_spec], reuse=reusable_specs, out=sys.stdout
|
||||
)
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies('%gcc@4.5.0')
|
||||
assert concrete_spec.satisfies('os=debian6')
|
||||
|
@@ -144,7 +144,7 @@ def test_preferred_target(self, mutable_mock_repo):
|
||||
|
||||
update_packages('mpileaks', 'target', [default])
|
||||
spec = concretize('mpileaks')
|
||||
assert str(spec['mpileaks'].target) == default
|
||||
assert str(spec['mpich'].target) == default
|
||||
assert str(spec['mpich'].target) == default
|
||||
|
||||
def test_preferred_versions(self):
|
||||
|
@@ -18,7 +18,6 @@
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.schema.compilers
|
||||
import spack.schema.config
|
||||
import spack.schema.env
|
||||
@@ -1158,19 +1157,6 @@ def test_bad_path_double_override(config):
|
||||
pass
|
||||
|
||||
|
||||
def test_license_dir_config(mutable_config, mock_packages):
|
||||
"""Ensure license directory is customizable"""
|
||||
assert spack.config.get("config:license_dir") == spack.paths.default_license_dir
|
||||
assert spack.package.Package.global_license_dir == spack.paths.default_license_dir
|
||||
assert spack.repo.get("a").global_license_dir == spack.paths.default_license_dir
|
||||
|
||||
rel_path = os.path.join(os.path.sep, "foo", "bar", "baz")
|
||||
spack.config.set("config:license_dir", rel_path)
|
||||
assert spack.config.get("config:license_dir") == rel_path
|
||||
assert spack.package.Package.global_license_dir == rel_path
|
||||
assert spack.repo.get("a").global_license_dir == rel_path
|
||||
|
||||
|
||||
@pytest.mark.regression('22547')
|
||||
def test_single_file_scope_cache_clearing(env_yaml):
|
||||
scope = spack.config.SingleFileScope(
|
||||
|
@@ -576,7 +576,7 @@ def default_config():
|
||||
|
||||
This ensures we can test the real default configuration without having
|
||||
tests fail when the user overrides the defaults that we test against."""
|
||||
defaults_path = os.path.join(spack.paths.etc_path, 'defaults')
|
||||
defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
if is_windows:
|
||||
defaults_path = os.path.join(defaults_path, "windows")
|
||||
with spack.config.use_configuration(defaults_path) as defaults_config:
|
||||
|
@@ -10,7 +10,6 @@
|
||||
logic needs to consume all related specs in a single pass).
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -33,7 +32,7 @@
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "10.2.0.cray"
|
||||
"version": "10.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"packagey": {
|
||||
@@ -157,7 +156,7 @@ def spec_json(self):
|
||||
# Intended to match example_compiler_entry above
|
||||
_common_compiler = JsonCompilerEntry(
|
||||
name='gcc',
|
||||
version='10.2.0.cray',
|
||||
version='10.2.0',
|
||||
arch={
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
@@ -310,16 +309,8 @@ def test_failed_translate_compiler_name():
|
||||
|
||||
def create_manifest_content():
|
||||
return {
|
||||
# Note: the cray_manifest module doesn't use the _meta section right
|
||||
# now, but it is anticipated to be useful
|
||||
'_meta': {
|
||||
"file-type": "cray-pe-json",
|
||||
"system-type": "test",
|
||||
"schema-version": "1.3",
|
||||
"cpe-version": "22.06"
|
||||
},
|
||||
'specs': list(x.to_dict() for x in generate_openmpi_entries()),
|
||||
'compilers': [_common_compiler.compiler_json()]
|
||||
'compilers': []
|
||||
}
|
||||
|
||||
|
||||
@@ -345,45 +336,3 @@ def test_read_cray_manifest(
|
||||
' ^/openmpifakehasha'.split(),
|
||||
concretize=True)
|
||||
assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'
|
||||
|
||||
|
||||
def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
pytest.skip("The ASP-based concretizer is currently picky about "
|
||||
" OS matching and will fail.")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = 'external-db.json'
|
||||
with open(test_db_fname, 'w') as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
|
||||
# Read the manifest twice
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
|
||||
compilers = spack.compilers.all_compilers()
|
||||
filtered = list(c for c in compilers if
|
||||
c.spec == spack.spec.CompilerSpec('gcc@10.2.0.cray'))
|
||||
assert(len(filtered) == 1)
|
||||
|
||||
|
||||
def test_read_old_manifest_v1_2(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
"""Test reading a file using the older format
|
||||
('version' instead of 'schema-version').
|
||||
"""
|
||||
manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
manifest_file_path = os.path.join(manifest_dir, 'test.json')
|
||||
with open(manifest_file_path, 'w') as manifest_file:
|
||||
manifest_file.write("""\
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "cray-pe-json",
|
||||
"system-type": "EX",
|
||||
"version": "1.3"
|
||||
},
|
||||
"specs": []
|
||||
}
|
||||
""")
|
||||
cray_manifest.read(manifest_file_path, True)
|
||||
|
@@ -1,5 +1,12 @@
|
||||
bootstrap:
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: file:///home/spack/production/spack/mirrors/clingo
|
||||
homepage: https://github.com/alalazo/spack-bootstrap-mirrors
|
||||
releases: https://github.com/alalazo/spack-bootstrap-mirrors/releases
|
||||
trusted: {}
|
||||
|
@@ -1,54 +0,0 @@
|
||||
{
|
||||
"spec": {
|
||||
"_meta": {
|
||||
"version": 3
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"name": "archive-files",
|
||||
"version": "2.0",
|
||||
"arch": {
|
||||
"platform": "test",
|
||||
"platform_os": "debian6",
|
||||
"target": {
|
||||
"name": "core2",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"nocona"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "4.5.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"package_hash": "ncv2pr4o2yemepsa4h7u4p4dsgieul5fxvh6s5am5fsb65ebugaa====",
|
||||
"hash": "l3vdiqvbobmspwyb4q2b62fz6nitd4hk"
|
||||
}
|
||||
]
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "c226b51d88876746efd6f9737cc6dfdd349870b6c0b9c045d9bad0f2764a40b9"
|
||||
},
|
||||
"buildinfo": {
|
||||
"relative_prefix": "test-debian6-core2/gcc-4.5.0/archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
Binary file not shown.
@@ -1,93 +0,0 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
"_meta": {
|
||||
"version": 2
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"name": "zlib",
|
||||
"version": "1.2.12",
|
||||
"arch": {
|
||||
"platform": "linux",
|
||||
"platform_os": "ubuntu18.04",
|
||||
"target": {
|
||||
"name": "haswell",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"ivybridge",
|
||||
"x86_64_v3"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "8.4.0"
|
||||
},
|
||||
"namespace": "builtin",
|
||||
"parameters": {
|
||||
"optimize": true,
|
||||
"patches": [
|
||||
"0d38234384870bfd34dfcb738a9083952656f0c766a0f5990b1893076b084b76"
|
||||
],
|
||||
"pic": true,
|
||||
"shared": true,
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"patches": [
|
||||
"0d38234384870bfd34dfcb738a9083952656f0c766a0f5990b1893076b084b76"
|
||||
],
|
||||
"package_hash": "bm7rut622h3yt5mpm4kvf7pmh7tnmueezgk5yquhr2orbmixwxuq====",
|
||||
"hash": "g7otk5dra3hifqxej36m5qzm7uyghqgb",
|
||||
"full_hash": "fx2fyri7bv3vpz2rhke6g3l3dwxda4t6",
|
||||
"build_hash": "g7otk5dra3hifqxej36m5qzm7uyghqgb"
|
||||
}
|
||||
]
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "5b9a180f14e0d04b17b1b0c2a26cf3beae448d77d1bda4279283ca4567d0be90"
|
||||
},
|
||||
"buildinfo": {
|
||||
"relative_prefix": "linux-ubuntu18.04-haswell/gcc-8.4.0/zlib-1.2.12-g7otk5dra3hifqxej36m5qzm7uyghqgb",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQEzBAEBCgAdFiEEz8AGj4zHZe4OaI2OQ0Tg92UAr50FAmJ5lD4ACgkQQ0Tg92UA
|
||||
r52LEggAl/wXlOlHDnjWvqBlqAn3gaJEZ5PDVPczk6k0w+SNfDGrHfWJnL2c23Oq
|
||||
CssbHylSgAFvaPT1frbiLfZj6L4j4Ym1qsxIlGNsVfW7Pbc4yNF0flqYMdWKXbgY
|
||||
2sQoPegIKK7EBtpjDf0+VRYfJTMqjsSgjT/o+nTkg9oAnvU23EqXI5uiY84Z5z6l
|
||||
CKLBm0GWg7MzI0u8NdiQMVNYVatvvZ8EQpblEUQ7jD4Bo0yoSr33Qdq8uvu4ZdlW
|
||||
bvbIgeY3pTPF13g9uNznHLxW4j9BWQnOtHFI5UKQnYRner504Yoz9k+YxhwuDlaY
|
||||
TrOxvHe9hG1ox1AP4tqQc+HsNpm5Kg==
|
||||
=gi2R
|
||||
-----END PGP SIGNATURE-----
|
@@ -104,7 +104,7 @@ def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
|
||||
/
|
||||
o dyninst
|
||||
|\
|
||||
| o libdwarf
|
||||
o | libdwarf
|
||||
|/
|
||||
o libelf
|
||||
'''
|
||||
|
@@ -215,7 +215,7 @@ def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
|
||||
|
||||
def test_process_binary_cache_tarball_tar(install_mockery, monkeypatch, capfd):
|
||||
"""Tests of _process_binary_cache_tarball with a tar file."""
|
||||
def _spec(spec, unsigned=False, mirrors_for_spec=None):
|
||||
def _spec(spec, preferred_mirrors=None):
|
||||
return spec
|
||||
|
||||
# Skip binary distribution functionality since assume tested elsewhere
|
||||
@@ -820,7 +820,7 @@ def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd):
|
||||
def _get_group(spec):
|
||||
return mock_group
|
||||
|
||||
def _chgrp(path, group, follow_symlinks=True):
|
||||
def _chgrp(path, group):
|
||||
tty.msg(mock_chgrp_msg.format(path, group))
|
||||
|
||||
monkeypatch.setattr(prefs, 'get_package_group', _get_group)
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import os.path
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -271,37 +270,3 @@ def f(*args, **kwargs):
|
||||
def test_dedupe():
|
||||
assert [x for x in dedupe([1, 2, 1, 3, 2])] == [1, 2, 3]
|
||||
assert [x for x in dedupe([1, -2, 1, 3, 2], key=abs)] == [1, -2, 3]
|
||||
|
||||
|
||||
def test_grouped_exception():
|
||||
h = llnl.util.lang.GroupedExceptionHandler()
|
||||
|
||||
def inner():
|
||||
raise ValueError('wow!')
|
||||
|
||||
with h.forward('inner method'):
|
||||
inner()
|
||||
|
||||
with h.forward('top-level'):
|
||||
raise TypeError('ok')
|
||||
|
||||
assert h.grouped_message(with_tracebacks=False) == dedent("""\
|
||||
due to the following failures:
|
||||
inner method raised ValueError: wow!
|
||||
top-level raised TypeError: ok""")
|
||||
|
||||
assert h.grouped_message(with_tracebacks=True) == dedent("""\
|
||||
due to the following failures:
|
||||
inner method raised ValueError: wow!
|
||||
File "{0}", \
|
||||
line 283, in test_grouped_exception
|
||||
inner()
|
||||
File "{0}", \
|
||||
line 280, in inner
|
||||
raise ValueError('wow!')
|
||||
|
||||
top-level raised TypeError: ok
|
||||
File "{0}", \
|
||||
line 286, in test_grouped_exception
|
||||
raise TypeError('ok')
|
||||
""").format(__file__)
|
||||
|
@@ -2,8 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -59,9 +59,6 @@ def test_repo_anonymous_pkg(mutable_mock_repo):
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 5), reason="Test started failing spuriously on Python 2.7"
|
||||
)
|
||||
def test_repo_last_mtime():
|
||||
latest_mtime = max(os.path.getmtime(p.module.__file__)
|
||||
for p in spack.repo.path.all_packages())
|
||||
|
@@ -135,6 +135,8 @@ def _mock_installed(self):
|
||||
assert spack.version.Version('3') == a_spec[b][d].version
|
||||
assert spack.version.Version('3') == a_spec[d].version
|
||||
|
||||
# TODO: with reuse, this will be different -- verify the reuse case
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_specify_preinstalled_dep():
|
||||
|
@@ -15,7 +15,6 @@
|
||||
SpecFormatSigilError,
|
||||
SpecFormatStringError,
|
||||
UnconstrainableDependencySpecError,
|
||||
UnsupportedCompilerError,
|
||||
)
|
||||
from spack.variant import (
|
||||
InvalidVariantValueError,
|
||||
@@ -1274,55 +1273,3 @@ def test_spec_installed(install_mockery, database):
|
||||
# 'a' is not in the mock DB and is not installed
|
||||
spec = Spec("a").concretized()
|
||||
assert not spec.installed
|
||||
|
||||
|
||||
@pytest.mark.regression('30678')
|
||||
def test_call_dag_hash_on_old_dag_hash_spec(mock_packages, config):
|
||||
# create a concrete spec
|
||||
a = Spec("a").concretized()
|
||||
dag_hashes = {
|
||||
spec.name: spec.dag_hash() for spec in a.traverse()
|
||||
}
|
||||
|
||||
# make it look like an old DAG hash spec with no package hash on the spec.
|
||||
for spec in a.traverse():
|
||||
assert spec.concrete
|
||||
spec._package_hash = None
|
||||
|
||||
for spec in a.traverse():
|
||||
assert dag_hashes[spec.name] == spec.dag_hash()
|
||||
|
||||
with pytest.raises(ValueError, match='Cannot call package_hash()'):
|
||||
spec.package_hash()
|
||||
|
||||
|
||||
@pytest.mark.regression('30861')
|
||||
def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
# create an "old" spec with no package hash
|
||||
bottom = Spec("dt-diamond-bottom").concretized()
|
||||
delattr(bottom, "_package_hash")
|
||||
|
||||
dummy_hash = "zd4m26eis2wwbvtyfiliar27wkcv3ehk"
|
||||
bottom._hash = dummy_hash
|
||||
|
||||
# add it to an abstract spec as a dependency
|
||||
top = Spec("dt-diamond")
|
||||
top.add_dependency_edge(bottom, ())
|
||||
|
||||
# concretize with the already-concrete dependency
|
||||
top.concretize()
|
||||
|
||||
for spec in top.traverse():
|
||||
assert spec.concrete
|
||||
|
||||
# make sure dag_hash is untouched
|
||||
assert spec["dt-diamond-bottom"].dag_hash() == dummy_hash
|
||||
assert spec["dt-diamond-bottom"]._hash == dummy_hash
|
||||
|
||||
# make sure package hash is NOT recomputed
|
||||
assert not getattr(spec["dt-diamond-bottom"], '_package_hash', None)
|
||||
|
||||
|
||||
def test_unsupported_compiler():
|
||||
with pytest.raises(UnsupportedCompilerError):
|
||||
Spec('gcc%fake-compiler').validate_or_raise()
|
||||
|
@@ -125,6 +125,7 @@ def check_lex(self, tokens, spec):
|
||||
def _check_raises(self, exc_type, items):
|
||||
for item in items:
|
||||
with pytest.raises(exc_type):
|
||||
print("CHECKING: ", item, "=======================")
|
||||
Spec(item)
|
||||
|
||||
# ========================================================================
|
||||
|
@@ -8,10 +8,7 @@
|
||||
The YAML and JSON formats preserve DAG information in the spec.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import ast
|
||||
import collections
|
||||
import inspect
|
||||
import os
|
||||
|
||||
@@ -20,7 +17,6 @@
|
||||
from llnl.util.compat import Iterable, Mapping
|
||||
|
||||
import spack.hash_types as ht
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -49,27 +45,6 @@ def test_simple_spec():
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_read_spec_from_signed_json():
|
||||
spec_dir = os.path.join(
|
||||
spack.paths.test_path, 'data', 'mirrors', 'signed_json')
|
||||
file_name = (
|
||||
'linux-ubuntu18.04-haswell-gcc-8.4.0-'
|
||||
'zlib-1.2.12-g7otk5dra3hifqxej36m5qzm7uyghqgb.spec.json.sig')
|
||||
spec_path = os.path.join(spec_dir, file_name)
|
||||
|
||||
def check_spec(spec_to_check):
|
||||
assert(spec_to_check.name == 'zlib')
|
||||
assert(spec_to_check._hash == 'g7otk5dra3hifqxej36m5qzm7uyghqgb')
|
||||
|
||||
with open(spec_path) as fd:
|
||||
s = Spec.from_signed_json(fd)
|
||||
check_spec(s)
|
||||
|
||||
with open(spec_path) as fd:
|
||||
s = Spec.from_signed_json(fd.read())
|
||||
check_spec(s)
|
||||
|
||||
|
||||
def test_normal_spec(mock_packages):
|
||||
spec = Spec('mpileaks+debug~opt')
|
||||
spec.normalize()
|
||||
@@ -436,75 +411,3 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
spec = Spec.from_yaml(yaml)
|
||||
concrete_spec = spec.concretized()
|
||||
assert concrete_spec.eq_dag(spec)
|
||||
|
||||
|
||||
#: A well ordered Spec dictionary, using ``OrderdDict``.
|
||||
#: Any operation that transforms Spec dictionaries should
|
||||
#: preserve this order.
|
||||
ordered_spec = collections.OrderedDict([
|
||||
("arch", collections.OrderedDict([
|
||||
("platform", "darwin"),
|
||||
("platform_os", "bigsur"),
|
||||
("target", collections.OrderedDict([
|
||||
("features", [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
]),
|
||||
("generation", 0),
|
||||
("name", "skylake"),
|
||||
("parents", ["broadwell"]),
|
||||
("vendor", "GenuineIntel"),
|
||||
])),
|
||||
])),
|
||||
("compiler", collections.OrderedDict([
|
||||
("name", "apple-clang"),
|
||||
("version", "13.0.0"),
|
||||
])),
|
||||
("name", "zlib"),
|
||||
("namespace", "builtin"),
|
||||
("parameters", collections.OrderedDict([
|
||||
("cflags", []),
|
||||
("cppflags", []),
|
||||
("cxxflags", []),
|
||||
("fflags", []),
|
||||
("ldflags", []),
|
||||
("ldlibs", []),
|
||||
("optimize", True),
|
||||
("pic", True),
|
||||
("shared", True),
|
||||
])),
|
||||
("version", "1.2.11"),
|
||||
])
|
||||
|
||||
|
||||
@pytest.mark.regression("31092")
|
||||
def test_strify_preserves_order():
|
||||
"""Ensure that ``spack_json._strify()`` dumps dictionaries in the right order.
|
||||
|
||||
``_strify()`` is used in ``spack_json.dump()``, which is used in
|
||||
``Spec.dag_hash()``, so if this goes wrong, ``Spec`` hashes can vary between python
|
||||
versions.
|
||||
|
||||
"""
|
||||
strified = sjson._strify(ordered_spec)
|
||||
assert list(ordered_spec.items()) == list(strified.items())
|
||||
|
@@ -55,12 +55,9 @@ def test_write_and_remove_cache_file(file_cache):
|
||||
|
||||
file_cache.remove('test.yaml')
|
||||
|
||||
# After removal the file should not exist
|
||||
# After removal both the file and the lock file should not exist
|
||||
assert not os.path.exists(file_cache.cache_path('test.yaml'))
|
||||
|
||||
# Whether the lock file exists is more of an implementation detail, on Linux they
|
||||
# continue to exist, on Windows they don't.
|
||||
# assert os.path.exists(file_cache._lock_path('test.yaml'))
|
||||
assert not os.path.exists(file_cache._lock_path('test.yaml'))
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
@@ -97,10 +94,3 @@ def test_cache_write_readonly_cache_fails(file_cache):
|
||||
|
||||
with pytest.raises(CacheError, match='Insufficient permissions to write'):
|
||||
file_cache.write_transaction(filename)
|
||||
|
||||
|
||||
@pytest.mark.regression('31475')
|
||||
def test_delete_is_idempotent(file_cache):
|
||||
"""Deleting a non-existent key should be idempotent, to simplify life when
|
||||
running delete with multiple processes"""
|
||||
file_cache.remove('test.yaml')
|
||||
|
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from itertools import product
|
||||
|
||||
from spack.util.executable import which
|
||||
@@ -19,8 +18,6 @@
|
||||
ALLOWED_ARCHIVE_TYPES = [".".join(ext) for ext in product(
|
||||
PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
|
||||
|
||||
is_windows = sys.platform == 'win32'
|
||||
|
||||
|
||||
def allowed_archive(path):
|
||||
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
@@ -51,14 +48,15 @@ def _unzip(archive_file):
|
||||
Args:
|
||||
archive_file (str): absolute path of the file to be decompressed
|
||||
"""
|
||||
exe = 'unzip'
|
||||
arg = '-q'
|
||||
if is_windows:
|
||||
exe = 'tar'
|
||||
arg = '-xf'
|
||||
unzip = which(exe, required=True)
|
||||
unzip.add_default_arg(arg)
|
||||
unzip(archive_file)
|
||||
try:
|
||||
from zipfile import ZipFile
|
||||
destination_abspath = os.getcwd()
|
||||
with ZipFile(archive_file, 'r') as zf:
|
||||
zf.extractall(destination_abspath)
|
||||
except ImportError:
|
||||
unzip = which('unzip', required=True)
|
||||
unzip.add_default_arg('-q')
|
||||
return unzip
|
||||
|
||||
|
||||
def decompressor_for(path, extension=None):
|
||||
|
@@ -178,7 +178,7 @@ def streamify(arg, mode):
|
||||
istream, close_istream = streamify(input, 'r')
|
||||
|
||||
if not ignore_quotes:
|
||||
quoted_args = [arg for arg in args if re.search(r'^".*"$|^\'.*\'$', arg)]
|
||||
quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)]
|
||||
if quoted_args:
|
||||
tty.warn(
|
||||
"Quotes in command arguments can confuse scripts like"
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user