Compare commits
55 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
912109da16 | ||
![]() |
5e0eb1f887 | ||
![]() |
7cd7b38cb8 | ||
![]() |
a2a403ae0a | ||
![]() |
3b12a8b192 | ||
![]() |
e9896620e4 | ||
![]() |
9d00894c5a | ||
![]() |
1cc2b82408 | ||
![]() |
ad2c020848 | ||
![]() |
bd119927ff | ||
![]() |
2a8fe3a5b0 | ||
![]() |
3427e2c8cf | ||
![]() |
e8bb341536 | ||
![]() |
08009ffd70 | ||
![]() |
d4f2326824 | ||
![]() |
9edfd25134 | ||
![]() |
afb3f4ff20 | ||
![]() |
9d8e411d76 | ||
![]() |
fbdcd7cbf1 | ||
![]() |
5b0d4fe928 | ||
![]() |
deb9102b2d | ||
![]() |
e0be0d8683 | ||
![]() |
1179623002 | ||
![]() |
ab5c02d538 | ||
![]() |
1fd6fedba5 | ||
![]() |
69cbf10a80 | ||
![]() |
5b2d7445b8 | ||
![]() |
e974b44e86 | ||
![]() |
69b8cddb1b | ||
![]() |
8e659f512e | ||
![]() |
5daf023aec | ||
![]() |
87abda4cdd | ||
![]() |
fa5e186d4a | ||
![]() |
e1cc28a30a | ||
![]() |
17edf1ae90 | ||
![]() |
79fd1c5114 | ||
![]() |
13e36c5457 | ||
![]() |
b2694013d4 | ||
![]() |
8f3b025b55 | ||
![]() |
37fbe30c4a | ||
![]() |
314867e635 | ||
![]() |
9345bf81b9 | ||
![]() |
adf4e91658 | ||
![]() |
20ad47f9e1 | ||
![]() |
7e5de95a30 | ||
![]() |
e9f7fb03c9 | ||
![]() |
9d4291e590 | ||
![]() |
8f98f1d182 | ||
![]() |
654f6839eb | ||
![]() |
c8daa7218d | ||
![]() |
d862507bcf | ||
![]() |
7c6b253d89 | ||
![]() |
abd418cc31 | ||
![]() |
544826c825 | ||
![]() |
f92a2d688d |
32
.github/workflows/bootstrap.yml
vendored
32
.github/workflows/bootstrap.yml
vendored
@@ -59,6 +59,14 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
|
- name: Work around CVE-2022-24765
|
||||||
|
run: |
|
||||||
|
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||||
|
# a breaking behavior. See:
|
||||||
|
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||||
|
# - https://github.com/actions/checkout/issues/760
|
||||||
|
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
@@ -171,7 +179,15 @@ jobs:
|
|||||||
apt-get install -y \
|
apt-get install -y \
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- uses: actions/checkout@v2
|
- name: Work around CVE-2022-24765
|
||||||
|
run: |
|
||||||
|
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||||
|
# a breaking behavior. See:
|
||||||
|
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||||
|
# - https://github.com/actions/checkout/issues/760
|
||||||
|
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -200,7 +216,15 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
gawk
|
gawk
|
||||||
- uses: actions/checkout@v2
|
- name: Work around CVE-2022-24765
|
||||||
|
run: |
|
||||||
|
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||||
|
# a breaking behavior. See:
|
||||||
|
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||||
|
# - https://github.com/actions/checkout/issues/760
|
||||||
|
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -225,7 +249,7 @@ jobs:
|
|||||||
brew install tree
|
brew install tree
|
||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -241,7 +265,7 @@ jobs:
|
|||||||
brew install gawk tree
|
brew install gawk tree
|
||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
11
.github/workflows/setup_git.sh
vendored
11
.github/workflows/setup_git.sh
vendored
@@ -1,9 +1,8 @@
|
|||||||
#!/usr/bin/env sh
|
#!/bin/bash -e
|
||||||
git config --global user.email "spack@example.com"
|
git config --global user.email "spack@example.com"
|
||||||
git config --global user.name "Test User"
|
git config --global user.name "Test User"
|
||||||
# With fetch-depth: 0 we have a remote develop
|
|
||||||
# but not a local branch. Don't do this on develop
|
# create a local pr base branch
|
||||||
if [ "$(git branch --show-current)" != "develop" ]
|
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||||
then
|
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||||
git branch develop origin/develop
|
|
||||||
fi
|
fi
|
||||||
|
2
.github/workflows/unit_tests.yaml
vendored
2
.github/workflows/unit_tests.yaml
vendored
@@ -211,6 +211,7 @@ jobs:
|
|||||||
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
||||||
git fetch origin "${{ github.ref }}:test-branch"
|
git fetch origin "${{ github.ref }}:test-branch"
|
||||||
git checkout test-branch
|
git checkout test-branch
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
bin/spack unit-test -x
|
bin/spack unit-test -x
|
||||||
- name: Run unit tests (only package tests)
|
- name: Run unit tests (only package tests)
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||||
@@ -223,6 +224,7 @@ jobs:
|
|||||||
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
git clone "${{ github.server_url }}/${{ github.repository }}.git" && cd spack
|
||||||
git fetch origin "${{ github.ref }}:test-branch"
|
git fetch origin "${{ github.ref }}:test-branch"
|
||||||
git checkout test-branch
|
git checkout test-branch
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
bin/spack unit-test -x -m "not maybeslow" -k "package_sanity"
|
bin/spack unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||||
|
|
||||||
# Test RHEL8 UBI with platform Python. This job is run
|
# Test RHEL8 UBI with platform Python. This job is run
|
||||||
|
41
CHANGELOG.md
41
CHANGELOG.md
@@ -1,3 +1,44 @@
|
|||||||
|
# v0.17.2 (2022-04-13)
|
||||||
|
|
||||||
|
### Spack bugfixes
|
||||||
|
* Fix --reuse with upstreams set in an environment (#29680)
|
||||||
|
* config add: fix parsing of validator error to infer type from oneOf (#29475)
|
||||||
|
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
|
||||||
|
* Use Spec.constrain to construct spec lists for stacks (#28783)
|
||||||
|
* Fix bug occurring when searching for inherited patches in packages (#29574)
|
||||||
|
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
|
||||||
|
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
|
||||||
|
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||||
|
* Fix handling of Intel compiler environment (#29439)
|
||||||
|
* Fix a few edge cases when reindexing the DB (#28764)
|
||||||
|
* Remove "Known issues" from documentation (#29664)
|
||||||
|
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||||
|
|
||||||
|
# v0.17.1 (2021-12-23)
|
||||||
|
|
||||||
|
### Spack Bugfixes
|
||||||
|
* Allow locks to work under high contention (#27846)
|
||||||
|
* Improve errors messages from clingo (#27707 #27970)
|
||||||
|
* Respect package permissions for sbang (#25764)
|
||||||
|
* Fix --enable-locks behavior (#24675)
|
||||||
|
* Fix log-format reporter ignoring install errors (#25961)
|
||||||
|
* Fix overloaded argparse keys (#27379)
|
||||||
|
* Allow style commands to run with targets other than "develop" (#27472)
|
||||||
|
* Log lock messages to debug level, instead of verbose level (#27408)
|
||||||
|
* Handle invalid unicode while logging (#21447)
|
||||||
|
* spack audit: fix API calls to variants (#27713)
|
||||||
|
* Provide meaningful message for empty environment installs (#28031)
|
||||||
|
* Added opensuse leap containers to spack containerize (#27837)
|
||||||
|
* Revert "patches: make re-applied patches idempotent" (#27625)
|
||||||
|
* MANPATH can use system defaults (#21682)
|
||||||
|
* Add "setdefault" subcommand to `spack module tcl` (#14686)
|
||||||
|
* Regenerate views when specs already installed (#28113)
|
||||||
|
|
||||||
|
### Package bugfixes
|
||||||
|
* Fix external package detection for OpenMPI (#27255)
|
||||||
|
* Update the UPC++ package to 2021.9.0 (#26996)
|
||||||
|
* Added py-vermin v1.3.2 (#28072)
|
||||||
|
|
||||||
# v0.17.0 (2021-11-05)
|
# v0.17.0 (2021-11-05)
|
||||||
|
|
||||||
`v0.17.0` is a major feature release.
|
`v0.17.0` is a major feature release.
|
||||||
|
@@ -129,6 +129,9 @@ are currently supported are summarized in the table below:
|
|||||||
* - CentOS 7
|
* - CentOS 7
|
||||||
- ``centos:7``
|
- ``centos:7``
|
||||||
- ``spack/centos7``
|
- ``spack/centos7``
|
||||||
|
* - openSUSE Leap
|
||||||
|
- ``opensuse/leap``
|
||||||
|
- ``spack/leap15``
|
||||||
|
|
||||||
All the images are tagged with the corresponding release of Spack:
|
All the images are tagged with the corresponding release of Spack:
|
||||||
|
|
||||||
|
@@ -56,7 +56,6 @@ or refer to the full manual below.
|
|||||||
basic_usage
|
basic_usage
|
||||||
workflows
|
workflows
|
||||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||||
known_issues
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
@@ -1,77 +0,0 @@
|
|||||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
|
||||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
============
|
|
||||||
Known Issues
|
|
||||||
============
|
|
||||||
|
|
||||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
|
||||||
problems if you encounter them.
|
|
||||||
|
|
||||||
---------------------------------------------------
|
|
||||||
Variants are not properly forwarded to dependencies
|
|
||||||
---------------------------------------------------
|
|
||||||
|
|
||||||
**Status:** Expected to be fixed by Spack's new concretizer
|
|
||||||
|
|
||||||
Sometimes, a variant of a package can also affect how its dependencies are
|
|
||||||
built. For example, in order to build MPI support for a package, it may
|
|
||||||
require that its dependencies are also built with MPI support. In the
|
|
||||||
``package.py``, this looks like:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
depends_on('hdf5~mpi', when='~mpi')
|
|
||||||
depends_on('hdf5+mpi', when='+mpi')
|
|
||||||
|
|
||||||
Spack handles this situation properly for *immediate* dependencies, and
|
|
||||||
builds ``hdf5`` with the same variant you used for the package that
|
|
||||||
depends on it. However, for *indirect* dependencies (dependencies of
|
|
||||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
|
||||||
this. Users commonly run into this situation when trying to build R with
|
|
||||||
X11 support:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install r+X
|
|
||||||
...
|
|
||||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
|
||||||
Package cairo requires variant ~X, but spec asked for +X
|
|
||||||
|
|
||||||
A workaround is to explicitly activate the variants of dependencies as well:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install r+X ^cairo+X ^pango+X
|
|
||||||
|
|
||||||
See https://github.com/spack/spack/issues/267 and
|
|
||||||
https://github.com/spack/spack/issues/2546 for further details.
|
|
||||||
|
|
||||||
-----------------------------------------------
|
|
||||||
depends_on cannot handle recursive dependencies
|
|
||||||
-----------------------------------------------
|
|
||||||
|
|
||||||
**Status:** Not yet a work in progress
|
|
||||||
|
|
||||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
|
||||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
|
||||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
|
||||||
For example, something like:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
|
||||||
|
|
||||||
|
|
||||||
should be rewritten as:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
depends_on('mfem+cuda', when='+cuda')
|
|
||||||
depends_on('hypre+cuda', when='+cuda')
|
|
||||||
|
|
||||||
|
|
||||||
See https://github.com/spack/spack/issues/17660 and
|
|
||||||
https://github.com/spack/spack/issues/11160 for more details.
|
|
@@ -2943,7 +2943,7 @@ The package base class, usually specialized for a given build system, determines
|
|||||||
actual set of entities available for overriding.
|
actual set of entities available for overriding.
|
||||||
The classes that are currently provided by Spack are:
|
The classes that are currently provided by Spack are:
|
||||||
|
|
||||||
+-------------------------=--------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| **Base Class** | **Purpose** |
|
| **Base Class** | **Purpose** |
|
||||||
+==========================================================+==================================+
|
+==========================================================+==================================+
|
||||||
| :class:`~spack.package.Package` | General base class not |
|
| :class:`~spack.package.Package` | General base class not |
|
||||||
|
@@ -141,7 +141,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
|||||||
file.
|
file.
|
||||||
"""
|
"""
|
||||||
string = kwargs.get('string', False)
|
string = kwargs.get('string', False)
|
||||||
backup = kwargs.get('backup', True)
|
backup = kwargs.get('backup', False)
|
||||||
ignore_absent = kwargs.get('ignore_absent', False)
|
ignore_absent = kwargs.get('ignore_absent', False)
|
||||||
stop_at = kwargs.get('stop_at', None)
|
stop_at = kwargs.get('stop_at', None)
|
||||||
|
|
||||||
|
@@ -780,7 +780,12 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
|||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
line = _retry(in_pipe.readline)()
|
try:
|
||||||
|
line = _retry(in_pipe.readline)()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# installs like --test=root gpgme produce non-UTF8 logs
|
||||||
|
line = '<line lost: output was not encoded as UTF-8>\n'
|
||||||
|
|
||||||
if not line:
|
if not line:
|
||||||
return
|
return
|
||||||
line_count += 1
|
line_count += 1
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: major, minor, patch version for Spack, in a tuple
|
#: major, minor, patch version for Spack, in a tuple
|
||||||
spack_version_info = (0, 17, 0)
|
spack_version_info = (0, 17, 2)
|
||||||
|
|
||||||
#: String containing Spack version joined with .'s
|
#: String containing Spack version joined with .'s
|
||||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||||
|
@@ -389,9 +389,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
|||||||
dependency_variants = dependency_edge.spec.variants
|
dependency_variants = dependency_edge.spec.variants
|
||||||
for name, value in dependency_variants.items():
|
for name, value in dependency_variants.items():
|
||||||
try:
|
try:
|
||||||
dependency_pkg.variants[name].validate_or_raise(
|
v, _ = dependency_pkg.variants[name]
|
||||||
value, pkg=dependency_pkg
|
v.validate_or_raise(value, pkg=dependency_pkg)
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
summary = (pkg_name + ": wrong variant used for a "
|
summary = (pkg_name + ": wrong variant used for a "
|
||||||
"dependency in a 'depends_on' directive")
|
"dependency in a 'depends_on' directive")
|
||||||
@@ -419,7 +418,8 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for name, v in constraint.variants.items():
|
for name, v in constraint.variants.items():
|
||||||
try:
|
try:
|
||||||
pkg.variants[name].validate_or_raise(v, pkg=pkg)
|
variant, _ = pkg.variants[name]
|
||||||
|
variant.validate_or_raise(v, pkg=pkg)
|
||||||
except variant_exceptions as e:
|
except variant_exceptions as e:
|
||||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||||
summary = summary.format(directive)
|
summary = summary.format(directive)
|
||||||
|
@@ -685,9 +685,9 @@ def openmp_libs(self):
|
|||||||
# packages.yaml), specificially to provide the 'iomp5' libs.
|
# packages.yaml), specificially to provide the 'iomp5' libs.
|
||||||
|
|
||||||
elif '%gcc' in self.spec:
|
elif '%gcc' in self.spec:
|
||||||
gcc = Executable(self.compiler.cc)
|
with self.compiler.compiler_environment():
|
||||||
omp_lib_path = gcc(
|
omp_lib_path = Executable(self.compiler.cc)(
|
||||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||||
omp_libs = LibraryList(omp_lib_path.strip())
|
omp_libs = LibraryList(omp_lib_path.strip())
|
||||||
|
|
||||||
if len(omp_libs) < 1:
|
if len(omp_libs) < 1:
|
||||||
@@ -728,8 +728,9 @@ def tbb_libs(self):
|
|||||||
|
|
||||||
# TODO: clang(?)
|
# TODO: clang(?)
|
||||||
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
|
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
|
||||||
cxx_lib_path = gcc(
|
with self.compiler.compiler_environment():
|
||||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
cxx_lib_path = gcc(
|
||||||
|
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||||
|
|
||||||
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
|
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
|
||||||
debug_print(libs)
|
debug_print(libs)
|
||||||
@@ -739,8 +740,9 @@ def tbb_libs(self):
|
|||||||
def _tbb_abi(self):
|
def _tbb_abi(self):
|
||||||
'''Select the ABI needed for linking TBB'''
|
'''Select the ABI needed for linking TBB'''
|
||||||
gcc = self._gcc_executable
|
gcc = self._gcc_executable
|
||||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
with self.compiler.compiler_environment():
|
||||||
gcc('--version', output=str), re.I | re.M)
|
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||||
|
gcc('--version', output=str), re.I | re.M)
|
||||||
abi = ''
|
abi = ''
|
||||||
if sys.platform == 'darwin':
|
if sys.platform == 'darwin':
|
||||||
pass
|
pass
|
||||||
|
@@ -1551,7 +1551,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
|||||||
|
|
||||||
# Next attempt to clone your local spack repo into the repro dir
|
# Next attempt to clone your local spack repo into the repro dir
|
||||||
with fs.working_dir(repro_dir):
|
with fs.working_dir(repro_dir):
|
||||||
clone_out = git("clone", spack_git_path,
|
clone_out = git("clone", spack_git_path, "spack",
|
||||||
output=str, error=os.devnull,
|
output=str, error=os.devnull,
|
||||||
fail_on_error=False)
|
fail_on_error=False)
|
||||||
|
|
||||||
|
@@ -362,8 +362,10 @@ def ci_rebuild(args):
|
|||||||
# Write information about spack into an artifact in the repro dir
|
# Write information about spack into an artifact in the repro dir
|
||||||
spack_info = spack_ci.get_spack_info()
|
spack_info = spack_ci.get_spack_info()
|
||||||
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
|
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
|
||||||
with open(spack_info_file, 'w') as fd:
|
with open(spack_info_file, 'wb') as fd:
|
||||||
fd.write('\n{0}\n'.format(spack_info))
|
fd.write(b'\n')
|
||||||
|
fd.write(spack_info.encode('utf8'))
|
||||||
|
fd.write(b'\n')
|
||||||
|
|
||||||
# If we decided there should be a temporary storage mechanism, add that
|
# If we decided there should be a temporary storage mechanism, add that
|
||||||
# mirror now so it's used when we check for a full hash match already
|
# mirror now so it's used when we check for a full hash match already
|
||||||
|
@@ -117,7 +117,7 @@ def format(self, cmd):
|
|||||||
'virtual': '_providers',
|
'virtual': '_providers',
|
||||||
'namespace': '_repos',
|
'namespace': '_repos',
|
||||||
'hash': '_all_resource_hashes',
|
'hash': '_all_resource_hashes',
|
||||||
'pytest': '_tests',
|
'pytest': '_unit_tests',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -411,8 +411,6 @@ def env_status(args):
|
|||||||
#
|
#
|
||||||
def env_loads_setup_parser(subparser):
|
def env_loads_setup_parser(subparser):
|
||||||
"""list modules for an installed environment '(see spack module loads)'"""
|
"""list modules for an installed environment '(see spack module loads)'"""
|
||||||
subparser.add_argument(
|
|
||||||
'env', nargs='?', help='name of env to generate loads file for')
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-n', '--module-set-name', default='default',
|
'-n', '--module-set-name', default='default',
|
||||||
help='module set for which to generate load operations')
|
help='module set for which to generate load operations')
|
||||||
@@ -448,19 +446,19 @@ def env_loads(args):
|
|||||||
def env_update_setup_parser(subparser):
|
def env_update_setup_parser(subparser):
|
||||||
"""update environments to the latest format"""
|
"""update environments to the latest format"""
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
metavar='env', dest='env',
|
metavar='env', dest='update_env',
|
||||||
help='name or directory of the environment to activate'
|
help='name or directory of the environment to activate'
|
||||||
)
|
)
|
||||||
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
|
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
|
||||||
|
|
||||||
|
|
||||||
def env_update(args):
|
def env_update(args):
|
||||||
manifest_file = ev.manifest_file(args.env)
|
manifest_file = ev.manifest_file(args.update_env)
|
||||||
backup_file = manifest_file + ".bkp"
|
backup_file = manifest_file + ".bkp"
|
||||||
needs_update = not ev.is_latest_format(manifest_file)
|
needs_update = not ev.is_latest_format(manifest_file)
|
||||||
|
|
||||||
if not needs_update:
|
if not needs_update:
|
||||||
tty.msg('No update needed for the environment "{0}"'.format(args.env))
|
tty.msg('No update needed for the environment "{0}"'.format(args.update_env))
|
||||||
return
|
return
|
||||||
|
|
||||||
proceed = True
|
proceed = True
|
||||||
@@ -470,7 +468,7 @@ def env_update(args):
|
|||||||
'Spack that are older than this version may not be able to '
|
'Spack that are older than this version may not be able to '
|
||||||
'read it. Spack stores backups of the updated environment '
|
'read it. Spack stores backups of the updated environment '
|
||||||
'which can be retrieved with "spack env revert"')
|
'which can be retrieved with "spack env revert"')
|
||||||
tty.msg(msg.format(args.env))
|
tty.msg(msg.format(args.update_env))
|
||||||
proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
|
||||||
|
|
||||||
if not proceed:
|
if not proceed:
|
||||||
@@ -478,20 +476,20 @@ def env_update(args):
|
|||||||
|
|
||||||
ev.update_yaml(manifest_file, backup_file=backup_file)
|
ev.update_yaml(manifest_file, backup_file=backup_file)
|
||||||
msg = 'Environment "{0}" has been updated [backup={1}]'
|
msg = 'Environment "{0}" has been updated [backup={1}]'
|
||||||
tty.msg(msg.format(args.env, backup_file))
|
tty.msg(msg.format(args.update_env, backup_file))
|
||||||
|
|
||||||
|
|
||||||
def env_revert_setup_parser(subparser):
|
def env_revert_setup_parser(subparser):
|
||||||
"""restore environments to their state before update"""
|
"""restore environments to their state before update"""
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
metavar='env', dest='env',
|
metavar='env', dest='revert_env',
|
||||||
help='name or directory of the environment to activate'
|
help='name or directory of the environment to activate'
|
||||||
)
|
)
|
||||||
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
|
spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
|
||||||
|
|
||||||
|
|
||||||
def env_revert(args):
|
def env_revert(args):
|
||||||
manifest_file = ev.manifest_file(args.env)
|
manifest_file = ev.manifest_file(args.revert_env)
|
||||||
backup_file = manifest_file + ".bkp"
|
backup_file = manifest_file + ".bkp"
|
||||||
|
|
||||||
# Check that both the spack.yaml and the backup exist, the inform user
|
# Check that both the spack.yaml and the backup exist, the inform user
|
||||||
|
@@ -38,7 +38,7 @@ def update_kwargs_from_args(args, kwargs):
|
|||||||
'keep_stage': args.keep_stage,
|
'keep_stage': args.keep_stage,
|
||||||
'restage': not args.dont_restage,
|
'restage': not args.dont_restage,
|
||||||
'install_source': args.install_source,
|
'install_source': args.install_source,
|
||||||
'verbose': args.verbose,
|
'verbose': args.verbose or args.install_verbose,
|
||||||
'fake': args.fake,
|
'fake': args.fake,
|
||||||
'dirty': args.dirty,
|
'dirty': args.dirty,
|
||||||
'use_cache': args.use_cache,
|
'use_cache': args.use_cache,
|
||||||
@@ -130,7 +130,7 @@ def setup_parser(subparser):
|
|||||||
help="install source files in prefix")
|
help="install source files in prefix")
|
||||||
arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
|
arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-v', '--verbose', action='store_true',
|
'-v', '--verbose', action='store_true', dest='install_verbose',
|
||||||
help="display verbose build output while installing")
|
help="display verbose build output while installing")
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--fake', action='store_true',
|
'--fake', action='store_true',
|
||||||
@@ -285,6 +285,8 @@ def install_specs(cli_args, kwargs, specs):
|
|||||||
|
|
||||||
|
|
||||||
def install(parser, args, **kwargs):
|
def install(parser, args, **kwargs):
|
||||||
|
# TODO: unify args.verbose?
|
||||||
|
tty.set_verbose(args.verbose or args.install_verbose)
|
||||||
|
|
||||||
if args.help_cdash:
|
if args.help_cdash:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
@@ -346,17 +348,22 @@ def get_tests(specs):
|
|||||||
env.write(regenerate=False)
|
env.write(regenerate=False)
|
||||||
|
|
||||||
specs = env.all_specs()
|
specs = env.all_specs()
|
||||||
if not args.log_file and not reporter.filename:
|
if specs:
|
||||||
reporter.filename = default_log_file(specs[0])
|
if not args.log_file and not reporter.filename:
|
||||||
reporter.specs = specs
|
reporter.filename = default_log_file(specs[0])
|
||||||
|
reporter.specs = specs
|
||||||
|
|
||||||
# Tell the monitor about the specs
|
# Tell the monitor about the specs
|
||||||
if args.use_monitor and specs:
|
if args.use_monitor and specs:
|
||||||
monitor.new_configuration(specs)
|
monitor.new_configuration(specs)
|
||||||
|
|
||||||
tty.msg("Installing environment {0}".format(env.name))
|
tty.msg("Installing environment {0}".format(env.name))
|
||||||
with reporter('build'):
|
with reporter('build'):
|
||||||
env.install_all(**kwargs)
|
env.install_all(**kwargs)
|
||||||
|
|
||||||
|
else:
|
||||||
|
msg = '{0} environment has no specs to install'.format(env.name)
|
||||||
|
tty.msg(msg)
|
||||||
|
|
||||||
tty.debug("Regenerating environment views for {0}"
|
tty.debug("Regenerating environment views for {0}"
|
||||||
.format(env.name))
|
.format(env.name))
|
||||||
|
@@ -4,12 +4,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import os
|
|
||||||
|
|
||||||
import llnl.util.filesystem
|
|
||||||
|
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.cmd.modules
|
import spack.cmd.modules
|
||||||
|
import spack.config
|
||||||
|
import spack.modules.lmod
|
||||||
|
|
||||||
|
|
||||||
def add_command(parser, command_dict):
|
def add_command(parser, command_dict):
|
||||||
@@ -41,12 +40,19 @@ def setdefault(module_type, specs, args):
|
|||||||
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default
|
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default
|
||||||
#
|
#
|
||||||
spack.cmd.modules.one_spec_or_raise(specs)
|
spack.cmd.modules.one_spec_or_raise(specs)
|
||||||
writer = spack.modules.module_types['lmod'](
|
spec = specs[0]
|
||||||
specs[0], args.module_set_name)
|
data = {
|
||||||
|
'modules': {
|
||||||
module_folder = os.path.dirname(writer.layout.filename)
|
args.module_set_name: {
|
||||||
module_basename = os.path.basename(writer.layout.filename)
|
'lmod': {
|
||||||
with llnl.util.filesystem.working_dir(module_folder):
|
'defaults': [str(spec)]
|
||||||
if os.path.exists('default') and os.path.islink('default'):
|
}
|
||||||
os.remove('default')
|
}
|
||||||
os.symlink(module_basename, 'default')
|
}
|
||||||
|
}
|
||||||
|
# Need to clear the cache if a SpackCommand is called during scripting
|
||||||
|
spack.modules.lmod.configuration_registry = {}
|
||||||
|
scope = spack.config.InternalConfigScope('lmod-setdefault', data)
|
||||||
|
with spack.config.override(scope):
|
||||||
|
writer = spack.modules.module_types['lmod'](spec, args.module_set_name)
|
||||||
|
writer.update_module_defaults()
|
||||||
|
@@ -2,18 +2,52 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
|
|
||||||
|
import spack.cmd.common.arguments
|
||||||
import spack.cmd.modules
|
import spack.cmd.modules
|
||||||
|
import spack.config
|
||||||
|
import spack.modules.tcl
|
||||||
|
|
||||||
|
|
||||||
def add_command(parser, command_dict):
|
def add_command(parser, command_dict):
|
||||||
tcl_parser = parser.add_parser(
|
tcl_parser = parser.add_parser(
|
||||||
'tcl', help='manipulate non-hierarchical module files'
|
'tcl', help='manipulate non-hierarchical module files'
|
||||||
)
|
)
|
||||||
spack.cmd.modules.setup_parser(tcl_parser)
|
sp = spack.cmd.modules.setup_parser(tcl_parser)
|
||||||
|
|
||||||
|
# Set default module file for a package
|
||||||
|
setdefault_parser = sp.add_parser(
|
||||||
|
'setdefault', help='set the default module file for a package'
|
||||||
|
)
|
||||||
|
spack.cmd.common.arguments.add_common_arguments(
|
||||||
|
setdefault_parser, ['constraint']
|
||||||
|
)
|
||||||
|
|
||||||
|
callbacks = dict(spack.cmd.modules.callbacks.items())
|
||||||
|
callbacks['setdefault'] = setdefault
|
||||||
|
|
||||||
command_dict['tcl'] = functools.partial(
|
command_dict['tcl'] = functools.partial(
|
||||||
spack.cmd.modules.modules_cmd, module_type='tcl'
|
spack.cmd.modules.modules_cmd, module_type='tcl', callbacks=callbacks
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def setdefault(module_type, specs, args):
|
||||||
|
"""Set the default module file, when multiple are present"""
|
||||||
|
# Currently, accepts only a single matching spec
|
||||||
|
spack.cmd.modules.one_spec_or_raise(specs)
|
||||||
|
spec = specs[0]
|
||||||
|
data = {
|
||||||
|
'modules': {
|
||||||
|
args.module_set_name: {
|
||||||
|
'tcl': {
|
||||||
|
'defaults': [str(spec)]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
spack.modules.tcl.configuration_registry = {}
|
||||||
|
scope = spack.config.InternalConfigScope('tcl-setdefault', data)
|
||||||
|
with spack.config.override(scope):
|
||||||
|
writer = spack.modules.module_types['tcl'](spec, args.module_set_name)
|
||||||
|
writer.update_module_defaults()
|
||||||
|
@@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-V', '--version', action='store_true',
|
'-V', '--version', action='store_true', dest='python_version',
|
||||||
help='print the Python version number and exit')
|
help='print the Python version number and exit')
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-c', dest='python_command', help='command to execute')
|
'-c', dest='python_command', help='command to execute')
|
||||||
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def python(parser, args, unknown_args):
|
def python(parser, args, unknown_args):
|
||||||
if args.version:
|
if args.python_version:
|
||||||
print('Python', platform.python_version())
|
print('Python', platform.python_version())
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@@ -6,6 +6,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -142,9 +143,10 @@ def repo_list(args):
|
|||||||
except spack.repo.RepoError:
|
except spack.repo.RepoError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
msg = "%d package repositor" % len(repos)
|
if sys.stdout.isatty():
|
||||||
msg += "y." if len(repos) == 1 else "ies."
|
msg = "%d package repositor" % len(repos)
|
||||||
tty.msg(msg)
|
msg += "y." if len(repos) == 1 else "ies."
|
||||||
|
tty.msg(msg)
|
||||||
|
|
||||||
if not repos:
|
if not repos:
|
||||||
return
|
return
|
||||||
|
@@ -325,7 +325,7 @@ def accessible_exe(exe):
|
|||||||
|
|
||||||
# setup environment before verifying in case we have executable names
|
# setup environment before verifying in case we have executable names
|
||||||
# instead of absolute paths
|
# instead of absolute paths
|
||||||
with self._compiler_environment():
|
with self.compiler_environment():
|
||||||
missing = [cmp for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
missing = [cmp for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
||||||
if cmp and not accessible_exe(cmp)]
|
if cmp and not accessible_exe(cmp)]
|
||||||
if missing:
|
if missing:
|
||||||
@@ -407,7 +407,7 @@ def _get_compiler_link_paths(self, paths):
|
|||||||
compiler_exe.add_default_arg(flag)
|
compiler_exe.add_default_arg(flag)
|
||||||
|
|
||||||
output = ''
|
output = ''
|
||||||
with self._compiler_environment():
|
with self.compiler_environment():
|
||||||
output = str(compiler_exe(
|
output = str(compiler_exe(
|
||||||
self.verbose_flag, fin, '-o', fout,
|
self.verbose_flag, fin, '-o', fout,
|
||||||
output=str, error=str)) # str for py2
|
output=str, error=str)) # str for py2
|
||||||
@@ -523,7 +523,7 @@ def get_real_version(self):
|
|||||||
modifications) to enable the compiler to run properly on any platform.
|
modifications) to enable the compiler to run properly on any platform.
|
||||||
"""
|
"""
|
||||||
cc = spack.util.executable.Executable(self.cc)
|
cc = spack.util.executable.Executable(self.cc)
|
||||||
with self._compiler_environment():
|
with self.compiler_environment():
|
||||||
output = cc(self.version_argument,
|
output = cc(self.version_argument,
|
||||||
output=str, error=str,
|
output=str, error=str,
|
||||||
ignore_errors=tuple(self.ignore_version_errors))
|
ignore_errors=tuple(self.ignore_version_errors))
|
||||||
@@ -597,7 +597,7 @@ def __str__(self):
|
|||||||
str(self.operating_system)))))
|
str(self.operating_system)))))
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def _compiler_environment(self):
|
def compiler_environment(self):
|
||||||
# store environment to replace later
|
# store environment to replace later
|
||||||
backup_env = os.environ.copy()
|
backup_env = os.environ.copy()
|
||||||
|
|
||||||
|
@@ -685,7 +685,7 @@ def find_spec(spec, condition, default=None):
|
|||||||
visited.add(id(relative))
|
visited.add(id(relative))
|
||||||
|
|
||||||
# Then search all other relatives in the DAG *except* spec
|
# Then search all other relatives in the DAG *except* spec
|
||||||
for relative in spec.root.traverse(deptypes=all):
|
for relative in spec.root.traverse(deptype='all'):
|
||||||
if relative is spec:
|
if relative is spec:
|
||||||
continue
|
continue
|
||||||
if id(relative) in visited:
|
if id(relative) in visited:
|
||||||
|
@@ -1090,11 +1090,11 @@ def get_valid_type(path):
|
|||||||
jsonschema_error = e.validation_error
|
jsonschema_error = e.validation_error
|
||||||
if jsonschema_error.validator == 'type':
|
if jsonschema_error.validator == 'type':
|
||||||
return types[jsonschema_error.validator_value]()
|
return types[jsonschema_error.validator_value]()
|
||||||
elif jsonschema_error.validator == 'anyOf':
|
elif jsonschema_error.validator in ('anyOf', 'oneOf'):
|
||||||
for subschema in jsonschema_error.validator_value:
|
for subschema in jsonschema_error.validator_value:
|
||||||
anyof_type = subschema.get('type')
|
schema_type = subschema.get('type')
|
||||||
if anyof_type is not None:
|
if schema_type is not None:
|
||||||
return types[anyof_type]()
|
return types[schema_type]()
|
||||||
else:
|
else:
|
||||||
return type(None)
|
return type(None)
|
||||||
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||||
|
@@ -28,6 +28,19 @@
|
|||||||
"develop": "latest"
|
"develop": "latest"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"opensuse/leap:15": {
|
||||||
|
"bootstrap": {
|
||||||
|
"template": "container/leap-15.dockerfile"
|
||||||
|
},
|
||||||
|
"os_package_manager": "zypper",
|
||||||
|
"build": "spack/leap15",
|
||||||
|
"build_tags": {
|
||||||
|
"develop": "latest"
|
||||||
|
},
|
||||||
|
"final": {
|
||||||
|
"image": "opensuse/leap:latest"
|
||||||
|
}
|
||||||
|
},
|
||||||
"nvidia/cuda:11.2.1": {
|
"nvidia/cuda:11.2.1": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/cuda_11_2_1.dockerfile",
|
"template": "container/cuda_11_2_1.dockerfile",
|
||||||
@@ -85,6 +98,11 @@
|
|||||||
"update": "yum update -y && amazon-linux-extras install epel -y",
|
"update": "yum update -y && amazon-linux-extras install epel -y",
|
||||||
"install": "yum install -y",
|
"install": "yum install -y",
|
||||||
"clean": "rm -rf /var/cache/yum && yum clean all"
|
"clean": "rm -rf /var/cache/yum && yum clean all"
|
||||||
|
},
|
||||||
|
"zypper": {
|
||||||
|
"update": "zypper update -y",
|
||||||
|
"install": "zypper install -y",
|
||||||
|
"clean": "rm -rf /var/cache/zypp && zypper clean -a"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -937,22 +937,15 @@ def _construct_from_directory_layout(self, directory_layout, old_data):
|
|||||||
tty.debug(
|
tty.debug(
|
||||||
'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
|
'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
|
||||||
try:
|
try:
|
||||||
layout = spack.store.layout
|
layout = None if entry.spec.external else spack.store.layout
|
||||||
if entry.spec.external:
|
kwargs = {
|
||||||
layout = None
|
'spec': entry.spec,
|
||||||
install_check = True
|
'directory_layout': layout,
|
||||||
else:
|
'explicit': entry.explicit,
|
||||||
install_check = layout.check_installed(entry.spec)
|
'installation_time': entry.installation_time # noqa: E501
|
||||||
|
}
|
||||||
if install_check:
|
self._add(**kwargs)
|
||||||
kwargs = {
|
processed_specs.add(entry.spec)
|
||||||
'spec': entry.spec,
|
|
||||||
'directory_layout': layout,
|
|
||||||
'explicit': entry.explicit,
|
|
||||||
'installation_time': entry.installation_time # noqa: E501
|
|
||||||
}
|
|
||||||
self._add(**kwargs)
|
|
||||||
processed_specs.add(entry.spec)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Something went wrong, so the spec was not restored
|
# Something went wrong, so the spec was not restored
|
||||||
# from old data
|
# from old data
|
||||||
@@ -1096,24 +1089,28 @@ def _add(
|
|||||||
}
|
}
|
||||||
self._add(dep, directory_layout, **extra_args)
|
self._add(dep, directory_layout, **extra_args)
|
||||||
|
|
||||||
if key not in self._data:
|
# Make sure the directory layout agrees whether the spec is installed
|
||||||
installed = bool(spec.external)
|
if not spec.external and directory_layout:
|
||||||
path = None
|
path = directory_layout.path_for_spec(spec)
|
||||||
if not spec.external and directory_layout:
|
installed = False
|
||||||
path = directory_layout.path_for_spec(spec)
|
try:
|
||||||
if path in self._installed_prefixes:
|
directory_layout.ensure_installed(spec)
|
||||||
raise Exception("Install prefix collision.")
|
installed = True
|
||||||
try:
|
|
||||||
directory_layout.check_installed(spec)
|
|
||||||
installed = True
|
|
||||||
except DirectoryLayoutError as e:
|
|
||||||
tty.warn(
|
|
||||||
'Dependency missing: may be deprecated or corrupted:',
|
|
||||||
path, str(e))
|
|
||||||
self._installed_prefixes.add(path)
|
self._installed_prefixes.add(path)
|
||||||
elif spec.external_path:
|
except DirectoryLayoutError as e:
|
||||||
path = spec.external_path
|
msg = ("{0} is being {1} in the database with prefix {2}, "
|
||||||
|
"but this directory does not contain an installation of "
|
||||||
|
"the spec, due to: {3}")
|
||||||
|
action = "updated" if key in self._data else "registered"
|
||||||
|
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||||
|
elif spec.external_path:
|
||||||
|
path = spec.external_path
|
||||||
|
installed = True
|
||||||
|
else:
|
||||||
|
path = None
|
||||||
|
installed = True
|
||||||
|
|
||||||
|
if key not in self._data:
|
||||||
# Create a new install record with no deps initially.
|
# Create a new install record with no deps initially.
|
||||||
new_spec = spec.copy(deps=False)
|
new_spec = spec.copy(deps=False)
|
||||||
extra_args = {
|
extra_args = {
|
||||||
@@ -1141,9 +1138,8 @@ def _add(
|
|||||||
new_spec._full_hash = spec._full_hash
|
new_spec._full_hash = spec._full_hash
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# If it is already there, mark it as installed and update
|
# It is already in the database
|
||||||
# installation time
|
self._data[key].installed = installed
|
||||||
self._data[key].installed = True
|
|
||||||
self._data[key].installation_time = _now()
|
self._data[key].installation_time = _now()
|
||||||
|
|
||||||
self._data[key].explicit = explicit
|
self._data[key].explicit = explicit
|
||||||
@@ -1210,7 +1206,7 @@ def _remove(self, spec):
|
|||||||
|
|
||||||
# This install prefix is now free for other specs to use, even if the
|
# This install prefix is now free for other specs to use, even if the
|
||||||
# spec is only marked uninstalled.
|
# spec is only marked uninstalled.
|
||||||
if not rec.spec.external:
|
if not rec.spec.external and rec.installed:
|
||||||
self._installed_prefixes.remove(rec.path)
|
self._installed_prefixes.remove(rec.path)
|
||||||
|
|
||||||
if rec.ref_count > 0:
|
if rec.ref_count > 0:
|
||||||
|
@@ -233,13 +233,20 @@ def create_install_directory(self, spec):
|
|||||||
|
|
||||||
self.write_spec(spec, self.spec_file_path(spec))
|
self.write_spec(spec, self.spec_file_path(spec))
|
||||||
|
|
||||||
def check_installed(self, spec):
|
def ensure_installed(self, spec):
|
||||||
|
"""
|
||||||
|
Throws DirectoryLayoutError if:
|
||||||
|
1. spec prefix does not exist
|
||||||
|
2. spec prefix does not contain a spec file
|
||||||
|
3. the spec file does not correspond to the spec
|
||||||
|
"""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
path = self.path_for_spec(spec)
|
path = self.path_for_spec(spec)
|
||||||
spec_file_path = self.spec_file_path(spec)
|
spec_file_path = self.spec_file_path(spec)
|
||||||
|
|
||||||
if not os.path.isdir(path):
|
if not os.path.isdir(path):
|
||||||
return None
|
raise InconsistentInstallDirectoryError(
|
||||||
|
"Install prefix {0} does not exist.".format(path))
|
||||||
|
|
||||||
if not os.path.isfile(spec_file_path):
|
if not os.path.isfile(spec_file_path):
|
||||||
raise InconsistentInstallDirectoryError(
|
raise InconsistentInstallDirectoryError(
|
||||||
@@ -248,7 +255,7 @@ def check_installed(self, spec):
|
|||||||
|
|
||||||
installed_spec = self.read_spec(spec_file_path)
|
installed_spec = self.read_spec(spec_file_path)
|
||||||
if installed_spec == spec:
|
if installed_spec == spec:
|
||||||
return path
|
return
|
||||||
|
|
||||||
# DAG hashes currently do not include build dependencies.
|
# DAG hashes currently do not include build dependencies.
|
||||||
#
|
#
|
||||||
@@ -261,7 +268,7 @@ def check_installed(self, spec):
|
|||||||
# may be installed. This means for example that for two instances
|
# may be installed. This means for example that for two instances
|
||||||
# that differ only in CMake version used to build, only one will
|
# that differ only in CMake version used to build, only one will
|
||||||
# be installed.
|
# be installed.
|
||||||
return path
|
return
|
||||||
|
|
||||||
if spec.dag_hash() == installed_spec.dag_hash():
|
if spec.dag_hash() == installed_spec.dag_hash():
|
||||||
raise SpecHashCollisionError(spec, installed_spec)
|
raise SpecHashCollisionError(spec, installed_spec)
|
||||||
|
@@ -144,10 +144,12 @@ def activate(env, use_env_repo=False):
|
|||||||
|
|
||||||
# Check if we need to reinitialize the store due to pushing the configuration
|
# Check if we need to reinitialize the store due to pushing the configuration
|
||||||
# below.
|
# below.
|
||||||
store_before_pushing = spack.config.get('config:install_tree')
|
install_tree_before = spack.config.get('config:install_tree')
|
||||||
|
upstreams_before = spack.config.get('upstreams')
|
||||||
prepare_config_scope(env)
|
prepare_config_scope(env)
|
||||||
store_after_pushing = spack.config.get('config:install_tree')
|
install_tree_after = spack.config.get('config:install_tree')
|
||||||
if store_before_pushing != store_after_pushing:
|
upstreams_after = spack.config.get('upstreams')
|
||||||
|
if install_tree_before != install_tree_after or upstreams_before != upstreams_after:
|
||||||
# Hack to store the state of the store before activation
|
# Hack to store the state of the store before activation
|
||||||
env.store_token = spack.store.reinitialize()
|
env.store_token = spack.store.reinitialize()
|
||||||
|
|
||||||
@@ -489,8 +491,14 @@ def regenerate(self, all_specs, roots):
|
|||||||
raise SpackEnvironmentViewError(msg)
|
raise SpackEnvironmentViewError(msg)
|
||||||
os.rename(tmp_symlink_name, self.root)
|
os.rename(tmp_symlink_name, self.root)
|
||||||
|
|
||||||
# remove old_root
|
# Remove the old root when it's in the same folder as the new root. This
|
||||||
if old_root and os.path.exists(old_root):
|
# guards against removal of an arbitrary path when the original symlink in
|
||||||
|
# self.root was not created by the environment, but by the user.
|
||||||
|
if (
|
||||||
|
old_root and
|
||||||
|
os.path.exists(old_root) and
|
||||||
|
os.path.samefile(os.path.dirname(new_root), os.path.dirname(old_root))
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(old_root)
|
shutil.rmtree(old_root)
|
||||||
except (IOError, OSError) as e:
|
except (IOError, OSError) as e:
|
||||||
@@ -1500,10 +1508,8 @@ def install_specs(self, specs=None, **install_args):
|
|||||||
|
|
||||||
if not specs_to_install:
|
if not specs_to_install:
|
||||||
tty.msg('All of the packages are already installed')
|
tty.msg('All of the packages are already installed')
|
||||||
return
|
else:
|
||||||
|
tty.debug('Processing {0} uninstalled specs'.format(len(specs_to_install)))
|
||||||
tty.debug('Processing {0} uninstalled specs'.format(
|
|
||||||
len(specs_to_install)))
|
|
||||||
|
|
||||||
install_args['overwrite'] = install_args.get(
|
install_args['overwrite'] = install_args.get(
|
||||||
'overwrite', []) + self._get_overwrite_specs()
|
'overwrite', []) + self._get_overwrite_specs()
|
||||||
|
@@ -39,7 +39,7 @@ def activate_header(env, shell, prompt=None):
|
|||||||
#
|
#
|
||||||
else:
|
else:
|
||||||
if 'color' in os.getenv('TERM', '') and prompt:
|
if 'color' in os.getenv('TERM', '') and prompt:
|
||||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
prompt = colorize('@G{%s}' % prompt, color=True)
|
||||||
|
|
||||||
cmds += 'export SPACK_ENV=%s;\n' % env.path
|
cmds += 'export SPACK_ENV=%s;\n' % env.path
|
||||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||||
@@ -73,7 +73,7 @@ def deactivate_header(shell):
|
|||||||
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
||||||
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
||||||
cmds += 'fi;\n'
|
cmds += 'fi;\n'
|
||||||
cmds += 'unalias despacktivate;\n'
|
cmds += 'alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n'
|
||||||
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
|
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||||
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
|
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
|
||||||
cmds += ' unset PS1; export PS1;\n'
|
cmds += ' unset PS1; export PS1;\n'
|
||||||
|
@@ -123,18 +123,11 @@ class UnsatisfiableSpecError(SpecError):
|
|||||||
For original concretizer, provide the requirement that was violated when
|
For original concretizer, provide the requirement that was violated when
|
||||||
raising.
|
raising.
|
||||||
"""
|
"""
|
||||||
def __init__(self, provided, required=None, constraint_type=None, conflicts=None):
|
def __init__(self, provided, required, constraint_type):
|
||||||
# required is only set by the original concretizer.
|
# This is only the entrypoint for old concretizer errors
|
||||||
# clingo concretizer handles error messages differently.
|
super(UnsatisfiableSpecError, self).__init__(
|
||||||
if required is not None:
|
"%s does not satisfy %s" % (provided, required))
|
||||||
assert not conflicts # can't mix formats
|
|
||||||
super(UnsatisfiableSpecError, self).__init__(
|
|
||||||
"%s does not satisfy %s" % (provided, required))
|
|
||||||
else:
|
|
||||||
indented = [' %s\n' % conflict for conflict in conflicts]
|
|
||||||
conflict_msg = ''.join(indented)
|
|
||||||
msg = '%s is unsatisfiable, conflicts are:\n%s' % (provided, conflict_msg)
|
|
||||||
super(UnsatisfiableSpecError, self).__init__(msg)
|
|
||||||
self.provided = provided
|
self.provided = provided
|
||||||
self.required = required
|
self.required = required
|
||||||
self.constraint_type = constraint_type
|
self.constraint_type = constraint_type
|
||||||
|
@@ -4,6 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import filecmp
|
import filecmp
|
||||||
|
import grp
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@@ -14,7 +15,9 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.package_prefs
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
#: OS-imposed character limit for shebang line: 127 for Linux; 511 for Mac.
|
#: OS-imposed character limit for shebang line: 127 for Linux; 511 for Mac.
|
||||||
@@ -187,11 +190,47 @@ def install_sbang():
|
|||||||
spack.paths.sbang_script, sbang_path):
|
spack.paths.sbang_script, sbang_path):
|
||||||
return
|
return
|
||||||
|
|
||||||
# make $install_tree/bin and copy in a new version of sbang if needed
|
# make $install_tree/bin
|
||||||
sbang_bin_dir = os.path.dirname(sbang_path)
|
sbang_bin_dir = os.path.dirname(sbang_path)
|
||||||
fs.mkdirp(sbang_bin_dir)
|
fs.mkdirp(sbang_bin_dir)
|
||||||
fs.install(spack.paths.sbang_script, sbang_path)
|
|
||||||
fs.set_install_permissions(sbang_bin_dir)
|
# get permissions for bin dir from configuration files
|
||||||
|
group_name = spack.package_prefs.get_package_group(spack.spec.Spec("all"))
|
||||||
|
config_mode = spack.package_prefs.get_package_dir_permissions(
|
||||||
|
spack.spec.Spec("all")
|
||||||
|
)
|
||||||
|
|
||||||
|
if group_name:
|
||||||
|
os.chmod(sbang_bin_dir, config_mode) # Use package directory permissions
|
||||||
|
else:
|
||||||
|
fs.set_install_permissions(sbang_bin_dir)
|
||||||
|
|
||||||
|
# set group on sbang_bin_dir if not already set (only if set in configuration)
|
||||||
|
if group_name and grp.getgrgid(os.stat(sbang_bin_dir).st_gid).gr_name != group_name:
|
||||||
|
os.chown(
|
||||||
|
sbang_bin_dir,
|
||||||
|
os.stat(sbang_bin_dir).st_uid,
|
||||||
|
grp.getgrnam(group_name).gr_gid
|
||||||
|
)
|
||||||
|
|
||||||
|
# copy over the fresh copy of `sbang`
|
||||||
|
sbang_tmp_path = os.path.join(
|
||||||
|
os.path.dirname(sbang_path),
|
||||||
|
".%s.tmp" % os.path.basename(sbang_path),
|
||||||
|
)
|
||||||
|
shutil.copy(spack.paths.sbang_script, sbang_tmp_path)
|
||||||
|
|
||||||
|
# set permissions on `sbang` (including group if set in configuration)
|
||||||
|
os.chmod(sbang_tmp_path, config_mode)
|
||||||
|
if group_name:
|
||||||
|
os.chown(
|
||||||
|
sbang_tmp_path,
|
||||||
|
os.stat(sbang_tmp_path).st_uid,
|
||||||
|
grp.getgrnam(group_name).gr_gid
|
||||||
|
)
|
||||||
|
|
||||||
|
# Finally, move the new `sbang` into place atomically
|
||||||
|
os.rename(sbang_tmp_path, sbang_path)
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec):
|
def post_install(spec):
|
||||||
|
@@ -632,9 +632,14 @@ def __init__(self, pkg_count):
|
|||||||
# Counters used for showing status information in the terminal title
|
# Counters used for showing status information in the terminal title
|
||||||
self.pkg_num = 0
|
self.pkg_num = 0
|
||||||
self.pkg_count = pkg_count
|
self.pkg_count = pkg_count
|
||||||
|
self.pkg_ids = set()
|
||||||
|
|
||||||
def next_pkg(self):
|
def next_pkg(self, pkg):
|
||||||
self.pkg_num += 1
|
pkg_id = package_id(pkg)
|
||||||
|
|
||||||
|
if pkg_id not in self.pkg_ids:
|
||||||
|
self.pkg_num += 1
|
||||||
|
self.pkg_ids.add(pkg_id)
|
||||||
|
|
||||||
def set(self, text):
|
def set(self, text):
|
||||||
if not spack.config.get('config:terminal_title', False):
|
if not spack.config.get('config:terminal_title', False):
|
||||||
@@ -794,10 +799,10 @@ def _check_deps_status(self, request):
|
|||||||
.format(dep_id, action)
|
.format(dep_id, action)
|
||||||
raise InstallError(err.format(request.pkg_id, msg))
|
raise InstallError(err.format(request.pkg_id, msg))
|
||||||
|
|
||||||
# Attempt to get a write lock to ensure another process does not
|
# Attempt to get a read lock to ensure another process does not
|
||||||
# uninstall the dependency while the requested spec is being
|
# uninstall the dependency while the requested spec is being
|
||||||
# installed
|
# installed
|
||||||
ltype, lock = self._ensure_locked('write', dep_pkg)
|
ltype, lock = self._ensure_locked('read', dep_pkg)
|
||||||
if lock is None:
|
if lock is None:
|
||||||
msg = '{0} is write locked by another process'.format(dep_id)
|
msg = '{0} is write locked by another process'.format(dep_id)
|
||||||
raise InstallError(err.format(request.pkg_id, msg))
|
raise InstallError(err.format(request.pkg_id, msg))
|
||||||
@@ -816,6 +821,8 @@ def _check_deps_status(self, request):
|
|||||||
tty.debug('Flagging {0} as installed per the database'
|
tty.debug('Flagging {0} as installed per the database'
|
||||||
.format(dep_id))
|
.format(dep_id))
|
||||||
self._flag_installed(dep_pkg)
|
self._flag_installed(dep_pkg)
|
||||||
|
else:
|
||||||
|
lock.release_read()
|
||||||
|
|
||||||
def _prepare_for_install(self, task):
|
def _prepare_for_install(self, task):
|
||||||
"""
|
"""
|
||||||
@@ -1022,12 +1029,12 @@ def _ensure_locked(self, lock_type, pkg):
|
|||||||
tty.debug(msg.format('Upgrading to', desc, pkg_id, timeout))
|
tty.debug(msg.format('Upgrading to', desc, pkg_id, timeout))
|
||||||
op = 'upgrade to'
|
op = 'upgrade to'
|
||||||
lock.upgrade_read_to_write(timeout)
|
lock.upgrade_read_to_write(timeout)
|
||||||
tty.verbose('{0} is now {1} locked'.format(pkg_id, lock_type))
|
tty.debug('{0} is now {1} locked'.format(pkg_id, lock_type))
|
||||||
|
|
||||||
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
|
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
|
||||||
tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__,
|
tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__,
|
||||||
str(exc)))
|
str(exc)))
|
||||||
lock = None
|
return (lock_type, None)
|
||||||
|
|
||||||
except (Exception, KeyboardInterrupt, SystemExit) as exc:
|
except (Exception, KeyboardInterrupt, SystemExit) as exc:
|
||||||
tty.error(err.format(op, desc, pkg_id, exc.__class__.__name__,
|
tty.error(err.format(op, desc, pkg_id, exc.__class__.__name__,
|
||||||
@@ -1254,7 +1261,7 @@ def _push_task(self, task):
|
|||||||
# Remove any associated build task since its sequence will change
|
# Remove any associated build task since its sequence will change
|
||||||
self._remove_task(task.pkg_id)
|
self._remove_task(task.pkg_id)
|
||||||
desc = 'Queueing' if task.attempts == 0 else 'Requeueing'
|
desc = 'Queueing' if task.attempts == 0 else 'Requeueing'
|
||||||
tty.verbose(msg.format(desc, task.pkg_id, task.status))
|
tty.debug(msg.format(desc, task.pkg_id, task.status))
|
||||||
|
|
||||||
# Now add the new task to the queue with a new sequence number to
|
# Now add the new task to the queue with a new sequence number to
|
||||||
# ensure it is the last entry popped with the same priority. This
|
# ensure it is the last entry popped with the same priority. This
|
||||||
@@ -1276,7 +1283,7 @@ def _release_lock(self, pkg_id):
|
|||||||
ltype, lock = self.locks[pkg_id]
|
ltype, lock = self.locks[pkg_id]
|
||||||
if lock is not None:
|
if lock is not None:
|
||||||
try:
|
try:
|
||||||
tty.verbose(msg.format(ltype, pkg_id))
|
tty.debug(msg.format(ltype, pkg_id))
|
||||||
if ltype == 'read':
|
if ltype == 'read':
|
||||||
lock.release_read()
|
lock.release_read()
|
||||||
else:
|
else:
|
||||||
@@ -1296,8 +1303,8 @@ def _remove_task(self, pkg_id):
|
|||||||
pkg_id (str): identifier for the package to be removed
|
pkg_id (str): identifier for the package to be removed
|
||||||
"""
|
"""
|
||||||
if pkg_id in self.build_tasks:
|
if pkg_id in self.build_tasks:
|
||||||
tty.verbose('Removing build task for {0} from list'
|
tty.debug('Removing build task for {0} from list'
|
||||||
.format(pkg_id))
|
.format(pkg_id))
|
||||||
task = self.build_tasks.pop(pkg_id)
|
task = self.build_tasks.pop(pkg_id)
|
||||||
task.status = STATUS_REMOVED
|
task.status = STATUS_REMOVED
|
||||||
return task
|
return task
|
||||||
@@ -1328,8 +1335,7 @@ def _setup_install_dir(self, pkg):
|
|||||||
pkg (spack.package.Package): the package to be built and installed
|
pkg (spack.package.Package): the package to be built and installed
|
||||||
"""
|
"""
|
||||||
if not os.path.exists(pkg.spec.prefix):
|
if not os.path.exists(pkg.spec.prefix):
|
||||||
tty.verbose('Creating the installation directory {0}'
|
tty.debug('Creating the installation directory {0}'.format(pkg.spec.prefix))
|
||||||
.format(pkg.spec.prefix))
|
|
||||||
spack.store.layout.create_install_directory(pkg.spec)
|
spack.store.layout.create_install_directory(pkg.spec)
|
||||||
else:
|
else:
|
||||||
# Set the proper group for the prefix
|
# Set the proper group for the prefix
|
||||||
@@ -1381,8 +1387,8 @@ def _update_failed(self, task, mark=False, exc=None):
|
|||||||
self._update_failed(dep_task, mark)
|
self._update_failed(dep_task, mark)
|
||||||
self._remove_task(dep_id)
|
self._remove_task(dep_id)
|
||||||
else:
|
else:
|
||||||
tty.verbose('No build task for {0} to skip since {1} failed'
|
tty.debug('No build task for {0} to skip since {1} failed'
|
||||||
.format(dep_id, pkg_id))
|
.format(dep_id, pkg_id))
|
||||||
|
|
||||||
def _update_installed(self, task):
|
def _update_installed(self, task):
|
||||||
"""
|
"""
|
||||||
@@ -1499,8 +1505,6 @@ def install(self):
|
|||||||
term_title = TermTitle(len(self.build_pq))
|
term_title = TermTitle(len(self.build_pq))
|
||||||
|
|
||||||
while self.build_pq:
|
while self.build_pq:
|
||||||
term_title.next_pkg()
|
|
||||||
|
|
||||||
task = self._pop_task()
|
task = self._pop_task()
|
||||||
if task is None:
|
if task is None:
|
||||||
continue
|
continue
|
||||||
@@ -1510,8 +1514,9 @@ def install(self):
|
|||||||
keep_prefix = install_args.get('keep_prefix')
|
keep_prefix = install_args.get('keep_prefix')
|
||||||
|
|
||||||
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
|
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
|
||||||
|
term_title.next_pkg(pkg)
|
||||||
term_title.set('Processing {0}'.format(pkg.name))
|
term_title.set('Processing {0}'.format(pkg.name))
|
||||||
tty.verbose('Processing {0}: task={1}'.format(pkg_id, task))
|
tty.debug('Processing {0}: task={1}'.format(pkg_id, task))
|
||||||
# Ensure that the current spec has NO uninstalled dependencies,
|
# Ensure that the current spec has NO uninstalled dependencies,
|
||||||
# which is assumed to be reflected directly in its priority.
|
# which is assumed to be reflected directly in its priority.
|
||||||
#
|
#
|
||||||
@@ -1627,6 +1632,7 @@ def install(self):
|
|||||||
# established by the other process -- failed, installed, or
|
# established by the other process -- failed, installed, or
|
||||||
# uninstalled -- on the next pass.
|
# uninstalled -- on the next pass.
|
||||||
if ltype == 'read':
|
if ltype == 'read':
|
||||||
|
lock.release_read()
|
||||||
self._requeue_task(task)
|
self._requeue_task(task)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@@ -41,6 +41,7 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.solver.asp
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.debug
|
import spack.util.debug
|
||||||
@@ -380,6 +381,13 @@ def make_argument_parser(**kwargs):
|
|||||||
# stat names in groups of 7, for nice wrapping.
|
# stat names in groups of 7, for nice wrapping.
|
||||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||||
|
|
||||||
|
# help message for --show-cores
|
||||||
|
show_cores_help = 'provide additional information on concretization failures\n'
|
||||||
|
show_cores_help += 'off (default): show only the violated rule\n'
|
||||||
|
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||||
|
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||||
|
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-h', '--help',
|
'-h', '--help',
|
||||||
dest='help', action='store_const', const='short', default=None,
|
dest='help', action='store_const', const='short', default=None,
|
||||||
@@ -403,6 +411,9 @@ def make_argument_parser(**kwargs):
|
|||||||
'-d', '--debug', action='count', default=0,
|
'-d', '--debug', action='count', default=0,
|
||||||
help="write out debug messages "
|
help="write out debug messages "
|
||||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||||
|
parser.add_argument(
|
||||||
|
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||||
|
help=show_cores_help)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--timestamp', action='store_true',
|
'--timestamp', action='store_true',
|
||||||
help="Add a timestamp to tty output")
|
help="Add a timestamp to tty output")
|
||||||
@@ -486,13 +497,21 @@ def setup_main_options(args):
|
|||||||
spack.config.set('config:debug', True, scope='command_line')
|
spack.config.set('config:debug', True, scope='command_line')
|
||||||
spack.util.environment.tracing_enabled = True
|
spack.util.environment.tracing_enabled = True
|
||||||
|
|
||||||
|
if args.show_cores != "off":
|
||||||
|
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||||
|
# but don't want to wait to minimize it.
|
||||||
|
spack.solver.asp.full_cores = True
|
||||||
|
if args.show_cores == 'full':
|
||||||
|
spack.solver.asp.minimize_cores = False
|
||||||
|
|
||||||
if args.timestamp:
|
if args.timestamp:
|
||||||
tty.set_timestamp(True)
|
tty.set_timestamp(True)
|
||||||
|
|
||||||
# override lock configuration if passed on command line
|
# override lock configuration if passed on command line
|
||||||
if args.locks is not None:
|
if args.locks is not None:
|
||||||
spack.util.lock.check_lock_safety(spack.paths.prefix)
|
if args.locks is False:
|
||||||
spack.config.set('config:locks', False, scope='command_line')
|
spack.util.lock.check_lock_safety(spack.paths.prefix)
|
||||||
|
spack.config.set('config:locks', args.locks, scope='command_line')
|
||||||
|
|
||||||
if args.mock:
|
if args.mock:
|
||||||
rp = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
rp = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||||
@@ -800,13 +819,13 @@ def _main(argv=None):
|
|||||||
# scopes, then environment configuration here.
|
# scopes, then environment configuration here.
|
||||||
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
|
||||||
|
|
||||||
# ensure options on spack command come before everything
|
|
||||||
setup_main_options(args)
|
|
||||||
|
|
||||||
# make spack.config aware of any command line configuration scopes
|
# make spack.config aware of any command line configuration scopes
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
spack.config.command_line_scopes = args.config_scopes
|
spack.config.command_line_scopes = args.config_scopes
|
||||||
|
|
||||||
|
# ensure options on spack command come before everything
|
||||||
|
setup_main_options(args)
|
||||||
|
|
||||||
# activate an environment if one was specified on the command line
|
# activate an environment if one was specified on the command line
|
||||||
env_format_error = None
|
env_format_error = None
|
||||||
if not args.no_env:
|
if not args.no_env:
|
||||||
|
@@ -906,6 +906,9 @@ def write(self, overwrite=False):
|
|||||||
fp.set_permissions_by_spec(self.layout.filename, self.spec)
|
fp.set_permissions_by_spec(self.layout.filename, self.spec)
|
||||||
|
|
||||||
# Symlink defaults if needed
|
# Symlink defaults if needed
|
||||||
|
self.update_module_defaults()
|
||||||
|
|
||||||
|
def update_module_defaults(self):
|
||||||
if any(self.spec.satisfies(default) for default in self.conf.defaults):
|
if any(self.spec.satisfies(default) for default in self.conf.defaults):
|
||||||
# This spec matches a default, it needs to be symlinked to default
|
# This spec matches a default, it needs to be symlinked to default
|
||||||
# Symlink to a tmp location first and move, so that existing
|
# Symlink to a tmp location first and move, so that existing
|
||||||
|
@@ -373,6 +373,21 @@ def fullname(self):
|
|||||||
"""Name of this package, including the namespace"""
|
"""Name of this package, including the namespace"""
|
||||||
return '%s.%s' % (self.namespace, self.name)
|
return '%s.%s' % (self.namespace, self.name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fullnames(self):
|
||||||
|
"""
|
||||||
|
Fullnames for this package and any packages from which it inherits.
|
||||||
|
"""
|
||||||
|
fullnames = []
|
||||||
|
for cls in inspect.getmro(self):
|
||||||
|
namespace = getattr(cls, 'namespace', None)
|
||||||
|
if namespace:
|
||||||
|
fullnames.append('%s.%s' % (namespace, self.name))
|
||||||
|
if namespace == 'builtin':
|
||||||
|
# builtin packages cannot inherit from other repos
|
||||||
|
break
|
||||||
|
return fullnames
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""The name of this package.
|
"""The name of this package.
|
||||||
@@ -450,7 +465,7 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
Alternative implementations may allow some of the files to exist in
|
Alternative implementations may allow some of the files to exist in
|
||||||
the view (in this case they would be omitted from the results).
|
the view (in this case they would be omitted from the results).
|
||||||
"""
|
"""
|
||||||
return set(dst for dst in merge_map.values() if os.path.exists(dst))
|
return set(dst for dst in merge_map.values() if os.path.lexists(dst))
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map):
|
def add_files_to_view(self, view, merge_map):
|
||||||
"""Given a map of package files to destination paths in the view, add
|
"""Given a map of package files to destination paths in the view, add
|
||||||
@@ -459,7 +474,7 @@ def add_files_to_view(self, view, merge_map):
|
|||||||
linked into the view already include the file.
|
linked into the view already include the file.
|
||||||
"""
|
"""
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if not os.path.exists(dst):
|
if not os.path.lexists(dst):
|
||||||
view.link(src, dst, spec=self.spec)
|
view.link(src, dst, spec=self.spec)
|
||||||
|
|
||||||
def remove_files_from_view(self, view, merge_map):
|
def remove_files_from_view(self, view, merge_map):
|
||||||
@@ -863,6 +878,10 @@ def fullname(self):
|
|||||||
"""Name of this package, including namespace: namespace.name."""
|
"""Name of this package, including namespace: namespace.name."""
|
||||||
return type(self).fullname
|
return type(self).fullname
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fullnames(self):
|
||||||
|
return type(self).fullnames
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Name of this package (the module without parent modules)."""
|
"""Name of this package (the module without parent modules)."""
|
||||||
@@ -1182,7 +1201,7 @@ def extendee_spec(self):
|
|||||||
name = next(iter(self.extendees))
|
name = next(iter(self.extendees))
|
||||||
|
|
||||||
# If the extendee is in the spec's deps already, return that.
|
# If the extendee is in the spec's deps already, return that.
|
||||||
for dep in self.spec.traverse(deptypes=('link', 'run')):
|
for dep in self.spec.traverse(deptype=('link', 'run')):
|
||||||
if name == dep.name:
|
if name == dep.name:
|
||||||
return dep
|
return dep
|
||||||
|
|
||||||
|
@@ -7,7 +7,6 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -26,12 +25,6 @@
|
|||||||
def apply_patch(stage, patch_path, level=1, working_dir='.'):
|
def apply_patch(stage, patch_path, level=1, working_dir='.'):
|
||||||
"""Apply the patch at patch_path to code in the stage.
|
"""Apply the patch at patch_path to code in the stage.
|
||||||
|
|
||||||
Spack runs ``patch`` with ``-N`` so that it does not reject already-applied
|
|
||||||
patches. This is useful for develop specs, so that the build does not fail
|
|
||||||
due to repeated application of patches, and for easing requirements on patch
|
|
||||||
specifications in packages -- packages won't stop working when patches we
|
|
||||||
previously had to apply land in upstream.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
stage (spack.stage.Stage): stage with code that will be patched
|
stage (spack.stage.Stage): stage with code that will be patched
|
||||||
patch_path (str): filesystem location for the patch to apply
|
patch_path (str): filesystem location for the patch to apply
|
||||||
@@ -41,31 +34,10 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
|
|||||||
"""
|
"""
|
||||||
patch = which("patch", required=True)
|
patch = which("patch", required=True)
|
||||||
with llnl.util.filesystem.working_dir(stage.source_path):
|
with llnl.util.filesystem.working_dir(stage.source_path):
|
||||||
output = patch(
|
patch('-s',
|
||||||
'-N', # don't reject already-applied patches
|
'-p', str(level),
|
||||||
'-p', str(level), # patch level (directory depth)
|
'-i', patch_path,
|
||||||
'-i', patch_path, # input source is the patch file
|
'-d', working_dir)
|
||||||
'-d', working_dir, # patch chdir's to here before patching
|
|
||||||
output=str,
|
|
||||||
fail_on_error=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
if patch.returncode != 0:
|
|
||||||
# `patch` returns 1 both:
|
|
||||||
# a) when an error applying a patch, and
|
|
||||||
# b) when -N is supplied and the patch has already been applied
|
|
||||||
#
|
|
||||||
# It returns > 1 if there's something more serious wrong.
|
|
||||||
#
|
|
||||||
# So, the best we can do is to look for return code 1, look for output
|
|
||||||
# indicating that the patch was already applied, and ignore the error
|
|
||||||
# if we see it. Most implementations (BSD and GNU) seem to have the
|
|
||||||
# same messages, so we expect these checks to be reliable.
|
|
||||||
if patch.returncode > 1 or not any(
|
|
||||||
s in output for s in ("Skipping patch", "ignored")
|
|
||||||
):
|
|
||||||
sys.stderr.write(output)
|
|
||||||
raise patch.error
|
|
||||||
|
|
||||||
|
|
||||||
class Patch(object):
|
class Patch(object):
|
||||||
@@ -376,8 +348,12 @@ def patch_for_package(self, sha256, pkg):
|
|||||||
"Couldn't find patch for package %s with sha256: %s"
|
"Couldn't find patch for package %s with sha256: %s"
|
||||||
% (pkg.fullname, sha256))
|
% (pkg.fullname, sha256))
|
||||||
|
|
||||||
patch_dict = sha_index.get(pkg.fullname)
|
# Find patches for this class or any class it inherits from
|
||||||
if not patch_dict:
|
for fullname in pkg.fullnames:
|
||||||
|
patch_dict = sha_index.get(fullname)
|
||||||
|
if patch_dict:
|
||||||
|
break
|
||||||
|
else:
|
||||||
raise NoSuchPatchError(
|
raise NoSuchPatchError(
|
||||||
"Couldn't find patch for package %s with sha256: %s"
|
"Couldn't find patch for package %s with sha256: %s"
|
||||||
% (pkg.fullname, sha256))
|
% (pkg.fullname, sha256))
|
||||||
|
@@ -151,6 +151,22 @@ def wrapper(instance, *args, **kwargs):
|
|||||||
'installed_from_binary_cache': False
|
'installed_from_binary_cache': False
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Append the package to the correct spec report. In some
|
||||||
|
# cases it may happen that a spec that is asked to be
|
||||||
|
# installed explicitly will also be installed as a
|
||||||
|
# dependency of another spec. In this case append to both
|
||||||
|
# spec reports.
|
||||||
|
for s in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
|
||||||
|
name = name_fmt.format(s.name, s.dag_hash(length=7))
|
||||||
|
try:
|
||||||
|
item = next((
|
||||||
|
x for x in self.specs
|
||||||
|
if x['name'] == name
|
||||||
|
))
|
||||||
|
item['packages'].append(package)
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
value = None
|
value = None
|
||||||
try:
|
try:
|
||||||
@@ -170,6 +186,7 @@ def wrapper(instance, *args, **kwargs):
|
|||||||
package['stdout'] = fetch_log(pkg, do_fn, self.dir)
|
package['stdout'] = fetch_log(pkg, do_fn, self.dir)
|
||||||
package['stdout'] += package['message']
|
package['stdout'] += package['message']
|
||||||
package['exception'] = e.traceback
|
package['exception'] = e.traceback
|
||||||
|
raise
|
||||||
|
|
||||||
except (Exception, BaseException) as e:
|
except (Exception, BaseException) as e:
|
||||||
# Everything else is an error (the installation
|
# Everything else is an error (the installation
|
||||||
@@ -178,26 +195,11 @@ def wrapper(instance, *args, **kwargs):
|
|||||||
package['stdout'] = fetch_log(pkg, do_fn, self.dir)
|
package['stdout'] = fetch_log(pkg, do_fn, self.dir)
|
||||||
package['message'] = str(e) or 'Unknown error'
|
package['message'] = str(e) or 'Unknown error'
|
||||||
package['exception'] = traceback.format_exc()
|
package['exception'] = traceback.format_exc()
|
||||||
|
raise
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
package['elapsed_time'] = time.time() - start_time
|
package['elapsed_time'] = time.time() - start_time
|
||||||
|
|
||||||
# Append the package to the correct spec report. In some
|
|
||||||
# cases it may happen that a spec that is asked to be
|
|
||||||
# installed explicitly will also be installed as a
|
|
||||||
# dependency of another spec. In this case append to both
|
|
||||||
# spec reports.
|
|
||||||
for s in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
|
|
||||||
name = name_fmt.format(s.name, s.dag_hash(length=7))
|
|
||||||
try:
|
|
||||||
item = next((
|
|
||||||
x for x in self.specs
|
|
||||||
if x['name'] == name
|
|
||||||
))
|
|
||||||
item['packages'].append(package)
|
|
||||||
except StopIteration:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@@ -62,7 +62,6 @@ class CDash(Reporter):
|
|||||||
|
|
||||||
def __init__(self, args):
|
def __init__(self, args):
|
||||||
Reporter.__init__(self, args)
|
Reporter.__init__(self, args)
|
||||||
tty.set_verbose(args.verbose)
|
|
||||||
self.success = True
|
self.success = True
|
||||||
self.template_dir = os.path.join('reports', 'cdash')
|
self.template_dir = os.path.join('reports', 'cdash')
|
||||||
self.cdash_upload_url = args.cdash_upload_url
|
self.cdash_upload_url = args.cdash_upload_url
|
||||||
|
@@ -59,6 +59,14 @@
|
|||||||
parse_files = None
|
parse_files = None
|
||||||
|
|
||||||
|
|
||||||
|
#: whether we should write ASP unsat cores quickly in debug mode when the cores
|
||||||
|
#: may be very large or take the time (sometimes hours) to minimize them
|
||||||
|
minimize_cores = True
|
||||||
|
|
||||||
|
#: whether we should include all facts in the unsat cores or only error messages
|
||||||
|
full_cores = False
|
||||||
|
|
||||||
|
|
||||||
# backward compatibility functions for clingo ASTs
|
# backward compatibility functions for clingo ASTs
|
||||||
def ast_getter(*names):
|
def ast_getter(*names):
|
||||||
def getter(node):
|
def getter(node):
|
||||||
@@ -366,6 +374,19 @@ def format_minimal_cores(self):
|
|||||||
string_list.extend(self.format_core(core))
|
string_list.extend(self.format_core(core))
|
||||||
return string_list
|
return string_list
|
||||||
|
|
||||||
|
def format_cores(self):
|
||||||
|
"""List of facts for each core
|
||||||
|
|
||||||
|
Separate cores are separated by an empty line
|
||||||
|
Cores are not minimized
|
||||||
|
"""
|
||||||
|
string_list = []
|
||||||
|
for core in self.cores:
|
||||||
|
if string_list:
|
||||||
|
string_list.append('\n')
|
||||||
|
string_list.extend(self.format_core(core))
|
||||||
|
return string_list
|
||||||
|
|
||||||
def raise_if_unsat(self):
|
def raise_if_unsat(self):
|
||||||
"""
|
"""
|
||||||
Raise an appropriate error if the result is unsatisfiable.
|
Raise an appropriate error if the result is unsatisfiable.
|
||||||
@@ -379,9 +400,13 @@ def raise_if_unsat(self):
|
|||||||
constraints = self.abstract_specs
|
constraints = self.abstract_specs
|
||||||
if len(constraints) == 1:
|
if len(constraints) == 1:
|
||||||
constraints = constraints[0]
|
constraints = constraints[0]
|
||||||
conflicts = self.format_minimal_cores()
|
|
||||||
|
|
||||||
raise spack.error.UnsatisfiableSpecError(constraints, conflicts=conflicts)
|
if minimize_cores:
|
||||||
|
conflicts = self.format_minimal_cores()
|
||||||
|
else:
|
||||||
|
conflicts = self.format_cores()
|
||||||
|
|
||||||
|
raise UnsatisfiableSpecError(constraints, conflicts=conflicts)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def specs(self):
|
def specs(self):
|
||||||
@@ -496,7 +521,11 @@ def fact(self, head, assumption=False):
|
|||||||
self.out.write("%s.\n" % str(symbol))
|
self.out.write("%s.\n" % str(symbol))
|
||||||
|
|
||||||
atom = self.backend.add_atom(symbol)
|
atom = self.backend.add_atom(symbol)
|
||||||
choice = self.cores and assumption
|
|
||||||
|
# with `--show-cores=full or --show-cores=minimized, make all facts
|
||||||
|
# choices/assumptions, otherwise only if assumption=True
|
||||||
|
choice = self.cores and (full_cores or assumption)
|
||||||
|
|
||||||
self.backend.add_rule([atom], [], choice=choice)
|
self.backend.add_rule([atom], [], choice=choice)
|
||||||
if choice:
|
if choice:
|
||||||
self.assumptions.append(atom)
|
self.assumptions.append(atom)
|
||||||
@@ -2024,3 +2053,33 @@ def solve(specs, dump=(), models=0, timers=False, stats=False, tests=False,
|
|||||||
return driver.solve(
|
return driver.solve(
|
||||||
setup, specs, dump, models, timers, stats, tests, reuse
|
setup, specs, dump, models, timers, stats, tests, reuse
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||||
|
"""
|
||||||
|
Subclass for new constructor signature for new concretizer
|
||||||
|
"""
|
||||||
|
def __init__(self, provided, conflicts):
|
||||||
|
indented = [' %s\n' % conflict for conflict in conflicts]
|
||||||
|
conflict_msg = ''.join(indented)
|
||||||
|
issue = 'conflicts' if full_cores else 'errors'
|
||||||
|
msg = '%s is unsatisfiable, %s are:\n%s' % (provided, issue, conflict_msg)
|
||||||
|
|
||||||
|
newline_indent = '\n '
|
||||||
|
if not full_cores:
|
||||||
|
msg += newline_indent + 'To see full clingo unsat cores, '
|
||||||
|
msg += 're-run with `spack --show-cores=full`'
|
||||||
|
if not minimize_cores or not full_cores:
|
||||||
|
# not solver.minimalize_cores and not solver.full_cores impossible
|
||||||
|
msg += newline_indent + 'For full, subset-minimal unsat cores, '
|
||||||
|
msg += 're-run with `spack --show-cores=minimized'
|
||||||
|
msg += newline_indent
|
||||||
|
msg += 'Warning: This may take (up to) hours for some specs'
|
||||||
|
|
||||||
|
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||||
|
|
||||||
|
self.provided = provided
|
||||||
|
|
||||||
|
# Add attribute expected of the superclass interface
|
||||||
|
self.required = None
|
||||||
|
self.constraint_type = None
|
||||||
|
@@ -3159,6 +3159,15 @@ def constrain(self, other, deps=True):
|
|||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||||
|
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
|
if not self.name and other.name:
|
||||||
|
self.name = other.name
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if not self.namespace and other.namespace:
|
||||||
|
self.namespace = other.namespace
|
||||||
|
changed = True
|
||||||
|
|
||||||
if self.compiler is not None and other.compiler is not None:
|
if self.compiler is not None and other.compiler is not None:
|
||||||
changed |= self.compiler.constrain(other.compiler)
|
changed |= self.compiler.constrain(other.compiler)
|
||||||
elif self.compiler is None:
|
elif self.compiler is None:
|
||||||
@@ -4259,7 +4268,7 @@ def tree(self, **kwargs):
|
|||||||
|
|
||||||
out = ""
|
out = ""
|
||||||
for d, dep_spec in self.traverse_edges(
|
for d, dep_spec in self.traverse_edges(
|
||||||
order='pre', cover=cover, depth=True, deptypes=deptypes):
|
order='pre', cover=cover, depth=True, deptype=deptypes):
|
||||||
node = dep_spec.spec
|
node = dep_spec.spec
|
||||||
|
|
||||||
if prefix is not None:
|
if prefix is not None:
|
||||||
|
@@ -11,19 +11,6 @@
|
|||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
|
||||||
def spec_ordering_key(s):
|
|
||||||
if s.startswith('^'):
|
|
||||||
return 5
|
|
||||||
elif s.startswith('/'):
|
|
||||||
return 4
|
|
||||||
elif s.startswith('%'):
|
|
||||||
return 3
|
|
||||||
elif any(s.startswith(c) for c in '~-+@') or '=' in s:
|
|
||||||
return 2
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
class SpecList(object):
|
class SpecList(object):
|
||||||
|
|
||||||
def __init__(self, name='specs', yaml_list=None, reference=None):
|
def __init__(self, name='specs', yaml_list=None, reference=None):
|
||||||
@@ -177,30 +164,36 @@ def __getitem__(self, key):
|
|||||||
return self.specs[key]
|
return self.specs[key]
|
||||||
|
|
||||||
|
|
||||||
def _expand_matrix_constraints(object, specify=True):
|
def _expand_matrix_constraints(matrix_config):
|
||||||
# recurse so we can handle nexted matrices
|
# recurse so we can handle nested matrices
|
||||||
expanded_rows = []
|
expanded_rows = []
|
||||||
for row in object['matrix']:
|
for row in matrix_config['matrix']:
|
||||||
new_row = []
|
new_row = []
|
||||||
for r in row:
|
for r in row:
|
||||||
if isinstance(r, dict):
|
if isinstance(r, dict):
|
||||||
|
# Flatten the nested matrix into a single row of constraints
|
||||||
new_row.extend(
|
new_row.extend(
|
||||||
[[' '.join(c)]
|
[[' '.join([str(c) for c in expanded_constraint_list])]
|
||||||
for c in _expand_matrix_constraints(r, specify=False)])
|
for expanded_constraint_list in _expand_matrix_constraints(r)]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
new_row.append([r])
|
new_row.append([r])
|
||||||
expanded_rows.append(new_row)
|
expanded_rows.append(new_row)
|
||||||
|
|
||||||
excludes = object.get('exclude', []) # only compute once
|
excludes = matrix_config.get('exclude', []) # only compute once
|
||||||
sigil = object.get('sigil', '')
|
sigil = matrix_config.get('sigil', '')
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for combo in itertools.product(*expanded_rows):
|
for combo in itertools.product(*expanded_rows):
|
||||||
# Construct a combined spec to test against excludes
|
# Construct a combined spec to test against excludes
|
||||||
flat_combo = [constraint for list in combo for constraint in list]
|
flat_combo = [constraint for constraint_list in combo
|
||||||
ordered_combo = sorted(flat_combo, key=spec_ordering_key)
|
for constraint in constraint_list]
|
||||||
|
flat_combo = [Spec(x) for x in flat_combo]
|
||||||
|
|
||||||
|
test_spec = flat_combo[0].copy()
|
||||||
|
for constraint in flat_combo[1:]:
|
||||||
|
test_spec.constrain(constraint)
|
||||||
|
|
||||||
test_spec = Spec(' '.join(ordered_combo))
|
|
||||||
# Abstract variants don't have normal satisfaction semantics
|
# Abstract variants don't have normal satisfaction semantics
|
||||||
# Convert all variants to concrete types.
|
# Convert all variants to concrete types.
|
||||||
# This method is best effort, so all existing variants will be
|
# This method is best effort, so all existing variants will be
|
||||||
@@ -214,14 +207,12 @@ def _expand_matrix_constraints(object, specify=True):
|
|||||||
if any(test_spec.satisfies(x) for x in excludes):
|
if any(test_spec.satisfies(x) for x in excludes):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if sigil: # add sigil if necessary
|
if sigil:
|
||||||
ordered_combo[0] = sigil + ordered_combo[0]
|
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
||||||
|
|
||||||
# Add to list of constraints
|
# Add to list of constraints
|
||||||
if specify:
|
results.append(flat_combo)
|
||||||
results.append([Spec(x) for x in ordered_combo])
|
|
||||||
else:
|
|
||||||
results.append(ordered_combo)
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@@ -533,7 +533,7 @@ def steal_source(self, dest):
|
|||||||
for entry in hidden_entries + entries:
|
for entry in hidden_entries + entries:
|
||||||
if os.path.isdir(entry):
|
if os.path.isdir(entry):
|
||||||
d = os.path.join(dest, os.path.basename(entry))
|
d = os.path.join(dest, os.path.basename(entry))
|
||||||
shutil.copytree(entry, d)
|
shutil.copytree(entry, d, symlinks=True)
|
||||||
else:
|
else:
|
||||||
shutil.copy2(entry, dest)
|
shutil.copy2(entry, dest)
|
||||||
|
|
||||||
@@ -694,8 +694,8 @@ def _add_to_root_stage(self):
|
|||||||
source_path = os.path.join(self.source_path, key)
|
source_path = os.path.join(self.source_path, key)
|
||||||
|
|
||||||
if not os.path.exists(destination_path):
|
if not os.path.exists(destination_path):
|
||||||
tty.info('Moving resource stage\n\tsource : '
|
tty.info('Moving resource stage\n\tsource: '
|
||||||
'{stage}\n\tdestination : {destination}'.format(
|
'{stage}\n\tdestination: {destination}'.format(
|
||||||
stage=source_path, destination=destination_path
|
stage=source_path, destination=destination_path
|
||||||
))
|
))
|
||||||
|
|
||||||
|
@@ -239,6 +239,12 @@ def test_config_add_ordered_dict(mutable_empty_config):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_add_interpret_oneof(mutable_empty_config):
|
||||||
|
# Regression test for a bug that would raise a validation error
|
||||||
|
config('add', 'packages:all:target:[x86_64]')
|
||||||
|
config('add', 'packages:all:variants:~shared')
|
||||||
|
|
||||||
|
|
||||||
def test_config_add_invalid_fails(mutable_empty_config):
|
def test_config_add_invalid_fails(mutable_empty_config):
|
||||||
config('add', 'packages:all:variants:+debug')
|
config('add', 'packages:all:variants:+debug')
|
||||||
with pytest.raises(
|
with pytest.raises(
|
||||||
|
@@ -868,7 +868,7 @@ def test_env_loads(install_mockery, mock_fetch):
|
|||||||
install('--fake')
|
install('--fake')
|
||||||
|
|
||||||
with ev.read('test'):
|
with ev.read('test'):
|
||||||
env('loads', 'test')
|
env('loads')
|
||||||
|
|
||||||
e = ev.read('test')
|
e = ev.read('test')
|
||||||
|
|
||||||
@@ -2693,3 +2693,14 @@ def test_activate_temp(monkeypatch, tmpdir):
|
|||||||
if ev.spack_env_var in line)
|
if ev.spack_env_var in line)
|
||||||
assert str(tmpdir) in active_env_var
|
assert str(tmpdir) in active_env_var
|
||||||
assert ev.is_env_dir(str(tmpdir))
|
assert ev.is_env_dir(str(tmpdir))
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_view_fail_if_symlink_points_elsewhere(tmpdir, install_mockery, mock_fetch):
|
||||||
|
view = str(tmpdir.join('view'))
|
||||||
|
# Put a symlink to an actual directory in view
|
||||||
|
non_view_dir = str(tmpdir.mkdir('dont-delete-me'))
|
||||||
|
os.symlink(non_view_dir, view)
|
||||||
|
with ev.create('env', with_view=view):
|
||||||
|
add('libelf')
|
||||||
|
install('--fake')
|
||||||
|
assert os.path.isdir(non_view_dir)
|
||||||
|
@@ -393,9 +393,14 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
|
|||||||
'--log-format=junit', '--log-file=test.xml',
|
'--log-format=junit', '--log-file=test.xml',
|
||||||
'raiser',
|
'raiser',
|
||||||
'exc_type={0}'.format(exc_typename),
|
'exc_type={0}'.format(exc_typename),
|
||||||
'msg="{0}"'.format(msg)
|
'msg="{0}"'.format(msg),
|
||||||
|
fail_on_error=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
assert isinstance(install.error, spack.build_environment.ChildError)
|
||||||
|
assert install.error.name == exc_typename
|
||||||
|
assert install.error.pkg.name == 'raiser'
|
||||||
|
|
||||||
files = tmpdir.listdir()
|
files = tmpdir.listdir()
|
||||||
filename = tmpdir.join('test.xml')
|
filename = tmpdir.join('test.xml')
|
||||||
assert filename in files
|
assert filename in files
|
||||||
@@ -407,18 +412,22 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
|
|||||||
assert 'failures="1"' in content
|
assert 'failures="1"' in content
|
||||||
assert 'errors="0"' in content
|
assert 'errors="0"' in content
|
||||||
|
|
||||||
|
# Nothing should have succeeded
|
||||||
|
assert 'tests="0"' not in content
|
||||||
|
assert 'failures="0"' not in content
|
||||||
|
|
||||||
# We want to have both stdout and stderr
|
# We want to have both stdout and stderr
|
||||||
assert '<system-out>' in content
|
assert '<system-out>' in content
|
||||||
assert msg in content
|
assert msg in content
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
@pytest.mark.parametrize('exc_typename,msg', [
|
@pytest.mark.parametrize('exc_typename,expected_exc,msg', [
|
||||||
('RuntimeError', 'something weird happened'),
|
('RuntimeError', spack.installer.InstallError, 'something weird happened'),
|
||||||
('KeyboardInterrupt', 'Ctrl-C strikes again')
|
('KeyboardInterrupt', KeyboardInterrupt, 'Ctrl-C strikes again')
|
||||||
])
|
])
|
||||||
def test_junit_output_with_errors(
|
def test_junit_output_with_errors(
|
||||||
exc_typename, msg,
|
exc_typename, expected_exc, msg,
|
||||||
mock_packages, mock_archive, mock_fetch, install_mockery,
|
mock_packages, mock_archive, mock_fetch, install_mockery,
|
||||||
config, tmpdir, monkeypatch):
|
config, tmpdir, monkeypatch):
|
||||||
|
|
||||||
@@ -429,11 +438,11 @@ def just_throw(*args, **kwargs):
|
|||||||
monkeypatch.setattr(spack.installer.PackageInstaller, '_install_task',
|
monkeypatch.setattr(spack.installer.PackageInstaller, '_install_task',
|
||||||
just_throw)
|
just_throw)
|
||||||
|
|
||||||
# TODO: Why does junit output capture appear to swallow the exception
|
|
||||||
# TODO: as evidenced by the two failing packages getting tagged as
|
|
||||||
# TODO: installed?
|
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
|
install('--log-format=junit', '--log-file=test.xml', 'libdwarf',
|
||||||
|
fail_on_error=False)
|
||||||
|
|
||||||
|
assert isinstance(install.error, expected_exc)
|
||||||
|
|
||||||
files = tmpdir.listdir()
|
files = tmpdir.listdir()
|
||||||
filename = tmpdir.join('test.xml')
|
filename = tmpdir.join('test.xml')
|
||||||
@@ -441,10 +450,14 @@ def just_throw(*args, **kwargs):
|
|||||||
|
|
||||||
content = filename.open().read()
|
content = filename.open().read()
|
||||||
|
|
||||||
# Count failures and errors correctly: libdwarf _and_ libelf
|
# Only libelf error is reported (through libdwarf root spec). libdwarf
|
||||||
assert 'tests="2"' in content
|
# install is skipped and it is not an error.
|
||||||
|
assert 'tests="1"' in content
|
||||||
assert 'failures="0"' in content
|
assert 'failures="0"' in content
|
||||||
assert 'errors="2"' in content
|
assert 'errors="1"' in content
|
||||||
|
|
||||||
|
# Nothing should have succeeded
|
||||||
|
assert 'errors="0"' not in content
|
||||||
|
|
||||||
# We want to have both stdout and stderr
|
# We want to have both stdout and stderr
|
||||||
assert '<system-out>' in content
|
assert '<system-out>' in content
|
||||||
@@ -877,7 +890,7 @@ def test_install_help_cdash(capsys):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_cdash_auth_token(tmpdir, install_mockery, capfd):
|
def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, capfd):
|
||||||
# capfd interferes with Spack's capturing
|
# capfd interferes with Spack's capturing
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
with capfd.disabled():
|
with capfd.disabled():
|
||||||
@@ -1086,3 +1099,15 @@ def test_install_env_with_tests_root(tmpdir, mock_packages, mock_fetch,
|
|||||||
add('depb')
|
add('depb')
|
||||||
install('--test', 'root')
|
install('--test', 'root')
|
||||||
assert not os.path.exists(test_dep.prefix)
|
assert not os.path.exists(test_dep.prefix)
|
||||||
|
|
||||||
|
|
||||||
|
def test_install_empty_env(tmpdir, mock_packages, mock_fetch,
|
||||||
|
install_mockery, mutable_mock_env_path):
|
||||||
|
env_name = 'empty'
|
||||||
|
env('create', env_name)
|
||||||
|
with ev.read(env_name):
|
||||||
|
out = install(fail_on_error=False)
|
||||||
|
|
||||||
|
assert env_name in out
|
||||||
|
assert 'environment' in out
|
||||||
|
assert 'no specs to install' in out
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -16,6 +17,24 @@
|
|||||||
location = SpackCommand('location')
|
location = SpackCommand('location')
|
||||||
|
|
||||||
|
|
||||||
|
def test_manpath_trailing_colon(install_mockery, mock_fetch, mock_archive,
|
||||||
|
mock_packages, working_env):
|
||||||
|
"""Test that the commands generated by load add the MANPATH prefix
|
||||||
|
inspections. Also test that Spack correctly preserves the default/existing
|
||||||
|
manpath search path via a trailing colon"""
|
||||||
|
install('mpileaks')
|
||||||
|
|
||||||
|
sh_out = load('--sh', '--only', 'package', 'mpileaks')
|
||||||
|
lines = sh_out.split('\n')
|
||||||
|
assert any(re.match(r'export MANPATH=.*:;', ln) for ln in lines)
|
||||||
|
|
||||||
|
os.environ['MANPATH'] = '/tmp/man:'
|
||||||
|
|
||||||
|
sh_out = load('--sh', '--only', 'package', 'mpileaks')
|
||||||
|
lines = sh_out.split('\n')
|
||||||
|
assert any(re.match(r'export MANPATH=.*:/tmp/man:;', ln) for ln in lines)
|
||||||
|
|
||||||
|
|
||||||
def test_load(install_mockery, mock_fetch, mock_archive, mock_packages):
|
def test_load(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||||
"""Test that the commands generated by load add the specified prefix
|
"""Test that the commands generated by load add the specified prefix
|
||||||
inspections. Also test that Spack records loaded specs by hash in the
|
inspections. Also test that Spack records loaded specs by hash in the
|
||||||
|
@@ -178,10 +178,18 @@ def test_loads_recursive_blacklisted(database, module_configuration):
|
|||||||
|
|
||||||
@pytest.mark.db
|
@pytest.mark.db
|
||||||
def test_setdefault_command(
|
def test_setdefault_command(
|
||||||
mutable_database, module_configuration
|
mutable_database, mutable_config
|
||||||
):
|
):
|
||||||
module_configuration('autoload_direct')
|
data = {
|
||||||
|
'default': {
|
||||||
|
'enable': ['lmod'],
|
||||||
|
'lmod': {
|
||||||
|
'core_compilers': ['clang@3.3'],
|
||||||
|
'hierarchy': ['mpi']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
spack.config.set('modules', data)
|
||||||
# Install two different versions of a package
|
# Install two different versions of a package
|
||||||
other_spec, preferred = 'a@1.0', 'a@2.0'
|
other_spec, preferred = 'a@1.0', 'a@2.0'
|
||||||
|
|
||||||
|
@@ -24,8 +24,19 @@
|
|||||||
|
|
||||||
style = spack.main.SpackCommand("style")
|
style = spack.main.SpackCommand("style")
|
||||||
|
|
||||||
|
|
||||||
|
def has_develop_branch():
|
||||||
|
git = which('git')
|
||||||
|
if not git:
|
||||||
|
return False
|
||||||
|
git("show-ref", "--verify", "--quiet",
|
||||||
|
"refs/heads/develop", fail_on_error=False)
|
||||||
|
return git.returncode == 0
|
||||||
|
|
||||||
|
|
||||||
# spack style requires git to run -- skip the tests if it's not there
|
# spack style requires git to run -- skip the tests if it's not there
|
||||||
pytestmark = pytest.mark.skipif(not which('git'), reason='requires git')
|
pytestmark = pytest.mark.skipif(not has_develop_branch(),
|
||||||
|
reason='requires git with develop branch')
|
||||||
|
|
||||||
# The style tools have requirements to use newer Python versions. We simplify by
|
# The style tools have requirements to use newer Python versions. We simplify by
|
||||||
# requiring Python 3.6 or higher to run spack style.
|
# requiring Python 3.6 or higher to run spack style.
|
||||||
|
@@ -133,7 +133,8 @@ def test_junit_output_with_failures(tmpdir, mock_test_stage, pkg_name, msgs):
|
|||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
spack_test('run',
|
spack_test('run',
|
||||||
'--log-format=junit', '--log-file=test.xml',
|
'--log-format=junit', '--log-file=test.xml',
|
||||||
pkg_name)
|
pkg_name,
|
||||||
|
fail_on_error=False)
|
||||||
|
|
||||||
files = tmpdir.listdir()
|
files = tmpdir.listdir()
|
||||||
filename = tmpdir.join('test.xml')
|
filename = tmpdir.join('test.xml')
|
||||||
@@ -160,7 +161,8 @@ def test_cdash_output_test_error(
|
|||||||
spack_test('run',
|
spack_test('run',
|
||||||
'--log-format=cdash',
|
'--log-format=cdash',
|
||||||
'--log-file=cdash_reports',
|
'--log-file=cdash_reports',
|
||||||
'test-error')
|
'test-error',
|
||||||
|
fail_on_error=False)
|
||||||
report_dir = tmpdir.join('cdash_reports')
|
report_dir = tmpdir.join('cdash_reports')
|
||||||
print(tmpdir.listdir())
|
print(tmpdir.listdir())
|
||||||
assert report_dir in tmpdir.listdir()
|
assert report_dir in tmpdir.listdir()
|
||||||
|
@@ -557,6 +557,19 @@ def test_conflicts_in_spec(self, conflict_spec):
|
|||||||
with pytest.raises(spack.error.SpackError):
|
with pytest.raises(spack.error.SpackError):
|
||||||
s.concretize()
|
s.concretize()
|
||||||
|
|
||||||
|
def test_conflicts_show_cores(self, conflict_spec, monkeypatch):
|
||||||
|
if spack.config.get('config:concretizer') == 'original':
|
||||||
|
pytest.skip('Testing debug statements specific to new concretizer')
|
||||||
|
|
||||||
|
monkeypatch.setattr(spack.solver.asp, 'full_cores', True)
|
||||||
|
monkeypatch.setattr(spack.solver.asp, 'minimize_cores', False)
|
||||||
|
|
||||||
|
s = Spec(conflict_spec)
|
||||||
|
with pytest.raises(spack.error.SpackError) as e:
|
||||||
|
s.concretize()
|
||||||
|
|
||||||
|
assert "conflict_trigger(" in e.value.message
|
||||||
|
|
||||||
def test_conflict_in_all_directives_true(self):
|
def test_conflict_in_all_directives_true(self):
|
||||||
s = Spec('when-directives-true')
|
s = Spec('when-directives-true')
|
||||||
with pytest.raises(spack.error.SpackError):
|
with pytest.raises(spack.error.SpackError):
|
||||||
|
@@ -876,8 +876,8 @@ def __init__(self, root):
|
|||||||
def path_for_spec(self, spec):
|
def path_for_spec(self, spec):
|
||||||
return '/'.join([self.root, spec.name + '-' + spec.dag_hash()])
|
return '/'.join([self.root, spec.name + '-' + spec.dag_hash()])
|
||||||
|
|
||||||
def check_installed(self, spec):
|
def ensure_installed(self, spec):
|
||||||
return True
|
pass
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
|
@@ -11,6 +11,7 @@
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -909,3 +910,51 @@ def test_database_works_with_empty_dir(tmpdir):
|
|||||||
db.query()
|
db.query()
|
||||||
# Check that reading an empty directory didn't create a new index.json
|
# Check that reading an empty directory didn't create a new index.json
|
||||||
assert not os.path.exists(db._index_path)
|
assert not os.path.exists(db._index_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, capfd):
|
||||||
|
"""When a prefix of a dependency is removed and the database is reindexed,
|
||||||
|
the spec should still be added through the dependent, but should be listed as
|
||||||
|
not installed."""
|
||||||
|
|
||||||
|
# Remove libelf from the filesystem
|
||||||
|
prefix = mutable_database.query_one('libelf').prefix
|
||||||
|
assert prefix.startswith(str(mock_store))
|
||||||
|
shutil.rmtree(prefix)
|
||||||
|
|
||||||
|
# Reindex should pick up libelf as a dependency of libdwarf
|
||||||
|
spack.store.store.reindex()
|
||||||
|
|
||||||
|
# Reindexing should warn about libelf not being found on the filesystem
|
||||||
|
err = capfd.readouterr()[1]
|
||||||
|
assert 'this directory does not contain an installation of the spec' in err
|
||||||
|
|
||||||
|
# And we should still have libelf in the database, but not installed.
|
||||||
|
assert not mutable_database.query_one('libelf', installed=True)
|
||||||
|
assert mutable_database.query_one('libelf', installed=False)
|
||||||
|
|
||||||
|
|
||||||
|
def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
|
||||||
|
# Remove all non-external installations from the filesystem
|
||||||
|
for spec in spack.store.db.query_local():
|
||||||
|
if not spec.external:
|
||||||
|
assert spec.prefix.startswith(str(mock_store))
|
||||||
|
shutil.rmtree(spec.prefix)
|
||||||
|
|
||||||
|
# Make sure we have some explicitly installed specs
|
||||||
|
num = len(mutable_database.query_local(installed=True, explicit=True))
|
||||||
|
assert num > 0
|
||||||
|
|
||||||
|
# Reindex uses the current index to repopulate itself
|
||||||
|
spack.store.store.reindex()
|
||||||
|
|
||||||
|
# Make sure all explicit specs are still there, but are now uninstalled.
|
||||||
|
specs = mutable_database.query_local(installed=False, explicit=True)
|
||||||
|
assert len(specs) == num
|
||||||
|
|
||||||
|
# And make sure they can be removed from the database (covers the case where
|
||||||
|
# `ref_count == 0 and not installed`, which hits some obscure branches.
|
||||||
|
for s in specs:
|
||||||
|
mutable_database.remove(s)
|
||||||
|
|
||||||
|
assert len(mutable_database.query_local(installed=False, explicit=True)) == 0
|
||||||
|
@@ -206,8 +206,12 @@ def test_prs_update_old_api():
|
|||||||
"""Ensures that every package modified in a PR doesn't contain
|
"""Ensures that every package modified in a PR doesn't contain
|
||||||
deprecated calls to any method.
|
deprecated calls to any method.
|
||||||
"""
|
"""
|
||||||
|
ref = os.getenv("GITHUB_BASE_REF")
|
||||||
|
if not ref:
|
||||||
|
pytest.skip("No base ref found")
|
||||||
|
|
||||||
changed_package_files = [
|
changed_package_files = [
|
||||||
x for x in style.changed_files() if style.is_package(x)
|
x for x in style.changed_files(base=ref) if style.is_package(x)
|
||||||
]
|
]
|
||||||
failing = []
|
failing = []
|
||||||
for file in changed_package_files:
|
for file in changed_package_files:
|
||||||
|
@@ -15,9 +15,8 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.compression
|
import spack.util.compression
|
||||||
import spack.util.crypto
|
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.stage import DIYStage, Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
# various sha256 sums (using variables for legibility)
|
# various sha256 sums (using variables for legibility)
|
||||||
@@ -34,43 +33,6 @@
|
|||||||
url2_archive_sha256 = 'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd'
|
url2_archive_sha256 = 'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd'
|
||||||
|
|
||||||
|
|
||||||
# some simple files for patch tests
|
|
||||||
file_to_patch = """\
|
|
||||||
first line
|
|
||||||
second line
|
|
||||||
"""
|
|
||||||
|
|
||||||
patch_file = """\
|
|
||||||
diff a/foo.txt b/foo-expected.txt
|
|
||||||
--- a/foo.txt
|
|
||||||
+++ b/foo-expected.txt
|
|
||||||
@@ -1,2 +1,3 @@
|
|
||||||
+zeroth line
|
|
||||||
first line
|
|
||||||
-second line
|
|
||||||
+third line
|
|
||||||
"""
|
|
||||||
|
|
||||||
expected_patch_result = """\
|
|
||||||
zeroth line
|
|
||||||
first line
|
|
||||||
third line
|
|
||||||
"""
|
|
||||||
|
|
||||||
file_patch_cant_apply_to = """\
|
|
||||||
this file
|
|
||||||
is completely different
|
|
||||||
from anything in the files
|
|
||||||
or patch above
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def write_file(filename, contents):
|
|
||||||
"""Helper function for setting up tests."""
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
f.write(contents)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def mock_patch_stage(tmpdir_factory, monkeypatch):
|
def mock_patch_stage(tmpdir_factory, monkeypatch):
|
||||||
# Don't disrupt the spack install directory with tests.
|
# Don't disrupt the spack install directory with tests.
|
||||||
@@ -105,9 +67,19 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256):
|
|||||||
|
|
||||||
mkdirp(stage.source_path)
|
mkdirp(stage.source_path)
|
||||||
with working_dir(stage.source_path):
|
with working_dir(stage.source_path):
|
||||||
write_file("foo.txt", file_to_patch)
|
# write a file to be patched
|
||||||
write_file("foo-expected.txt", expected_patch_result)
|
with open('foo.txt', 'w') as f:
|
||||||
|
f.write("""\
|
||||||
|
first line
|
||||||
|
second line
|
||||||
|
""")
|
||||||
|
# write the expected result of patching.
|
||||||
|
with open('foo-expected.txt', 'w') as f:
|
||||||
|
f.write("""\
|
||||||
|
zeroth line
|
||||||
|
first line
|
||||||
|
third line
|
||||||
|
""")
|
||||||
# apply the patch and compare files
|
# apply the patch and compare files
|
||||||
patch.fetch()
|
patch.fetch()
|
||||||
patch.apply(stage)
|
patch.apply(stage)
|
||||||
@@ -117,47 +89,6 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256):
|
|||||||
assert filecmp.cmp('foo.txt', 'foo-expected.txt')
|
assert filecmp.cmp('foo.txt', 'foo-expected.txt')
|
||||||
|
|
||||||
|
|
||||||
def test_apply_patch_twice(mock_patch_stage, tmpdir):
|
|
||||||
"""Ensure that patch doesn't fail if applied twice."""
|
|
||||||
|
|
||||||
stage = DIYStage(str(tmpdir))
|
|
||||||
with tmpdir.as_cwd():
|
|
||||||
write_file("foo.txt", file_to_patch)
|
|
||||||
write_file("foo-expected.txt", expected_patch_result)
|
|
||||||
write_file("foo.patch", patch_file)
|
|
||||||
|
|
||||||
FakePackage = collections.namedtuple(
|
|
||||||
'FakePackage', ['name', 'namespace', 'fullname'])
|
|
||||||
fake_pkg = FakePackage('fake-package', 'test', 'fake-package')
|
|
||||||
|
|
||||||
def make_patch(filename):
|
|
||||||
path = os.path.realpath(str(tmpdir.join(filename)))
|
|
||||||
url = 'file://' + path
|
|
||||||
sha256 = spack.util.crypto.checksum("sha256", path)
|
|
||||||
return spack.patch.UrlPatch(fake_pkg, url, sha256=sha256)
|
|
||||||
|
|
||||||
# apply the first time
|
|
||||||
patch = make_patch('foo.patch')
|
|
||||||
patch.fetch()
|
|
||||||
|
|
||||||
patch.apply(stage)
|
|
||||||
with working_dir(stage.source_path):
|
|
||||||
assert filecmp.cmp('foo.txt', 'foo-expected.txt')
|
|
||||||
|
|
||||||
# ensure apply() is idempotent
|
|
||||||
patch.apply(stage)
|
|
||||||
with working_dir(stage.source_path):
|
|
||||||
assert filecmp.cmp('foo.txt', 'foo-expected.txt')
|
|
||||||
|
|
||||||
# now write a file that can't be patched
|
|
||||||
with working_dir(stage.source_path):
|
|
||||||
write_file("foo.txt", file_patch_cant_apply_to)
|
|
||||||
|
|
||||||
# this application should fail with a real error
|
|
||||||
with pytest.raises(spack.util.executable.ProcessError):
|
|
||||||
patch.apply(stage)
|
|
||||||
|
|
||||||
|
|
||||||
def test_patch_in_spec(mock_packages, config):
|
def test_patch_in_spec(mock_packages, config):
|
||||||
"""Test whether patches in a package appear in the spec."""
|
"""Test whether patches in a package appear in the spec."""
|
||||||
spec = Spec('patch')
|
spec = Spec('patch')
|
||||||
|
@@ -7,6 +7,7 @@
|
|||||||
Test that Spack's shebang filtering works correctly.
|
Test that Spack's shebang filtering works correctly.
|
||||||
"""
|
"""
|
||||||
import filecmp
|
import filecmp
|
||||||
|
import grp
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
@@ -19,6 +20,7 @@
|
|||||||
import spack.hooks.sbang as sbang
|
import spack.hooks.sbang as sbang
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
too_long = sbang.system_shebang_limit + 1
|
too_long = sbang.system_shebang_limit + 1
|
||||||
@@ -256,7 +258,34 @@ def test_shebang_handles_non_writable_files(script_dir, sbang_line):
|
|||||||
assert oct(not_writable_mode) == oct(st.st_mode)
|
assert oct(not_writable_mode) == oct(st.st_mode)
|
||||||
|
|
||||||
|
|
||||||
def check_sbang_installation():
|
@pytest.fixture(scope='function')
|
||||||
|
def configure_group_perms():
|
||||||
|
conf = syaml.load_config("""\
|
||||||
|
all:
|
||||||
|
permissions:
|
||||||
|
read: world
|
||||||
|
write: group
|
||||||
|
group: {0}
|
||||||
|
""".format(grp.getgrgid(os.getegid()).gr_name))
|
||||||
|
spack.config.set('packages', conf, scope='user')
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def configure_user_perms():
|
||||||
|
conf = syaml.load_config("""\
|
||||||
|
all:
|
||||||
|
permissions:
|
||||||
|
read: world
|
||||||
|
write: user
|
||||||
|
""")
|
||||||
|
spack.config.set('packages', conf, scope='user')
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
def check_sbang_installation(group=False):
|
||||||
sbang_path = sbang.sbang_install_path()
|
sbang_path = sbang.sbang_install_path()
|
||||||
sbang_bin_dir = os.path.dirname(sbang_path)
|
sbang_bin_dir = os.path.dirname(sbang_path)
|
||||||
assert sbang_path.startswith(spack.store.store.unpadded_root)
|
assert sbang_path.startswith(spack.store.store.unpadded_root)
|
||||||
@@ -264,14 +293,22 @@ def check_sbang_installation():
|
|||||||
assert os.path.exists(sbang_path)
|
assert os.path.exists(sbang_path)
|
||||||
assert fs.is_exe(sbang_path)
|
assert fs.is_exe(sbang_path)
|
||||||
|
|
||||||
status = os.stat(sbang_path)
|
|
||||||
assert (status.st_mode & 0o777) == 0o755
|
|
||||||
|
|
||||||
status = os.stat(sbang_bin_dir)
|
status = os.stat(sbang_bin_dir)
|
||||||
assert (status.st_mode & 0o777) == 0o755
|
mode = (status.st_mode & 0o777)
|
||||||
|
if group:
|
||||||
|
assert mode == 0o775, 'Unexpected {0}'.format(oct(mode))
|
||||||
|
else:
|
||||||
|
assert mode == 0o755, 'Unexpected {0}'.format(oct(mode))
|
||||||
|
|
||||||
|
status = os.stat(sbang_path)
|
||||||
|
mode = (status.st_mode & 0o777)
|
||||||
|
if group:
|
||||||
|
assert mode == 0o775, 'Unexpected {0}'.format(oct(mode))
|
||||||
|
else:
|
||||||
|
assert mode == 0o755, 'Unexpected {0}'.format(oct(mode))
|
||||||
|
|
||||||
|
|
||||||
def test_install_sbang(install_mockery):
|
def run_test_install_sbang(group):
|
||||||
sbang_path = sbang.sbang_install_path()
|
sbang_path = sbang.sbang_install_path()
|
||||||
sbang_bin_dir = os.path.dirname(sbang_path)
|
sbang_bin_dir = os.path.dirname(sbang_path)
|
||||||
|
|
||||||
@@ -279,7 +316,7 @@ def test_install_sbang(install_mockery):
|
|||||||
assert not os.path.exists(sbang_bin_dir)
|
assert not os.path.exists(sbang_bin_dir)
|
||||||
|
|
||||||
sbang.install_sbang()
|
sbang.install_sbang()
|
||||||
check_sbang_installation()
|
check_sbang_installation(group)
|
||||||
|
|
||||||
# put an invalid file in for sbang
|
# put an invalid file in for sbang
|
||||||
fs.mkdirp(sbang_bin_dir)
|
fs.mkdirp(sbang_bin_dir)
|
||||||
@@ -287,11 +324,19 @@ def test_install_sbang(install_mockery):
|
|||||||
f.write("foo")
|
f.write("foo")
|
||||||
|
|
||||||
sbang.install_sbang()
|
sbang.install_sbang()
|
||||||
check_sbang_installation()
|
check_sbang_installation(group)
|
||||||
|
|
||||||
# install again and make sure sbang is still fine
|
# install again and make sure sbang is still fine
|
||||||
sbang.install_sbang()
|
sbang.install_sbang()
|
||||||
check_sbang_installation()
|
check_sbang_installation(group)
|
||||||
|
|
||||||
|
|
||||||
|
def test_install_group_sbang(install_mockery, configure_group_perms):
|
||||||
|
run_test_install_sbang(True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_install_user_sbang(install_mockery, configure_user_perms):
|
||||||
|
run_test_install_sbang(False)
|
||||||
|
|
||||||
|
|
||||||
def test_install_sbang_too_long(tmpdir):
|
def test_install_sbang_too_long(tmpdir):
|
||||||
|
@@ -974,7 +974,6 @@ def test_canonical_deptype(self):
|
|||||||
canonical_deptype(('foo',))
|
canonical_deptype(('foo',))
|
||||||
|
|
||||||
def test_invalid_literal_spec(self):
|
def test_invalid_literal_spec(self):
|
||||||
|
|
||||||
# Can't give type 'build' to a top-level spec
|
# Can't give type 'build' to a top-level spec
|
||||||
with pytest.raises(spack.spec.SpecParseError):
|
with pytest.raises(spack.spec.SpecParseError):
|
||||||
Spec.from_literal({'foo:build': None})
|
Spec.from_literal({'foo:build': None})
|
||||||
@@ -982,3 +981,11 @@ def test_invalid_literal_spec(self):
|
|||||||
# Can't use more than one ':' separator
|
# Can't use more than one ':' separator
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
Spec.from_literal({'foo': {'bar:build:link': None}})
|
Spec.from_literal({'foo': {'bar:build:link': None}})
|
||||||
|
|
||||||
|
def test_spec_tree_respect_deptypes(self):
|
||||||
|
# Version-test-root uses version-test-pkg as a build dependency
|
||||||
|
s = Spec('version-test-root').concretized()
|
||||||
|
out = s.tree(deptypes='all')
|
||||||
|
assert 'version-test-pkg' in out
|
||||||
|
out = s.tree(deptypes=('link', 'run'))
|
||||||
|
assert 'version-test-pkg' not in out
|
||||||
|
@@ -45,24 +45,34 @@ def test_spec_list_expansions(self):
|
|||||||
assert speclist.specs_as_constraints == self.default_constraints
|
assert speclist.specs_as_constraints == self.default_constraints
|
||||||
assert speclist.specs == self.default_specs
|
assert speclist.specs == self.default_specs
|
||||||
|
|
||||||
def test_spec_list_constraint_ordering(self):
|
@pytest.mark.regression('28749')
|
||||||
specs = [{'matrix': [
|
@pytest.mark.parametrize('specs,expected', [
|
||||||
|
# Constraints are ordered randomly
|
||||||
|
([{'matrix': [
|
||||||
['^zmpi'],
|
['^zmpi'],
|
||||||
['%gcc@4.5.0'],
|
['%gcc@4.5.0'],
|
||||||
['hypre', 'libelf'],
|
['hypre', 'libelf'],
|
||||||
['~shared'],
|
['~shared'],
|
||||||
['cflags=-O3', 'cflags="-g -O0"'],
|
['cflags=-O3', 'cflags="-g -O0"'],
|
||||||
['^foo']
|
['^foo']
|
||||||
]}]
|
]}], [
|
||||||
|
'hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||||
|
'hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||||
|
'libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||||
|
'libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||||
|
]),
|
||||||
|
# A constraint affects both the root and a dependency
|
||||||
|
([{'matrix': [
|
||||||
|
['gromacs'],
|
||||||
|
['%gcc'],
|
||||||
|
['+plumed ^plumed%gcc']
|
||||||
|
]}], [
|
||||||
|
'gromacs+plumed%gcc ^plumed%gcc'
|
||||||
|
])
|
||||||
|
])
|
||||||
|
def test_spec_list_constraint_ordering(self, specs, expected):
|
||||||
speclist = SpecList('specs', specs)
|
speclist = SpecList('specs', specs)
|
||||||
|
expected_specs = [Spec(x) for x in expected]
|
||||||
expected_specs = [
|
|
||||||
Spec('hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
|
||||||
Spec('hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
|
||||||
Spec('libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
|
||||||
Spec('libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
|
||||||
]
|
|
||||||
assert speclist.specs == expected_specs
|
assert speclist.specs == expected_specs
|
||||||
|
|
||||||
def test_spec_list_add(self):
|
def test_spec_list_add(self):
|
||||||
|
@@ -1228,3 +1228,15 @@ def test_merge_abstract_anonymous_specs(specs, expected):
|
|||||||
specs = [Spec(x) for x in specs]
|
specs = [Spec(x) for x in specs]
|
||||||
result = spack.spec.merge_abstract_anonymous_specs(*specs)
|
result = spack.spec.merge_abstract_anonymous_specs(*specs)
|
||||||
assert result == Spec(expected)
|
assert result == Spec(expected)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('anonymous,named,expected', [
|
||||||
|
('+plumed', 'gromacs', 'gromacs+plumed'),
|
||||||
|
('+plumed ^plumed%gcc', 'gromacs', 'gromacs+plumed ^plumed%gcc'),
|
||||||
|
('+plumed', 'builtin.gromacs', 'builtin.gromacs+plumed')
|
||||||
|
])
|
||||||
|
def test_merge_anonymous_spec_with_named_spec(anonymous, named, expected):
|
||||||
|
s = Spec(anonymous)
|
||||||
|
changed = s.constrain(named)
|
||||||
|
assert changed
|
||||||
|
assert s == Spec(expected)
|
||||||
|
@@ -22,6 +22,8 @@ def prepare_environment_for_tests():
|
|||||||
def test_is_system_path():
|
def test_is_system_path():
|
||||||
assert(envutil.is_system_path('/usr/bin'))
|
assert(envutil.is_system_path('/usr/bin'))
|
||||||
assert(not envutil.is_system_path('/nonsense_path/bin'))
|
assert(not envutil.is_system_path('/nonsense_path/bin'))
|
||||||
|
assert(not envutil.is_system_path(''))
|
||||||
|
assert(not envutil.is_system_path(None))
|
||||||
|
|
||||||
|
|
||||||
test_paths = ['/usr/bin',
|
test_paths = ['/usr/bin',
|
||||||
|
@@ -630,6 +630,14 @@ def test_version_wrong_idx_type():
|
|||||||
v['0:']
|
v['0:']
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression('29170')
|
||||||
|
def test_version_range_satisfies_means_nonempty_intersection():
|
||||||
|
x = VersionRange('3.7.0', '3')
|
||||||
|
y = VersionRange('3.6.0', '3.6.0')
|
||||||
|
assert not x.satisfies(y)
|
||||||
|
assert not y.satisfies(x)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression('26482')
|
@pytest.mark.regression('26482')
|
||||||
def test_version_list_with_range_included_in_concrete_version_interpreted_as_range():
|
def test_version_list_with_range_included_in_concrete_version_interpreted_as_range():
|
||||||
# Note: this test only tests whether we can construct a version list of a range
|
# Note: this test only tests whether we can construct a version list of a range
|
||||||
|
@@ -92,11 +92,6 @@ def checksum(hashlib_algo, filename, **kwargs):
|
|||||||
"""Returns a hex digest of the filename generated using an
|
"""Returns a hex digest of the filename generated using an
|
||||||
algorithm from hashlib.
|
algorithm from hashlib.
|
||||||
"""
|
"""
|
||||||
if isinstance(hashlib_algo, str):
|
|
||||||
if hashlib_algo not in hashes:
|
|
||||||
raise ValueError("Invalid hash algorithm: ", hashlib_algo)
|
|
||||||
hashlib_algo = hash_fun_for_algo(hashlib_algo)
|
|
||||||
|
|
||||||
block_size = kwargs.get('block_size', 2**20)
|
block_size = kwargs.get('block_size', 2**20)
|
||||||
hasher = hashlib_algo()
|
hasher = hashlib_algo()
|
||||||
with open(filename, 'rb') as file:
|
with open(filename, 'rb') as file:
|
||||||
|
@@ -59,7 +59,7 @@ def is_system_path(path):
|
|||||||
Returns:
|
Returns:
|
||||||
True or False
|
True or False
|
||||||
"""
|
"""
|
||||||
return os.path.normpath(path) in system_dirs
|
return path and os.path.normpath(path) in system_dirs
|
||||||
|
|
||||||
|
|
||||||
def filter_system_paths(paths):
|
def filter_system_paths(paths):
|
||||||
@@ -611,7 +611,6 @@ def apply_modifications(self, env=None):
|
|||||||
def shell_modifications(self, shell='sh', explicit=False, env=None):
|
def shell_modifications(self, shell='sh', explicit=False, env=None):
|
||||||
"""Return shell code to apply the modifications and clears the list."""
|
"""Return shell code to apply the modifications and clears the list."""
|
||||||
modifications = self.group_by_name()
|
modifications = self.group_by_name()
|
||||||
new_env = os.environ.copy()
|
|
||||||
|
|
||||||
if env is None:
|
if env is None:
|
||||||
env = os.environ
|
env = os.environ
|
||||||
@@ -622,6 +621,9 @@ def shell_modifications(self, shell='sh', explicit=False, env=None):
|
|||||||
for x in actions:
|
for x in actions:
|
||||||
x.execute(new_env)
|
x.execute(new_env)
|
||||||
|
|
||||||
|
if 'MANPATH' in new_env and not new_env.get('MANPATH').endswith(':'):
|
||||||
|
new_env['MANPATH'] += ':'
|
||||||
|
|
||||||
cmds = ''
|
cmds = ''
|
||||||
|
|
||||||
for name in sorted(set(modifications)):
|
for name in sorted(set(modifications)):
|
||||||
|
@@ -27,7 +27,6 @@ def __init__(self, name):
|
|||||||
from spack.util.environment import EnvironmentModifications # no cycle
|
from spack.util.environment import EnvironmentModifications # no cycle
|
||||||
self.default_envmod = EnvironmentModifications()
|
self.default_envmod = EnvironmentModifications()
|
||||||
self.returncode = None
|
self.returncode = None
|
||||||
self.error = None # saved ProcessError when fail_on_error
|
|
||||||
|
|
||||||
if not self.exe:
|
if not self.exe:
|
||||||
raise ProcessError("Cannot construct executable for '%s'" % name)
|
raise ProcessError("Cannot construct executable for '%s'" % name)
|
||||||
@@ -91,8 +90,7 @@ def __call__(self, *args, **kwargs):
|
|||||||
the environment (neither requires nor precludes env)
|
the environment (neither requires nor precludes env)
|
||||||
fail_on_error (bool): Raise an exception if the subprocess returns
|
fail_on_error (bool): Raise an exception if the subprocess returns
|
||||||
an error. Default is True. The return code is available as
|
an error. Default is True. The return code is available as
|
||||||
``exe.returncode``, and a saved ``ProcessError`` that would
|
``exe.returncode``
|
||||||
have been raised is in ``exe.error``.
|
|
||||||
ignore_errors (int or list): A list of error codes to ignore.
|
ignore_errors (int or list): A list of error codes to ignore.
|
||||||
If these codes are returned, this process will not raise
|
If these codes are returned, this process will not raise
|
||||||
an exception even if ``fail_on_error`` is set to ``True``
|
an exception even if ``fail_on_error`` is set to ``True``
|
||||||
@@ -215,7 +213,7 @@ def streamify(arg, mode):
|
|||||||
sys.stderr.write(errstr)
|
sys.stderr.write(errstr)
|
||||||
|
|
||||||
rc = self.returncode = proc.returncode
|
rc = self.returncode = proc.returncode
|
||||||
if rc != 0:
|
if fail_on_error and rc != 0 and (rc not in ignore_errors):
|
||||||
long_msg = cmd_line_string
|
long_msg = cmd_line_string
|
||||||
if result:
|
if result:
|
||||||
# If the output is not captured in the result, it will have
|
# If the output is not captured in the result, it will have
|
||||||
@@ -224,11 +222,8 @@ def streamify(arg, mode):
|
|||||||
# stdout/stderr (e.g. if 'output' is not specified)
|
# stdout/stderr (e.g. if 'output' is not specified)
|
||||||
long_msg += '\n' + result
|
long_msg += '\n' + result
|
||||||
|
|
||||||
self.error = ProcessError(
|
raise ProcessError('Command exited with status %d:' %
|
||||||
'Command exited with status %d:' % proc.returncode, long_msg
|
proc.returncode, long_msg)
|
||||||
)
|
|
||||||
if fail_on_error and (rc not in ignore_errors):
|
|
||||||
raise self.error
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -237,15 +232,10 @@ def streamify(arg, mode):
|
|||||||
'%s: %s' % (self.exe[0], e.strerror), 'Command: ' + cmd_line_string)
|
'%s: %s' % (self.exe[0], e.strerror), 'Command: ' + cmd_line_string)
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
self.error = ProcessError(
|
|
||||||
str(e),
|
|
||||||
'\nExit status %d when invoking command: %s' % (
|
|
||||||
proc.returncode,
|
|
||||||
cmd_line_string,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
if fail_on_error:
|
if fail_on_error:
|
||||||
raise self.error
|
raise ProcessError(
|
||||||
|
str(e), '\nExit status %d when invoking command: %s' %
|
||||||
|
(proc.returncode, cmd_line_string))
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if close_ostream:
|
if close_ostream:
|
||||||
|
@@ -586,35 +586,23 @@ def __contains__(self, other):
|
|||||||
|
|
||||||
@coerced
|
@coerced
|
||||||
def satisfies(self, other):
|
def satisfies(self, other):
|
||||||
"""A VersionRange satisfies another if some version in this range
|
|
||||||
would satisfy some version in the other range. To do this it must
|
|
||||||
either:
|
|
||||||
|
|
||||||
a) Overlap with the other range
|
|
||||||
b) The start of this range satisfies the end of the other range.
|
|
||||||
|
|
||||||
This is essentially the same as overlaps(), but overlaps assumes
|
|
||||||
that its arguments are specific. That is, 4.7 is interpreted as
|
|
||||||
4.7.0.0.0.0... . This function assumes that 4.7 would be satisfied
|
|
||||||
by 4.7.3.5, etc.
|
|
||||||
|
|
||||||
Rationale:
|
|
||||||
|
|
||||||
If a user asks for gcc@4.5:4.7, and a package is only compatible with
|
|
||||||
gcc@4.7.3:4.8, then that package should be able to build under the
|
|
||||||
constraints. Just using overlaps() would not work here.
|
|
||||||
|
|
||||||
Note that we don't need to check whether the end of this range
|
|
||||||
would satisfy the start of the other range, because overlaps()
|
|
||||||
already covers that case.
|
|
||||||
|
|
||||||
Note further that overlaps() is a symmetric operation, while
|
|
||||||
satisfies() is not.
|
|
||||||
"""
|
"""
|
||||||
return (self.overlaps(other) or
|
x.satisfies(y) in general means that x and y have a
|
||||||
# if either self.start or other.end are None, then this can't
|
non-zero intersection. For VersionRange this means they overlap.
|
||||||
# satisfy, or overlaps() would've taken care of it.
|
|
||||||
self.start and other.end and self.start.satisfies(other.end))
|
`satisfies` is a commutative binary operator, meaning that
|
||||||
|
x.satisfies(y) if and only if y.satisfies(x).
|
||||||
|
|
||||||
|
Note: in some cases we have the keyword x.satisfies(y, strict=True)
|
||||||
|
to mean strict set inclusion, which is not commutative. However, this
|
||||||
|
lacks in VersionRange for unknown reasons.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
- 1:3 satisfies 2:4, as their intersection is 2:3.
|
||||||
|
- 1:2 does not satisfy 3:4, as their intersection is empty.
|
||||||
|
- 4.5:4.7 satisfies 4.7.2:4.8, as their intersection is 4.7.2:4.7
|
||||||
|
"""
|
||||||
|
return self.overlaps(other)
|
||||||
|
|
||||||
@coerced
|
@coerced
|
||||||
def overlaps(self, other):
|
def overlaps(self, other):
|
||||||
|
@@ -123,7 +123,7 @@ _bash_completion_spack() {
|
|||||||
# If the cursor is in the middle of the line, like:
|
# If the cursor is in the middle of the line, like:
|
||||||
# `spack -d [] install`
|
# `spack -d [] install`
|
||||||
# COMP_WORDS will not contain the empty character, so we have to add it.
|
# COMP_WORDS will not contain the empty character, so we have to add it.
|
||||||
if [[ "${COMP_LINE:$COMP_POINT:1}" == " " ]]
|
if [[ "${COMP_LINE:$COMP_POINT-1:1}" == " " ]]
|
||||||
then
|
then
|
||||||
cur=""
|
cur=""
|
||||||
fi
|
fi
|
||||||
@@ -208,10 +208,10 @@ _repos() {
|
|||||||
SPACK_COMPREPLY="$SPACK_REPOS"
|
SPACK_COMPREPLY="$SPACK_REPOS"
|
||||||
}
|
}
|
||||||
|
|
||||||
_tests() {
|
_unit_tests() {
|
||||||
if [[ -z "${SPACK_TESTS:-}" ]]
|
if [[ -z "${SPACK_TESTS:-}" ]]
|
||||||
then
|
then
|
||||||
SPACK_TESTS="$(spack test -l)"
|
SPACK_TESTS="$(spack unit-test -l)"
|
||||||
fi
|
fi
|
||||||
SPACK_COMPREPLY="$SPACK_TESTS"
|
SPACK_COMPREPLY="$SPACK_TESTS"
|
||||||
}
|
}
|
||||||
|
@@ -2,7 +2,7 @@ FROM opensuse/leap:15.3
|
|||||||
MAINTAINER Christian Goll <cgoll@suse.com>
|
MAINTAINER Christian Goll <cgoll@suse.com>
|
||||||
|
|
||||||
ENV DOCKERFILE_BASE=opensuse \
|
ENV DOCKERFILE_BASE=opensuse \
|
||||||
DOCKERFILE_DISTRO=opensuse_leap \
|
DOCKERFILE_DISTRO=leap \
|
||||||
DOCKERFILE_DISTRO_VERSION=15.3 \
|
DOCKERFILE_DISTRO_VERSION=15.3 \
|
||||||
SPACK_ROOT=/opt/spack \
|
SPACK_ROOT=/opt/spack \
|
||||||
DEBIAN_FRONTEND=noninteractive \
|
DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -11,10 +11,21 @@ ENV DOCKERFILE_BASE=opensuse \
|
|||||||
|
|
||||||
RUN zypper ref && \
|
RUN zypper ref && \
|
||||||
zypper up -y && \
|
zypper up -y && \
|
||||||
zypper in -y python3-base python3-boto3 \
|
zypper in -y \
|
||||||
xz gzip tar bzip2 curl patch patchelf file \
|
bzip2\
|
||||||
gcc-c++ gcc-fortran make cmake automake && \
|
curl\
|
||||||
zypper clean
|
file\
|
||||||
|
gcc-c++\
|
||||||
|
gcc-fortran\
|
||||||
|
make\
|
||||||
|
gzip\
|
||||||
|
patch\
|
||||||
|
patchelf\
|
||||||
|
python3-base \
|
||||||
|
python3-boto3\
|
||||||
|
tar\
|
||||||
|
xz\
|
||||||
|
&& zypper clean
|
||||||
|
|
||||||
# clean up manpages
|
# clean up manpages
|
||||||
RUN rm -rf /var/cache/zypp/* \
|
RUN rm -rf /var/cache/zypp/* \
|
||||||
|
@@ -70,6 +70,16 @@ _test_debug_functions() {
|
|||||||
emulate -L sh
|
emulate -L sh
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Test whether `spack install --verb[] spec` completes to `spack install --verbose spec`
|
||||||
|
COMP_LINE='spack install --verb spec'
|
||||||
|
COMP_POINT=20
|
||||||
|
COMP_WORDS=(spack install --verb spec)
|
||||||
|
COMP_CWORD=2
|
||||||
|
COMP_KEY=9
|
||||||
|
COMP_TYPE=64
|
||||||
|
_bash_completion_spack
|
||||||
|
contains "--verbose" echo "${COMPREPLY[@]}"
|
||||||
|
|
||||||
# This is a particularly tricky case that involves the following situation:
|
# This is a particularly tricky case that involves the following situation:
|
||||||
# `spack -d [] install `
|
# `spack -d [] install `
|
||||||
# Here, [] represents the cursor, which is in the middle of the line.
|
# Here, [] represents the cursor, which is in the middle of the line.
|
||||||
|
@@ -14,15 +14,17 @@
|
|||||||
# Usage:
|
# Usage:
|
||||||
# run-flake8-tests
|
# run-flake8-tests
|
||||||
#
|
#
|
||||||
. "$(dirname $0)/setup.sh"
|
. "$(dirname "$0")/setup.sh"
|
||||||
|
|
||||||
BASE=""
|
args=()
|
||||||
if [ -n "$GITHUB_BASE_REF" ]; then
|
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||||
BASE="--base ${GITHUB_BASE_REF}"
|
args+=("--base" "${GITHUB_BASE_REF}")
|
||||||
|
else
|
||||||
|
args+=("--base" "${GITHUB_REF_NAME}")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# verify that the code style is correct
|
# verify that the code style is correct
|
||||||
spack style --root-relative $BASE
|
spack style --root-relative "${args[@]}"
|
||||||
|
|
||||||
# verify that the license headers are present
|
# verify that the license headers are present
|
||||||
spack license verify
|
spack license verify
|
||||||
|
@@ -123,7 +123,7 @@ _bash_completion_spack() {
|
|||||||
# If the cursor is in the middle of the line, like:
|
# If the cursor is in the middle of the line, like:
|
||||||
# `spack -d [] install`
|
# `spack -d [] install`
|
||||||
# COMP_WORDS will not contain the empty character, so we have to add it.
|
# COMP_WORDS will not contain the empty character, so we have to add it.
|
||||||
if [[ "${COMP_LINE:$COMP_POINT:1}" == " " ]]
|
if [[ "${COMP_LINE:$COMP_POINT-1:1}" == " " ]]
|
||||||
then
|
then
|
||||||
cur=""
|
cur=""
|
||||||
fi
|
fi
|
||||||
@@ -208,10 +208,10 @@ _repos() {
|
|||||||
SPACK_COMPREPLY="$SPACK_REPOS"
|
SPACK_COMPREPLY="$SPACK_REPOS"
|
||||||
}
|
}
|
||||||
|
|
||||||
_tests() {
|
_unit_tests() {
|
||||||
if [[ -z "${SPACK_TESTS:-}" ]]
|
if [[ -z "${SPACK_TESTS:-}" ]]
|
||||||
then
|
then
|
||||||
SPACK_TESTS="$(spack test -l)"
|
SPACK_TESTS="$(spack unit-test -l)"
|
||||||
fi
|
fi
|
||||||
SPACK_COMPREPLY="$SPACK_TESTS"
|
SPACK_COMPREPLY="$SPACK_TESTS"
|
||||||
}
|
}
|
||||||
@@ -335,7 +335,7 @@ _spacktivate() {
|
|||||||
_spack() {
|
_spack() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --show-cores --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||||
fi
|
fi
|
||||||
@@ -969,12 +969,7 @@ _spack_env_st() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_spack_env_loads() {
|
_spack_env_loads() {
|
||||||
if $list_options
|
SPACK_COMPREPLY="-h --help -n --module-set-name -m --module-type --input-only -p --prefix -x --exclude -r --dependencies"
|
||||||
then
|
|
||||||
SPACK_COMPREPLY="-h --help -n --module-set-name -m --module-type --input-only -p --prefix -x --exclude -r --dependencies"
|
|
||||||
else
|
|
||||||
_environments
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_spack_env_view() {
|
_spack_env_view() {
|
||||||
@@ -1387,7 +1382,7 @@ _spack_module_tcl() {
|
|||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -n --name"
|
SPACK_COMPREPLY="-h --help -n --name"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY="refresh find rm loads"
|
SPACK_COMPREPLY="refresh find rm loads setdefault"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1427,6 +1422,15 @@ _spack_module_tcl_loads() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_spack_module_tcl_setdefault() {
|
||||||
|
if $list_options
|
||||||
|
then
|
||||||
|
SPACK_COMPREPLY="-h --help"
|
||||||
|
else
|
||||||
|
_installed_packages
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
_spack_monitor() {
|
_spack_monitor() {
|
||||||
SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
|
SPACK_COMPREPLY="-h --help --monitor --monitor-save-local --monitor-no-auth --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix"
|
||||||
}
|
}
|
||||||
@@ -1776,7 +1780,7 @@ _spack_unit_test() {
|
|||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -H --pytest-help -l --list -L --list-long -N --list-names --extension -s -k --showlocals"
|
SPACK_COMPREPLY="-h --help -H --pytest-help -l --list -L --list-long -N --list-names --extension -s -k --showlocals"
|
||||||
else
|
else
|
||||||
_tests
|
_unit_tests
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
21
share/spack/templates/container/leap-15.dockerfile
Normal file
21
share/spack/templates/container/leap-15.dockerfile
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{% extends "container/bootstrap-base.dockerfile" %}
|
||||||
|
{% block install_os_packages %}
|
||||||
|
RUN zypper ref && \
|
||||||
|
zypper up -y && \
|
||||||
|
zypper in -y \
|
||||||
|
bzip2\
|
||||||
|
curl\
|
||||||
|
file\
|
||||||
|
gcc-c++\
|
||||||
|
gcc-fortran\
|
||||||
|
make\
|
||||||
|
git\
|
||||||
|
gzip\
|
||||||
|
patch\
|
||||||
|
patchelf\
|
||||||
|
python3-base \
|
||||||
|
python3-boto3\
|
||||||
|
tar\
|
||||||
|
xz\
|
||||||
|
&& zypper clean
|
||||||
|
{% endblock %}
|
@@ -28,6 +28,7 @@ class Mpileaks(Package):
|
|||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
touch(prefix.mpileaks)
|
touch(prefix.mpileaks)
|
||||||
|
mkdirp(prefix.man)
|
||||||
|
|
||||||
def setup_environment(self, senv, renv):
|
def setup_environment(self, senv, renv):
|
||||||
renv.set('FOOBAR', self.name)
|
renv.set('FOOBAR', self.name)
|
||||||
|
@@ -22,6 +22,7 @@ class Elfutils(AutotoolsPackage, SourcewarePackage):
|
|||||||
list_url = "https://sourceware.org/elfutils/ftp"
|
list_url = "https://sourceware.org/elfutils/ftp"
|
||||||
list_depth = 1
|
list_depth = 1
|
||||||
|
|
||||||
|
version('0.186', sha256='7f6fb9149b1673d38d9178a0d3e0fb8a1ec4f53a9f4c2ff89469609879641177')
|
||||||
version('0.185', sha256='dc8d3e74ab209465e7f568e1b3bb9a5a142f8656e2b57d10049a73da2ae6b5a6')
|
version('0.185', sha256='dc8d3e74ab209465e7f568e1b3bb9a5a142f8656e2b57d10049a73da2ae6b5a6')
|
||||||
version('0.184', sha256='87e7d1d7f0333815dd1f62135d047a4dc4082068f361452f357997c11360644b')
|
version('0.184', sha256='87e7d1d7f0333815dd1f62135d047a4dc4082068f361452f357997c11360644b')
|
||||||
version('0.183', sha256='c3637c208d309d58714a51e61e63f1958808fead882e9b607506a29e5474f2c5')
|
version('0.183', sha256='c3637c208d309d58714a51e61e63f1958808fead882e9b607506a29e5474f2c5')
|
||||||
|
@@ -8,7 +8,7 @@ def error(self, message):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super(HTMLParser, self).__init__()
|
||||||
self.state = 0
|
self.state = 0
|
||||||
self.processes = []
|
self.processes = []
|
||||||
|
|
||||||
|
@@ -1058,7 +1058,7 @@ def is_enabled(text):
|
|||||||
# This code gets all the fabric names from the variants list
|
# This code gets all the fabric names from the variants list
|
||||||
# Idea taken from the AutotoolsPackage source.
|
# Idea taken from the AutotoolsPackage source.
|
||||||
def get_options_from_variant(self, name):
|
def get_options_from_variant(self, name):
|
||||||
values = self.variants[name].values
|
values = self.variants[name][0].values
|
||||||
if getattr(values, 'feature_values', None):
|
if getattr(values, 'feature_values', None):
|
||||||
values = values.feature_values
|
values = values.feature_values
|
||||||
return values
|
return values
|
||||||
|
@@ -66,7 +66,7 @@ def setup_build_environment(self, env):
|
|||||||
|
|
||||||
def install_args(self, spec, prefix):
|
def install_args(self, spec, prefix):
|
||||||
# Have the parent class version set prefix
|
# Have the parent class version set prefix
|
||||||
args = super().install_args(spec, prefix)
|
args = super(PythonPackage, self).install_args(spec, prefix)
|
||||||
if '+mpi' in spec:
|
if '+mpi' in spec:
|
||||||
args.append('--mpi')
|
args.append('--mpi')
|
||||||
return args
|
return args
|
||||||
|
@@ -91,7 +91,7 @@ class PyPandas(PythonPackage):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
modules = super().import_modules
|
modules = super(PyPandas, self).import_modules()
|
||||||
|
|
||||||
ignored_imports = ["pandas.tests", "pandas.plotting._matplotlib"]
|
ignored_imports = ["pandas.tests", "pandas.plotting._matplotlib"]
|
||||||
|
|
||||||
|
@@ -8,10 +8,11 @@ class PyVermin(PythonPackage):
|
|||||||
"""Concurrently detect the minimum Python versions needed to run code."""
|
"""Concurrently detect the minimum Python versions needed to run code."""
|
||||||
|
|
||||||
homepage = "https://github.com/netromdk/vermin"
|
homepage = "https://github.com/netromdk/vermin"
|
||||||
url = "https://github.com/netromdk/vermin/archive/v1.3.1.tar.gz"
|
url = "https://github.com/netromdk/vermin/archive/v1.3.2.tar.gz"
|
||||||
|
|
||||||
maintainers = ['netromdk']
|
maintainers = ['netromdk']
|
||||||
|
|
||||||
|
version('1.3.2', sha256='2818eaea24c5be5dae1f374ddb2377e9cfaad04d0a3372ad129cffc46cec5404')
|
||||||
version('1.3.1', sha256='ddcdaad5a708a483af192075f5d2eaaaf3aa4661b5101ddafa40d7837eeb5368')
|
version('1.3.1', sha256='ddcdaad5a708a483af192075f5d2eaaaf3aa4661b5101ddafa40d7837eeb5368')
|
||||||
version('1.3.0', sha256='adf2b6ea34c01c3a81fc4fa78c2e5fa6c8dd6d35327a8e5a4caeeaef7ec21668')
|
version('1.3.0', sha256='adf2b6ea34c01c3a81fc4fa78c2e5fa6c8dd6d35327a8e5a4caeeaef7ec21668')
|
||||||
version('1.2.2', sha256='d0343b2a78d7e4de67dfd2d882eeaf8b241db724f7e67f83bdd4111edb97f1e2')
|
version('1.2.2', sha256='d0343b2a78d7e4de67dfd2d882eeaf8b241db724f7e67f83bdd4111edb97f1e2')
|
||||||
|
@@ -3,11 +3,18 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
from spack import *
|
from spack import *
|
||||||
|
|
||||||
|
|
||||||
|
def is_CrayXC():
|
||||||
|
return (spack.platforms.host().name == 'cray') and \
|
||||||
|
(os.environ.get('CRAYPE_NETWORK_TARGET') == "aries")
|
||||||
|
|
||||||
|
|
||||||
def cross_detect():
|
def cross_detect():
|
||||||
if spack.platforms.host().name == 'cray':
|
if is_CrayXC():
|
||||||
if which('srun'):
|
if which('srun'):
|
||||||
return 'cray-aries-slurm'
|
return 'cray-aries-slurm'
|
||||||
if which('aprun'):
|
if which('aprun'):
|
||||||
@@ -24,6 +31,7 @@ class Upcxx(Package):
|
|||||||
|
|
||||||
homepage = "https://upcxx.lbl.gov"
|
homepage = "https://upcxx.lbl.gov"
|
||||||
maintainers = ['bonachea']
|
maintainers = ['bonachea']
|
||||||
|
url = "https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-2021.3.0.tar.gz"
|
||||||
git = 'https://bitbucket.org/berkeleylab/upcxx.git'
|
git = 'https://bitbucket.org/berkeleylab/upcxx.git'
|
||||||
|
|
||||||
tags = ['e4s']
|
tags = ['e4s']
|
||||||
@@ -31,13 +39,15 @@ class Upcxx(Package):
|
|||||||
version('develop', branch='develop')
|
version('develop', branch='develop')
|
||||||
version('master', branch='master')
|
version('master', branch='master')
|
||||||
|
|
||||||
|
version('2021.9.0', sha256='9299e17602bcc8c05542cdc339897a9c2dba5b5c3838d6ef2df7a02250f42177')
|
||||||
version('2021.3.0', sha256='3433714cd4162ffd8aad9a727c12dbf1c207b7d6664879fc41259a4b351595b7')
|
version('2021.3.0', sha256='3433714cd4162ffd8aad9a727c12dbf1c207b7d6664879fc41259a4b351595b7')
|
||||||
version('2020.11.0', sha256='f6f212760a485a9f346ca11bb4751e7095bbe748b8e5b2389ff9238e9e321317',
|
version('2020.11.0', sha256='f6f212760a485a9f346ca11bb4751e7095bbe748b8e5b2389ff9238e9e321317',
|
||||||
url='https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-2020.11.0-memory_kinds_prototype.tar.gz')
|
url='https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-2020.11.0-memory_kinds_prototype.tar.gz')
|
||||||
version('2020.10.0', sha256='623e074b512bf8cad770a04040272e1cc660d2749760398b311f9bcc9d381a37')
|
version('2020.10.0', sha256='623e074b512bf8cad770a04040272e1cc660d2749760398b311f9bcc9d381a37')
|
||||||
|
version('2020.3.2', sha256='978adc315d21089c739d5efda764b77fc9a2a7c5860f169fe5cd2ca1d840620f')
|
||||||
version('2020.3.0', sha256='01be35bef4c0cfd24e9b3d50c88866521b9cac3ad4cbb5b1fc97aea55078810f')
|
version('2020.3.0', sha256='01be35bef4c0cfd24e9b3d50c88866521b9cac3ad4cbb5b1fc97aea55078810f')
|
||||||
version('2019.9.0', sha256='7d67ccbeeefb59de9f403acc719f52127a30801a2c2b9774a1df03f850f8f1d4')
|
# Do NOT add older versions here.
|
||||||
version('2019.3.2', sha256='dcb0b337c05a0feb2ed5386f5da6c60342412b49cab10f282f461e74411018ad')
|
# UPC++ releases over 2 years old are not supported.
|
||||||
|
|
||||||
variant('mpi', default=False,
|
variant('mpi', default=False,
|
||||||
description='Enables MPI-based spawners and mpi-conduit')
|
description='Enables MPI-based spawners and mpi-conduit')
|
||||||
@@ -48,8 +58,8 @@ class Upcxx(Package):
|
|||||||
variant('cross', default=cross_detect(),
|
variant('cross', default=cross_detect(),
|
||||||
description="UPC++ cross-compile target (autodetect by default)")
|
description="UPC++ cross-compile target (autodetect by default)")
|
||||||
|
|
||||||
conflicts('cross=none', when='platform=cray',
|
conflicts('cross=none', when=is_CrayXC(),
|
||||||
msg='cross=none is unacceptable on Cray.' +
|
msg='cross=none is unacceptable on Cray XC.' +
|
||||||
'Please specify an appropriate "cross" value')
|
'Please specify an appropriate "cross" value')
|
||||||
|
|
||||||
# UPC++ always relies on GASNet-EX.
|
# UPC++ always relies on GASNet-EX.
|
||||||
@@ -61,47 +71,15 @@ class Upcxx(Package):
|
|||||||
|
|
||||||
depends_on('mpi', when='+mpi')
|
depends_on('mpi', when='+mpi')
|
||||||
depends_on('cuda', when='+cuda')
|
depends_on('cuda', when='+cuda')
|
||||||
# Require Python2 2.7.5+ up to v2019.9.0
|
depends_on('python@2.7.5:', type=("build", "run"))
|
||||||
depends_on('python@2.7.5:2',
|
|
||||||
type=("build", "run"), when='@:2019.9.0')
|
|
||||||
# v2020.3.0 and later also permit Python3
|
|
||||||
depends_on('python@2.7.5:', type=("build", "run"), when='@2020.3.0:')
|
|
||||||
|
|
||||||
# All flags should be passed to the build-env in autoconf-like vars
|
# All flags should be passed to the build-env in autoconf-like vars
|
||||||
flag_handler = env_flags
|
flag_handler = env_flags
|
||||||
|
|
||||||
def url_for_version(self, version):
|
|
||||||
if version > Version('2019.3.2'):
|
|
||||||
url = "https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-{0}.tar.gz"
|
|
||||||
else:
|
|
||||||
url = "https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-{0}-offline.tar.gz"
|
|
||||||
return url.format(version)
|
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
|
||||||
# ensure we use the correct python
|
|
||||||
env.set('UPCXX_PYTHON', self.spec['python'].command.path)
|
|
||||||
|
|
||||||
if '+mpi' in self.spec:
|
|
||||||
env.set('GASNET_CONFIGURE_ARGS',
|
|
||||||
'--enable-mpi --enable-mpi-compat')
|
|
||||||
else:
|
|
||||||
env.set('GASNET_CONFIGURE_ARGS', '--without-mpicc')
|
|
||||||
|
|
||||||
if 'cross=none' not in self.spec:
|
|
||||||
env.set('CROSS', self.spec.variants['cross'].value)
|
|
||||||
|
|
||||||
if '+cuda' in self.spec:
|
|
||||||
env.set('UPCXX_CUDA', '1')
|
|
||||||
env.set('UPCXX_CUDA_NVCC', self.spec['cuda'].prefix.bin.nvcc)
|
|
||||||
|
|
||||||
def setup_run_environment(self, env):
|
def setup_run_environment(self, env):
|
||||||
# ensure we use the correct python
|
|
||||||
env.set('UPCXX_PYTHON', self.spec['python'].command.path)
|
|
||||||
|
|
||||||
env.set('UPCXX_INSTALL', self.prefix)
|
env.set('UPCXX_INSTALL', self.prefix)
|
||||||
env.set('UPCXX', self.prefix.bin.upcxx)
|
env.set('UPCXX', self.prefix.bin.upcxx)
|
||||||
if 'platform=cray' in self.spec:
|
if is_CrayXC():
|
||||||
env.set('UPCXX_GASNET_CONDUIT', 'aries')
|
|
||||||
env.set('UPCXX_NETWORK', 'aries')
|
env.set('UPCXX_NETWORK', 'aries')
|
||||||
|
|
||||||
def setup_dependent_package(self, module, dep_spec):
|
def setup_dependent_package(self, module, dep_spec):
|
||||||
@@ -110,97 +88,91 @@ def setup_dependent_package(self, module, dep_spec):
|
|||||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
env.set('UPCXX_INSTALL', self.prefix)
|
env.set('UPCXX_INSTALL', self.prefix)
|
||||||
env.set('UPCXX', self.prefix.bin.upcxx)
|
env.set('UPCXX', self.prefix.bin.upcxx)
|
||||||
if 'platform=cray' in self.spec:
|
if is_CrayXC():
|
||||||
env.set('UPCXX_GASNET_CONDUIT', 'aries')
|
|
||||||
env.set('UPCXX_NETWORK', 'aries')
|
env.set('UPCXX_NETWORK', 'aries')
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
|
env = os.environ
|
||||||
# UPC++ follows autoconf naming convention for LDLIBS, which is 'LIBS'
|
# UPC++ follows autoconf naming convention for LDLIBS, which is 'LIBS'
|
||||||
if (env.get('LDLIBS')):
|
if (env.get('LDLIBS')):
|
||||||
env['LIBS'] = env['LDLIBS']
|
env['LIBS'] = env['LDLIBS']
|
||||||
|
|
||||||
if spec.version <= Version('2019.9.0'):
|
options = ["--prefix=%s" % prefix]
|
||||||
env['CC'] = self.compiler.cc
|
|
||||||
if '+mpi' in self.spec:
|
if 'cross=none' in spec:
|
||||||
if 'platform=cray' in self.spec:
|
options.append('--without-cross')
|
||||||
env['GASNET_CONFIGURE_ARGS'] += \
|
|
||||||
" --with-mpicc=" + self.compiler.cc
|
|
||||||
else:
|
|
||||||
env['CXX'] = spec['mpi'].mpicxx
|
|
||||||
else:
|
|
||||||
env['CXX'] = self.compiler.cxx
|
|
||||||
if '+gasnet' in self.spec:
|
|
||||||
env['GASNET'] = spec['gasnet'].prefix.src
|
|
||||||
installsh = Executable("./install")
|
|
||||||
installsh(prefix)
|
|
||||||
else:
|
else:
|
||||||
if 'platform=cray' in self.spec:
|
options.append('--with-cross=' + spec.variants['cross'].value)
|
||||||
# Spack loads the cray-libsci module incorrectly on ALCF theta,
|
|
||||||
# breaking the Cray compiler wrappers
|
|
||||||
# cray-libsci is irrelevant to our build, so disable it
|
|
||||||
for var in ['PE_PKGCONFIG_PRODUCTS', 'PE_PKGCONFIG_LIBS']:
|
|
||||||
env[var] = ":".join(
|
|
||||||
filter(lambda x: "libsci" not in x.lower(),
|
|
||||||
env[var].split(":")))
|
|
||||||
# Undo spack compiler wrappers:
|
|
||||||
# the C/C++ compilers must work post-install
|
|
||||||
# hack above no longer works after the fix to UPC++ issue #287
|
|
||||||
real_cc = join_path(env['CRAYPE_DIR'], 'bin', 'cc')
|
|
||||||
real_cxx = join_path(env['CRAYPE_DIR'], 'bin', 'CC')
|
|
||||||
# workaround a bug in the UPC++ installer: (issue #346)
|
|
||||||
env['GASNET_CONFIGURE_ARGS'] += \
|
|
||||||
" --with-cc=" + real_cc + " --with-cxx=" + real_cxx
|
|
||||||
if '+mpi' in self.spec:
|
|
||||||
env['GASNET_CONFIGURE_ARGS'] += " --with-mpicc=" + real_cc
|
|
||||||
else:
|
|
||||||
real_cc = self.compiler.cc
|
|
||||||
real_cxx = self.compiler.cxx
|
|
||||||
if '+mpi' in self.spec:
|
|
||||||
real_cxx = spec['mpi'].mpicxx
|
|
||||||
|
|
||||||
env['CC'] = real_cc
|
if is_CrayXC():
|
||||||
env['CXX'] = real_cxx
|
# Spack loads the cray-libsci module incorrectly on ALCF theta,
|
||||||
|
# breaking the Cray compiler wrappers
|
||||||
|
# cray-libsci is irrelevant to our build, so disable it
|
||||||
|
for var in ['PE_PKGCONFIG_PRODUCTS', 'PE_PKGCONFIG_LIBS']:
|
||||||
|
env[var] = ":".join(
|
||||||
|
filter(lambda x: "libsci" not in x.lower(),
|
||||||
|
env[var].split(":")))
|
||||||
|
# Undo spack compiler wrappers:
|
||||||
|
# the C/C++ compilers must work post-install
|
||||||
|
real_cc = join_path(env['CRAYPE_DIR'], 'bin', 'cc')
|
||||||
|
real_cxx = join_path(env['CRAYPE_DIR'], 'bin', 'CC')
|
||||||
|
# workaround a bug in the UPC++ installer: (issue #346)
|
||||||
|
if (env.get('GASNET_CONFIGURE_ARGS') is None):
|
||||||
|
env['GASNET_CONFIGURE_ARGS'] = ''
|
||||||
|
env['GASNET_CONFIGURE_ARGS'] += \
|
||||||
|
" --with-cc=" + real_cc + " --with-cxx=" + real_cxx
|
||||||
|
if '+mpi' in spec:
|
||||||
|
env['GASNET_CONFIGURE_ARGS'] += " --with-mpicc=" + real_cc
|
||||||
|
else:
|
||||||
|
real_cc = self.compiler.cc
|
||||||
|
real_cxx = self.compiler.cxx
|
||||||
|
if '+mpi' in spec:
|
||||||
|
real_cxx = spec['mpi'].mpicxx
|
||||||
|
|
||||||
options = ["--prefix=%s" % prefix]
|
options.append('--with-cc=' + real_cc)
|
||||||
|
options.append('--with-cxx=' + real_cxx)
|
||||||
|
|
||||||
if '+gasnet' in self.spec:
|
if '+gasnet' in spec:
|
||||||
options.append('--with-gasnet=' + spec['gasnet'].prefix.src)
|
options.append('--with-gasnet=' + spec['gasnet'].prefix.src)
|
||||||
|
|
||||||
configure(*options)
|
options.append('--with-python=' + spec['python'].command.path)
|
||||||
|
|
||||||
make()
|
if '+mpi' in spec:
|
||||||
|
options.append('--enable-mpi')
|
||||||
|
options.append('--enable-mpi-compat')
|
||||||
|
else:
|
||||||
|
options.append('--without-mpicc')
|
||||||
|
|
||||||
make('install')
|
if '+cuda' in spec:
|
||||||
|
options.append('--with-cuda')
|
||||||
|
options.append('--with-nvcc=' + spec['cuda'].prefix.bin.nvcc)
|
||||||
|
|
||||||
|
configure(*options)
|
||||||
|
|
||||||
|
make()
|
||||||
|
|
||||||
|
make('install')
|
||||||
|
|
||||||
install_tree('example', prefix.example)
|
install_tree('example', prefix.example)
|
||||||
|
|
||||||
@run_after('install')
|
@run_after('install')
|
||||||
@on_package_attributes(run_tests=True)
|
@on_package_attributes(run_tests=True)
|
||||||
def test_install(self):
|
def test_install(self):
|
||||||
if self.spec.version <= Version('2019.9.0'):
|
# enable testing of unofficial conduits (mpi)
|
||||||
spack.main.send_warning_to_tty(
|
test_networks = 'NETWORKS=$(CONDUITS)'
|
||||||
"run_tests not supported in UPC++ version " +
|
# build hello world against installed tree in all configurations
|
||||||
self.spec.version.string + " -- SKIPPED")
|
make('test_install', test_networks)
|
||||||
else:
|
make('tests-clean') # cleanup
|
||||||
# enable testing of unofficial conduits (mpi)
|
# build all tests for all networks in debug mode
|
||||||
test_networks = 'NETWORKS=$(CONDUITS)'
|
make('tests', test_networks)
|
||||||
# build hello world against installed tree in all configurations
|
if 'cross=none' in self.spec:
|
||||||
make('test_install', test_networks)
|
make('run-tests', 'NETWORKS=smp') # runs tests for smp backend
|
||||||
make('tests-clean') # cleanup
|
make('tests-clean') # cleanup
|
||||||
# build all tests for all networks in debug mode
|
|
||||||
make('tests', test_networks)
|
|
||||||
if 'cross=none' in self.spec:
|
|
||||||
make('run-tests', 'NETWORKS=smp') # runs tests for smp backend
|
|
||||||
make('tests-clean') # cleanup
|
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
if self.spec.version <= Version('2019.9.0'):
|
# run post-install smoke test:
|
||||||
spack.main.send_warning_to_tty(
|
test_install = join_path(self.prefix.bin, 'test-upcxx-install.sh')
|
||||||
"post-install tests not supported in UPC++ version " +
|
self.run_test(test_install, expected=['SUCCESS'], status=0,
|
||||||
self.spec.version.string + " -- SKIPPED")
|
installed=True,
|
||||||
else: # run post-install smoke test:
|
purpose='Checking UPC++ compile+link ' +
|
||||||
test_install = join_path(self.prefix.bin, 'test-upcxx-install.sh')
|
'for all installed backends')
|
||||||
self.run_test(test_install, expected=['SUCCESS'], status=0,
|
|
||||||
installed=True,
|
|
||||||
purpose='Checking UPC++ compile+link ' +
|
|
||||||
'for all installed backends')
|
|
||||||
|
Reference in New Issue
Block a user