Compare commits
37 Commits
develop-20
...
releases/v
Author | SHA1 | Date | |
---|---|---|---|
![]() |
22db992b5c | ||
![]() |
0b1be57e08 | ||
![]() |
9d04632cee | ||
![]() |
f374fe8a62 | ||
![]() |
6193d59d5d | ||
![]() |
fa0ac5f24b | ||
![]() |
5cea962e8e | ||
![]() |
b9fa27ac65 | ||
![]() |
96535dc39f | ||
![]() |
b74b175673 | ||
![]() |
912109da16 | ||
![]() |
5e0eb1f887 | ||
![]() |
7cd7b38cb8 | ||
![]() |
a2a403ae0a | ||
![]() |
3b12a8b192 | ||
![]() |
e9896620e4 | ||
![]() |
9d00894c5a | ||
![]() |
1cc2b82408 | ||
![]() |
ad2c020848 | ||
![]() |
bd119927ff | ||
![]() |
2a8fe3a5b0 | ||
![]() |
3427e2c8cf | ||
![]() |
e8bb341536 | ||
![]() |
08009ffd70 | ||
![]() |
d4f2326824 | ||
![]() |
9edfd25134 | ||
![]() |
afb3f4ff20 | ||
![]() |
9d8e411d76 | ||
![]() |
fbdcd7cbf1 | ||
![]() |
5b0d4fe928 | ||
![]() |
deb9102b2d | ||
![]() |
e0be0d8683 | ||
![]() |
1179623002 | ||
![]() |
ab5c02d538 | ||
![]() |
1fd6fedba5 | ||
![]() |
69cbf10a80 | ||
![]() |
5b2d7445b8 |
92
.github/workflows/bootstrap.yml
vendored
92
.github/workflows/bootstrap.yml
vendored
@@ -29,14 +29,20 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -59,14 +65,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -87,9 +99,12 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
@@ -107,7 +122,8 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -126,8 +142,9 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -143,11 +160,12 @@ jobs:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
- name: Setup repo
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
@@ -171,14 +189,20 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -200,14 +224,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -225,7 +255,8 @@ jobs:
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -241,7 +272,8 @@ jobs:
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -249,3 +281,11 @@ jobs:
|
||||
spack bootstrap untrust github-actions
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
26
CHANGELOG.md
26
CHANGELOG.md
@@ -1,3 +1,29 @@
|
||||
# v0.17.3 (2022-07-14)
|
||||
|
||||
### Spack bugfixes
|
||||
|
||||
* Fix missing chgrp on symlinks in package installations (#30743)
|
||||
* Allow having non-existing upstreams (#30744, #30746)
|
||||
* Fix `spack stage` with custom paths (#30448)
|
||||
* Fix failing call for `spack buildcache save-specfile` (#30637)
|
||||
* Fix globbing in compiler wrapper (#30699)
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
* Fix --reuse with upstreams set in an environment (#29680)
|
||||
* config add: fix parsing of validator error to infer type from oneOf (#29475)
|
||||
* Fix spack -C command_line_scope used in conjunction with other flags (#28418)
|
||||
* Use Spec.constrain to construct spec lists for stacks (#28783)
|
||||
* Fix bug occurring when searching for inherited patches in packages (#29574)
|
||||
* Fixed a few bugs when manipulating symlinks (#28318, #29515, #29636)
|
||||
* Fixed a few minor bugs affecting command prompt, terminal title and argument completion (#28279, #28278, #28939, #29405, #29070, #29402)
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
@@ -56,7 +56,6 @@ or refer to the full manual below.
|
||||
basic_usage
|
||||
workflows
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
known_issues
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
@@ -1,77 +0,0 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known bugs in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
---------------------------------------------------
|
||||
Variants are not properly forwarded to dependencies
|
||||
---------------------------------------------------
|
||||
|
||||
**Status:** Expected to be fixed by Spack's new concretizer
|
||||
|
||||
Sometimes, a variant of a package can also affect how its dependencies are
|
||||
built. For example, in order to build MPI support for a package, it may
|
||||
require that its dependencies are also built with MPI support. In the
|
||||
``package.py``, this looks like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('hdf5~mpi', when='~mpi')
|
||||
depends_on('hdf5+mpi', when='+mpi')
|
||||
|
||||
Spack handles this situation properly for *immediate* dependencies, and
|
||||
builds ``hdf5`` with the same variant you used for the package that
|
||||
depends on it. However, for *indirect* dependencies (dependencies of
|
||||
dependencies), Spack does not backtrack up the DAG far enough to handle
|
||||
this. Users commonly run into this situation when trying to build R with
|
||||
X11 support:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X
|
||||
...
|
||||
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
|
||||
Package cairo requires variant ~X, but spec asked for +X
|
||||
|
||||
A workaround is to explicitly activate the variants of dependencies as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install r+X ^cairo+X ^pango+X
|
||||
|
||||
See https://github.com/spack/spack/issues/267 and
|
||||
https://github.com/spack/spack/issues/2546 for further details.
|
||||
|
||||
-----------------------------------------------
|
||||
depends_on cannot handle recursive dependencies
|
||||
-----------------------------------------------
|
||||
|
||||
**Status:** Not yet a work in progress
|
||||
|
||||
Although ``depends_on`` can handle any aspect of Spack's spec syntax,
|
||||
it currently cannot handle recursive dependencies. If the ``^`` sigil
|
||||
appears in a ``depends_on`` statement, the concretizer will hang.
|
||||
For example, something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda ^hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
should be rewritten as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('mfem+cuda', when='+cuda')
|
||||
depends_on('hypre+cuda', when='+cuda')
|
||||
|
||||
|
||||
See https://github.com/spack/spack/issues/17660 and
|
||||
https://github.com/spack/spack/issues/11160 for more details.
|
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/sh -f
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
@@ -768,7 +768,9 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
@@ -302,13 +302,16 @@ def group_ids(uid=None):
|
||||
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
|
||||
|
||||
|
||||
def chgrp(path, group):
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if isinstance(group, six.string_types):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
os.chown(path, -1, gid)
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
|
||||
|
||||
def chmod_x(entry, perms):
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 17, 1)
|
||||
spack_version_info = (0, 17, 3)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
@@ -685,9 +685,9 @@ def openmp_libs(self):
|
||||
# packages.yaml), specificially to provide the 'iomp5' libs.
|
||||
|
||||
elif '%gcc' in self.spec:
|
||||
gcc = Executable(self.compiler.cc)
|
||||
omp_lib_path = gcc(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
'--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
|
||||
omp_libs = LibraryList(omp_lib_path.strip())
|
||||
|
||||
if len(omp_libs) < 1:
|
||||
@@ -728,8 +728,9 @@ def tbb_libs(self):
|
||||
|
||||
# TODO: clang(?)
|
||||
gcc = self._gcc_executable # must be gcc, not self.compiler.cc
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
with self.compiler.compiler_environment():
|
||||
cxx_lib_path = gcc(
|
||||
'--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
|
||||
|
||||
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
|
||||
debug_print(libs)
|
||||
@@ -739,8 +740,9 @@ def tbb_libs(self):
|
||||
def _tbb_abi(self):
|
||||
'''Select the ABI needed for linking TBB'''
|
||||
gcc = self._gcc_executable
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
with self.compiler.compiler_environment():
|
||||
matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
|
||||
gcc('--version', output=str), re.I | re.M)
|
||||
abi = ''
|
||||
if sys.platform == 'darwin':
|
||||
pass
|
||||
|
@@ -1551,7 +1551,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
|
||||
# Next attempt to clone your local spack repo into the repro dir
|
||||
with fs.working_dir(repro_dir):
|
||||
clone_out = git("clone", spack_git_path,
|
||||
clone_out = git("clone", spack_git_path, "spack",
|
||||
output=str, error=os.devnull,
|
||||
fail_on_error=False)
|
||||
|
||||
|
@@ -754,11 +754,12 @@ def save_specfiles(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
spec_format = 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
|
@@ -362,8 +362,10 @@ def ci_rebuild(args):
|
||||
# Write information about spack into an artifact in the repro dir
|
||||
spack_info = spack_ci.get_spack_info()
|
||||
spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
|
||||
with open(spack_info_file, 'w') as fd:
|
||||
fd.write('\n{0}\n'.format(spack_info))
|
||||
with open(spack_info_file, 'wb') as fd:
|
||||
fd.write(b'\n')
|
||||
fd.write(spack_info.encode('utf8'))
|
||||
fd.write(b'\n')
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
|
@@ -117,7 +117,7 @@ def format(self, cmd):
|
||||
'virtual': '_providers',
|
||||
'namespace': '_repos',
|
||||
'hash': '_all_resource_hashes',
|
||||
'pytest': '_tests',
|
||||
'pytest': '_unit_tests',
|
||||
}
|
||||
|
||||
|
||||
|
@@ -6,6 +6,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -142,9 +143,10 @@ def repo_list(args):
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
@@ -27,12 +27,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
if custom_path:
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -54,6 +48,10 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
|
||||
# prevent multiple specs from extracting in the same folder
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
@@ -325,7 +325,7 @@ def accessible_exe(exe):
|
||||
|
||||
# setup environment before verifying in case we have executable names
|
||||
# instead of absolute paths
|
||||
with self._compiler_environment():
|
||||
with self.compiler_environment():
|
||||
missing = [cmp for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
||||
if cmp and not accessible_exe(cmp)]
|
||||
if missing:
|
||||
@@ -407,7 +407,7 @@ def _get_compiler_link_paths(self, paths):
|
||||
compiler_exe.add_default_arg(flag)
|
||||
|
||||
output = ''
|
||||
with self._compiler_environment():
|
||||
with self.compiler_environment():
|
||||
output = str(compiler_exe(
|
||||
self.verbose_flag, fin, '-o', fout,
|
||||
output=str, error=str)) # str for py2
|
||||
@@ -523,7 +523,7 @@ def get_real_version(self):
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self._compiler_environment():
|
||||
with self.compiler_environment():
|
||||
output = cc(self.version_argument,
|
||||
output=str, error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors))
|
||||
@@ -597,7 +597,7 @@ def __str__(self):
|
||||
str(self.operating_system)))))
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _compiler_environment(self):
|
||||
def compiler_environment(self):
|
||||
# store environment to replace later
|
||||
backup_env = os.environ.copy()
|
||||
|
||||
|
@@ -685,7 +685,7 @@ def find_spec(spec, condition, default=None):
|
||||
visited.add(id(relative))
|
||||
|
||||
# Then search all other relatives in the DAG *except* spec
|
||||
for relative in spec.root.traverse(deptypes=all):
|
||||
for relative in spec.root.traverse(deptype='all'):
|
||||
if relative is spec:
|
||||
continue
|
||||
if id(relative) in visited:
|
||||
|
@@ -1090,11 +1090,11 @@ def get_valid_type(path):
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == 'type':
|
||||
return types[jsonschema_error.validator_value]()
|
||||
elif jsonschema_error.validator == 'anyOf':
|
||||
elif jsonschema_error.validator in ('anyOf', 'oneOf'):
|
||||
for subschema in jsonschema_error.validator_value:
|
||||
anyof_type = subschema.get('type')
|
||||
if anyof_type is not None:
|
||||
return types[anyof_type]()
|
||||
schema_type = subschema.get('type')
|
||||
if schema_type is not None:
|
||||
return types[schema_type]()
|
||||
else:
|
||||
return type(None)
|
||||
raise ConfigError("Cannot determine valid type for path '%s'." % path)
|
||||
|
@@ -355,10 +355,10 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
self.prefix_fail_path = os.path.join(self._db_dir, 'prefix_failures')
|
||||
|
||||
# Create needed directories and files
|
||||
if not os.path.exists(self._db_dir):
|
||||
if not is_upstream and not os.path.exists(self._db_dir):
|
||||
fs.mkdirp(self._db_dir)
|
||||
|
||||
if not os.path.exists(self._failure_dir) and not is_upstream:
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
@@ -937,22 +937,15 @@ def _construct_from_directory_layout(self, directory_layout, old_data):
|
||||
tty.debug(
|
||||
'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
|
||||
try:
|
||||
layout = spack.store.layout
|
||||
if entry.spec.external:
|
||||
layout = None
|
||||
install_check = True
|
||||
else:
|
||||
install_check = layout.check_installed(entry.spec)
|
||||
|
||||
if install_check:
|
||||
kwargs = {
|
||||
'spec': entry.spec,
|
||||
'directory_layout': layout,
|
||||
'explicit': entry.explicit,
|
||||
'installation_time': entry.installation_time # noqa: E501
|
||||
}
|
||||
self._add(**kwargs)
|
||||
processed_specs.add(entry.spec)
|
||||
layout = None if entry.spec.external else spack.store.layout
|
||||
kwargs = {
|
||||
'spec': entry.spec,
|
||||
'directory_layout': layout,
|
||||
'explicit': entry.explicit,
|
||||
'installation_time': entry.installation_time # noqa: E501
|
||||
}
|
||||
self._add(**kwargs)
|
||||
processed_specs.add(entry.spec)
|
||||
except Exception as e:
|
||||
# Something went wrong, so the spec was not restored
|
||||
# from old data
|
||||
@@ -1040,9 +1033,7 @@ def _read(self):
|
||||
self._read_from_file(self._index_path)
|
||||
return
|
||||
elif self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError(
|
||||
"No database index file is present, and upstream"
|
||||
" databases cannot generate an index file")
|
||||
tty.warn('upstream not found: {0}'.format(self._index_path))
|
||||
|
||||
def _add(
|
||||
self,
|
||||
@@ -1096,24 +1087,28 @@ def _add(
|
||||
}
|
||||
self._add(dep, directory_layout, **extra_args)
|
||||
|
||||
if key not in self._data:
|
||||
installed = bool(spec.external)
|
||||
path = None
|
||||
if not spec.external and directory_layout:
|
||||
path = directory_layout.path_for_spec(spec)
|
||||
if path in self._installed_prefixes:
|
||||
raise Exception("Install prefix collision.")
|
||||
try:
|
||||
directory_layout.check_installed(spec)
|
||||
installed = True
|
||||
except DirectoryLayoutError as e:
|
||||
tty.warn(
|
||||
'Dependency missing: may be deprecated or corrupted:',
|
||||
path, str(e))
|
||||
# Make sure the directory layout agrees whether the spec is installed
|
||||
if not spec.external and directory_layout:
|
||||
path = directory_layout.path_for_spec(spec)
|
||||
installed = False
|
||||
try:
|
||||
directory_layout.ensure_installed(spec)
|
||||
installed = True
|
||||
self._installed_prefixes.add(path)
|
||||
elif spec.external_path:
|
||||
path = spec.external_path
|
||||
except DirectoryLayoutError as e:
|
||||
msg = ("{0} is being {1} in the database with prefix {2}, "
|
||||
"but this directory does not contain an installation of "
|
||||
"the spec, due to: {3}")
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||
elif spec.external_path:
|
||||
path = spec.external_path
|
||||
installed = True
|
||||
else:
|
||||
path = None
|
||||
installed = True
|
||||
|
||||
if key not in self._data:
|
||||
# Create a new install record with no deps initially.
|
||||
new_spec = spec.copy(deps=False)
|
||||
extra_args = {
|
||||
@@ -1141,9 +1136,8 @@ def _add(
|
||||
new_spec._full_hash = spec._full_hash
|
||||
|
||||
else:
|
||||
# If it is already there, mark it as installed and update
|
||||
# installation time
|
||||
self._data[key].installed = True
|
||||
# It is already in the database
|
||||
self._data[key].installed = installed
|
||||
self._data[key].installation_time = _now()
|
||||
|
||||
self._data[key].explicit = explicit
|
||||
@@ -1210,7 +1204,7 @@ def _remove(self, spec):
|
||||
|
||||
# This install prefix is now free for other specs to use, even if the
|
||||
# spec is only marked uninstalled.
|
||||
if not rec.spec.external:
|
||||
if not rec.spec.external and rec.installed:
|
||||
self._installed_prefixes.remove(rec.path)
|
||||
|
||||
if rec.ref_count > 0:
|
||||
|
@@ -233,13 +233,20 @@ def create_install_directory(self, spec):
|
||||
|
||||
self.write_spec(spec, self.spec_file_path(spec))
|
||||
|
||||
def check_installed(self, spec):
|
||||
def ensure_installed(self, spec):
|
||||
"""
|
||||
Throws DirectoryLayoutError if:
|
||||
1. spec prefix does not exist
|
||||
2. spec prefix does not contain a spec file
|
||||
3. the spec file does not correspond to the spec
|
||||
"""
|
||||
_check_concrete(spec)
|
||||
path = self.path_for_spec(spec)
|
||||
spec_file_path = self.spec_file_path(spec)
|
||||
|
||||
if not os.path.isdir(path):
|
||||
return None
|
||||
raise InconsistentInstallDirectoryError(
|
||||
"Install prefix {0} does not exist.".format(path))
|
||||
|
||||
if not os.path.isfile(spec_file_path):
|
||||
raise InconsistentInstallDirectoryError(
|
||||
@@ -248,7 +255,7 @@ def check_installed(self, spec):
|
||||
|
||||
installed_spec = self.read_spec(spec_file_path)
|
||||
if installed_spec == spec:
|
||||
return path
|
||||
return
|
||||
|
||||
# DAG hashes currently do not include build dependencies.
|
||||
#
|
||||
@@ -261,7 +268,7 @@ def check_installed(self, spec):
|
||||
# may be installed. This means for example that for two instances
|
||||
# that differ only in CMake version used to build, only one will
|
||||
# be installed.
|
||||
return path
|
||||
return
|
||||
|
||||
if spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise SpecHashCollisionError(spec, installed_spec)
|
||||
|
@@ -144,10 +144,12 @@ def activate(env, use_env_repo=False):
|
||||
|
||||
# Check if we need to reinitialize the store due to pushing the configuration
|
||||
# below.
|
||||
store_before_pushing = spack.config.get('config:install_tree')
|
||||
install_tree_before = spack.config.get('config:install_tree')
|
||||
upstreams_before = spack.config.get('upstreams')
|
||||
prepare_config_scope(env)
|
||||
store_after_pushing = spack.config.get('config:install_tree')
|
||||
if store_before_pushing != store_after_pushing:
|
||||
install_tree_after = spack.config.get('config:install_tree')
|
||||
upstreams_after = spack.config.get('upstreams')
|
||||
if install_tree_before != install_tree_after or upstreams_before != upstreams_after:
|
||||
# Hack to store the state of the store before activation
|
||||
env.store_token = spack.store.reinitialize()
|
||||
|
||||
@@ -489,8 +491,14 @@ def regenerate(self, all_specs, roots):
|
||||
raise SpackEnvironmentViewError(msg)
|
||||
os.rename(tmp_symlink_name, self.root)
|
||||
|
||||
# remove old_root
|
||||
if old_root and os.path.exists(old_root):
|
||||
# Remove the old root when it's in the same folder as the new root. This
|
||||
# guards against removal of an arbitrary path when the original symlink in
|
||||
# self.root was not created by the environment, but by the user.
|
||||
if (
|
||||
old_root and
|
||||
os.path.exists(old_root) and
|
||||
os.path.samefile(os.path.dirname(new_root), os.path.dirname(old_root))
|
||||
):
|
||||
try:
|
||||
shutil.rmtree(old_root)
|
||||
except (IOError, OSError) as e:
|
||||
|
@@ -39,7 +39,7 @@ def activate_header(env, shell, prompt=None):
|
||||
#
|
||||
else:
|
||||
if 'color' in os.getenv('TERM', '') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
prompt = colorize('@G{%s}' % prompt, color=True)
|
||||
|
||||
cmds += 'export SPACK_ENV=%s;\n' % env.path
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
@@ -73,7 +73,7 @@ def deactivate_header(shell):
|
||||
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
||||
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
cmds += 'alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n'
|
||||
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
|
||||
cmds += ' unset PS1; export PS1;\n'
|
||||
|
@@ -632,9 +632,14 @@ def __init__(self, pkg_count):
|
||||
# Counters used for showing status information in the terminal title
|
||||
self.pkg_num = 0
|
||||
self.pkg_count = pkg_count
|
||||
self.pkg_ids = set()
|
||||
|
||||
def next_pkg(self):
|
||||
self.pkg_num += 1
|
||||
def next_pkg(self, pkg):
|
||||
pkg_id = package_id(pkg)
|
||||
|
||||
if pkg_id not in self.pkg_ids:
|
||||
self.pkg_num += 1
|
||||
self.pkg_ids.add(pkg_id)
|
||||
|
||||
def set(self, text):
|
||||
if not spack.config.get('config:terminal_title', False):
|
||||
@@ -1500,8 +1505,6 @@ def install(self):
|
||||
term_title = TermTitle(len(self.build_pq))
|
||||
|
||||
while self.build_pq:
|
||||
term_title.next_pkg()
|
||||
|
||||
task = self._pop_task()
|
||||
if task is None:
|
||||
continue
|
||||
@@ -1511,6 +1514,7 @@ def install(self):
|
||||
keep_prefix = install_args.get('keep_prefix')
|
||||
|
||||
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
|
||||
term_title.next_pkg(pkg)
|
||||
term_title.set('Processing {0}'.format(pkg.name))
|
||||
tty.debug('Processing {0}: task={1}'.format(pkg_id, task))
|
||||
# Ensure that the current spec has NO uninstalled dependencies,
|
||||
|
@@ -819,13 +819,13 @@ def _main(argv=None):
|
||||
# scopes, then environment configuration here.
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
# activate an environment if one was specified on the command line
|
||||
env_format_error = None
|
||||
if not args.no_env:
|
||||
|
@@ -373,6 +373,21 @@ def fullname(self):
|
||||
"""Name of this package, including the namespace"""
|
||||
return '%s.%s' % (self.namespace, self.name)
|
||||
|
||||
@property
|
||||
def fullnames(self):
|
||||
"""
|
||||
Fullnames for this package and any packages from which it inherits.
|
||||
"""
|
||||
fullnames = []
|
||||
for cls in inspect.getmro(self):
|
||||
namespace = getattr(cls, 'namespace', None)
|
||||
if namespace:
|
||||
fullnames.append('%s.%s' % (namespace, self.name))
|
||||
if namespace == 'builtin':
|
||||
# builtin packages cannot inherit from other repos
|
||||
break
|
||||
return fullnames
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The name of this package.
|
||||
@@ -450,7 +465,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
Alternative implementations may allow some of the files to exist in
|
||||
the view (in this case they would be omitted from the results).
|
||||
"""
|
||||
return set(dst for dst in merge_map.values() if os.path.exists(dst))
|
||||
return set(dst for dst in merge_map.values() if os.path.lexists(dst))
|
||||
|
||||
def add_files_to_view(self, view, merge_map):
|
||||
"""Given a map of package files to destination paths in the view, add
|
||||
@@ -459,7 +474,7 @@ def add_files_to_view(self, view, merge_map):
|
||||
linked into the view already include the file.
|
||||
"""
|
||||
for src, dst in merge_map.items():
|
||||
if not os.path.exists(dst):
|
||||
if not os.path.lexists(dst):
|
||||
view.link(src, dst, spec=self.spec)
|
||||
|
||||
def remove_files_from_view(self, view, merge_map):
|
||||
@@ -863,6 +878,10 @@ def fullname(self):
|
||||
"""Name of this package, including namespace: namespace.name."""
|
||||
return type(self).fullname
|
||||
|
||||
@property
|
||||
def fullnames(self):
|
||||
return type(self).fullnames
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Name of this package (the module without parent modules)."""
|
||||
@@ -1182,7 +1201,7 @@ def extendee_spec(self):
|
||||
name = next(iter(self.extendees))
|
||||
|
||||
# If the extendee is in the spec's deps already, return that.
|
||||
for dep in self.spec.traverse(deptypes=('link', 'run')):
|
||||
for dep in self.spec.traverse(deptype=('link', 'run')):
|
||||
if name == dep.name:
|
||||
return dep
|
||||
|
||||
|
@@ -348,8 +348,12 @@ def patch_for_package(self, sha256, pkg):
|
||||
"Couldn't find patch for package %s with sha256: %s"
|
||||
% (pkg.fullname, sha256))
|
||||
|
||||
patch_dict = sha_index.get(pkg.fullname)
|
||||
if not patch_dict:
|
||||
# Find patches for this class or any class it inherits from
|
||||
for fullname in pkg.fullnames:
|
||||
patch_dict = sha_index.get(fullname)
|
||||
if patch_dict:
|
||||
break
|
||||
else:
|
||||
raise NoSuchPatchError(
|
||||
"Couldn't find patch for package %s with sha256: %s"
|
||||
% (pkg.fullname, sha256))
|
||||
|
@@ -3159,6 +3159,15 @@ def constrain(self, other, deps=True):
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
|
||||
changed = False
|
||||
|
||||
if not self.name and other.name:
|
||||
self.name = other.name
|
||||
changed = True
|
||||
|
||||
if not self.namespace and other.namespace:
|
||||
self.namespace = other.namespace
|
||||
changed = True
|
||||
|
||||
if self.compiler is not None and other.compiler is not None:
|
||||
changed |= self.compiler.constrain(other.compiler)
|
||||
elif self.compiler is None:
|
||||
@@ -4259,7 +4268,7 @@ def tree(self, **kwargs):
|
||||
|
||||
out = ""
|
||||
for d, dep_spec in self.traverse_edges(
|
||||
order='pre', cover=cover, depth=True, deptypes=deptypes):
|
||||
order='pre', cover=cover, depth=True, deptype=deptypes):
|
||||
node = dep_spec.spec
|
||||
|
||||
if prefix is not None:
|
||||
|
@@ -11,19 +11,6 @@
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
def spec_ordering_key(s):
|
||||
if s.startswith('^'):
|
||||
return 5
|
||||
elif s.startswith('/'):
|
||||
return 4
|
||||
elif s.startswith('%'):
|
||||
return 3
|
||||
elif any(s.startswith(c) for c in '~-+@') or '=' in s:
|
||||
return 2
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
class SpecList(object):
|
||||
|
||||
def __init__(self, name='specs', yaml_list=None, reference=None):
|
||||
@@ -177,30 +164,36 @@ def __getitem__(self, key):
|
||||
return self.specs[key]
|
||||
|
||||
|
||||
def _expand_matrix_constraints(object, specify=True):
|
||||
# recurse so we can handle nexted matrices
|
||||
def _expand_matrix_constraints(matrix_config):
|
||||
# recurse so we can handle nested matrices
|
||||
expanded_rows = []
|
||||
for row in object['matrix']:
|
||||
for row in matrix_config['matrix']:
|
||||
new_row = []
|
||||
for r in row:
|
||||
if isinstance(r, dict):
|
||||
# Flatten the nested matrix into a single row of constraints
|
||||
new_row.extend(
|
||||
[[' '.join(c)]
|
||||
for c in _expand_matrix_constraints(r, specify=False)])
|
||||
[[' '.join([str(c) for c in expanded_constraint_list])]
|
||||
for expanded_constraint_list in _expand_matrix_constraints(r)]
|
||||
)
|
||||
else:
|
||||
new_row.append([r])
|
||||
expanded_rows.append(new_row)
|
||||
|
||||
excludes = object.get('exclude', []) # only compute once
|
||||
sigil = object.get('sigil', '')
|
||||
excludes = matrix_config.get('exclude', []) # only compute once
|
||||
sigil = matrix_config.get('sigil', '')
|
||||
|
||||
results = []
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for list in combo for constraint in list]
|
||||
ordered_combo = sorted(flat_combo, key=spec_ordering_key)
|
||||
flat_combo = [constraint for constraint_list in combo
|
||||
for constraint in constraint_list]
|
||||
flat_combo = [Spec(x) for x in flat_combo]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
test_spec.constrain(constraint)
|
||||
|
||||
test_spec = Spec(' '.join(ordered_combo))
|
||||
# Abstract variants don't have normal satisfaction semantics
|
||||
# Convert all variants to concrete types.
|
||||
# This method is best effort, so all existing variants will be
|
||||
@@ -214,14 +207,12 @@ def _expand_matrix_constraints(object, specify=True):
|
||||
if any(test_spec.satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil: # add sigil if necessary
|
||||
ordered_combo[0] = sigil + ordered_combo[0]
|
||||
if sigil:
|
||||
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
||||
|
||||
# Add to list of constraints
|
||||
if specify:
|
||||
results.append([Spec(x) for x in ordered_combo])
|
||||
else:
|
||||
results.append(ordered_combo)
|
||||
results.append(flat_combo)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
|
@@ -533,7 +533,7 @@ def steal_source(self, dest):
|
||||
for entry in hidden_entries + entries:
|
||||
if os.path.isdir(entry):
|
||||
d = os.path.join(dest, os.path.basename(entry))
|
||||
shutil.copytree(entry, d)
|
||||
shutil.copytree(entry, d, symlinks=True)
|
||||
else:
|
||||
shutil.copy2(entry, dest)
|
||||
|
||||
@@ -694,8 +694,8 @@ def _add_to_root_stage(self):
|
||||
source_path = os.path.join(self.source_path, key)
|
||||
|
||||
if not os.path.exists(destination_path):
|
||||
tty.info('Moving resource stage\n\tsource : '
|
||||
'{stage}\n\tdestination : {destination}'.format(
|
||||
tty.info('Moving resource stage\n\tsource: '
|
||||
'{stage}\n\tdestination: {destination}'.format(
|
||||
stage=source_path, destination=destination_path
|
||||
))
|
||||
|
||||
|
@@ -239,6 +239,12 @@ def test_config_add_ordered_dict(mutable_empty_config):
|
||||
"""
|
||||
|
||||
|
||||
def test_config_add_interpret_oneof(mutable_empty_config):
|
||||
# Regression test for a bug that would raise a validation error
|
||||
config('add', 'packages:all:target:[x86_64]')
|
||||
config('add', 'packages:all:variants:~shared')
|
||||
|
||||
|
||||
def test_config_add_invalid_fails(mutable_empty_config):
|
||||
config('add', 'packages:all:variants:+debug')
|
||||
with pytest.raises(
|
||||
|
@@ -2693,3 +2693,14 @@ def test_activate_temp(monkeypatch, tmpdir):
|
||||
if ev.spack_env_var in line)
|
||||
assert str(tmpdir) in active_env_var
|
||||
assert ev.is_env_dir(str(tmpdir))
|
||||
|
||||
|
||||
def test_env_view_fail_if_symlink_points_elsewhere(tmpdir, install_mockery, mock_fetch):
|
||||
view = str(tmpdir.join('view'))
|
||||
# Put a symlink to an actual directory in view
|
||||
non_view_dir = str(tmpdir.mkdir('dont-delete-me'))
|
||||
os.symlink(non_view_dir, view)
|
||||
with ev.create('env', with_view=view):
|
||||
add('libelf')
|
||||
install('--fake')
|
||||
assert os.path.isdir(non_view_dir)
|
||||
|
@@ -39,7 +39,6 @@ def check_stage_path(monkeypatch, tmpdir):
|
||||
|
||||
def fake_stage(pkg, mirror_only=False):
|
||||
assert pkg.path == expected_path
|
||||
assert os.path.isdir(expected_path), expected_path
|
||||
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
|
||||
|
||||
|
@@ -876,8 +876,8 @@ def __init__(self, root):
|
||||
def path_for_spec(self, spec):
|
||||
return '/'.join([self.root, spec.name + '-' + spec.dag_hash()])
|
||||
|
||||
def check_installed(self, spec):
|
||||
return True
|
||||
def ensure_installed(self, spec):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -909,3 +910,51 @@ def test_database_works_with_empty_dir(tmpdir):
|
||||
db.query()
|
||||
# Check that reading an empty directory didn't create a new index.json
|
||||
assert not os.path.exists(db._index_path)
|
||||
|
||||
|
||||
def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, capfd):
|
||||
"""When a prefix of a dependency is removed and the database is reindexed,
|
||||
the spec should still be added through the dependent, but should be listed as
|
||||
not installed."""
|
||||
|
||||
# Remove libelf from the filesystem
|
||||
prefix = mutable_database.query_one('libelf').prefix
|
||||
assert prefix.startswith(str(mock_store))
|
||||
shutil.rmtree(prefix)
|
||||
|
||||
# Reindex should pick up libelf as a dependency of libdwarf
|
||||
spack.store.store.reindex()
|
||||
|
||||
# Reindexing should warn about libelf not being found on the filesystem
|
||||
err = capfd.readouterr()[1]
|
||||
assert 'this directory does not contain an installation of the spec' in err
|
||||
|
||||
# And we should still have libelf in the database, but not installed.
|
||||
assert not mutable_database.query_one('libelf', installed=True)
|
||||
assert mutable_database.query_one('libelf', installed=False)
|
||||
|
||||
|
||||
def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
|
||||
# Remove all non-external installations from the filesystem
|
||||
for spec in spack.store.db.query_local():
|
||||
if not spec.external:
|
||||
assert spec.prefix.startswith(str(mock_store))
|
||||
shutil.rmtree(spec.prefix)
|
||||
|
||||
# Make sure we have some explicitly installed specs
|
||||
num = len(mutable_database.query_local(installed=True, explicit=True))
|
||||
assert num > 0
|
||||
|
||||
# Reindex uses the current index to repopulate itself
|
||||
spack.store.store.reindex()
|
||||
|
||||
# Make sure all explicit specs are still there, but are now uninstalled.
|
||||
specs = mutable_database.query_local(installed=False, explicit=True)
|
||||
assert len(specs) == num
|
||||
|
||||
# And make sure they can be removed from the database (covers the case where
|
||||
# `ref_count == 0 and not installed`, which hits some obscure branches.
|
||||
for s in specs:
|
||||
mutable_database.remove(s)
|
||||
|
||||
assert len(mutable_database.query_local(installed=False, explicit=True)) == 0
|
||||
|
@@ -834,7 +834,7 @@ def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd):
|
||||
def _get_group(spec):
|
||||
return mock_group
|
||||
|
||||
def _chgrp(path, group):
|
||||
def _chgrp(path, group, follow_symlinks=True):
|
||||
tty.msg(mock_chgrp_msg.format(path, group))
|
||||
|
||||
monkeypatch.setattr(prefs, 'get_package_group', _get_group)
|
||||
|
@@ -974,7 +974,6 @@ def test_canonical_deptype(self):
|
||||
canonical_deptype(('foo',))
|
||||
|
||||
def test_invalid_literal_spec(self):
|
||||
|
||||
# Can't give type 'build' to a top-level spec
|
||||
with pytest.raises(spack.spec.SpecParseError):
|
||||
Spec.from_literal({'foo:build': None})
|
||||
@@ -982,3 +981,11 @@ def test_invalid_literal_spec(self):
|
||||
# Can't use more than one ':' separator
|
||||
with pytest.raises(KeyError):
|
||||
Spec.from_literal({'foo': {'bar:build:link': None}})
|
||||
|
||||
def test_spec_tree_respect_deptypes(self):
|
||||
# Version-test-root uses version-test-pkg as a build dependency
|
||||
s = Spec('version-test-root').concretized()
|
||||
out = s.tree(deptypes='all')
|
||||
assert 'version-test-pkg' in out
|
||||
out = s.tree(deptypes=('link', 'run'))
|
||||
assert 'version-test-pkg' not in out
|
||||
|
@@ -45,24 +45,34 @@ def test_spec_list_expansions(self):
|
||||
assert speclist.specs_as_constraints == self.default_constraints
|
||||
assert speclist.specs == self.default_specs
|
||||
|
||||
def test_spec_list_constraint_ordering(self):
|
||||
specs = [{'matrix': [
|
||||
@pytest.mark.regression('28749')
|
||||
@pytest.mark.parametrize('specs,expected', [
|
||||
# Constraints are ordered randomly
|
||||
([{'matrix': [
|
||||
['^zmpi'],
|
||||
['%gcc@4.5.0'],
|
||||
['hypre', 'libelf'],
|
||||
['~shared'],
|
||||
['cflags=-O3', 'cflags="-g -O0"'],
|
||||
['^foo']
|
||||
]}]
|
||||
|
||||
]}], [
|
||||
'hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||
'hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||
'libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||
'libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||
]),
|
||||
# A constraint affects both the root and a dependency
|
||||
([{'matrix': [
|
||||
['gromacs'],
|
||||
['%gcc'],
|
||||
['+plumed ^plumed%gcc']
|
||||
]}], [
|
||||
'gromacs+plumed%gcc ^plumed%gcc'
|
||||
])
|
||||
])
|
||||
def test_spec_list_constraint_ordering(self, specs, expected):
|
||||
speclist = SpecList('specs', specs)
|
||||
|
||||
expected_specs = [
|
||||
Spec('hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
||||
Spec('hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
||||
Spec('libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
||||
Spec('libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi'),
|
||||
]
|
||||
expected_specs = [Spec(x) for x in expected]
|
||||
assert speclist.specs == expected_specs
|
||||
|
||||
def test_spec_list_add(self):
|
||||
|
@@ -1228,3 +1228,15 @@ def test_merge_abstract_anonymous_specs(specs, expected):
|
||||
specs = [Spec(x) for x in specs]
|
||||
result = spack.spec.merge_abstract_anonymous_specs(*specs)
|
||||
assert result == Spec(expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('anonymous,named,expected', [
|
||||
('+plumed', 'gromacs', 'gromacs+plumed'),
|
||||
('+plumed ^plumed%gcc', 'gromacs', 'gromacs+plumed ^plumed%gcc'),
|
||||
('+plumed', 'builtin.gromacs', 'builtin.gromacs+plumed')
|
||||
])
|
||||
def test_merge_anonymous_spec_with_named_spec(anonymous, named, expected):
|
||||
s = Spec(anonymous)
|
||||
changed = s.constrain(named)
|
||||
assert changed
|
||||
assert s == Spec(expected)
|
||||
|
@@ -22,6 +22,8 @@ def prepare_environment_for_tests():
|
||||
def test_is_system_path():
|
||||
assert(envutil.is_system_path('/usr/bin'))
|
||||
assert(not envutil.is_system_path('/nonsense_path/bin'))
|
||||
assert(not envutil.is_system_path(''))
|
||||
assert(not envutil.is_system_path(None))
|
||||
|
||||
|
||||
test_paths = ['/usr/bin',
|
||||
|
@@ -630,6 +630,14 @@ def test_version_wrong_idx_type():
|
||||
v['0:']
|
||||
|
||||
|
||||
@pytest.mark.regression('29170')
|
||||
def test_version_range_satisfies_means_nonempty_intersection():
|
||||
x = VersionRange('3.7.0', '3')
|
||||
y = VersionRange('3.6.0', '3.6.0')
|
||||
assert not x.satisfies(y)
|
||||
assert not y.satisfies(x)
|
||||
|
||||
|
||||
@pytest.mark.regression('26482')
|
||||
def test_version_list_with_range_included_in_concrete_version_interpreted_as_range():
|
||||
# Note: this test only tests whether we can construct a version list of a range
|
||||
|
@@ -59,7 +59,7 @@ def is_system_path(path):
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
return os.path.normpath(path) in system_dirs
|
||||
return path and os.path.normpath(path) in system_dirs
|
||||
|
||||
|
||||
def filter_system_paths(paths):
|
||||
|
@@ -44,7 +44,7 @@ def set_permissions(path, perms, group=None):
|
||||
fs.chmod_x(path, perms)
|
||||
|
||||
if group:
|
||||
fs.chgrp(path, group)
|
||||
fs.chgrp(path, group, follow_symlinks=False)
|
||||
|
||||
|
||||
class InvalidPermissionsError(SpackError):
|
||||
|
@@ -586,35 +586,23 @@ def __contains__(self, other):
|
||||
|
||||
@coerced
|
||||
def satisfies(self, other):
|
||||
"""A VersionRange satisfies another if some version in this range
|
||||
would satisfy some version in the other range. To do this it must
|
||||
either:
|
||||
|
||||
a) Overlap with the other range
|
||||
b) The start of this range satisfies the end of the other range.
|
||||
|
||||
This is essentially the same as overlaps(), but overlaps assumes
|
||||
that its arguments are specific. That is, 4.7 is interpreted as
|
||||
4.7.0.0.0.0... . This function assumes that 4.7 would be satisfied
|
||||
by 4.7.3.5, etc.
|
||||
|
||||
Rationale:
|
||||
|
||||
If a user asks for gcc@4.5:4.7, and a package is only compatible with
|
||||
gcc@4.7.3:4.8, then that package should be able to build under the
|
||||
constraints. Just using overlaps() would not work here.
|
||||
|
||||
Note that we don't need to check whether the end of this range
|
||||
would satisfy the start of the other range, because overlaps()
|
||||
already covers that case.
|
||||
|
||||
Note further that overlaps() is a symmetric operation, while
|
||||
satisfies() is not.
|
||||
"""
|
||||
return (self.overlaps(other) or
|
||||
# if either self.start or other.end are None, then this can't
|
||||
# satisfy, or overlaps() would've taken care of it.
|
||||
self.start and other.end and self.start.satisfies(other.end))
|
||||
x.satisfies(y) in general means that x and y have a
|
||||
non-zero intersection. For VersionRange this means they overlap.
|
||||
|
||||
`satisfies` is a commutative binary operator, meaning that
|
||||
x.satisfies(y) if and only if y.satisfies(x).
|
||||
|
||||
Note: in some cases we have the keyword x.satisfies(y, strict=True)
|
||||
to mean strict set inclusion, which is not commutative. However, this
|
||||
lacks in VersionRange for unknown reasons.
|
||||
|
||||
Examples
|
||||
- 1:3 satisfies 2:4, as their intersection is 2:3.
|
||||
- 1:2 does not satisfy 3:4, as their intersection is empty.
|
||||
- 4.5:4.7 satisfies 4.7.2:4.8, as their intersection is 4.7.2:4.7
|
||||
"""
|
||||
return self.overlaps(other)
|
||||
|
||||
@coerced
|
||||
def overlaps(self, other):
|
||||
|
@@ -123,7 +123,7 @@ _bash_completion_spack() {
|
||||
# If the cursor is in the middle of the line, like:
|
||||
# `spack -d [] install`
|
||||
# COMP_WORDS will not contain the empty character, so we have to add it.
|
||||
if [[ "${COMP_LINE:$COMP_POINT:1}" == " " ]]
|
||||
if [[ "${COMP_LINE:$COMP_POINT-1:1}" == " " ]]
|
||||
then
|
||||
cur=""
|
||||
fi
|
||||
@@ -208,10 +208,10 @@ _repos() {
|
||||
SPACK_COMPREPLY="$SPACK_REPOS"
|
||||
}
|
||||
|
||||
_tests() {
|
||||
_unit_tests() {
|
||||
if [[ -z "${SPACK_TESTS:-}" ]]
|
||||
then
|
||||
SPACK_TESTS="$(spack test -l)"
|
||||
SPACK_TESTS="$(spack unit-test -l)"
|
||||
fi
|
||||
SPACK_COMPREPLY="$SPACK_TESTS"
|
||||
}
|
||||
|
@@ -70,6 +70,16 @@ _test_debug_functions() {
|
||||
emulate -L sh
|
||||
fi
|
||||
|
||||
# Test whether `spack install --verb[] spec` completes to `spack install --verbose spec`
|
||||
COMP_LINE='spack install --verb spec'
|
||||
COMP_POINT=20
|
||||
COMP_WORDS=(spack install --verb spec)
|
||||
COMP_CWORD=2
|
||||
COMP_KEY=9
|
||||
COMP_TYPE=64
|
||||
_bash_completion_spack
|
||||
contains "--verbose" echo "${COMPREPLY[@]}"
|
||||
|
||||
# This is a particularly tricky case that involves the following situation:
|
||||
# `spack -d [] install `
|
||||
# Here, [] represents the cursor, which is in the middle of the line.
|
||||
|
@@ -123,7 +123,7 @@ _bash_completion_spack() {
|
||||
# If the cursor is in the middle of the line, like:
|
||||
# `spack -d [] install`
|
||||
# COMP_WORDS will not contain the empty character, so we have to add it.
|
||||
if [[ "${COMP_LINE:$COMP_POINT:1}" == " " ]]
|
||||
if [[ "${COMP_LINE:$COMP_POINT-1:1}" == " " ]]
|
||||
then
|
||||
cur=""
|
||||
fi
|
||||
@@ -208,10 +208,10 @@ _repos() {
|
||||
SPACK_COMPREPLY="$SPACK_REPOS"
|
||||
}
|
||||
|
||||
_tests() {
|
||||
_unit_tests() {
|
||||
if [[ -z "${SPACK_TESTS:-}" ]]
|
||||
then
|
||||
SPACK_TESTS="$(spack test -l)"
|
||||
SPACK_TESTS="$(spack unit-test -l)"
|
||||
fi
|
||||
SPACK_COMPREPLY="$SPACK_TESTS"
|
||||
}
|
||||
@@ -1780,7 +1780,7 @@ _spack_unit_test() {
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --pytest-help -l --list -L --list-long -N --list-names --extension -s -k --showlocals"
|
||||
else
|
||||
_tests
|
||||
_unit_tests
|
||||
fi
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user