Compare commits
69 Commits
v0.16.2
...
features/i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9479be5618 | ||
|
|
4dd854d31b | ||
|
|
25fd25a77d | ||
|
|
842867dd89 | ||
|
|
49866c9013 | ||
|
|
983fb11dee | ||
|
|
b33969598a | ||
|
|
8b2c7a6c65 | ||
|
|
f40492b7d4 | ||
|
|
164fc4ee95 | ||
|
|
408824f365 | ||
|
|
f6549849e5 | ||
|
|
348cbe143c | ||
|
|
87689f7cc8 | ||
|
|
d9f6ef9df4 | ||
|
|
2c0091df3f | ||
|
|
28a3b30c53 | ||
|
|
4e35df4b61 | ||
|
|
c468d6bed2 | ||
|
|
3f0984e5e1 | ||
|
|
fd07decd27 | ||
|
|
aa8dd782cd | ||
|
|
617f2ac714 | ||
|
|
aee3b4a1e8 | ||
|
|
01c9f3edc3 | ||
|
|
5c623b03b1 | ||
|
|
cb4a08b3e0 | ||
|
|
fa66d683e4 | ||
|
|
fb2ac2077d | ||
|
|
b62401ec8f | ||
|
|
8a54817d4e | ||
|
|
5088d799eb | ||
|
|
c25f15b7d5 | ||
|
|
d7db6068c5 | ||
|
|
236796577d | ||
|
|
b490d65f28 | ||
|
|
92d540fde7 | ||
|
|
4609a126ba | ||
|
|
f30aeb35ae | ||
|
|
439b329c38 | ||
|
|
f613e10f24 | ||
|
|
f92e52cdc8 | ||
|
|
16d5cc2c99 | ||
|
|
d6e44b94d6 | ||
|
|
5015635506 | ||
|
|
c417827954 | ||
|
|
e75b76f433 | ||
|
|
1522d1fac6 | ||
|
|
5129d84304 | ||
|
|
14a9359395 | ||
|
|
8f3594564c | ||
|
|
1b7a5e53a6 | ||
|
|
dd54cb4c7a | ||
|
|
db9b7a509a | ||
|
|
a2801a1384 | ||
|
|
cb22bcf6f1 | ||
|
|
c9aac3e221 | ||
|
|
a680df8453 | ||
|
|
932f128bc8 | ||
|
|
95f5419502 | ||
|
|
bc5c475909 | ||
|
|
10f784338b | ||
|
|
3b9155239b | ||
|
|
676d68a979 | ||
|
|
3069631f37 | ||
|
|
eca1370abc | ||
|
|
b1dc3e787b | ||
|
|
8b431d1774 | ||
|
|
a0a15b5cd0 |
20
.github/workflows/linux_unit_tests.yaml
vendored
20
.github/workflows/linux_unit_tests.yaml
vendored
@@ -15,7 +15,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['original', 'clingo']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -51,23 +50,16 @@ jobs:
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
flags: unittests,linux
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -111,7 +103,6 @@ jobs:
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
centos6:
|
||||
# Test for Python2.6 run on Centos 6
|
||||
runs-on: ubuntu-latest
|
||||
@@ -126,16 +117,15 @@ jobs:
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
share/spack/qa/run-unit-tests
|
||||
|
||||
clingo-cffi:
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo:
|
||||
# Test for the clingo based solver
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:clingo-cffi
|
||||
container: spack/github-actions:clingo
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER
|
||||
python3 -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)"
|
||||
which clingo && clingo --version
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
|
||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,59 +1,3 @@
|
||||
# v0.16.2 (2021-05-22)
|
||||
|
||||
* Major performance improvement for `spack load` and other commands. (#23661)
|
||||
* `spack fetch` is now environment-aware. (#19166)
|
||||
* Numerous fixes for the new, `clingo`-based concretizer. (#23016, #23307,
|
||||
#23090, #22896, #22534, #20644, #20537, #21148)
|
||||
* Supoprt for automatically bootstrapping `clingo` from source. (#20652, #20657
|
||||
#21364, #21446, #21913, #22354, #22444, #22460, #22489, #22610, #22631)
|
||||
* Python 3.10 support: `collections.abc` (#20441)
|
||||
* Fix import issues by using `__import__` instead of Spack package importe.
|
||||
(#23288, #23290)
|
||||
* Bugfixes and `--source-dir` argument for `spack location`. (#22755, #22348,
|
||||
#22321)
|
||||
* Better support for externals in shared prefixes. (#22653)
|
||||
* `spack build-env` now prefers specs defined in the active environment.
|
||||
(#21642)
|
||||
* Remove erroneous warnings about quotes in `from_sourcing_files`. (#22767)
|
||||
* Fix clearing cache of `InternalConfigScope`. (#22609)
|
||||
* Bugfix for active when pkg is already active error. (#22587)
|
||||
* Make `SingleFileScope` able to repopulate the cache after clearing it.
|
||||
(#22559)
|
||||
* Channelflow: Fix the package. (#22483)
|
||||
* More descriptive error message for bugs in `package.py` (#21811)
|
||||
* Use package-supplied `autogen.sh`. (#20319)
|
||||
* Respect `-k/verify-ssl-false` in `_existing_url` method. (#21864)
|
||||
|
||||
|
||||
# v0.16.1 (2021-02-22)
|
||||
|
||||
This minor release includes a new feature and associated fixes:
|
||||
* intel-oneapi support through new packages (#20411, #20686, #20693, #20717,
|
||||
#20732, #20808, #21377, #21448)
|
||||
|
||||
This release also contains bug fixes/enhancements for:
|
||||
* HIP/ROCm support (#19715, #20095)
|
||||
* concretization (#19988, #20020, #20082, #20086, #20099, #20102, #20128,
|
||||
#20182, #20193, #20194, #20196, #20203, #20247, #20259, #20307, #20362,
|
||||
#20383, #20423, #20473, #20506, #20507, #20604, #20638, #20649, #20677,
|
||||
#20680, #20790)
|
||||
* environment install reporting fix (#20004)
|
||||
* avoid import in ABI compatibility info (#20236)
|
||||
* restore ability of dev-build to skip patches (#20351)
|
||||
* spack find -d spec grouping (#20028)
|
||||
* spack smoke test support (#19987, #20298)
|
||||
* macOS fixes (#20038, #21662)
|
||||
* abstract spec comparisons (#20341)
|
||||
* continuous integration (#17563)
|
||||
* performance improvements for binary relocation (#19690, #20768)
|
||||
* additional sanity checks for variants in builtin packages (#20373)
|
||||
* do not pollute auto-generated configuration files with empty lists or
|
||||
dicts (#20526)
|
||||
|
||||
plus assorted documentation (#20021, #20174) and package bug fixes/enhancements
|
||||
(#19617, #19933, #19986, #20006, #20097, #20198, #20794, #20906, #21411).
|
||||
|
||||
|
||||
# v0.16.0 (2020-11-18)
|
||||
|
||||
`v0.16.0` is a major feature release.
|
||||
|
||||
@@ -324,21 +324,21 @@ mentions that Python 3 is required, this can be specified as:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@3:', type=('build', 'run'))
|
||||
depends_on('python@3:', type=('build', 'run')
|
||||
|
||||
|
||||
If Python 2 is required, this would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@:2', type=('build', 'run'))
|
||||
depends_on('python@:2', type=('build', 'run')
|
||||
|
||||
|
||||
If Python 2.7 is the only version that works, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('python@2.7:2.8', type=('build', 'run'))
|
||||
depends_on('python@2.7:2.8', type=('build', 'run')
|
||||
|
||||
|
||||
The documentation may not always specify supported Python versions.
|
||||
|
||||
@@ -103,53 +103,6 @@ environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using clingo as an external solver to compute which software
|
||||
needs to be installed. If you have a default compiler supporting C++14 Spack
|
||||
can automatically bootstrap this tool from sources the first time it is
|
||||
needed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison``
|
||||
on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Alternate Prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
7
lib/spack/external/_pytest/assertion/util.py
vendored
7
lib/spack/external/_pytest/assertion/util.py
vendored
@@ -5,12 +5,9 @@
|
||||
import _pytest._code
|
||||
import py
|
||||
try:
|
||||
from collections.abc import Sequence
|
||||
from collections import Sequence
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import Sequence
|
||||
except ImportError:
|
||||
Sequence = list
|
||||
Sequence = list
|
||||
|
||||
|
||||
u = py.builtin._totext
|
||||
|
||||
7
lib/spack/external/_pytest/main.py
vendored
7
lib/spack/external/_pytest/main.py
vendored
@@ -10,12 +10,9 @@
|
||||
import _pytest._code
|
||||
import py
|
||||
try:
|
||||
from collections.abc import MutableMapping as MappingMixin
|
||||
from collections import MutableMapping as MappingMixin
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import MutableMapping as MappingMixin
|
||||
except ImportError:
|
||||
from UserDict import DictMixin as MappingMixin
|
||||
from UserDict import DictMixin as MappingMixin
|
||||
|
||||
from _pytest.config import directory_arg, UsageError, hookimpl
|
||||
from _pytest.outcomes import exit
|
||||
|
||||
5
lib/spack/external/_pytest/python_api.py
vendored
5
lib/spack/external/_pytest/python_api.py
vendored
@@ -398,10 +398,7 @@ def approx(expected, rel=None, abs=None, nan_ok=False):
|
||||
__ https://docs.python.org/3/reference/datamodel.html#object.__ge__
|
||||
"""
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping, Sequence
|
||||
else:
|
||||
from collections import Mapping, Sequence
|
||||
from collections import Mapping, Sequence
|
||||
from _pytest.compat import STRING_TYPES as String
|
||||
|
||||
# Delegate the comparison to a class that knows how to deal with the type
|
||||
|
||||
8
lib/spack/external/jinja2/runtime.py
vendored
8
lib/spack/external/jinja2/runtime.py
vendored
@@ -315,14 +315,10 @@ def __repr__(self):
|
||||
|
||||
# register the context as mapping if possible
|
||||
try:
|
||||
from collections.abc import Mapping
|
||||
from collections import Mapping
|
||||
Mapping.register(Context)
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import Mapping
|
||||
Mapping.register(Context)
|
||||
except ImportError:
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
class BlockReference(object):
|
||||
|
||||
12
lib/spack/external/jinja2/sandbox.py
vendored
12
lib/spack/external/jinja2/sandbox.py
vendored
@@ -14,7 +14,7 @@
|
||||
"""
|
||||
import types
|
||||
import operator
|
||||
import sys
|
||||
from collections import Mapping
|
||||
from jinja2.environment import Environment
|
||||
from jinja2.exceptions import SecurityError
|
||||
from jinja2._compat import string_types, PY2
|
||||
@@ -23,11 +23,6 @@
|
||||
from markupsafe import EscapeFormatter
|
||||
from string import Formatter
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
#: maximum number of items a range may produce
|
||||
MAX_RANGE = 100000
|
||||
@@ -84,10 +79,7 @@
|
||||
pass
|
||||
|
||||
#: register Python 2.6 abstract base classes
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSet, MutableMapping, MutableSequence
|
||||
else:
|
||||
from collections import MutableSet, MutableMapping, MutableSequence
|
||||
from collections import MutableSet, MutableMapping, MutableSequence
|
||||
_mutable_set_types += (MutableSet,)
|
||||
_mutable_mapping_types += (MutableMapping,)
|
||||
_mutable_sequence_types += (MutableSequence,)
|
||||
|
||||
7
lib/spack/external/jinja2/tests.py
vendored
7
lib/spack/external/jinja2/tests.py
vendored
@@ -10,16 +10,11 @@
|
||||
"""
|
||||
import operator
|
||||
import re
|
||||
import sys
|
||||
from collections import Mapping
|
||||
from jinja2.runtime import Undefined
|
||||
from jinja2._compat import text_type, string_types, integer_types
|
||||
import decimal
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
number_re = re.compile(r'^-?\d+(\.\d+)?$')
|
||||
regex_type = type(number_re)
|
||||
|
||||
|
||||
8
lib/spack/external/jinja2/utils.py
vendored
8
lib/spack/external/jinja2/utils.py
vendored
@@ -482,14 +482,10 @@ def __reversed__(self):
|
||||
|
||||
# register the LRU cache as mutable mapping if possible
|
||||
try:
|
||||
from collections.abc import MutableMapping
|
||||
from collections import MutableMapping
|
||||
MutableMapping.register(LRUCache)
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import MutableMapping
|
||||
MutableMapping.register(LRUCache)
|
||||
except ImportError:
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
def select_autoescape(enabled_extensions=('html', 'htm', 'xml'),
|
||||
|
||||
7
lib/spack/external/markupsafe/__init__.py
vendored
7
lib/spack/external/markupsafe/__init__.py
vendored
@@ -10,15 +10,10 @@
|
||||
"""
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
from collections import Mapping
|
||||
from markupsafe._compat import text_type, string_types, int_types, \
|
||||
unichr, iteritems, PY2
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
__version__ = "1.0"
|
||||
|
||||
__all__ = ['Markup', 'soft_unicode', 'escape', 'escape_silent']
|
||||
|
||||
7
lib/spack/external/ruamel/yaml/comments.py
vendored
7
lib/spack/external/ruamel/yaml/comments.py
vendored
@@ -9,12 +9,7 @@
|
||||
a separate base
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSet
|
||||
else:
|
||||
from collections import MutableSet
|
||||
from collections import MutableSet
|
||||
|
||||
__all__ = ["CommentedSeq", "CommentedMap", "CommentedOrderedMap",
|
||||
"CommentedSet", 'comment_attrib', 'merge_attrib']
|
||||
|
||||
7
lib/spack/external/ruamel/yaml/compat.py
vendored
7
lib/spack/external/ruamel/yaml/compat.py
vendored
@@ -12,12 +12,9 @@
|
||||
from ruamel.ordereddict import ordereddict
|
||||
except:
|
||||
try:
|
||||
from collections.abc import OrderedDict
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
from ordereddict import OrderedDict
|
||||
# to get the right name import ... as ordereddict doesn't do that
|
||||
|
||||
class ordereddict(OrderedDict):
|
||||
|
||||
19
lib/spack/external/ruamel/yaml/constructor.py
vendored
19
lib/spack/external/ruamel/yaml/constructor.py
vendored
@@ -3,6 +3,7 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import datetime
|
||||
import base64
|
||||
import binascii
|
||||
@@ -25,12 +26,6 @@
|
||||
from ruamel.yaml.scalarstring import * # NOQA
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Hashable
|
||||
else:
|
||||
from collections import Hashable
|
||||
|
||||
|
||||
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
|
||||
'ConstructorError', 'RoundTripConstructor']
|
||||
|
||||
@@ -168,7 +163,7 @@ def construct_mapping(self, node, deep=False):
|
||||
# keys can be list -> deep
|
||||
key = self.construct_object(key_node, deep=True)
|
||||
# lists are not hashable, but tuples are
|
||||
if not isinstance(key, Hashable):
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if isinstance(key, list):
|
||||
key = tuple(key)
|
||||
if PY2:
|
||||
@@ -180,7 +175,7 @@ def construct_mapping(self, node, deep=False):
|
||||
"found unacceptable key (%s)" %
|
||||
exc, key_node.start_mark)
|
||||
else:
|
||||
if not isinstance(key, Hashable):
|
||||
if not isinstance(key, collections.Hashable):
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
@@ -964,7 +959,7 @@ def construct_mapping(self, node, maptyp, deep=False):
|
||||
# keys can be list -> deep
|
||||
key = self.construct_object(key_node, deep=True)
|
||||
# lists are not hashable, but tuples are
|
||||
if not isinstance(key, Hashable):
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if isinstance(key, list):
|
||||
key = tuple(key)
|
||||
if PY2:
|
||||
@@ -976,7 +971,7 @@ def construct_mapping(self, node, maptyp, deep=False):
|
||||
"found unacceptable key (%s)" %
|
||||
exc, key_node.start_mark)
|
||||
else:
|
||||
if not isinstance(key, Hashable):
|
||||
if not isinstance(key, collections.Hashable):
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
@@ -1008,7 +1003,7 @@ def construct_setting(self, node, typ, deep=False):
|
||||
# keys can be list -> deep
|
||||
key = self.construct_object(key_node, deep=True)
|
||||
# lists are not hashable, but tuples are
|
||||
if not isinstance(key, Hashable):
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if isinstance(key, list):
|
||||
key = tuple(key)
|
||||
if PY2:
|
||||
@@ -1020,7 +1015,7 @@ def construct_setting(self, node, typ, deep=False):
|
||||
"found unacceptable key (%s)" %
|
||||
exc, key_node.start_mark)
|
||||
else:
|
||||
if not isinstance(key, Hashable):
|
||||
if not isinstance(key, collections.Hashable):
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
|
||||
@@ -23,12 +23,6 @@
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
else:
|
||||
from collections import Sequence
|
||||
|
||||
__all__ = [
|
||||
'FileFilter',
|
||||
'FileList',
|
||||
@@ -1111,7 +1105,7 @@ def find(root, files, recursive=True):
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or Sequence): Library name(s) to search for
|
||||
files (str or collections.Sequence): Library name(s) to search for
|
||||
recurse (bool, optional): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
@@ -1174,7 +1168,7 @@ def _find_non_recursive(root, search_files):
|
||||
# Utilities for libraries and headers
|
||||
|
||||
|
||||
class FileList(Sequence):
|
||||
class FileList(collections.Sequence):
|
||||
"""Sequence of absolute paths to files.
|
||||
|
||||
Provides a few convenience methods to manipulate file paths.
|
||||
@@ -1417,7 +1411,7 @@ def find_headers(headers, root, recursive=False):
|
||||
"""
|
||||
if isinstance(headers, six.string_types):
|
||||
headers = [headers]
|
||||
elif not isinstance(headers, Sequence):
|
||||
elif not isinstance(headers, collections.Sequence):
|
||||
message = '{0} expects a string or sequence of strings as the '
|
||||
message += 'first argument [got {1} instead]'
|
||||
message = message.format(find_headers.__name__, type(headers))
|
||||
@@ -1572,7 +1566,7 @@ def find_system_libraries(libraries, shared=True):
|
||||
"""
|
||||
if isinstance(libraries, six.string_types):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, Sequence):
|
||||
elif not isinstance(libraries, collections.Sequence):
|
||||
message = '{0} expects a string or sequence of strings as the '
|
||||
message += 'first argument [got {1} instead]'
|
||||
message = message.format(find_system_libraries.__name__,
|
||||
@@ -1626,7 +1620,7 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
"""
|
||||
if isinstance(libraries, six.string_types):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, Sequence):
|
||||
elif not isinstance(libraries, collections.Sequence):
|
||||
message = '{0} expects a string or sequence of strings as the '
|
||||
message += 'first argument [got {1} instead]'
|
||||
message = message.format(find_libraries.__name__, type(libraries))
|
||||
|
||||
@@ -9,18 +9,13 @@
|
||||
import os
|
||||
import re
|
||||
import functools
|
||||
import collections
|
||||
import inspect
|
||||
from datetime import datetime, timedelta
|
||||
from six import string_types
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Hashable, MutableMapping # novm
|
||||
else:
|
||||
from collections import Hashable, MutableMapping
|
||||
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
@@ -194,7 +189,7 @@ def memoized(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def _memoized_function(*args):
|
||||
if not isinstance(args, Hashable):
|
||||
if not isinstance(args, collections.Hashable):
|
||||
# Not hashable, so just call the function.
|
||||
return func(*args)
|
||||
|
||||
@@ -269,7 +264,7 @@ def setter(name, value):
|
||||
|
||||
|
||||
@key_ordering
|
||||
class HashableMap(MutableMapping):
|
||||
class HashableMap(collections.MutableMapping):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
@@ -678,13 +673,6 @@ def uniq(sequence):
|
||||
return uniq_list
|
||||
|
||||
|
||||
def star(func):
|
||||
"""Unpacks arguments for use with Multiprocessing mapping functions"""
|
||||
def _wrapper(args):
|
||||
return func(*args)
|
||||
return _wrapper
|
||||
|
||||
|
||||
class Devnull(object):
|
||||
"""Null stream with less overhead than ``os.devnull``.
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 16, 2)
|
||||
spack_version_info = (0, 16, 0)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
from spack.compilers.clang import Clang
|
||||
@@ -29,7 +30,6 @@ def architecture_compatible(self, target, constraint):
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
"""Returns gcc ABI compatibility info by getting the library version of
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
spec = CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
|
||||
@@ -56,7 +56,6 @@
|
||||
attributes front_os and back_os. The operating system as described earlier,
|
||||
will be responsible for compiler detection.
|
||||
"""
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import warnings
|
||||
@@ -68,8 +67,6 @@
|
||||
from llnl.util.lang import memoized, list_modules, key_ordering
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.error as serr
|
||||
import spack.util.executable
|
||||
@@ -494,7 +491,7 @@ def arch_for_spec(arch_spec):
|
||||
|
||||
|
||||
@memoized
|
||||
def _all_platforms():
|
||||
def all_platforms():
|
||||
classes = []
|
||||
mod_path = spack.paths.platform_path
|
||||
parent_module = "spack.platforms"
|
||||
@@ -515,7 +512,7 @@ def _all_platforms():
|
||||
|
||||
|
||||
@memoized
|
||||
def _platform():
|
||||
def platform():
|
||||
"""Detects the platform for this machine.
|
||||
|
||||
Gather a list of all available subclasses of platforms.
|
||||
@@ -524,7 +521,7 @@ def _platform():
|
||||
a file path (/opt/cray...)
|
||||
"""
|
||||
# Try to create a Platform object using the config file FIRST
|
||||
platform_list = _all_platforms()
|
||||
platform_list = all_platforms()
|
||||
platform_list.sort(key=lambda a: a.priority)
|
||||
|
||||
for platform_cls in platform_list:
|
||||
@@ -532,19 +529,6 @@ def _platform():
|
||||
return platform_cls()
|
||||
|
||||
|
||||
#: The "real" platform of the host running Spack. This should not be changed
|
||||
#: by any method and is here as a convenient way to refer to the host platform.
|
||||
real_platform = _platform
|
||||
|
||||
#: The current platform used by Spack. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
platform = _platform
|
||||
|
||||
#: The list of all platform classes. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
all_platforms = _all_platforms
|
||||
|
||||
|
||||
@memoized
|
||||
def default_arch():
|
||||
"""Default ``Arch`` object for this machine.
|
||||
@@ -579,39 +563,3 @@ def compatible_sys_types():
|
||||
arch = Arch(platform(), 'default_os', target)
|
||||
compatible_archs.append(str(arch))
|
||||
return compatible_archs
|
||||
|
||||
|
||||
class _PickleableCallable(object):
|
||||
"""Class used to pickle a callable that may substitute either
|
||||
_platform or _all_platforms. Lambda or nested functions are
|
||||
not pickleable.
|
||||
"""
|
||||
def __init__(self, return_value):
|
||||
self.return_value = return_value
|
||||
|
||||
def __call__(self):
|
||||
return self.return_value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_platform(new_platform):
|
||||
global platform, all_platforms
|
||||
|
||||
msg = '"{0}" must be an instance of Platform'
|
||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||
|
||||
original_platform_fn, original_all_platforms_fn = platform, all_platforms
|
||||
platform = _PickleableCallable(new_platform)
|
||||
all_platforms = _PickleableCallable([type(new_platform)])
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
|
||||
platform, all_platforms = original_platform_fn, original_all_platforms_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
import tempfile
|
||||
import hashlib
|
||||
import glob
|
||||
from ordereddict_backport import OrderedDict
|
||||
|
||||
from contextlib import closing
|
||||
import ruamel.yaml as yaml
|
||||
@@ -599,9 +598,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
||||
text_to_relocate.append(rel_path_name)
|
||||
|
||||
# Create buildinfo data and write it to disk
|
||||
import spack.hooks.sbang as sbang
|
||||
buildinfo = {}
|
||||
buildinfo['sbang_install_path'] = sbang.sbang_install_path()
|
||||
buildinfo['relative_rpaths'] = rel
|
||||
buildinfo['buildpath'] = spack.store.layout.root
|
||||
buildinfo['spackprefix'] = spack.paths.prefix
|
||||
@@ -1087,10 +1084,6 @@ def relocate_package(spec, allow_root):
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
|
||||
old_sbang_install_path = None
|
||||
if 'sbang_install_path' in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo['sbang_install_path'])
|
||||
old_layout_root = str(buildinfo['buildpath'])
|
||||
old_spack_prefix = str(buildinfo.get('spackprefix'))
|
||||
old_rel_prefix = buildinfo.get('relative_prefix')
|
||||
@@ -1112,32 +1105,11 @@ def relocate_package(spec, allow_root):
|
||||
new_deps = spack.build_environment.get_rpath_deps(spec.package)
|
||||
for d in new_deps:
|
||||
hash_to_prefix[d.format('{hash}')] = str(d.prefix)
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
prefix_to_prefix_text = OrderedDict({})
|
||||
prefix_to_prefix_bin = OrderedDict({})
|
||||
|
||||
if old_sbang_install_path:
|
||||
import spack.hooks.sbang as sbang
|
||||
prefix_to_prefix_text[old_sbang_install_path] = \
|
||||
sbang.sbang_install_path()
|
||||
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix = dict()
|
||||
for orig_prefix, hash in prefix_to_hash.items():
|
||||
prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
import spack.hooks.sbang as sbang
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(old_spack_prefix)
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
prefix_to_prefix[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
prefix_to_prefix[old_prefix] = new_prefix
|
||||
prefix_to_prefix[old_layout_root] = new_layout_root
|
||||
|
||||
tty.debug("Relocating package from",
|
||||
"%s to %s." % (old_layout_root, new_layout_root))
|
||||
@@ -1165,14 +1137,15 @@ def is_backup_file(file):
|
||||
relocate.relocate_macho_binaries(files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin, rel,
|
||||
prefix_to_prefix, rel,
|
||||
old_prefix,
|
||||
new_prefix)
|
||||
|
||||
if 'elf' in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin, rel,
|
||||
prefix_to_prefix, rel,
|
||||
old_prefix,
|
||||
new_prefix)
|
||||
# Relocate links to the new install prefix
|
||||
@@ -1183,7 +1156,12 @@ def is_backup_file(file):
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
relocate.relocate_text(text_names,
|
||||
old_layout_root, new_layout_root,
|
||||
old_prefix, new_prefix,
|
||||
old_spack_prefix,
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
paths_to_relocate = [old_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
@@ -1193,13 +1171,22 @@ def is_backup_file(file):
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
relocate.relocate_text_bin(files_to_relocate,
|
||||
old_prefix, new_prefix,
|
||||
old_spack_prefix,
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
relocate.relocate_text(text_names,
|
||||
old_layout_root, new_layout_root,
|
||||
old_prefix, new_prefix,
|
||||
old_spack_prefix,
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
|
||||
@@ -1,252 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
import sysconfig # novm
|
||||
except ImportError:
|
||||
# Not supported on Python 2.6
|
||||
pass
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
minor version (all patches are ABI compatible with the same minor)
|
||||
and on whether ucs4 support has been enabled for Python 2.7
|
||||
|
||||
See:
|
||||
https://www.python.org/dev/peps/pep-0513/
|
||||
https://stackoverflow.com/a/35801395/771663
|
||||
"""
|
||||
version_str = '.'.join(str(x) for x in sys.version_info[:2])
|
||||
variant_str = ''
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
|
||||
unicode_size = sysconfig.get_config_var('Py_UNICODE_SIZE')
|
||||
variant_str = '+ucs4' if unicode_size == 4 else '~ucs4'
|
||||
|
||||
spec_fmt = 'python@{0} {1}'
|
||||
return spec_fmt.format(version_str, variant_str)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = os.path.dirname(os.path.dirname(sys.executable))
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
'buildable': False,
|
||||
'externals': [
|
||||
{'prefix': python_prefix, 'spec': str(external_python)}
|
||||
]
|
||||
}
|
||||
|
||||
with spack.config.override('packages:python::', entry):
|
||||
yield
|
||||
|
||||
|
||||
def make_module_available(module, spec=None, install=False):
|
||||
"""Ensure module is importable"""
|
||||
# If we already can import it, that's great
|
||||
try:
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# If it's already installed, use it
|
||||
# Search by spec
|
||||
spec = spack.spec.Spec(spec or module)
|
||||
|
||||
# We have to run as part of this python
|
||||
# We can constrain by a shortened version in place of a version range
|
||||
# because this spec is only used for querying or as a placeholder to be
|
||||
# replaced by an external that already has a concrete version. This syntax
|
||||
# is not sufficient when concretizing without an external, as it will
|
||||
# concretize to python@X.Y instead of python@X.Y.Z
|
||||
python_requirement = '^' + spec_for_current_python()
|
||||
spec.constrain(python_requirement)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
|
||||
for ispec in installed_specs:
|
||||
# TODO: make sure run-environment is appropriate
|
||||
module_path = os.path.join(ispec.prefix,
|
||||
ispec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
tty.warn("Spec %s did not provide module %s" % (ispec, module))
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
def _raise_error(module_name, module_spec):
|
||||
error_msg = 'cannot import module "{0}"'.format(module_name)
|
||||
if module_spec:
|
||||
error_msg += ' from spec "{0}'.format(module_spec)
|
||||
raise ImportError(error_msg)
|
||||
|
||||
if not install:
|
||||
_raise_error(module, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
spec.package.do_install()
|
||||
|
||||
module_path = os.path.join(spec.prefix,
|
||||
spec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
sys.path = sys.path[:-2]
|
||||
_raise_error(module, spec)
|
||||
|
||||
|
||||
def get_executable(exe, spec=None, install=False):
|
||||
"""Find an executable named exe, either in PATH or in Spack
|
||||
|
||||
Args:
|
||||
exe (str): needed executable name
|
||||
spec (Spec or str): spec to search for exe in (default exe)
|
||||
install (bool): install spec if not available
|
||||
|
||||
When ``install`` is True, Spack will use the python used to run Spack as an
|
||||
external. The ``install`` option should only be used with packages that
|
||||
install quickly (when using external python) or are guaranteed by Spack
|
||||
organization to be in a binary mirror (clingo)."""
|
||||
# Search the system first
|
||||
runner = spack.util.executable.which(exe)
|
||||
if runner:
|
||||
return runner
|
||||
|
||||
# Check whether it's already installed
|
||||
spec = spack.spec.Spec(spec or exe)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
for ispec in installed_specs:
|
||||
# filter out directories of the same name as the executable
|
||||
exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe)
|
||||
if fs.is_exe(exe_p)]
|
||||
if exe_path:
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in ispec.traverse(root=True, order='post'):
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
else:
|
||||
tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix))
|
||||
|
||||
def _raise_error(executable, exe_spec):
|
||||
error_msg = 'cannot find the executable "{0}"'.format(executable)
|
||||
if exe_spec:
|
||||
error_msg += ' from spec "{0}'.format(exe_spec)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
# If we're not allowed to install this for ourselves, we can't find it
|
||||
if not install:
|
||||
_raise_error(exe, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
|
||||
spec.package.do_install()
|
||||
# filter out directories of the same name as the executable
|
||||
exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe)
|
||||
if fs.is_exe(exe_p)]
|
||||
if exe_path:
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in spec.traverse(root=True, order='post'):
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
|
||||
_raise_error(exe, spec)
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
tty.debug('[BOOTSTRAP CONFIG SCOPE] name=_builtin')
|
||||
config_scopes = [
|
||||
spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
)
|
||||
]
|
||||
for name, path in spack.config.configuration_paths:
|
||||
platform = spack.architecture.platform().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
'/'.join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = '[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}'
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||
return config_scopes
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
# Construct the root spec that will be used to bootstrap clingo
|
||||
spec_str = 'clingo-bootstrap@spack+python'
|
||||
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS.
|
||||
if str(spack.architecture.platform()) == 'darwin':
|
||||
spec_str += ' %apple-clang'
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
spec_str += ' os=fe'
|
||||
|
||||
# Add the generic target
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(str(generic_target))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str))
|
||||
|
||||
return spack.spec.Spec(spec_str)
|
||||
@@ -302,19 +302,6 @@ def set_compiler_environment_variables(pkg, env):
|
||||
return env
|
||||
|
||||
|
||||
def _place_externals_last(spec_container):
|
||||
"""
|
||||
For a (possibly unordered) container of specs, return an ordered list
|
||||
where all external specs are at the end of the list. External packages
|
||||
may be installed in merged prefixes with other packages, and so
|
||||
they should be deprioritized for any search order (i.e. in PATH, or
|
||||
for a set of -L entries in a compiler invocation).
|
||||
"""
|
||||
first = list(x for x in spec_container if not x.external)
|
||||
second = list(x for x in spec_container if x.external)
|
||||
return first + second
|
||||
|
||||
|
||||
def set_build_environment_variables(pkg, env, dirty):
|
||||
"""Ensure a clean install environment when we build packages.
|
||||
|
||||
@@ -332,29 +319,6 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
|
||||
build_link_deps = build_deps | link_deps
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
# This includes all build dependencies and any other dependencies that
|
||||
# should be added to PATH (e.g. supporting executables run by build
|
||||
# dependencies)
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype='run'))
|
||||
|
||||
# Establish an arbitrary but fixed ordering of specs so that resulting
|
||||
# environment variable values are stable
|
||||
def _order(specs):
|
||||
return sorted(specs, key=lambda x: x.name)
|
||||
|
||||
# External packages may be installed in a prefix which contains many other
|
||||
# package installs. To avoid having those installations override
|
||||
# Spack-installed packages, they are placed at the end of search paths.
|
||||
# System prefixes are removed entirely later on since they are already
|
||||
# searched.
|
||||
build_deps = _place_externals_last(_order(build_deps))
|
||||
link_deps = _place_externals_last(_order(link_deps))
|
||||
build_link_deps = _place_externals_last(_order(build_link_deps))
|
||||
rpath_deps = _place_externals_last(_order(rpath_deps))
|
||||
build_and_supporting_deps = _place_externals_last(
|
||||
_order(build_and_supporting_deps))
|
||||
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
@@ -401,10 +365,21 @@ def _order(specs):
|
||||
env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs))
|
||||
|
||||
build_and_supporting_prefixes = filter_system_paths(
|
||||
x.prefix for x in build_and_supporting_deps)
|
||||
build_link_prefixes = filter_system_paths(
|
||||
x.prefix for x in build_link_deps)
|
||||
build_prefixes = [dep.prefix for dep in build_deps]
|
||||
build_link_prefixes = [dep.prefix for dep in build_link_deps]
|
||||
|
||||
# add run-time dependencies of direct build-time dependencies:
|
||||
for build_dep in build_deps:
|
||||
for run_dep in build_dep.traverse(deptype='run'):
|
||||
build_prefixes.append(run_dep.prefix)
|
||||
|
||||
# Filter out system paths: ['/', '/usr', '/usr/local']
|
||||
# These paths can be introduced into the build when an external package
|
||||
# is added as a dependency. The problem with these paths is that they often
|
||||
# contain hundreds of other packages installed in the same directory.
|
||||
# If these paths come first, they can overshadow Spack installations.
|
||||
build_prefixes = filter_system_paths(build_prefixes)
|
||||
build_link_prefixes = filter_system_paths(build_link_prefixes)
|
||||
|
||||
# Add dependencies to CMAKE_PREFIX_PATH
|
||||
env.set_path('CMAKE_PREFIX_PATH', build_link_prefixes)
|
||||
@@ -419,10 +394,7 @@ def _order(specs):
|
||||
env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
|
||||
|
||||
# Add bin directories from dependencies to the PATH for the build.
|
||||
# These directories are added to the beginning of the search path, and in
|
||||
# the order given by 'build_and_supporting_prefixes' (the iteration order
|
||||
# is reversed because each entry is prepended)
|
||||
for prefix in reversed(build_and_supporting_prefixes):
|
||||
for prefix in build_prefixes:
|
||||
for dirname in ['bin', 'bin64']:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
@@ -440,8 +412,7 @@ def _order(specs):
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path,
|
||||
os.path.dirname(pkg.compiler.link_paths['cc']))
|
||||
spack.paths.build_env_path, pkg.compiler.name)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, 'case-insensitive')
|
||||
@@ -467,7 +438,7 @@ def _order(specs):
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
|
||||
# Add any pkgconfig directories to PKG_CONFIG_PATH
|
||||
for prefix in reversed(build_link_prefixes):
|
||||
for prefix in build_link_prefixes:
|
||||
for directory in ('lib', 'lib64', 'share'):
|
||||
pcdir = os.path.join(prefix, directory, 'pkgconfig')
|
||||
if os.path.isdir(pcdir):
|
||||
@@ -779,9 +750,6 @@ def setup_package(pkg, dirty, context='build'):
|
||||
elif context == 'test':
|
||||
import spack.user_environment as uenv # avoid circular import
|
||||
env.extend(uenv.environment_modifications_for_spec(pkg.spec))
|
||||
env.extend(
|
||||
modifications_from_dependencies(pkg.spec, context=context)
|
||||
)
|
||||
set_module_variables_for_package(pkg)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
@@ -846,8 +814,7 @@ def modifications_from_dependencies(spec, context):
|
||||
}
|
||||
deptype, method = deptype_and_method[context]
|
||||
|
||||
root = context == 'test'
|
||||
for dspec in spec.traverse(order='post', root=root, deptype=deptype):
|
||||
for dspec in spec.traverse(order='post', root=False, deptype=deptype):
|
||||
dpkg = dspec.package
|
||||
set_module_variables_for_package(dpkg)
|
||||
# Allow dependencies to modify the module
|
||||
|
||||
@@ -19,7 +19,7 @@ class CudaPackage(PackageBase):
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
# https://developer.nvidia.com/cuda-gpus
|
||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||
cuda_arch_values = (
|
||||
cuda_arch_values = [
|
||||
'10', '11', '12', '13',
|
||||
'20', '21',
|
||||
'30', '32', '35', '37',
|
||||
@@ -27,7 +27,7 @@ class CudaPackage(PackageBase):
|
||||
'60', '61', '62',
|
||||
'70', '72', '75',
|
||||
'80', '86'
|
||||
)
|
||||
]
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
# Spack has depends_on(cuda, when='cuda_arch!=None') or alike
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# Troubleshooting advice for +rocm builds:
|
||||
# Troubleshooting advice for +hip builds:
|
||||
#
|
||||
# 1. When building with clang, go your compilers.yaml,
|
||||
# add an entry for the amd version of clang, as below.
|
||||
@@ -73,11 +73,9 @@
|
||||
from spack.package import PackageBase
|
||||
from spack.directives import depends_on, variant, conflicts
|
||||
|
||||
import spack.variant
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
"""Auxiliary class which contains ROCm variant, dependencies and conflicts
|
||||
class HipPackage(PackageBase):
|
||||
"""Auxiliary class which contains HIP variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage. Closely mimics CudaPackage.
|
||||
|
||||
Maintainers: dtaller
|
||||
@@ -88,26 +86,24 @@ class ROCmPackage(PackageBase):
|
||||
amdgpu_targets = (
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
||||
'gfx900', 'gfx906', 'gfx908', 'gfx1010',
|
||||
'gfx1011', 'gfx1012'
|
||||
'gfx1011', 'gfx1012', 'none'
|
||||
)
|
||||
|
||||
variant('rocm', default=False, description='Enable ROCm support')
|
||||
variant('hip', default=False, description='Enable HIP support')
|
||||
|
||||
# possible amd gpu targets for rocm builds
|
||||
variant('amdgpu_target',
|
||||
description='AMD GPU architecture',
|
||||
values=spack.variant.any_combination_of(*amdgpu_targets))
|
||||
# possible amd gpu targets for hip builds
|
||||
variant('amdgpu_target', default='none', values=amdgpu_targets)
|
||||
|
||||
depends_on('llvm-amdgpu', when='+rocm')
|
||||
depends_on('hsa-rocr-dev', when='+rocm')
|
||||
depends_on('hip', when='+rocm')
|
||||
depends_on('llvm-amdgpu', when='+hip')
|
||||
depends_on('hsa-rocr-dev', when='+hip')
|
||||
depends_on('hip', when='+hip')
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts('amdgpu_target=none', when='+rocm')
|
||||
# need amd gpu type for hip builds
|
||||
conflicts('amdgpu_target=none', when='+hip')
|
||||
|
||||
# Make sure amdgpu_targets cannot be used without +rocm
|
||||
for value in amdgpu_targets:
|
||||
conflicts('~rocm', when='amdgpu_target=' + value)
|
||||
# Make sure non-'none' amdgpu_targets cannot be used without +hip
|
||||
for value in amdgpu_targets[:-1]:
|
||||
conflicts('~hip', when='amdgpu_target=' + value)
|
||||
|
||||
# https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
|
||||
# It seems that hip-clang does not (yet?) accept this flag, in which case
|
||||
@@ -115,8 +111,17 @@ class ROCmPackage(PackageBase):
|
||||
# hip package file. But I will leave this here for future development.
|
||||
@staticmethod
|
||||
def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return '--amdgpu-target={0}'.format(archs)
|
||||
return '--amdgpu-target={0}'.format(amdgpu_target)
|
||||
|
||||
# https://llvm.org/docs/AMDGPUUsage.html
|
||||
# Possible architectures (not including 'none' option)
|
||||
@staticmethod
|
||||
def amd_gputargets_list():
|
||||
return (
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
||||
'gfx900', 'gfx906', 'gfx908', 'gfx1010',
|
||||
'gfx1011', 'gfx1012'
|
||||
)
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Common utilities for managing intel oneapi packages.
|
||||
|
||||
"""
|
||||
|
||||
from os.path import dirname, isdir
|
||||
|
||||
from spack.package import Package
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries
|
||||
|
||||
|
||||
class IntelOneApiPackage(Package):
|
||||
"""Base class for Intel oneAPI packages."""
|
||||
|
||||
homepage = 'https://software.intel.com/oneapi'
|
||||
|
||||
phases = ['install']
|
||||
|
||||
def component_info(self,
|
||||
dir_name,
|
||||
components,
|
||||
releases,
|
||||
url_name):
|
||||
self._dir_name = dir_name
|
||||
self._components = components
|
||||
self._releases = releases
|
||||
self._url_name = url_name
|
||||
|
||||
def url_for_version(self, version):
|
||||
release = self._release(version)
|
||||
return 'https://registrationcenter-download.intel.com/akdlm/irc_nas/%s/%s' % (
|
||||
release['irc_id'], self._oneapi_file(version, release))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
bash = Executable('bash')
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', prefix)
|
||||
|
||||
version = spec.versions.lowest()
|
||||
release = self._release(version)
|
||||
bash('./%s' % self._oneapi_file(version, release),
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
'--eula', 'accept',
|
||||
'--components',
|
||||
self._components,
|
||||
'--install-dir', prefix)
|
||||
|
||||
#
|
||||
# Helper functions
|
||||
#
|
||||
|
||||
def _release(self, version):
|
||||
return self._releases[str(version)]
|
||||
|
||||
def _oneapi_file(self, version, release):
|
||||
return 'l_%s_p_%s.%s_offline.sh' % (
|
||||
self._url_name, version, release['build'])
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
"""Base class for Intel oneAPI library packages."""
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
include_path = '%s/%s/latest/include' % (
|
||||
self.prefix, self._dir_name)
|
||||
return find_headers('*', include_path, recursive=True)
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
lib_path = '%s/%s/latest/lib/intel64' % (self.prefix, self._dir_name)
|
||||
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
||||
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
||||
@@ -233,28 +233,7 @@ def install_args(self, spec, prefix):
|
||||
if ('py-setuptools' == spec.name or # this is setuptools, or
|
||||
'py-setuptools' in spec._dependencies and # it's an immediate dep
|
||||
'build' in spec._dependencies['py-setuptools'].deptypes):
|
||||
args += ['--single-version-externally-managed']
|
||||
|
||||
# Get all relative paths since we set the root to `prefix`
|
||||
# We query the python with which these will be used for the lib and inc
|
||||
# directories. This ensures we use `lib`/`lib64` as expected by python.
|
||||
python = spec['python'].package.command
|
||||
command_start = 'print(distutils.sysconfig.'
|
||||
commands = ';'.join([
|
||||
'import distutils.sysconfig',
|
||||
command_start + 'get_python_lib(plat_specific=False, prefix=""))',
|
||||
command_start + 'get_python_lib(plat_specific=True, prefix=""))',
|
||||
command_start + 'get_python_inc(plat_specific=True, prefix=""))'])
|
||||
pure_site_packages_dir, plat_site_packages_dir, inc_dir = python(
|
||||
'-c', commands, output=str, error=str).strip().split('\n')
|
||||
|
||||
args += ['--root=%s' % prefix,
|
||||
'--install-purelib=%s' % pure_site_packages_dir,
|
||||
'--install-platlib=%s' % plat_site_packages_dir,
|
||||
'--install-scripts=bin',
|
||||
'--install-data=""',
|
||||
'--install-headers=%s' % inc_dir
|
||||
]
|
||||
args += ['--single-version-externally-managed', '--root=/']
|
||||
|
||||
return args
|
||||
|
||||
|
||||
@@ -181,19 +181,6 @@ def parse_specs(args, **kwargs):
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
"""
|
||||
Returns a concrete spec, matching what is available in the environment.
|
||||
If no matching spec is found in the environment (or if no environment is
|
||||
active), this will return the given spec but concretized.
|
||||
"""
|
||||
env = spack.environment.get_env({}, cmd_name)
|
||||
if env:
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
else:
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
@@ -448,7 +435,7 @@ def format_list(specs):
|
||||
out = ''
|
||||
if groups:
|
||||
for specs in iter_groups(specs, indent, all_headers):
|
||||
output.write(format_list(specs))
|
||||
out += format_list(specs)
|
||||
else:
|
||||
out = format_list(sorted(specs))
|
||||
|
||||
|
||||
@@ -10,12 +10,11 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.cmd.test
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.config
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
|
||||
@@ -27,7 +26,7 @@
|
||||
class AllClean(argparse.Action):
|
||||
"""Activates flags -s -d -f -m and -p simultaneously"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
parser.parse_args(['-sdfmpb'], namespace=namespace)
|
||||
parser.parse_args(['-sdfmp'], namespace=namespace)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -47,10 +46,7 @@ def setup_parser(subparser):
|
||||
'-p', '--python-cache', action='store_true',
|
||||
help="remove .pyc, .pyo files and __pycache__ folders")
|
||||
subparser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help="remove software needed to bootstrap Spack")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmpb", nargs=0
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmp", nargs=0
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
@@ -58,7 +54,7 @@ def setup_parser(subparser):
|
||||
def clean(parser, args):
|
||||
# If nothing was set, activate the default
|
||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||
args.misc_cache, args.python_cache, args.bootstrap]):
|
||||
args.misc_cache, args.python_cache]):
|
||||
args.stage = True
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
@@ -100,10 +96,3 @@ def clean(parser, args):
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
||||
@@ -53,13 +53,11 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
spec = args.spec[0]
|
||||
cmd = args.spec[1:]
|
||||
|
||||
specs = spack.cmd.parse_specs(spec, concretize=False)
|
||||
specs = spack.cmd.parse_specs(spec, concretize=True)
|
||||
if len(specs) > 1:
|
||||
tty.die("spack %s only takes one spec." % cmd_name)
|
||||
spec = specs[0]
|
||||
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
|
||||
build_environment.setup_package(spec.package, args.dirty, context)
|
||||
|
||||
if args.dump:
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.architecture as architecture
|
||||
import spack.config
|
||||
import spack.paths
|
||||
from spack.main import get_version
|
||||
from spack.util.executable import which
|
||||
@@ -89,6 +90,7 @@ def report(args):
|
||||
print('* **Python:**', platform.python_version())
|
||||
print('* **Platform:**', architecture.Arch(
|
||||
architecture.platform(), 'frontend', 'frontend'))
|
||||
print('* **Concretizer:**', spack.config.get('config:concretizer'))
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
|
||||
@@ -112,7 +112,6 @@ def dev_build(self, args):
|
||||
verbose=not args.quiet,
|
||||
dirty=args.dirty,
|
||||
stop_before=args.before,
|
||||
skip_patch=args.skip_patch,
|
||||
stop_at=args.until)
|
||||
|
||||
# drop into the build environment of the package?
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
|
||||
description = "fetch archives for packages"
|
||||
@@ -19,51 +18,22 @@
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['no_checksum'])
|
||||
subparser.add_argument(
|
||||
"-m",
|
||||
"--missing",
|
||||
action="store_true",
|
||||
help="fetch only missing (not yet installed) dependencies",
|
||||
)
|
||||
'-m', '--missing', action='store_true',
|
||||
help="fetch only missing (not yet installed) dependencies")
|
||||
subparser.add_argument(
|
||||
"-D",
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="also fetch all dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
subparser.epilog = (
|
||||
"With an active environment, the specs "
|
||||
"parameter can be omitted. In this case all (uninstalled"
|
||||
", in case of --missing) specs from the environment are fetched"
|
||||
)
|
||||
'-D', '--dependencies', action='store_true',
|
||||
help="also fetch all dependencies")
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
|
||||
def fetch(parser, args):
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
else:
|
||||
# No specs were given explicitly, check if we are in an
|
||||
# environment. If yes, check the missing argument, if yes
|
||||
# fetch all uninstalled specs from it otherwise fetch all.
|
||||
# If we are also not in an environment, complain to the
|
||||
# user that we don't know what to do.
|
||||
env = ev.get_env(args, "fetch")
|
||||
if env:
|
||||
if args.missing:
|
||||
specs = env.uninstalled_specs()
|
||||
else:
|
||||
specs = env.all_specs()
|
||||
if specs == []:
|
||||
tty.die(
|
||||
"No uninstalled specs in environment. Did you "
|
||||
"run `spack concretize` yet?"
|
||||
)
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
if not args.specs:
|
||||
tty.die("fetch requires at least one package argument")
|
||||
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
spack.config.set('config:checksum', False, scope='command_line')
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse():
|
||||
|
||||
@@ -109,10 +109,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--end-date', help='latest date of installation [YYYY-MM-DD]'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help='show software in the internal bootstrap store'
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ['constraint'])
|
||||
|
||||
@@ -205,14 +201,7 @@ def display_env(env, args, decorator):
|
||||
|
||||
def find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
# Query the current store or the internal bootstrap store if required
|
||||
if args.bootstrap:
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
results = args.specs(**q_args)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = lambda s, f: f
|
||||
added = set()
|
||||
|
||||
@@ -255,7 +255,7 @@ def install(parser, args, **kwargs):
|
||||
reporter.specs = specs
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
with reporter:
|
||||
env.install_all(args, **kwargs)
|
||||
|
||||
tty.debug("Regenerating environment views for {0}"
|
||||
|
||||
@@ -47,13 +47,9 @@ def setup_parser(subparser):
|
||||
directories.add_argument(
|
||||
'-S', '--stages', action='store_true',
|
||||
help="top level stage directory")
|
||||
directories.add_argument(
|
||||
'--source-dir', action='store_true',
|
||||
help="source directory for a spec "
|
||||
"(requires it to be staged first)")
|
||||
directories.add_argument(
|
||||
'-b', '--build-dir', action='store_true',
|
||||
help="build directory for a spec "
|
||||
help="checked out or expanded source directory for a spec "
|
||||
"(requires it to be staged first)")
|
||||
directories.add_argument(
|
||||
'-e', '--env', action='store',
|
||||
@@ -65,77 +61,53 @@ def setup_parser(subparser):
|
||||
def location(parser, args):
|
||||
if args.module_dir:
|
||||
print(spack.paths.module_path)
|
||||
return
|
||||
|
||||
if args.spack_root:
|
||||
elif args.spack_root:
|
||||
print(spack.paths.prefix)
|
||||
return
|
||||
|
||||
if args.env:
|
||||
elif args.env:
|
||||
path = spack.environment.root(args.env)
|
||||
if not os.path.isdir(path):
|
||||
tty.die("no such environment: '%s'" % args.env)
|
||||
print(path)
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
elif args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
elif args.stages:
|
||||
print(spack.stage.get_stage_root())
|
||||
return
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if not specs:
|
||||
tty.die("You must supply a spec.")
|
||||
if len(specs) != 1:
|
||||
tty.die("Too many specs. Supply only one.")
|
||||
|
||||
if not specs:
|
||||
tty.die("You must supply a spec.")
|
||||
if args.install_dir:
|
||||
# install_dir command matches against installed specs.
|
||||
env = ev.get_env(args, 'location')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
print(spec.prefix)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("Too many specs. Supply only one.")
|
||||
else:
|
||||
spec = specs[0]
|
||||
|
||||
# install_dir command matches against installed specs.
|
||||
if args.install_dir:
|
||||
env = ev.get_env(args, 'location')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
print(spec.prefix)
|
||||
return
|
||||
if args.package_dir:
|
||||
# This one just needs the spec name.
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
|
||||
spec = specs[0]
|
||||
else:
|
||||
# These versions need concretized specs.
|
||||
spec.concretize()
|
||||
pkg = spack.repo.get(spec)
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
return
|
||||
if args.stage_dir:
|
||||
print(pkg.stage.path)
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if args.stage_dir:
|
||||
print(pkg.stage.path)
|
||||
return
|
||||
|
||||
if args.build_dir:
|
||||
# Out of source builds have build_directory defined
|
||||
if hasattr(pkg, 'build_directory'):
|
||||
# build_directory can be either absolute or relative to the
|
||||
# stage path in either case os.path.join makes it absolute
|
||||
print(os.path.normpath(os.path.join(
|
||||
pkg.stage.path,
|
||||
pkg.build_directory
|
||||
)))
|
||||
return
|
||||
|
||||
# Otherwise assume in-source builds
|
||||
print(pkg.stage.source_path)
|
||||
return
|
||||
|
||||
# source dir remains, which requires the spec to be staged
|
||||
if not pkg.stage.expanded:
|
||||
tty.die("Source directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
|
||||
# Default to source dir.
|
||||
print(pkg.stage.source_path)
|
||||
else: # args.build_dir is the default.
|
||||
if not pkg.stage.expanded:
|
||||
tty.die("Build directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
print(pkg.stage.source_path)
|
||||
|
||||
@@ -25,8 +25,8 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.16"
|
||||
tutorial_mirror = "s3://spack-binaries-prs/tutorial/ecp21/mirror"
|
||||
tutorial_branch = "releases/v0.15"
|
||||
tutorial_mirror = "s3://spack-tutorial-container/mirror/"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
# configs to remove
|
||||
|
||||
@@ -22,10 +22,11 @@
|
||||
import spack.spec
|
||||
import spack.config
|
||||
import spack.architecture
|
||||
|
||||
import spack.util.imp as simp
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
_imported_compilers_module = 'spack.compilers'
|
||||
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_flags_instance_vars = ['cflags', 'cppflags', 'cxxflags', 'fflags']
|
||||
_other_instance_vars = ['modules', 'operating_system', 'environment',
|
||||
@@ -38,8 +39,7 @@
|
||||
_compiler_cache = {}
|
||||
|
||||
_compiler_to_pkg = {
|
||||
'clang': 'llvm+clang',
|
||||
'oneapi': 'intel-oneapi-compilers'
|
||||
'clang': 'llvm+clang'
|
||||
}
|
||||
|
||||
|
||||
@@ -469,17 +469,17 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
assert supported(compiler_name)
|
||||
assert(supported(compiler_name))
|
||||
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
submodule_name = compiler_name
|
||||
module_name = compiler_name
|
||||
if compiler_name == 'apple-clang':
|
||||
submodule_name = compiler_name.replace('-', '_')
|
||||
module_name = compiler_name.replace('-', '_')
|
||||
|
||||
module_name = '.'.join(['spack', 'compilers', submodule_name])
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
cls = getattr(module_obj, mod_to_class(compiler_name))
|
||||
file_path = os.path.join(spack.paths.compilers_path, module_name + ".py")
|
||||
compiler_mod = simp.load_source(_imported_compilers_module, file_path)
|
||||
cls = getattr(compiler_mod, mod_to_class(compiler_name))
|
||||
|
||||
# make a note of the name in the module so we can get to it easily.
|
||||
cls.name = compiler_name
|
||||
|
||||
@@ -159,11 +159,11 @@ def extract_version_from_output(cls, output):
|
||||
|
||||
match = re.search(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r'clang version ([^ )]+)-svn[~.\w\d-]*|'
|
||||
r'clang version ([^ )\n]+)-svn[~.\w\d-]*|'
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r'clang version ([^ )]+?)-[~.\w\d-]*|'
|
||||
r'clang version ([^ )]+)',
|
||||
r'clang version ([^ )\n]+?)-[~.\w\d-]*|'
|
||||
r'clang version ([^ )\n]+)',
|
||||
output
|
||||
)
|
||||
if match:
|
||||
|
||||
@@ -34,9 +34,13 @@ class Fj(spack.compiler.Compiler):
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return "-g"
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4']
|
||||
return ['-O0', '-O1', '-O2', '-O3', '-Ofast']
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
@@ -54,6 +58,10 @@ def cxx11_flag(self):
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@@ -29,14 +29,13 @@ class Oneapi(Compiler):
|
||||
PrgEnv_compiler = 'oneapi'
|
||||
|
||||
version_argument = '--version'
|
||||
version_regex = r'(?:(?:oneAPI DPC\+\+ Compiler)|(?:ifx \(IFORT\))) (\S+)'
|
||||
version_regex = r'\((?:IFORT|ICC)\)|DPC\+\+ [^ ]+ [^ ]+ [^ ]+ \(([^ ]+)\)'
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng',
|
||||
'libsvml', 'libintlc', 'libimf']
|
||||
required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng']
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
|
||||
@@ -253,7 +253,8 @@ def concretize_architecture(self, spec):
|
||||
if spec.architecture is None:
|
||||
spec.architecture = spack.spec.ArchSpec()
|
||||
|
||||
if spec.architecture.concrete:
|
||||
if spec.architecture.platform and \
|
||||
(spec.architecture.os and spec.architecture.target):
|
||||
return False
|
||||
|
||||
# Get platform of nearest spec with a platform, including spec
|
||||
@@ -293,58 +294,22 @@ def concretize_architecture(self, spec):
|
||||
|
||||
# Get the nearest spec with relevant platform and a target
|
||||
# Generally, same algorithm as finding os
|
||||
curr_target = None
|
||||
if spec.architecture.target:
|
||||
curr_target = spec.architecture.target
|
||||
if spec.architecture.target and spec.architecture.target_concrete:
|
||||
new_target = spec.architecture.target
|
||||
else:
|
||||
new_target_spec = find_spec(
|
||||
spec, lambda x: (x.architecture and
|
||||
x.architecture.platform == str(new_plat) and
|
||||
x.architecture.target and
|
||||
x.architecture.target != curr_target)
|
||||
x.architecture.target)
|
||||
)
|
||||
if new_target_spec:
|
||||
if curr_target:
|
||||
# constrain one target by the other
|
||||
new_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, new_target_spec.architecture.target))
|
||||
curr_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, curr_target))
|
||||
curr_target_arch.constrain(new_target_arch)
|
||||
new_target = curr_target_arch.target
|
||||
else:
|
||||
new_target = new_target_spec.architecture.target
|
||||
new_target = new_target_spec.architecture.target
|
||||
else:
|
||||
# To get default platform, consider package prefs
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
new_target = self.target_from_package_preferences(spec)
|
||||
else:
|
||||
new_target = new_plat.target('default_target')
|
||||
if curr_target:
|
||||
# convert to ArchSpec to compare satisfaction
|
||||
new_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, str(new_target)))
|
||||
curr_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, str(curr_target)))
|
||||
|
||||
if not new_target_arch.satisfies(curr_target_arch):
|
||||
# new_target is an incorrect guess based on preferences
|
||||
# and/or default
|
||||
valid_target_ranges = str(curr_target).split(',')
|
||||
for target_range in valid_target_ranges:
|
||||
t_min, t_sep, t_max = target_range.partition(':')
|
||||
if not t_sep:
|
||||
new_target = t_min
|
||||
break
|
||||
elif t_max:
|
||||
new_target = t_max
|
||||
break
|
||||
elif t_min:
|
||||
# TODO: something better than picking first
|
||||
new_target = t_min
|
||||
break
|
||||
|
||||
# Construct new architecture, compute whether spec changed
|
||||
arch_spec = (str(new_plat), str(new_os), str(new_target))
|
||||
@@ -419,7 +384,7 @@ def concretize_compiler(self, spec):
|
||||
"""
|
||||
# Pass on concretizing the compiler if the target or operating system
|
||||
# is not yet determined
|
||||
if not spec.architecture.concrete:
|
||||
if not (spec.architecture.os and spec.architecture.target):
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
@@ -517,7 +482,7 @@ def concretize_compiler_flags(self, spec):
|
||||
"""
|
||||
# Pass on concretizing the compiler flags if the target or operating
|
||||
# system is not set.
|
||||
if not spec.architecture.concrete:
|
||||
if not (spec.architecture.os and spec.architecture.target):
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
@@ -578,14 +543,10 @@ def adjust_target(self, spec):
|
||||
True if spec was modified, False otherwise
|
||||
"""
|
||||
# To minimize the impact on performance this function will attempt
|
||||
# to adjust the target only at the very first call once necessary
|
||||
# information is set. It will just return False on subsequent calls.
|
||||
# The way this is achieved is by initializing a generator and making
|
||||
# this function return the next answer.
|
||||
if not (spec.architecture and spec.architecture.concrete):
|
||||
# Not ready, but keep going because we have work to do later
|
||||
return True
|
||||
|
||||
# to adjust the target only at the very first call. It will just
|
||||
# return False on subsequent calls. The way this is achieved is by
|
||||
# initializing a generator and making this function return the next
|
||||
# answer.
|
||||
def _make_only_one_call(spec):
|
||||
yield self._adjust_target(spec)
|
||||
while True:
|
||||
@@ -623,10 +584,9 @@ def _adjust_target(self, spec):
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
default_target = self.target_from_package_preferences(spec)
|
||||
|
||||
if current_target != default_target or (
|
||||
self.abstract_spec and
|
||||
self.abstract_spec.architecture and
|
||||
self.abstract_spec.architecture.concrete):
|
||||
if current_target != default_target or \
|
||||
(self.abstract_spec.architecture is not None and
|
||||
self.abstract_spec.architecture.target is not None):
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -714,7 +674,7 @@ def _compiler_concretization_failure(compiler_spec, arch):
|
||||
raise UnavailableCompilerVersionError(compiler_spec, arch)
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
def concretize_specs_together(*abstract_specs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
@@ -724,24 +684,6 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
return _concretize_specs_together_original(*abstract_specs, **kwargs)
|
||||
return _concretize_specs_together_new(*abstract_specs, **kwargs)
|
||||
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
result = spack.solver.asp.solve(abstract_specs)
|
||||
|
||||
if not result.satisfiable:
|
||||
result.print_cores()
|
||||
tty.die("Unsatisfiable spec.")
|
||||
|
||||
opt, i, answer = min(result.answers)
|
||||
return [answer[s.name].copy() for s in abstract_specs]
|
||||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
def make_concretization_repository(abstract_specs):
|
||||
"""Returns the path to a temporary repository created to contain
|
||||
a fake package that depends on all of the abstract specs.
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
|
||||
"""
|
||||
import collections
|
||||
import contextlib
|
||||
import copy
|
||||
import functools
|
||||
import os
|
||||
@@ -49,7 +48,6 @@
|
||||
|
||||
import spack.paths
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.schema
|
||||
import spack.schema.compilers
|
||||
import spack.schema.mirrors
|
||||
@@ -240,18 +238,11 @@ def get_section(self, section):
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
|
||||
# This bit ensures we have read the file and have
|
||||
# the raw data in memory
|
||||
if self._raw_data is None:
|
||||
self._raw_data = read_config_file(self.path, self.schema)
|
||||
if self._raw_data is None:
|
||||
return None
|
||||
|
||||
# Here we know we have the raw data and ensure we
|
||||
# populate the sections dictionary, which may be
|
||||
# cleared by the clear() method
|
||||
if not self.sections:
|
||||
section_data = self._raw_data
|
||||
for key in self.yaml_path:
|
||||
if section_data is None:
|
||||
@@ -260,7 +251,6 @@ def get_section(self, section):
|
||||
|
||||
for section_key, data in section_data.items():
|
||||
self.sections[section_key] = {section_key: data}
|
||||
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section):
|
||||
@@ -361,10 +351,6 @@ def _write_section(self, section):
|
||||
def __repr__(self):
|
||||
return '<InternalConfigScope: %s>' % self.name
|
||||
|
||||
def clear(self):
|
||||
# no cache to clear here.
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _process_dict_keyname_overrides(data):
|
||||
"""Turn a trailing `:' in a key name into an override attribute."""
|
||||
@@ -817,6 +803,22 @@ def _config():
|
||||
config = llnl.util.lang.Singleton(_config)
|
||||
|
||||
|
||||
def replace_config(configuration):
|
||||
"""Replace the current global configuration with the instance passed as
|
||||
argument.
|
||||
|
||||
Args:
|
||||
configuration (Configuration): the new configuration to be used.
|
||||
|
||||
Returns:
|
||||
The old configuration that has been removed
|
||||
"""
|
||||
global config
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
old_config, config = config, configuration
|
||||
return old_config
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
@@ -1131,55 +1133,6 @@ def ensure_latest_format_fn(section):
|
||||
return update_fn
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_configuration(*scopes_or_paths):
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
|
||||
Args:
|
||||
*scopes_or_paths: scope objects or paths to be used
|
||||
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
|
||||
# Save and clear the current compiler cache
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
yield configuration
|
||||
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _config_from(scopes_or_paths):
|
||||
scopes = []
|
||||
for scope_or_path in scopes_or_paths:
|
||||
# If we have a config scope we are already done
|
||||
if isinstance(scope_or_path, ConfigScope):
|
||||
scopes.append(scope_or_path)
|
||||
continue
|
||||
|
||||
# Otherwise we need to construct it
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), '"{0}" must be a directory'.format(path)
|
||||
name = os.path.basename(path)
|
||||
scopes.append(ConfigScope(name, path))
|
||||
|
||||
configuration = Configuration(*scopes)
|
||||
return configuration
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
"""Superclass for all Spack config related errors."""
|
||||
|
||||
|
||||
@@ -28,11 +28,10 @@ class OpenMpi(Package):
|
||||
|
||||
"""
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
from six import string_types
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -48,13 +47,6 @@ class OpenMpi(Package):
|
||||
from spack.resource import Resource
|
||||
from spack.version import Version, VersionChecksumError
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
else:
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
__all__ = []
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
@@ -210,7 +202,7 @@ class Foo(Package):
|
||||
|
||||
if isinstance(dicts, string_types):
|
||||
dicts = (dicts, )
|
||||
if not isinstance(dicts, Sequence):
|
||||
if not isinstance(dicts, collections.Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
raise TypeError(message.format(type(dicts)))
|
||||
# Add the dictionary names if not already there
|
||||
@@ -251,7 +243,7 @@ def remove_directives(arg):
|
||||
|
||||
# ...so if it is not a sequence make it so
|
||||
values = result
|
||||
if not isinstance(values, Sequence):
|
||||
if not isinstance(values, collections.Sequence):
|
||||
values = (values, )
|
||||
|
||||
DirectiveMeta._directives_to_be_executed.extend(values)
|
||||
|
||||
@@ -685,7 +685,7 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
spec_list = config_dict(self.yaml).get(user_speclist_name, [])
|
||||
spec_list = config_dict(self.yaml).get(user_speclist_name)
|
||||
user_specs = SpecList(user_speclist_name, [s for s in spec_list if s],
|
||||
self.spec_lists.copy())
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
@@ -707,11 +707,10 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
self.views = {}
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.yaml)
|
||||
# default concretization to separately
|
||||
self.concretization = configuration.get('concretization', 'separately')
|
||||
self.concretization = configuration.get('concretization')
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = configuration.get('develop', {})
|
||||
self.dev_specs = configuration['develop']
|
||||
for name, entry in self.dev_specs.items():
|
||||
# spec must include a concrete version
|
||||
assert Spec(entry['spec']).version.concrete
|
||||
@@ -1387,21 +1386,6 @@ def _install_log_links(self, spec):
|
||||
os.remove(build_log_link)
|
||||
os.symlink(spec.package.build_log_path, build_log_link)
|
||||
|
||||
def uninstalled_specs(self):
|
||||
"""Return a list of all uninstalled (and non-dev) specs."""
|
||||
# Do the installed check across all specs within a single
|
||||
# DB read transaction to reduce time spent in lock acquisition.
|
||||
uninstalled_specs = []
|
||||
with spack.store.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.package.installed or (
|
||||
spec.satisfies('dev_path=*') or
|
||||
spec.satisfies('^dev_path=*')
|
||||
):
|
||||
uninstalled_specs.append(spec)
|
||||
return uninstalled_specs
|
||||
|
||||
def install_all(self, args=None, **install_args):
|
||||
"""Install all concretized specs in an environment.
|
||||
|
||||
@@ -1412,13 +1396,22 @@ def install_all(self, args=None, **install_args):
|
||||
args (Namespace): argparse namespace with command arguments
|
||||
install_args (dict): keyword install arguments
|
||||
"""
|
||||
tty.debug('Assessing installation status of environment packages')
|
||||
# If "spack install" is invoked repeatedly for a large environment
|
||||
# where all specs are already installed, the operation can take
|
||||
# a large amount of time due to repeatedly acquiring and releasing
|
||||
# locks, this does an initial check across all specs within a single
|
||||
# DB read transaction to reduce time spent in this case.
|
||||
specs_to_install = self.uninstalled_specs()
|
||||
tty.debug('Assessing installation status of environment packages')
|
||||
specs_to_install = []
|
||||
with spack.store.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.package.installed or (
|
||||
spec.satisfies('dev_path=*') or
|
||||
spec.satisfies('^dev_path=*')
|
||||
):
|
||||
# If it's a dev build it could need to be reinstalled
|
||||
specs_to_install.append(spec)
|
||||
|
||||
if not specs_to_install:
|
||||
tty.msg('All of the packages are already installed')
|
||||
@@ -1505,67 +1498,6 @@ def concretized_specs(self):
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
|
||||
def matching_spec(self, spec):
|
||||
"""
|
||||
Given a spec (likely not concretized), find a matching concretized
|
||||
spec in the environment.
|
||||
|
||||
The matching spec does not have to be installed in the environment,
|
||||
but must be concrete (specs added with `spack add` without an
|
||||
intervening `spack concretize` will not be matched).
|
||||
|
||||
If there is a single root spec that matches the provided spec or a
|
||||
single dependency spec that matches the provided spec, then the
|
||||
concretized instance of that spec will be returned.
|
||||
|
||||
If multiple root specs match the provided spec, or no root specs match
|
||||
and multiple dependency specs match, then this raises an error
|
||||
and reports all matching specs.
|
||||
"""
|
||||
# Root specs will be keyed by concrete spec, value abstract
|
||||
# Dependency-only specs will have value None
|
||||
matches = {}
|
||||
|
||||
for user_spec, concretized_user_spec in self.concretized_specs():
|
||||
if concretized_user_spec.satisfies(spec):
|
||||
matches[concretized_user_spec] = user_spec
|
||||
for dep_spec in concretized_user_spec.traverse(root=False):
|
||||
if dep_spec.satisfies(spec):
|
||||
# Don't overwrite the abstract spec if present
|
||||
# If not present already, set to None
|
||||
matches[dep_spec] = matches.get(dep_spec, None)
|
||||
|
||||
if not matches:
|
||||
return None
|
||||
elif len(matches) == 1:
|
||||
return list(matches.keys())[0]
|
||||
|
||||
root_matches = dict((concrete, abstract)
|
||||
for concrete, abstract in matches.items()
|
||||
if abstract)
|
||||
|
||||
if len(root_matches) == 1:
|
||||
return root_matches[0][1]
|
||||
|
||||
# More than one spec matched, and either multiple roots matched or
|
||||
# none of the matches were roots
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
match_strings = []
|
||||
fmt_str = '{hash:7} ' + spack.spec.default_format
|
||||
for concrete, abstract in matches.items():
|
||||
if abstract:
|
||||
s = 'Root spec %s\n %s' % (abstract, concrete.format(fmt_str))
|
||||
else:
|
||||
s = 'Dependency spec\n %s' % concrete.format(fmt_str)
|
||||
match_strings.append(s)
|
||||
matches_str = '\n'.join(match_strings)
|
||||
|
||||
msg = ("{0} matches multiple specs in the environment {1}: \n"
|
||||
"{2}".format(str(spec), self.name, matches_str))
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
def removed_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all specs that will be
|
||||
removed on nexg concretize."""
|
||||
|
||||
@@ -324,8 +324,6 @@ def _existing_url(self, url):
|
||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||
# portable.
|
||||
curl_args = ['--stderr', '-', '-s', '-f', '-r', '0-0', url]
|
||||
if not spack.config.get('config:verify_ssl'):
|
||||
curl_args.append('-k')
|
||||
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
|
||||
return curl.returncode == 0
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from ordereddict_backport import OrderedDict
|
||||
|
||||
from llnl.util.link_tree import LinkTree, MergeConflictError
|
||||
from llnl.util import tty
|
||||
@@ -66,35 +65,32 @@ def view_copy(src, dst, view, spec=None):
|
||||
# Not metadata, we have to relocate it
|
||||
|
||||
# Get information on where to relocate from/to
|
||||
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
# TODO: Not sure which one to use...
|
||||
import spack.hooks.sbang as sbang
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root)
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
|
||||
prefix_to_projection = OrderedDict({
|
||||
spec.prefix: view.get_projection_for_spec(spec),
|
||||
spack.paths.spack_root: view._root})
|
||||
|
||||
for dep in spec.traverse():
|
||||
prefix_to_projection[dep.prefix] = \
|
||||
view.get_projection_for_spec(dep)
|
||||
prefix_to_projection = dict(
|
||||
(dep.prefix, view.get_projection_for_spec(dep))
|
||||
for dep in spec.traverse()
|
||||
)
|
||||
|
||||
if spack.relocate.is_binary(dst):
|
||||
# relocate binaries
|
||||
spack.relocate.relocate_text_bin(
|
||||
binaries=[dst],
|
||||
prefixes=prefix_to_projection
|
||||
orig_install_prefix=spec.prefix,
|
||||
new_install_prefix=view.get_projection_for_spec(spec),
|
||||
orig_spack=spack.paths.spack_root,
|
||||
new_spack=view._root,
|
||||
new_prefixes=prefix_to_projection
|
||||
)
|
||||
else:
|
||||
prefix_to_projection[spack.store.layout.root] = view._root
|
||||
prefix_to_projection[orig_sbang] = new_sbang
|
||||
# relocate text
|
||||
spack.relocate.relocate_text(
|
||||
files=[dst],
|
||||
prefixes=prefix_to_projection
|
||||
orig_layout_root=spack.store.layout.root,
|
||||
new_layout_root=view._root,
|
||||
orig_install_prefix=spec.prefix,
|
||||
new_install_prefix=view.get_projection_for_spec(spec),
|
||||
orig_spack=spack.paths.spack_root,
|
||||
new_spack=view._root,
|
||||
new_prefixes=prefix_to_projection
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This package contains modules with hooks for various stages in the
|
||||
|
||||
"""This package contains modules with hooks for various stages in the
|
||||
Spack install process. You can add modules here and they'll be
|
||||
executed by package at various times during the package lifecycle.
|
||||
|
||||
@@ -21,55 +21,46 @@
|
||||
systems (e.g. modules, lmod, etc.) or to add other custom
|
||||
features.
|
||||
"""
|
||||
import llnl.util.lang
|
||||
import os.path
|
||||
|
||||
import spack.paths
|
||||
import spack.util.imp as simp
|
||||
from llnl.util.lang import memoized, list_modules
|
||||
|
||||
|
||||
class _HookRunner(object):
|
||||
#: Stores all hooks on first call, shared among
|
||||
#: all HookRunner objects
|
||||
_hooks = None
|
||||
@memoized
|
||||
def all_hook_modules():
|
||||
modules = []
|
||||
for name in list_modules(spack.paths.hooks_path):
|
||||
mod_name = __name__ + '.' + name
|
||||
path = os.path.join(spack.paths.hooks_path, name) + ".py"
|
||||
mod = simp.load_source(mod_name, path)
|
||||
|
||||
if name == 'write_install_manifest':
|
||||
last_mod = mod
|
||||
else:
|
||||
modules.append(mod)
|
||||
|
||||
# put `write_install_manifest` as the last hook to run
|
||||
modules.append(last_mod)
|
||||
return modules
|
||||
|
||||
|
||||
class HookRunner(object):
|
||||
|
||||
def __init__(self, hook_name):
|
||||
self.hook_name = hook_name
|
||||
|
||||
@classmethod
|
||||
def _populate_hooks(cls):
|
||||
# Lazily populate the list of hooks
|
||||
cls._hooks = []
|
||||
relative_names = list(llnl.util.lang.list_modules(
|
||||
spack.paths.hooks_path
|
||||
))
|
||||
|
||||
# We want this hook to be the last registered
|
||||
relative_names.sort(key=lambda x: x == 'write_install_manifest')
|
||||
assert relative_names[-1] == 'write_install_manifest'
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + '.' + name
|
||||
# When importing a module from a package, __import__('A.B', ...)
|
||||
# returns package A when 'fromlist' is empty. If fromlist is not
|
||||
# empty it returns the submodule B instead
|
||||
# See: https://stackoverflow.com/a/2725668/771663
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
cls._hooks.append((module_name, module_obj))
|
||||
|
||||
@property
|
||||
def hooks(self):
|
||||
if not self._hooks:
|
||||
self._populate_hooks()
|
||||
return self._hooks
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
for _, module in self.hooks:
|
||||
for module in all_hook_modules():
|
||||
if hasattr(module, self.hook_name):
|
||||
hook = getattr(module, self.hook_name)
|
||||
if hasattr(hook, '__call__'):
|
||||
hook(*args, **kwargs)
|
||||
|
||||
|
||||
pre_install = _HookRunner('pre_install')
|
||||
post_install = _HookRunner('post_install')
|
||||
pre_install = HookRunner('pre_install')
|
||||
post_install = HookRunner('post_install')
|
||||
|
||||
pre_uninstall = _HookRunner('pre_uninstall')
|
||||
post_uninstall = _HookRunner('post_uninstall')
|
||||
pre_uninstall = HookRunner('pre_uninstall')
|
||||
post_uninstall = HookRunner('post_uninstall')
|
||||
|
||||
@@ -182,7 +182,7 @@ def _do_fake_install(pkg):
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
|
||||
def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
def _packages_needed_to_bootstrap_compiler(pkg):
|
||||
"""
|
||||
Return a list of packages required to bootstrap `pkg`s compiler
|
||||
|
||||
@@ -190,11 +190,7 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
matches the package spec.
|
||||
|
||||
Args:
|
||||
compiler (CompilerSpec): the compiler to bootstrap
|
||||
architecture (ArchSpec): the architecture for which to boostrap the
|
||||
compiler
|
||||
pkgs (list of PackageBase): the packages that may need their compiler
|
||||
installed
|
||||
pkg (Package): the package that may need its compiler installed
|
||||
|
||||
Return:
|
||||
(list) list of tuples, (PackageBase, bool), for concretized compiler-
|
||||
@@ -203,27 +199,21 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
(``True``) or one of its dependencies (``False``). The list
|
||||
will be empty if there are no compilers.
|
||||
"""
|
||||
tty.debug('Bootstrapping {0} compiler'.format(compiler))
|
||||
tty.debug('Bootstrapping {0} compiler for {1}'
|
||||
.format(pkg.spec.compiler, package_id(pkg)))
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
compiler, arch_spec=architecture)
|
||||
pkg.spec.compiler, arch_spec=pkg.spec.architecture)
|
||||
if compilers:
|
||||
return []
|
||||
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler)
|
||||
|
||||
# Set the architecture for the compiler package in a way that allows the
|
||||
# concretizer to back off if needed for the older bootstrapping compiler
|
||||
dep.constrain('platform=%s' % str(architecture.platform))
|
||||
dep.constrain('os=%s' % str(architecture.os))
|
||||
dep.constrain('target=%s:' %
|
||||
architecture.target.microarchitecture.family.name)
|
||||
dep = spack.compilers.pkg_spec_for_compiler(pkg.spec.compiler)
|
||||
dep.architecture = pkg.spec.architecture
|
||||
# concrete CompilerSpec has less info than concrete Spec
|
||||
# concretize as Spec to add that information
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents[pkg.name] = spack.spec.DependencySpec(
|
||||
pkg.spec, dep, ('build',))
|
||||
# mark compiler as depended-on by the package that uses it
|
||||
dep._dependents[pkg.name] = spack.spec.DependencySpec(
|
||||
pkg.spec, dep, ('build',))
|
||||
packages = [(s.package, False) for
|
||||
s in dep.traverse(order='post', root=False)]
|
||||
packages.append((dep.package, True))
|
||||
@@ -657,21 +647,17 @@ def __str__(self):
|
||||
return '{0}: {1}; {2}; {3}; {4}'.format(
|
||||
self.pid, requests, tasks, installed, failed)
|
||||
|
||||
def _add_bootstrap_compilers(
|
||||
self, compiler, architecture, pkgs, request, all_deps):
|
||||
def _add_bootstrap_compilers(self, pkg, request, all_deps):
|
||||
"""
|
||||
Add bootstrap compilers and dependencies to the build queue.
|
||||
|
||||
Args:
|
||||
compiler: the compiler to boostrap
|
||||
architecture: the architecture for which to bootstrap the compiler
|
||||
pkgs (PackageBase): the package with possible compiler dependencies
|
||||
pkg (PackageBase): the package with possible compiler dependencies
|
||||
request (BuildRequest): the associated install request
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(
|
||||
compiler, architecture, pkgs)
|
||||
packages = _packages_needed_to_bootstrap_compiler(pkg)
|
||||
for (comp_pkg, is_compiler) in packages:
|
||||
if package_id(comp_pkg) not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
@@ -1011,42 +997,14 @@ def _add_tasks(self, request, all_deps):
|
||||
'config:install_missing_compilers', False)
|
||||
|
||||
install_deps = request.install_args.get('install_deps')
|
||||
# Bootstrap compilers first
|
||||
if install_deps and install_compilers:
|
||||
packages_per_compiler = {}
|
||||
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
compiler = dep_pkg.spec.compiler
|
||||
arch = dep_pkg.spec.architecture
|
||||
if compiler not in packages_per_compiler:
|
||||
packages_per_compiler[compiler] = {}
|
||||
|
||||
if arch not in packages_per_compiler[compiler]:
|
||||
packages_per_compiler[compiler][arch] = []
|
||||
|
||||
packages_per_compiler[compiler][arch].append(dep_pkg)
|
||||
|
||||
compiler = request.pkg.spec.compiler
|
||||
arch = request.pkg.spec.architecture
|
||||
|
||||
if compiler not in packages_per_compiler:
|
||||
packages_per_compiler[compiler] = {}
|
||||
|
||||
if arch not in packages_per_compiler[compiler]:
|
||||
packages_per_compiler[compiler][arch] = []
|
||||
|
||||
packages_per_compiler[compiler][arch].append(request.pkg)
|
||||
|
||||
for compiler, archs in packages_per_compiler.items():
|
||||
for arch, packages in archs.items():
|
||||
self._add_bootstrap_compilers(
|
||||
compiler, arch, packages, request, all_deps)
|
||||
|
||||
if install_deps:
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
|
||||
# First push any missing compilers (if requested)
|
||||
if install_compilers:
|
||||
self._add_bootstrap_compilers(dep_pkg, request, all_deps)
|
||||
|
||||
dep_id = package_id(dep_pkg)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, request, False, all_deps)
|
||||
@@ -1056,9 +1014,13 @@ def _add_tasks(self, request, all_deps):
|
||||
# of the spec.
|
||||
spack.store.db.clear_failure(dep, force=False)
|
||||
|
||||
# Push any missing compilers (if requested) as part of the
|
||||
# package dependencies.
|
||||
if install_compilers:
|
||||
self._add_bootstrap_compilers(request.pkg, request, all_deps)
|
||||
|
||||
install_package = request.install_args.get('install_package')
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.db.clear_failure(request.spec, force=True)
|
||||
|
||||
@@ -1790,11 +1752,6 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
|
||||
# to support tracking of parallel, multi-spec, environment installs.
|
||||
self.dependents = set(get_dependent_ids(self.pkg.spec))
|
||||
|
||||
tty.debug(
|
||||
'Pkg id {0} has the following dependents:'.format(self.pkg_id))
|
||||
for dep_id in self.dependents:
|
||||
tty.debug('- {0}'.format(dep_id))
|
||||
|
||||
# Set of dependencies
|
||||
#
|
||||
# Be consistent wrt use of dependents and dependencies. That is,
|
||||
@@ -1815,10 +1772,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
|
||||
arch_spec=arch_spec):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain('platform=%s' % str(arch_spec.platform))
|
||||
dep.constrain('os=%s' % str(arch_spec.os))
|
||||
dep.constrain('target=%s:' %
|
||||
arch_spec.target.microarchitecture.family.name)
|
||||
dep.architecture = arch_spec
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep.package)
|
||||
self.dependencies.add(dep_id)
|
||||
|
||||
@@ -2284,13 +2284,8 @@ def do_activate(self, view=None, with_dependencies=True, verbose=True):
|
||||
|
||||
extensions_layout = view.extensions_layout
|
||||
|
||||
try:
|
||||
extensions_layout.check_extension_conflict(
|
||||
self.extendee_spec, self.spec)
|
||||
except spack.directory_layout.ExtensionAlreadyInstalledError as e:
|
||||
# already installed, let caller know
|
||||
tty.msg(e.message)
|
||||
return
|
||||
extensions_layout.check_extension_conflict(
|
||||
self.extendee_spec, self.spec)
|
||||
|
||||
# Activate any package dependencies that are also extensions.
|
||||
if with_dependencies:
|
||||
|
||||
@@ -50,8 +50,7 @@
|
||||
|
||||
#: User configuration location
|
||||
user_config_path = os.path.expanduser('~/.spack')
|
||||
user_bootstrap_path = os.path.join(user_config_path, 'bootstrap')
|
||||
user_bootstrap_store = os.path.join(user_bootstrap_path, 'store')
|
||||
|
||||
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
|
||||
@@ -20,9 +20,7 @@
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.oneapi import IntelOneApiPackage
|
||||
from spack.build_systems.oneapi import IntelOneApiLibraryPackage
|
||||
from spack.build_systems.rocm import ROCmPackage
|
||||
from spack.build_systems.hip import HipPackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.scons import SConsPackage
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import multiprocessing.pool
|
||||
from ordereddict_backport import OrderedDict
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -451,26 +449,36 @@ def needs_text_relocation(m_type, m_subtype):
|
||||
return m_type == 'text'
|
||||
|
||||
|
||||
def _replace_prefix_text(filename, compiled_prefixes):
|
||||
def _replace_prefix_text(filename, old_dir, new_dir):
|
||||
"""Replace all the occurrences of the old install prefix with a
|
||||
new install prefix in text files that are utf-8 encoded.
|
||||
|
||||
Args:
|
||||
filename (str): target text file (utf-8 encoded)
|
||||
compiled_prefixes (OrderedDict): OrderedDictionary where the keys are
|
||||
precompiled regex of the old prefixes and the values are the new
|
||||
prefixes (uft-8 encoded)
|
||||
old_dir (str): directory to be searched in the file
|
||||
new_dir (str): substitute for the old directory
|
||||
"""
|
||||
# TODO: cache regexes globally to speedup computation
|
||||
with open(filename, 'rb+') as f:
|
||||
data = f.read()
|
||||
f.seek(0)
|
||||
for orig_prefix_rexp, new_bytes in compiled_prefixes.items():
|
||||
data = orig_prefix_rexp.sub(new_bytes, data)
|
||||
f.write(data)
|
||||
# Replace old_dir with new_dir if it appears at the beginning of a path
|
||||
# Negative lookbehind for a character legal in a path
|
||||
# Then a match group for any characters legal in a compiler flag
|
||||
# Then old_dir
|
||||
# Then characters legal in a path
|
||||
# Ensures we only match the old_dir if it's precedeed by a flag or by
|
||||
# characters not legal in a path, but not if it's preceeded by other
|
||||
# components of a path.
|
||||
old_bytes = old_dir.encode('utf-8')
|
||||
pat = b'(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)' % old_bytes
|
||||
repl = b'\\1%s\\2' % new_dir.encode('utf-8')
|
||||
ndata = re.sub(pat, repl, data)
|
||||
f.write(ndata)
|
||||
f.truncate()
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, byte_prefixes):
|
||||
def _replace_prefix_bin(filename, old_dir, new_dir):
|
||||
"""Replace all the occurrences of the old install prefix with a
|
||||
new install prefix in binary files.
|
||||
|
||||
@@ -479,34 +487,33 @@ def _replace_prefix_bin(filename, byte_prefixes):
|
||||
|
||||
Args:
|
||||
filename (str): target binary file
|
||||
byte_prefixes (OrderedDict): OrderedDictionary where the keys are
|
||||
precompiled regex of the old prefixes and the values are the new
|
||||
prefixes (uft-8 encoded)
|
||||
old_dir (str): directory to be searched in the file
|
||||
new_dir (str): substitute for the old directory
|
||||
"""
|
||||
def replace(match):
|
||||
occurances = match.group().count(old_dir.encode('utf-8'))
|
||||
olen = len(old_dir.encode('utf-8'))
|
||||
nlen = len(new_dir.encode('utf-8'))
|
||||
padding = (olen - nlen) * occurances
|
||||
if padding < 0:
|
||||
return data
|
||||
return match.group().replace(
|
||||
old_dir.encode('utf-8'),
|
||||
os.sep.encode('utf-8') * padding + new_dir.encode('utf-8')
|
||||
)
|
||||
|
||||
with open(filename, 'rb+') as f:
|
||||
data = f.read()
|
||||
f.seek(0)
|
||||
for orig_bytes, new_bytes in byte_prefixes.items():
|
||||
original_data_len = len(data)
|
||||
# Skip this hassle if not found
|
||||
if orig_bytes not in data:
|
||||
continue
|
||||
# We only care about this problem if we are about to replace
|
||||
length_compatible = len(new_bytes) <= len(orig_bytes)
|
||||
if not length_compatible:
|
||||
raise BinaryTextReplaceError(orig_bytes, new_bytes)
|
||||
pad_length = len(orig_bytes) - len(new_bytes)
|
||||
padding = os.sep * pad_length
|
||||
padding = padding.encode('utf-8')
|
||||
data = data.replace(orig_bytes, new_bytes + padding)
|
||||
# Really needs to be the same length
|
||||
if not len(data) == original_data_len:
|
||||
print('Length of pad:', pad_length, 'should be', len(padding))
|
||||
print(new_bytes, 'was to replace', orig_bytes)
|
||||
raise BinaryStringReplacementError(
|
||||
filename, original_data_len, len(data))
|
||||
f.write(data)
|
||||
original_data_len = len(data)
|
||||
pat = re.compile(old_dir.encode('utf-8'))
|
||||
if not pat.search(data):
|
||||
return
|
||||
ndata = pat.sub(replace, data)
|
||||
if not len(ndata) == original_data_len:
|
||||
raise BinaryStringReplacementError(
|
||||
filename, original_data_len, len(ndata))
|
||||
f.write(ndata)
|
||||
f.truncate()
|
||||
|
||||
|
||||
@@ -779,88 +786,86 @@ def relocate_links(links, orig_layout_root,
|
||||
tty.warn(msg.format(link_target, abs_link, new_install_prefix))
|
||||
|
||||
|
||||
def relocate_text(files, prefixes, concurrency=32):
|
||||
"""Relocate text file from the original installation prefix to the
|
||||
new prefix.
|
||||
def relocate_text(
|
||||
files, orig_layout_root, new_layout_root, orig_install_prefix,
|
||||
new_install_prefix, orig_spack, new_spack, new_prefixes
|
||||
):
|
||||
"""Relocate text file from the original ``install_tree`` to the new one.
|
||||
|
||||
Relocation also affects the the path in Spack's sbang script.
|
||||
This also handles relocating Spack's sbang scripts to point at the
|
||||
new install tree.
|
||||
|
||||
Args:
|
||||
files (list): text files to be relocated
|
||||
orig_layout_root (str): original layout root
|
||||
new_layout_root (str): new layout root
|
||||
orig_install_prefix (str): install prefix of the original installation
|
||||
new_install_prefix (str): install prefix where we want to relocate
|
||||
orig_spack (str): path to the original Spack
|
||||
new_spack (str): path to the new Spack
|
||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
||||
where they should be relocated
|
||||
|
||||
Args:
|
||||
files (list): Text files to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed
|
||||
concurrency (int): Preferred degree of parallelism
|
||||
"""
|
||||
# TODO: reduce the number of arguments (8 seems too much)
|
||||
|
||||
# This now needs to be handled by the caller in all cases
|
||||
# orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
|
||||
# new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
|
||||
|
||||
compiled_prefixes = OrderedDict({})
|
||||
|
||||
for orig_prefix, new_prefix in prefixes.items():
|
||||
if orig_prefix != new_prefix:
|
||||
orig_bytes = orig_prefix.encode('utf-8')
|
||||
orig_prefix_rexp = re.compile(
|
||||
b'(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)' % orig_bytes)
|
||||
new_bytes = b'\\1%s\\2' % new_prefix.encode('utf-8')
|
||||
compiled_prefixes[orig_prefix_rexp] = new_bytes
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
import spack.hooks.sbang as sbang
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
|
||||
new_sbang = sbang.sbang_shebang_line()
|
||||
|
||||
# Do relocations on text that refers to the install tree
|
||||
# multiprocesing.ThreadPool.map requires single argument
|
||||
|
||||
args = []
|
||||
for filename in files:
|
||||
args.append((filename, compiled_prefixes))
|
||||
_replace_prefix_text(filename, orig_install_prefix, new_install_prefix)
|
||||
for orig_dep_prefix, new_dep_prefix in new_prefixes.items():
|
||||
_replace_prefix_text(filename, orig_dep_prefix, new_dep_prefix)
|
||||
_replace_prefix_text(filename, orig_layout_root, new_layout_root)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_text), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
# Point old packages at the new sbang location. Packages that
|
||||
# already use the new sbang location will already have been
|
||||
# handled by the prior call to _replace_prefix_text
|
||||
_replace_prefix_text(filename, orig_sbang, new_sbang)
|
||||
|
||||
|
||||
def relocate_text_bin(binaries, prefixes, concurrency=32):
|
||||
def relocate_text_bin(
|
||||
binaries, orig_install_prefix, new_install_prefix,
|
||||
orig_spack, new_spack, new_prefixes
|
||||
):
|
||||
"""Replace null terminated path strings hard coded into binaries.
|
||||
|
||||
The new install prefix must be shorter than the original one.
|
||||
|
||||
Args:
|
||||
binaries (list): binaries to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
||||
concurrency (int): Desired degree of parallelism.
|
||||
orig_install_prefix (str): install prefix of the original installation
|
||||
new_install_prefix (str): install prefix where we want to relocate
|
||||
orig_spack (str): path to the original Spack
|
||||
new_spack (str): path to the new Spack
|
||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
||||
where they should be relocated
|
||||
|
||||
Raises:
|
||||
BinaryTextReplaceError: when the new path is longer than the old path
|
||||
BinaryTextReplaceError: when the new path in longer than the old path
|
||||
"""
|
||||
byte_prefixes = OrderedDict({})
|
||||
|
||||
for orig_prefix, new_prefix in prefixes.items():
|
||||
if orig_prefix != new_prefix:
|
||||
if isinstance(orig_prefix, bytes):
|
||||
orig_bytes = orig_prefix
|
||||
else:
|
||||
orig_bytes = orig_prefix.encode('utf-8')
|
||||
if isinstance(new_prefix, bytes):
|
||||
new_bytes = new_prefix
|
||||
else:
|
||||
new_bytes = new_prefix.encode('utf-8')
|
||||
byte_prefixes[orig_bytes] = new_bytes
|
||||
|
||||
# Do relocations on text in binaries that refers to the install tree
|
||||
# multiprocesing.ThreadPool.map requires single argument
|
||||
args = []
|
||||
# Raise if the new install prefix is longer than the
|
||||
# original one, since it means we can't change the original
|
||||
# binary to relocate it
|
||||
new_prefix_is_shorter = len(new_install_prefix) <= len(orig_install_prefix)
|
||||
if not new_prefix_is_shorter and len(binaries) > 0:
|
||||
raise BinaryTextReplaceError(orig_install_prefix, new_install_prefix)
|
||||
|
||||
for binary in binaries:
|
||||
args.append((binary, byte_prefixes))
|
||||
for old_dep_prefix, new_dep_prefix in new_prefixes.items():
|
||||
if len(new_dep_prefix) <= len(old_dep_prefix):
|
||||
_replace_prefix_bin(binary, old_dep_prefix, new_dep_prefix)
|
||||
_replace_prefix_bin(binary, orig_install_prefix, new_install_prefix)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
# Note: Replacement of spack directory should not be done. This causes
|
||||
# an incorrect replacement path in the case where the install root is a
|
||||
# subdirectory of the spack directory.
|
||||
|
||||
|
||||
def is_relocatable(spec):
|
||||
|
||||
@@ -914,12 +914,8 @@ def _read_config(self):
|
||||
@autospec
|
||||
def get(self, spec):
|
||||
"""Returns the package associated with the supplied spec."""
|
||||
# NOTE: we only check whether the package is None here, not whether it
|
||||
# actually exists, because we have to load it anyway, and that ends up
|
||||
# checking for existence. We avoid constructing FastPackageChecker,
|
||||
# which will stat all packages.
|
||||
if spec.name is None:
|
||||
raise UnknownPackageError(None, self)
|
||||
if not self.exists(spec.name):
|
||||
raise UnknownPackageError(spec.name)
|
||||
|
||||
if spec.namespace and spec.namespace != self.namespace:
|
||||
raise UnknownPackageError(spec.name, self.namespace)
|
||||
@@ -1064,16 +1060,7 @@ def all_package_classes(self):
|
||||
|
||||
def exists(self, pkg_name):
|
||||
"""Whether a package with the supplied name exists."""
|
||||
if pkg_name is None:
|
||||
return False
|
||||
|
||||
# if the FastPackageChecker is already constructed, use it
|
||||
if self._fast_package_checker:
|
||||
return pkg_name in self._pkg_checker
|
||||
|
||||
# if not, check for the package.py file
|
||||
path = self.filename_for_package_name(pkg_name)
|
||||
return os.path.exists(path)
|
||||
return pkg_name in self._pkg_checker
|
||||
|
||||
def last_mtime(self):
|
||||
"""Time a package file in this repo was last updated."""
|
||||
@@ -1268,6 +1255,23 @@ def set_path(repo):
|
||||
return append
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def swap(repo_path):
|
||||
"""Temporarily use another RepoPath."""
|
||||
global path
|
||||
|
||||
# swap out _path for repo_path
|
||||
saved = path
|
||||
remove_from_meta = set_path(repo_path)
|
||||
|
||||
yield
|
||||
|
||||
# restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(repo_path)
|
||||
path = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def additional_repository(repository):
|
||||
"""Adds temporarily a repository to the default one.
|
||||
@@ -1280,34 +1284,6 @@ def additional_repository(repository):
|
||||
path.remove(repository)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_repositories(*paths_and_repos):
|
||||
"""Use the repositories passed as arguments within the context manager.
|
||||
|
||||
Args:
|
||||
*paths_and_repos: paths to the repositories to be used, or
|
||||
already constructed Repo objects
|
||||
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
|
||||
# Construct a temporary RepoPath object from
|
||||
temporary_repositories = RepoPath(*paths_and_repos)
|
||||
|
||||
# Swap the current repository out
|
||||
saved = path
|
||||
remove_from_meta = set_path(temporary_repositories)
|
||||
|
||||
yield temporary_repositories
|
||||
|
||||
# Restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(temporary_repositories)
|
||||
path = saved
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
"""Superclass for repository-related errors."""
|
||||
|
||||
@@ -1340,7 +1316,7 @@ def __init__(self, name, repo=None):
|
||||
long_msg = None
|
||||
if name:
|
||||
if repo:
|
||||
msg = "Package '{0}' not found in repository '{1.root}'"
|
||||
msg = "Package '{0}' not found in repository '{1}'"
|
||||
msg = msg.format(name, repo)
|
||||
else:
|
||||
msg = "Package '{0}' not found.".format(name)
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
||||
|
||||
import copy
|
||||
import re
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -15,6 +18,45 @@
|
||||
# and increases the start-up time
|
||||
def _make_validator():
|
||||
import jsonschema
|
||||
_validate_properties = jsonschema.Draft4Validator.VALIDATORS["properties"]
|
||||
_validate_pattern_properties = jsonschema.Draft4Validator.VALIDATORS[
|
||||
"patternProperties"
|
||||
]
|
||||
|
||||
def _set_defaults(validator, properties, instance, schema):
|
||||
"""Adds support for the 'default' attribute in 'properties'.
|
||||
|
||||
``jsonschema`` does not handle this out of the box -- it only
|
||||
validates. This allows us to set default values for configs
|
||||
where certain fields are `None` b/c they're deleted or
|
||||
commented out.
|
||||
"""
|
||||
for property, subschema in six.iteritems(properties):
|
||||
if "default" in subschema:
|
||||
instance.setdefault(
|
||||
property, copy.deepcopy(subschema["default"]))
|
||||
for err in _validate_properties(
|
||||
validator, properties, instance, schema):
|
||||
yield err
|
||||
|
||||
def _set_pp_defaults(validator, properties, instance, schema):
|
||||
"""Adds support for the 'default' attribute in 'patternProperties'.
|
||||
|
||||
``jsonschema`` does not handle this out of the box -- it only
|
||||
validates. This allows us to set default values for configs
|
||||
where certain fields are `None` b/c they're deleted or
|
||||
commented out.
|
||||
"""
|
||||
for property, subschema in six.iteritems(properties):
|
||||
if "default" in subschema:
|
||||
if isinstance(instance, dict):
|
||||
for key, val in six.iteritems(instance):
|
||||
if re.match(property, key) and val is None:
|
||||
instance[key] = copy.deepcopy(subschema["default"])
|
||||
|
||||
for err in _validate_pattern_properties(
|
||||
validator, properties, instance, schema):
|
||||
yield err
|
||||
|
||||
def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
@@ -59,6 +101,8 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
return jsonschema.validators.extend(
|
||||
jsonschema.Draft4Validator, {
|
||||
"validate_spec": _validate_spec,
|
||||
"properties": _set_defaults,
|
||||
"patternProperties": _set_pp_defaults,
|
||||
"deprecatedProperties": _deprecated_properties
|
||||
}
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
%=============================================================================
|
||||
% This logic program implements Spack's concretizer
|
||||
% Generate
|
||||
%=============================================================================
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -14,59 +14,14 @@ version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
1 { version(Package, Version) : version_declared(Package, Version) } 1
|
||||
:- node(Package).
|
||||
|
||||
possible_version_weight(Package, Weight)
|
||||
version_weight(Package, Weight)
|
||||
:- version(Package, Version), version_declared(Package, Version, Weight),
|
||||
not preferred_version_declared(Package, Version, _).
|
||||
|
||||
possible_version_weight(Package, Weight)
|
||||
version_weight(Package, Weight)
|
||||
:- version(Package, Version), preferred_version_declared(Package, Version, Weight).
|
||||
|
||||
1 { version_weight(Package, Weight) : possible_version_weight(Package, Weight) } 1 :- node(Package).
|
||||
|
||||
% version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1
|
||||
:- version_satisfies(Package, Constraint).
|
||||
version_satisfies(Package, Constraint)
|
||||
:- version(Package, Version), version_satisfies(Package, Constraint, Version).
|
||||
|
||||
#defined preferred_version_declared/3.
|
||||
#defined version_satisfies/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Spec conditions and imposed constraints
|
||||
%
|
||||
% Given Spack directives like these:
|
||||
% depends_on("foo@1.0+bar", when="@2.0+variant")
|
||||
% provides("mpi@2:", when="@1.9:")
|
||||
%
|
||||
% The conditions are `@2.0+variant` and `@1.9:`, and the imposed constraints
|
||||
% are `@1.0+bar` on `foo` and `@2:` on `mpi`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% conditions are specified with `condition_requirement` and hold when
|
||||
% corresponding spec attributes hold.
|
||||
condition_holds(ID) :-
|
||||
condition(ID);
|
||||
attr(Name, A1) : condition_requirement(ID, Name, A1);
|
||||
attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2);
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3).
|
||||
|
||||
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
|
||||
|
||||
% conditions that hold impose constraints on other specs
|
||||
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
||||
attr(Name, A1, A2) :- impose(ID), imposed_constraint(ID, Name, A1, A2).
|
||||
attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
|
||||
#defined condition/1.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
#defined imposed_constraint/3.
|
||||
#defined imposed_constraint/4.
|
||||
#defined imposed_constraint/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Dependency semantics
|
||||
@@ -74,103 +29,37 @@ attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
% Dependencies of any type imply that one package "depends on" another
|
||||
depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
|
||||
% a dependency holds if its condition holds
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
condition_holds(ID),
|
||||
not external(Package).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
do_not_impose(ID) :-
|
||||
not dependency_holds(Package, Dependency, _),
|
||||
dependency_condition(ID, Package, Dependency).
|
||||
|
||||
% declared dependencies are real if they're not virtual AND
|
||||
% the package is not an external.
|
||||
% They're only triggered if the associated dependnecy condition holds.
|
||||
% the package is not an external
|
||||
depends_on(Package, Dependency, Type)
|
||||
:- dependency_holds(Package, Dependency, Type),
|
||||
not virtual(Dependency).
|
||||
:- declared_dependency(Package, Dependency, Type),
|
||||
node(Package),
|
||||
not virtual(Dependency),
|
||||
not external(Package).
|
||||
|
||||
% every root must be a node
|
||||
node(Package) :- root(Package).
|
||||
% if you declare a dependency on a virtual AND the package is not an external,
|
||||
% you depend on one of its providers
|
||||
1 {
|
||||
depends_on(Package, Provider, Type)
|
||||
: provides_virtual(Provider, Virtual)
|
||||
} 1
|
||||
:- declared_dependency(Package, Virtual, Type),
|
||||
virtual(Virtual),
|
||||
not external(Package),
|
||||
node(Package).
|
||||
|
||||
% dependencies imply new nodes
|
||||
node(Dependency) :- node(Package), depends_on(Package, Dependency).
|
||||
|
||||
% all nodes in the graph must be reachable from some root
|
||||
% this ensures a user can't say `zlib ^libiconv` (neither of which have any
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
needed(Package) :- root(Package).
|
||||
needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
|
||||
:- node(Package), not needed(Package).
|
||||
|
||||
% Avoid cycles in the DAG
|
||||
% some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A).
|
||||
|
||||
#defined dependency_type/2.
|
||||
#defined dependency_condition/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
:- node(Package),
|
||||
not external(Package),
|
||||
conflict(Package, TriggerID, ConstraintID),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID).
|
||||
|
||||
#defined conflict/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependencies
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% if a package depends on a virtual, it's not external and we have a
|
||||
% provider for that virtual then it depends on the provider
|
||||
depends_on(Package, Provider, Type)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provides_virtual(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
virtual_node(Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% if there's a virtual node, we must select one provider
|
||||
1 { provides_virtual(Package, Virtual) : possible_provider(Package, Virtual) } 1
|
||||
% if a virtual was required by some root spec, one provider is in the DAG
|
||||
1 { node(Package) : provides_virtual(Package, Virtual) } 1
|
||||
:- virtual_node(Virtual).
|
||||
|
||||
% virtual roots imply virtual nodes, and that one provider is a root
|
||||
virtual_node(Virtual) :- virtual_root(Virtual).
|
||||
1 { root(Package) : provides_virtual(Package, Virtual) } 1
|
||||
:- virtual_root(Virtual).
|
||||
|
||||
% The provider provides the virtual if some provider condition holds.
|
||||
provides_virtual(Provider, Virtual) :-
|
||||
provider_condition(ID, Provider, Virtual),
|
||||
condition_holds(ID),
|
||||
virtual(Virtual).
|
||||
|
||||
% a node that provides a virtual is a provider
|
||||
provider(Package, Virtual)
|
||||
:- node(Package), provides_virtual(Package, Virtual).
|
||||
|
||||
% for any virtual, there can be at most one provider in the DAG
|
||||
0 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual(Virtual).
|
||||
0 { provider(Package, Virtual) :
|
||||
node(Package), provides_virtual(Package, Virtual) } 1 :- virtual(Virtual).
|
||||
|
||||
#defined possible_provider/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependency weights
|
||||
%-----------------------------------------------------------------------------
|
||||
% give dependents the virtuals they want
|
||||
provider_weight(Dependency, 0)
|
||||
:- virtual(Virtual), depends_on(Package, Dependency),
|
||||
@@ -211,52 +100,23 @@ provider_weight(Package, 100)
|
||||
provider(Package, Virtual),
|
||||
not default_provider_preference(Virtual, Package, _).
|
||||
|
||||
#defined provider_condition/3.
|
||||
#defined required_provider_condition/3.
|
||||
#defined required_provider_condition/4.
|
||||
#defined required_provider_condition/5.
|
||||
% all nodes must be reachable from some root
|
||||
node(Package) :- root(Package).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Spec Attributes
|
||||
%-----------------------------------------------------------------------------
|
||||
% Equivalencies of the form:
|
||||
%
|
||||
% name(Arg1, Arg2, ...) :- attr("name", Arg1, Arg2, ...).
|
||||
% attr("name", Arg1, Arg2, ...) :- name(Arg1, Arg2, ...).
|
||||
%
|
||||
% These allow us to easily define conditional dependency and conflict rules
|
||||
% without enumerating all spec attributes every time.
|
||||
node(Package) :- attr("node", Package).
|
||||
version(Package, Version) :- attr("version", Package, Version).
|
||||
version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint).
|
||||
node_platform(Package, Platform) :- attr("node_platform", Package, Platform).
|
||||
node_os(Package, OS) :- attr("node_os", Package, OS).
|
||||
node_target(Package, Target) :- attr("node_target", Package, Target).
|
||||
node_target_satisfies(Package, Target) :- attr("node_target_satisfies", Package, Target).
|
||||
variant_value(Package, Variant, Value) :- attr("variant_value", Package, Variant, Value).
|
||||
variant_set(Package, Variant, Value) :- attr("variant_set", Package, Variant, Value).
|
||||
node_flag(Package, FlagType, Flag) :- attr("node_flag", Package, FlagType, Flag).
|
||||
node_compiler(Package, Compiler) :- attr("node_compiler", Package, Compiler).
|
||||
node_compiler_version(Package, Compiler, Version)
|
||||
:- attr("node_compiler_version", Package, Compiler, Version).
|
||||
node_compiler_version_satisfies(Package, Compiler, Version)
|
||||
:- attr("node_compiler_version_satisfies", Package, Compiler, Version).
|
||||
1 { root(Package) : provides_virtual(Package, Virtual) } 1
|
||||
:- virtual_root(Virtual).
|
||||
|
||||
attr("node", Package) :- node(Package).
|
||||
attr("version", Package, Version) :- version(Package, Version).
|
||||
attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint).
|
||||
attr("node_platform", Package, Platform) :- node_platform(Package, Platform).
|
||||
attr("node_os", Package, OS) :- node_os(Package, OS).
|
||||
attr("node_target", Package, Target) :- node_target(Package, Target).
|
||||
attr("node_target_satisfies", Package, Target) :- node_target_satisfies(Package, Target).
|
||||
attr("variant_value", Package, Variant, Value) :- variant_value(Package, Variant, Value).
|
||||
attr("variant_set", Package, Variant, Value) :- variant_set(Package, Variant, Value).
|
||||
attr("node_flag", Package, FlagType, Flag) :- node_flag(Package, FlagType, Flag).
|
||||
attr("node_compiler", Package, Compiler) :- node_compiler(Package, Compiler).
|
||||
attr("node_compiler_version", Package, Compiler, Version)
|
||||
:- node_compiler_version(Package, Compiler, Version).
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
:- node_compiler_version_satisfies(Package, Compiler, Version).
|
||||
needed(Package) :- root(Package).
|
||||
needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
|
||||
:- node(Package), not needed(Package).
|
||||
|
||||
% real dependencies imply new nodes.
|
||||
node(Dependency) :- node(Package), depends_on(Package, Dependency).
|
||||
|
||||
% Avoid cycles in the DAG
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A).
|
||||
|
||||
% do not warn if generated program contains none of these.
|
||||
#defined depends_on/3.
|
||||
@@ -271,8 +131,6 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
#defined external_only/1.
|
||||
#defined pkg_provider_preference/4.
|
||||
#defined default_provider_preference/3.
|
||||
#defined version_satisfies/2.
|
||||
#defined node_compiler_version_satisfies/3.
|
||||
#defined root/1.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -291,33 +149,15 @@ external(Package) :- external_only(Package), node(Package).
|
||||
% a package is a real_node if it is not external
|
||||
real_node(Package) :- node(Package), not external(Package).
|
||||
|
||||
% a package is external if we are using an external spec for it
|
||||
external(Package) :- external_spec_selected(Package, _).
|
||||
% if an external version is selected, the package is external and
|
||||
% we are using the corresponding spec
|
||||
external(Package) :-
|
||||
version(Package, Version), version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID).
|
||||
|
||||
% we can't use the weight for an external version if we don't use the
|
||||
% corresponding external spec.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID),
|
||||
not external(Package).
|
||||
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
external_conditions_hold(Package, LocalIndex),
|
||||
node(Package).
|
||||
|
||||
external_conditions_hold(Package, LocalIndex) :-
|
||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
:- external(Package), not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
#defined external_spec_condition/3.
|
||||
#defined external_spec_condition/4.
|
||||
#defined external_spec_condition/5.
|
||||
external_spec(Package, ID) :-
|
||||
version(Package, Version), version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Variant semantics
|
||||
@@ -343,18 +183,6 @@ external_conditions_hold(Package, LocalIndex) :-
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
:- variant_value(Package, Variant, Value), not variant_possible_value(Package, Variant, Value).
|
||||
|
||||
% Some multi valued variants accept multiple values from disjoint sets.
|
||||
% Ensure that we respect that constraint and we don't pick values from more
|
||||
% than one set at once
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 != Set2.
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
variant_value(Package, Variant, Value)
|
||||
@@ -362,41 +190,17 @@ variant_value(Package, Variant, Value)
|
||||
variant(Package, Variant),
|
||||
variant_set(Package, Variant, Value).
|
||||
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
% specified in an external, we score it as if it was a default value.
|
||||
% prefer default values.
|
||||
variant_not_default(Package, Variant, Value, 1)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_default_value(Package, Variant, Value),
|
||||
not variant_set(Package, Variant, Value),
|
||||
not external_with_variant_set(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% We are using the default value for a variant
|
||||
variant_not_default(Package, Variant, Value, 0)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_default_value(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% The variant is set in the spec
|
||||
variant_not_default(Package, Variant, Value, 0)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_set(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% The variant is set in an external spec
|
||||
external_with_variant_set(Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
||||
possible_external(ID, Package, _),
|
||||
external(Package),
|
||||
node(Package).
|
||||
|
||||
variant_not_default(Package, Variant, Value, 0)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
external_with_variant_set(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% The default value for a variant in a package is what is written
|
||||
% in the package.py file, unless some preference is set in packages.yaml
|
||||
variant_default_value(Package, Variant, Value)
|
||||
@@ -411,16 +215,6 @@ variant_default_value(Package, Variant, Value)
|
||||
:- 2 {variant_value(Package, Variant, Value): variant_possible_value(Package, Variant, Value)},
|
||||
variant_value(Package, Variant, "none").
|
||||
|
||||
% patches and dev_path are special variants -- they don't have to be
|
||||
% declared in the package, so we just allow them to spring into existence
|
||||
% when assigned a value.
|
||||
auto_variant("dev_path").
|
||||
auto_variant("patches").
|
||||
variant(Package, Variant)
|
||||
:- variant_set(Package, Variant, _), auto_variant(Variant).
|
||||
variant_single_value(Package, "dev_path")
|
||||
:- variant_set(Package, "dev_path", _).
|
||||
|
||||
% suppress warnings about this atom being unset. It's only set if some
|
||||
% spec or some package sets it, and without this, clingo will give
|
||||
% warnings like 'info: atom does not occur in any rule head'.
|
||||
@@ -431,14 +225,13 @@ variant_single_value(Package, "dev_path")
|
||||
#defined variant_possible_value/3.
|
||||
#defined variant_default_value_from_packages_yaml/3.
|
||||
#defined variant_default_value_from_package_py/3.
|
||||
#defined variant_value_from_disjoint_sets/4.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Platform semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% one platform per node
|
||||
:- M = #count { Platform : node_platform(Package, Platform) }, M !=1, node(Package).
|
||||
1 { node_platform(Package, Platform) : node_platform(Packagee, Platform) } 1
|
||||
:- node(Package).
|
||||
|
||||
% if no platform is set, fall back to the default
|
||||
node_platform(Package, Platform)
|
||||
@@ -472,8 +265,6 @@ node_os_inherit(Dependency, OS)
|
||||
not node_os_set(Dependency).
|
||||
node_os_inherit(Package) :- node_os_inherit(Package, _).
|
||||
|
||||
node_os(Package, OS) :- node_os_inherit(Package, OS).
|
||||
|
||||
% fall back to default if not set or inherited
|
||||
node_os(Package, OS)
|
||||
:- node(Package),
|
||||
@@ -488,13 +279,6 @@ node_os(Package, OS)
|
||||
% one target per node -- optimization will pick the "best" one
|
||||
1 { node_target(Package, Target) : target(Target) } 1 :- node(Package).
|
||||
|
||||
% node_target_satisfies semantics
|
||||
1 { node_target(Package, Target) : node_target_satisfies(Package, Constraint, Target) } 1
|
||||
:- node_target_satisfies(Package, Constraint).
|
||||
node_target_satisfies(Package, Constraint)
|
||||
:- node_target(Package, Target), node_target_satisfies(Package, Constraint, Target).
|
||||
#defined node_target_satisfies/3.
|
||||
|
||||
% The target weight is either the default target weight
|
||||
% or a more specific per-package weight if set
|
||||
target_weight(Target, Package, Weight)
|
||||
@@ -530,21 +314,16 @@ node_target_weight(Package, Weight)
|
||||
target_weight(Target, Package, Weight).
|
||||
|
||||
% compatibility rules for targets among nodes
|
||||
node_target_match_pref(Package, Target) :- node_target_set(Package, Target).
|
||||
node_target_match_pref(Dependency, Target)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target_match_pref(Package, Target),
|
||||
not node_target_set(Dependency, _).
|
||||
|
||||
node_target_match_pref(Dependency, Target)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target_set(Package, Target),
|
||||
not node_target_match_pref(Package, Target),
|
||||
:- depends_on(Package, Dependency), node_target_match_pref(Package, Target),
|
||||
not node_target_set(Dependency, _).
|
||||
|
||||
node_target_match_pref(Dependency, Target)
|
||||
:- depends_on(Package, Dependency),
|
||||
root(Package), node_target(Package, Target),
|
||||
not node_target_match_pref(Package, _).
|
||||
not node_target_match_pref(Package, _),
|
||||
not node_target_set(Dependency, _).
|
||||
|
||||
node_target_match(Package, 1)
|
||||
:- node_target(Package, Target), node_target_match_pref(Package, Target).
|
||||
@@ -559,36 +338,17 @@ derive_target_from_parent(Parent, Package)
|
||||
%-----------------------------------------------------------------------------
|
||||
% Compiler semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
compiler(Compiler) :- compiler_version(Compiler, _).
|
||||
|
||||
% There must be only one compiler set per node. The compiler
|
||||
% is chosen among available versions.
|
||||
% one compiler per node
|
||||
1 { node_compiler(Package, Compiler) : compiler(Compiler) } 1 :- node(Package).
|
||||
1 { node_compiler_version(Package, Compiler, Version)
|
||||
: compiler_version(Compiler, Version) } 1 :- node(Package).
|
||||
|
||||
% Sometimes we just need to know the compiler and not the version
|
||||
node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
|
||||
% We can't have a compiler be enforced and select the version from another compiler
|
||||
:- node_compiler(Package, Compiler1),
|
||||
node_compiler_version(Package, Compiler2, _),
|
||||
Compiler1 != Compiler2.
|
||||
|
||||
% define node_compiler_version_satisfies/3 from node_compiler_version_satisfies/4
|
||||
% version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
1 { node_compiler_version(Package, Compiler, Version)
|
||||
: node_compiler_version_satisfies(Package, Compiler, Constraint, Version) } 1
|
||||
:- node_compiler_version_satisfies(Package, Compiler, Constraint).
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint)
|
||||
:- node_compiler_version(Package, Compiler, Version),
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint, Version).
|
||||
|
||||
#defined node_compiler_version_satisfies/4.
|
||||
1 { compiler_weight(Package, Weight) : compiler_weight(Package, Weight) } 1
|
||||
:- node(Package).
|
||||
|
||||
% If the compiler version was set from the command line,
|
||||
% respect it verbatim
|
||||
node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(Package, Compiler, Version).
|
||||
node_compiler_version(Package, Compiler, Version) :- node_compiler_version_hard(Package, Compiler, Version).
|
||||
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
@@ -597,36 +357,51 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(P
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
% same compiler there's a mismatch.
|
||||
compiler_mismatch(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version(Package, Compiler1, _),
|
||||
node_compiler_version(Dependency, Compiler2, _),
|
||||
Compiler1 != Compiler2.
|
||||
% If the compiler is what was prescribed from command line etc.
|
||||
% or is the same as a root node, there is a version match
|
||||
|
||||
compiler_mismatch(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version(Package, Compiler, Version1),
|
||||
node_compiler_version(Dependency, Compiler, Version2),
|
||||
Version1 != Version2.
|
||||
% Compiler prescribed in the root spec
|
||||
node_compiler_version_match_pref(Package, Compiler, V)
|
||||
:- node_compiler_hard(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, V),
|
||||
not external(Package).
|
||||
|
||||
#defined node_compiler_set/2.
|
||||
#defined node_compiler_version_set/3.
|
||||
% Compiler inherited from a parent node
|
||||
node_compiler_version_match_pref(Dependency, Compiler, V)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version_match_pref(Package, Compiler, V),
|
||||
node_compiler_version(Dependency, Compiler, V),
|
||||
not node_compiler_hard(Dependency, Compiler).
|
||||
|
||||
% Compiler inherited from the root package
|
||||
node_compiler_version_match_pref(Dependency, Compiler, V)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version(Package, Compiler, V), root(Package),
|
||||
node_compiler_version(Dependency, Compiler, V),
|
||||
not node_compiler_hard(Dependency, Compiler).
|
||||
|
||||
compiler_version_match(Package, 1)
|
||||
:- node_compiler_version(Package, Compiler, V),
|
||||
node_compiler_version_match_pref(Package, Compiler, V).
|
||||
|
||||
#defined node_compiler_hard/2.
|
||||
#defined node_compiler_version_hard/3.
|
||||
#defined compiler_supports_os/3.
|
||||
#defined compiler_version_match/2.
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
compiler_weight(Package, Weight)
|
||||
:- node_compiler_version(Package, Compiler, V),
|
||||
:- node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, V),
|
||||
node_compiler_preference(Package, Compiler, V, Weight).
|
||||
compiler_weight(Package, Weight)
|
||||
:- node_compiler_version(Package, Compiler, V),
|
||||
:- node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, V),
|
||||
not node_compiler_preference(Package, Compiler, V, _),
|
||||
default_compiler_preference(Compiler, V, Weight).
|
||||
compiler_weight(Package, 100)
|
||||
:- node_compiler_version(Package, Compiler, Version),
|
||||
:- node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
not node_compiler_preference(Package, Compiler, Version, _),
|
||||
not default_compiler_preference(Compiler, Version, _).
|
||||
|
||||
@@ -660,6 +435,7 @@ node_flag_source(Dependency, Q)
|
||||
node_flag(Package, FlagType, Flag)
|
||||
:- not node_flag_set(Package),
|
||||
compiler_version_flag(Compiler, Version, FlagType, Flag),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
flag_type(FlagType),
|
||||
compiler(Compiler),
|
||||
@@ -668,6 +444,7 @@ node_flag(Package, FlagType, Flag)
|
||||
node_flag_compiler_default(Package)
|
||||
:- not node_flag_set(Package),
|
||||
compiler_version_flag(Compiler, Version, FlagType, Flag),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
flag_type(FlagType),
|
||||
compiler(Compiler),
|
||||
@@ -724,49 +501,57 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency).
|
||||
% need to maximize their number below to ensure they're all set
|
||||
#maximize {
|
||||
1@13,Package,Variant,Value
|
||||
: variant_not_default(Package, Variant, Value, Weight),
|
||||
not variant_single_value(Package, Variant),
|
||||
root(Package)
|
||||
: variant_not_default(Package, Variant, Value, Weight), root(Package)
|
||||
}.
|
||||
#minimize{
|
||||
Weight@13,Provider
|
||||
: provider_weight(Provider, Weight), root(Provider)
|
||||
}.
|
||||
|
||||
% Try to use default variants or variants that have been set
|
||||
#minimize {
|
||||
Weight@11,Package,Variant,Value
|
||||
: variant_not_default(Package, Variant, Value, Weight), not root(Package)
|
||||
}.
|
||||
% Minimize the weights of the providers, i.e. use as much as
|
||||
% possible the most preferred providers
|
||||
% Next, we want to minimize the weights of the providers
|
||||
% i.e. use as much as possible the most preferred providers
|
||||
#minimize{
|
||||
Weight@9,Provider
|
||||
Weight@11,Provider
|
||||
: provider_weight(Provider, Weight), not root(Provider)
|
||||
}.
|
||||
|
||||
% For external packages it's more important than for others
|
||||
% to match the compiler with their parent node
|
||||
#maximize{
|
||||
Weight@10,Package
|
||||
: compiler_version_match(Package, Weight), external(Package)
|
||||
}.
|
||||
|
||||
% Then try to use as much as possible:
|
||||
% 1. Default variants
|
||||
% 2. Latest versions
|
||||
% of all the other nodes in the DAG
|
||||
#minimize {
|
||||
Weight@9,Package,Variant,Value
|
||||
: variant_not_default(Package, Variant, Value, Weight), not root(Package)
|
||||
}.
|
||||
% If the value is a multivalued variant there could be multiple
|
||||
% values set as default. Since a default value has a weight of 0 we
|
||||
% need to maximize their number below to ensure they're all set
|
||||
#maximize {
|
||||
1@8,Package,Variant,Value
|
||||
: variant_not_default(Package, Variant, Value, Weight),
|
||||
not variant_single_value(Package, Variant),
|
||||
not root(Package)
|
||||
: variant_not_default(Package, Variant, Value, Weight), not root(Package)
|
||||
}.
|
||||
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
#minimize{ 0@7 : #true }.
|
||||
#minimize{ 1@7,Package,Dependency : compiler_mismatch(Package, Dependency) }.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
#minimize{
|
||||
Weight@6,Package : version_weight(Package, Weight)
|
||||
Weight@8,Package : version_weight(Package, Weight)
|
||||
}.
|
||||
|
||||
% Try to maximize the number of compiler matches in the DAG,
|
||||
% while minimizing the number of nodes. This is done because
|
||||
% a maximization on the number of matches for compilers is highly
|
||||
% correlated to a preference to have as many nodes as possible
|
||||
#minimize{ 1@7,Package : node(Package) }.
|
||||
#maximize{ Weight@7,Package : compiler_version_match(Package, Weight) }.
|
||||
|
||||
% Try to use preferred compilers
|
||||
#minimize{ Weight@5,Package : compiler_weight(Package, Weight) }.
|
||||
#minimize{ Weight@6,Package : compiler_weight(Package, Weight) }.
|
||||
|
||||
% Maximize the number of matches for targets in the DAG, try
|
||||
% to select the preferred target.
|
||||
#maximize{ Weight@4,Package : node_target_match(Package, Weight) }.
|
||||
#minimize{ Weight@3,Package : node_target_weight(Package, Weight) }.
|
||||
#maximize{ Weight@5,Package : node_target_match(Package, Weight) }.
|
||||
#minimize{ Weight@4,Package : node_target_weight(Package, Weight) }.
|
||||
|
||||
@@ -24,4 +24,4 @@
|
||||
#show compiler_weight/2.
|
||||
#show node_target_match/2.
|
||||
#show node_target_weight/2.
|
||||
#show external_spec_selected/2.
|
||||
#show external_spec/2.
|
||||
|
||||
@@ -116,13 +116,6 @@
|
||||
import spack.variant as vt
|
||||
import spack.version as vn
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping # novm
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Spec',
|
||||
'parse',
|
||||
@@ -366,11 +359,13 @@ def satisfies(self, other, strict=False):
|
||||
return False
|
||||
|
||||
# Check target
|
||||
return self.target_satisfies(other, strict=strict)
|
||||
return self._satisfies_target(other.target, strict=strict)
|
||||
|
||||
def target_satisfies(self, other, strict):
|
||||
need_to_check = bool(other.target) if strict or self.concrete \
|
||||
else bool(other.target and self.target)
|
||||
def _satisfies_target(self, other_target, strict):
|
||||
self_target = self.target
|
||||
|
||||
need_to_check = bool(other_target) if strict or self.concrete \
|
||||
else bool(other_target and self_target)
|
||||
|
||||
# If there's no need to check we are fine
|
||||
if not need_to_check:
|
||||
@@ -380,68 +375,24 @@ def target_satisfies(self, other, strict):
|
||||
if self.target is None:
|
||||
return False
|
||||
|
||||
return bool(self.target_intersection(other))
|
||||
for target_range in str(other_target).split(','):
|
||||
t_min, sep, t_max = target_range.partition(':')
|
||||
|
||||
def target_constrain(self, other):
|
||||
if not other.target_satisfies(self, strict=False):
|
||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
# Checking against a single specific target
|
||||
if not sep and self_target == t_min:
|
||||
return True
|
||||
|
||||
if self.target_concrete:
|
||||
return False
|
||||
elif other.target_concrete:
|
||||
self.target = other.target
|
||||
return True
|
||||
if not sep and self_target != t_min:
|
||||
return False
|
||||
|
||||
# Compute the intersection of every combination of ranges in the lists
|
||||
results = self.target_intersection(other)
|
||||
# Do we need to dedupe here?
|
||||
self.target = ','.join(results)
|
||||
# Check against a range
|
||||
min_ok = self_target.microarchitecture >= t_min if t_min else True
|
||||
max_ok = self_target.microarchitecture <= t_max if t_max else True
|
||||
|
||||
def target_intersection(self, other):
|
||||
results = []
|
||||
if min_ok and max_ok:
|
||||
return True
|
||||
|
||||
if not self.target or not other.target:
|
||||
return results
|
||||
|
||||
for s_target_range in str(self.target).split(','):
|
||||
s_min, s_sep, s_max = s_target_range.partition(':')
|
||||
for o_target_range in str(other.target).split(','):
|
||||
o_min, o_sep, o_max = o_target_range.partition(':')
|
||||
|
||||
if not s_sep:
|
||||
# s_target_range is a concrete target
|
||||
# get a microarchitecture reference for at least one side
|
||||
# of each comparison so we can use archspec comparators
|
||||
s_comp = spack.architecture.Target(s_min).microarchitecture
|
||||
if not o_sep:
|
||||
if s_min == o_min:
|
||||
results.append(s_min)
|
||||
elif (not o_min or s_comp >= o_min) and (
|
||||
not o_max or s_comp <= o_max):
|
||||
results.append(s_min)
|
||||
elif not o_sep:
|
||||
# "cast" to microarchitecture
|
||||
o_comp = spack.architecture.Target(o_min).microarchitecture
|
||||
if (not s_min or o_comp >= s_min) and (
|
||||
not s_max or o_comp <= s_max):
|
||||
results.append(o_min)
|
||||
else:
|
||||
# Take intersection of two ranges
|
||||
# Lots of comparisons needed
|
||||
_s_min = spack.architecture.Target(s_min).microarchitecture
|
||||
_s_max = spack.architecture.Target(s_max).microarchitecture
|
||||
_o_min = spack.architecture.Target(o_min).microarchitecture
|
||||
_o_max = spack.architecture.Target(o_max).microarchitecture
|
||||
|
||||
n_min = s_min if _s_min >= _o_min else o_min
|
||||
n_max = s_max if _s_max <= _o_max else o_max
|
||||
_n_min = spack.architecture.Target(n_min).microarchitecture
|
||||
_n_max = spack.architecture.Target(n_max).microarchitecture
|
||||
if _n_min == _n_max:
|
||||
results.append(n_min)
|
||||
elif not n_min or not n_max or _n_min < _n_max:
|
||||
results.append('%s:%s' % (n_min, n_max))
|
||||
return results
|
||||
return False
|
||||
|
||||
def constrain(self, other):
|
||||
"""Projects all architecture fields that are specified in the given
|
||||
@@ -458,18 +409,16 @@ def constrain(self, other):
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
if not other.satisfies(self):
|
||||
raise UnsatisfiableArchitectureSpecError(other, self)
|
||||
if not self.satisfies(other):
|
||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
|
||||
constrained = False
|
||||
for attr in ('platform', 'os'):
|
||||
for attr in ('platform', 'os', 'target'):
|
||||
svalue, ovalue = getattr(self, attr), getattr(other, attr)
|
||||
if svalue is None and ovalue is not None:
|
||||
setattr(self, attr, ovalue)
|
||||
constrained = True
|
||||
|
||||
self.target_constrain(other)
|
||||
|
||||
return constrained
|
||||
|
||||
def copy(self):
|
||||
@@ -482,13 +431,7 @@ def copy(self):
|
||||
def concrete(self):
|
||||
"""True if the spec is concrete, False otherwise"""
|
||||
# return all(v for k, v in six.iteritems(self.to_cmp_dict()))
|
||||
return (self.platform and self.os and self.target and
|
||||
self.target_concrete)
|
||||
|
||||
@property
|
||||
def target_concrete(self):
|
||||
"""True if the target is not a range or list."""
|
||||
return ':' not in str(self.target) and ',' not in str(self.target)
|
||||
return self.platform and self.os and self.target
|
||||
|
||||
def to_dict(self):
|
||||
d = syaml.syaml_dict([
|
||||
@@ -2127,7 +2070,7 @@ def validate_detection(self):
|
||||
# which likely means the spec was created with Spec.from_detection
|
||||
msg = ('cannot validate "{0}" since it was not created '
|
||||
'using Spec.from_detection'.format(self))
|
||||
assert isinstance(self.extra_attributes, Mapping), msg
|
||||
assert isinstance(self.extra_attributes, collections.Mapping), msg
|
||||
|
||||
# Validate the spec calling a package specific method
|
||||
validate_fn = getattr(
|
||||
@@ -2499,9 +2442,6 @@ def _new_concretize(self, tests=False):
|
||||
raise spack.error.SpecError(
|
||||
"Spec has no name; cannot concretize an anonymous spec")
|
||||
|
||||
if self._concrete:
|
||||
return
|
||||
|
||||
result = spack.solver.asp.solve([self], tests=tests)
|
||||
if not result.satisfiable:
|
||||
result.print_cores()
|
||||
@@ -2523,14 +2463,8 @@ def _new_concretize(self, tests=False):
|
||||
self._dup(concretized)
|
||||
self._mark_concrete()
|
||||
|
||||
#: choose your concretizer here.
|
||||
def concretize(self, tests=False):
|
||||
"""Concretize the current spec.
|
||||
|
||||
Args:
|
||||
tests (bool or list): if False disregard 'test' dependencies,
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == "clingo":
|
||||
self._new_concretize(tests)
|
||||
else:
|
||||
@@ -2548,19 +2482,12 @@ def _mark_concrete(self, value=True):
|
||||
s._normal = value
|
||||
s._concrete = value
|
||||
|
||||
def concretized(self, tests=False):
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
without modifying this package.
|
||||
|
||||
Args:
|
||||
tests (bool or list): if False disregard 'test' dependencies,
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
def concretized(self):
|
||||
"""This is a non-destructive version of concretize(). First clones,
|
||||
then returns a concrete version of this package without modifying
|
||||
this package. """
|
||||
clone = self.copy(caches=False)
|
||||
clone.concretize(tests=tests)
|
||||
clone.concretize()
|
||||
return clone
|
||||
|
||||
def flat_dependencies(self, **kwargs):
|
||||
@@ -2950,40 +2877,6 @@ def ensure_valid_variants(spec):
|
||||
if not_existing:
|
||||
raise vt.UnknownVariantError(spec, not_existing)
|
||||
|
||||
def update_variant_validate(self, variant_name, values):
|
||||
"""If it is not already there, adds the variant named
|
||||
`variant_name` to the spec `spec` based on the definition
|
||||
contained in the package metadata. Validates the variant and
|
||||
values before returning.
|
||||
|
||||
Used to add values to a variant without being sensitive to the
|
||||
variant being single or multi-valued. If the variant already
|
||||
exists on the spec it is assumed to be multi-valued and the
|
||||
values are appended.
|
||||
|
||||
Args:
|
||||
variant_name: the name of the variant to add or append to
|
||||
values: the value or values (as a tuple) to add/append
|
||||
to the variant
|
||||
"""
|
||||
if not isinstance(values, tuple):
|
||||
values = (values,)
|
||||
|
||||
pkg_variant = self.package_class.variants[variant_name]
|
||||
|
||||
for value in values:
|
||||
if self.variants.get(variant_name):
|
||||
msg = ("Cannot append a value to a single-valued "
|
||||
"variant with an already set value")
|
||||
assert pkg_variant.multi, msg
|
||||
self.variants[variant_name].append(value)
|
||||
else:
|
||||
variant = pkg_variant.make_variant(value)
|
||||
self.variants[variant_name] = variant
|
||||
|
||||
pkg_variant.validate_or_raise(
|
||||
self.variants[variant_name], self.package)
|
||||
|
||||
def constrain(self, other, deps=True):
|
||||
"""Merge the constraints of other with self.
|
||||
|
||||
@@ -3148,23 +3041,25 @@ def satisfies(self, other, deps=True, strict=False, strict_deps=False):
|
||||
if other.concrete:
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
pkg = spack.repo.get(self.fullname)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
return False
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
pkg = spack.repo.get(self.fullname)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
return False
|
||||
|
||||
if pkg.provides(other.name):
|
||||
for provided, when_specs in pkg.provided.items():
|
||||
if any(self.satisfies(when, deps=False, strict=strict)
|
||||
for when in when_specs):
|
||||
if provided.satisfies(other):
|
||||
return True
|
||||
if pkg.provides(other.name):
|
||||
for provided, when_specs in pkg.provided.items():
|
||||
if any(self.satisfies(when_spec, deps=False, strict=strict)
|
||||
for when_spec in when_specs):
|
||||
if provided.satisfies(other):
|
||||
return True
|
||||
return False
|
||||
|
||||
# Otherwise, first thing we care about is whether the name matches
|
||||
if self.name != other.name and self.name and other.name:
|
||||
return False
|
||||
|
||||
# namespaces either match, or other doesn't require one.
|
||||
@@ -3572,11 +3467,8 @@ def ne_dag(self, other, deptypes=True):
|
||||
|
||||
def _cmp_node(self):
|
||||
"""Comparison key for just *this node* and not its deps."""
|
||||
# Name or namespace None will lead to invalid comparisons for abstract
|
||||
# specs. Replace them with the empty string, which is not a valid spec
|
||||
# name nor namespace so it will not create spurious equalities.
|
||||
return (self.name or '',
|
||||
self.namespace or '',
|
||||
return (self.name,
|
||||
self.namespace,
|
||||
tuple(self.versions),
|
||||
self.variants,
|
||||
self.architecture,
|
||||
@@ -4511,8 +4403,7 @@ def spec(self, name):
|
||||
break
|
||||
|
||||
elif self.accept(HASH):
|
||||
# Get spec by hash and confirm it matches any constraints we
|
||||
# already read in
|
||||
# Get spec by hash and confirm it matches what we already have
|
||||
hash_spec = self.spec_by_hash()
|
||||
if hash_spec.satisfies(spec):
|
||||
spec._dup(hash_spec)
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
configuration.
|
||||
|
||||
"""
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
@@ -173,16 +172,6 @@ def _store():
|
||||
config_dict = spack.config.get('config')
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = spack.config.get('config:install_hash_length')
|
||||
|
||||
# Check that the user is not trying to install software into the store
|
||||
# reserved by Spack to bootstrap its own dependencies, since this would
|
||||
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
|
||||
# user installed software)
|
||||
if spack.paths.user_bootstrap_store == root:
|
||||
msg = ('please change the install tree root "{0}" in your '
|
||||
'configuration [path reserved for Spack internal use]')
|
||||
raise ValueError(msg.format(root))
|
||||
|
||||
return Store(root=root,
|
||||
unpadded_root=unpadded_root,
|
||||
projections=projections,
|
||||
@@ -216,19 +205,6 @@ def _store_layout():
|
||||
layout = llnl.util.lang.LazyReference(_store_layout)
|
||||
|
||||
|
||||
def reinitialize():
|
||||
"""Restore globals to the same state they would have at start-up"""
|
||||
global store
|
||||
global root, unpadded_root, db, layout
|
||||
|
||||
store = llnl.util.lang.Singleton(_store)
|
||||
|
||||
root = llnl.util.lang.LazyReference(_store_root)
|
||||
unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
|
||||
db = llnl.util.lang.LazyReference(_store_db)
|
||||
layout = llnl.util.lang.LazyReference(_store_layout)
|
||||
|
||||
|
||||
def retrieve_upstream_dbs():
|
||||
other_spack_instances = spack.config.get('upstreams', {})
|
||||
|
||||
@@ -251,29 +227,3 @@ def _construct_upstream_dbs_from_install_roots(
|
||||
accumulated_upstream_dbs.insert(0, next_db)
|
||||
|
||||
return accumulated_upstream_dbs
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_store(store_or_path):
|
||||
"""Use the store passed as argument within the context manager.
|
||||
|
||||
Args:
|
||||
store_or_path: either a Store object ot a path to where the
|
||||
store resides
|
||||
|
||||
Returns:
|
||||
Store object associated with the context manager's store
|
||||
"""
|
||||
global store
|
||||
|
||||
# Normalize input arguments
|
||||
temporary_store = store_or_path
|
||||
if not isinstance(store_or_path, Store):
|
||||
temporary_store = Store(store_or_path)
|
||||
|
||||
# Swap the store with the one just constructed and return it
|
||||
original_store, store = store, temporary_store
|
||||
yield temporary_store
|
||||
|
||||
# Restore the original store
|
||||
store = original_store
|
||||
|
||||
@@ -6,14 +6,12 @@
|
||||
""" Test checks if the architecture class is created correctly and also that
|
||||
the functions are looking for the correct architecture name
|
||||
"""
|
||||
import itertools
|
||||
import os
|
||||
import platform as py_platform
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.architecture
|
||||
import spack.concretize
|
||||
from spack.spec import Spec
|
||||
from spack.platforms.cray import Cray
|
||||
from spack.platforms.linux import Linux
|
||||
@@ -117,26 +115,20 @@ def test_user_defaults(config):
|
||||
assert default_target == default_spec.architecture.target
|
||||
|
||||
|
||||
def test_user_input_combination(config):
|
||||
valid_keywords = ["fe", "be", "frontend", "backend"]
|
||||
|
||||
possible_targets = ([x for x in spack.architecture.platform().targets]
|
||||
+ valid_keywords)
|
||||
|
||||
possible_os = ([x for x in spack.architecture.platform().operating_sys]
|
||||
+ valid_keywords)
|
||||
|
||||
for target, operating_system in itertools.product(
|
||||
possible_targets, possible_os
|
||||
):
|
||||
platform = spack.architecture.platform()
|
||||
spec_str = "libelf os={0} target={1}".format(operating_system, target)
|
||||
spec = Spec(spec_str)
|
||||
spec.concretize()
|
||||
assert spec.architecture.os == str(
|
||||
platform.operating_system(operating_system)
|
||||
)
|
||||
assert spec.architecture.target == platform.target(target)
|
||||
@pytest.mark.parametrize('operating_system', [
|
||||
x for x in spack.architecture.platform().operating_sys
|
||||
] + ["fe", "be", "frontend", "backend"])
|
||||
@pytest.mark.parametrize('target', [
|
||||
x for x in spack.architecture.platform().targets
|
||||
] + ["fe", "be", "frontend", "backend"])
|
||||
def test_user_input_combination(config, operating_system, target):
|
||||
platform = spack.architecture.platform()
|
||||
spec = Spec("libelf os=%s target=%s" % (operating_system, target))
|
||||
spec.concretize()
|
||||
assert spec.architecture.os == str(
|
||||
platform.operating_system(operating_system)
|
||||
)
|
||||
assert spec.architecture.target == platform.target(target)
|
||||
|
||||
|
||||
def test_operating_system_conversion_to_dict():
|
||||
@@ -231,29 +223,3 @@ def test_satisfy_strict_constraint_when_not_concrete(
|
||||
architecture = spack.spec.ArchSpec(architecture_tuple)
|
||||
constraint = spack.spec.ArchSpec(constraint_tuple)
|
||||
assert not architecture.satisfies(constraint, strict=True)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('root_target_range,dep_target_range,result', [
|
||||
(('x86_64:nocona', 'x86_64:core2', 'nocona')), # pref not in intersection
|
||||
(('x86_64:core2', 'x86_64:nocona', 'nocona')),
|
||||
(('x86_64:haswell', 'x86_64:mic_knl', 'core2')), # pref in intersection
|
||||
(('ivybridge', 'nocona:skylake', 'ivybridge')), # one side concrete
|
||||
(('haswell:icelake', 'broadwell', 'broadwell')),
|
||||
# multiple ranges in lists with multiple overlaps
|
||||
(('x86_64:nocona,haswell:broadwell', 'nocona:haswell,skylake:',
|
||||
'nocona')),
|
||||
# lists with concrete targets, lists compared to ranges
|
||||
(('x86_64,haswell', 'core2:broadwell', 'haswell'))
|
||||
])
|
||||
@pytest.mark.usefixtures('mock_packages', 'config')
|
||||
def test_concretize_target_ranges(
|
||||
root_target_range, dep_target_range, result
|
||||
):
|
||||
# use foobar=bar to make the problem simpler for the old concretizer
|
||||
# the new concretizer should not need that help
|
||||
spec = Spec('a %%gcc@10 foobar=bar target=%s ^b target=%s' %
|
||||
(root_target_range, dep_target_range))
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec.concretize()
|
||||
|
||||
assert str(spec).count('arch=test-debian6-%s' % result) == 2
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
import spack.cmd.install as install
|
||||
import spack.cmd.uninstall as uninstall
|
||||
import spack.cmd.mirror as mirror
|
||||
import spack.hooks.sbang as sbang
|
||||
from spack.main import SpackCommand
|
||||
import spack.mirror
|
||||
import spack.util.gpg
|
||||
@@ -81,15 +80,6 @@ def mirror_directory_rel(session_mirror_rel):
|
||||
yield(session_mirror_rel)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def function_mirror(tmpdir):
|
||||
mirror_dir = str(tmpdir.join('mirror'))
|
||||
mirror_cmd('add', '--scope', 'site', 'test-mirror-func',
|
||||
'file://%s' % mirror_dir)
|
||||
yield mirror_dir
|
||||
mirror_cmd('rm', '--scope=site', 'test-mirror-func')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def config_directory(tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp('test_configs')
|
||||
@@ -681,78 +671,3 @@ def mock_list_url(url, recursive=False):
|
||||
err = capfd.readouterr()[1]
|
||||
expect = 'Encountered problem listing packages at {0}'.format(test_url)
|
||||
assert expect in err
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('mock_fetch')
|
||||
def test_update_sbang(tmpdir, install_mockery, function_mirror):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with default rpaths
|
||||
into the non-default directory layout scheme, triggering an update of the
|
||||
sbang.
|
||||
"""
|
||||
|
||||
# Save the original store and layout before we touch ANYTHING.
|
||||
real_store = spack.store.store
|
||||
real_layout = spack.store.layout
|
||||
|
||||
# Concretize a package with some old-fashioned sbang lines.
|
||||
sspec = Spec('old-sbang')
|
||||
sspec.concretize()
|
||||
|
||||
# Need a fake mirror with *function* scope.
|
||||
mirror_dir = function_mirror
|
||||
|
||||
# Assumes all commands will concretize sspec the same way.
|
||||
install_cmd('--no-cache', sspec.name)
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd('create', '-u', '-a', '-d', mirror_dir,
|
||||
'/%s' % sspec.dag_hash())
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd('update-index', '-d', 'file://%s' % mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd('-y', '/%s' % sspec.dag_hash())
|
||||
|
||||
try:
|
||||
# New install tree locations...
|
||||
# Too fine-grained to do be done in a fixture
|
||||
spack.store.store = spack.store.Store(str(tmpdir.join('newtree')))
|
||||
spack.store.layout = YamlDirectoryLayout(str(tmpdir.join('newtree')),
|
||||
path_scheme=ndef_install_path_scheme) # noqa: E501
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd('install', '-a', '-u', '-f', sspec.name)
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
spack.stage.purge()
|
||||
|
||||
# test that the sbang was updated by the move
|
||||
sbang_style_1_expected = '''{0}
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
'''.format(sbang.sbang_shebang_line(), sspec.prefix.bin)
|
||||
sbang_style_2_expected = '''{0}
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
'''.format(sbang.sbang_shebang_line(), sspec.prefix.bin)
|
||||
|
||||
installed_script_style_1_path = \
|
||||
sspec.prefix.bin.join('sbang-style-1.sh')
|
||||
assert sbang_style_1_expected == \
|
||||
open(str(installed_script_style_1_path)).read()
|
||||
|
||||
installed_script_style_2_path = \
|
||||
sspec.prefix.bin.join('sbang-style-2.sh')
|
||||
assert sbang_style_2_expected == \
|
||||
open(str(installed_script_style_2_path)).read()
|
||||
|
||||
uninstall_cmd('-y', '/%s' % sspec.dag_hash())
|
||||
|
||||
finally:
|
||||
spack.store.store = real_store
|
||||
spack.store.layout = real_layout
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.store
|
||||
|
||||
|
||||
@pytest.mark.regression('22294')
|
||||
def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
# Prepare a custom store path. This should be in a writeable location
|
||||
# since Spack needs to initialize the DB.
|
||||
user_path = str(tmpdir.join('store'))
|
||||
# Reassign global variables in spack.store to the value
|
||||
# they would have at Spack startup.
|
||||
spack.store.reinitialize()
|
||||
# Set the custom user path
|
||||
spack.config.set('config:install_tree:root', user_path)
|
||||
|
||||
# Test that within the context manager we use the bootstrap store
|
||||
# and that outside we restore the correct location
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert str(spack.store.root) == spack.paths.user_bootstrap_store
|
||||
assert str(spack.store.root) == user_path
|
||||
@@ -11,7 +11,6 @@
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.paths import build_env_path
|
||||
from spack.build_environment import dso_suffix, _static_to_shared_library
|
||||
from spack.util.executable import Executable
|
||||
@@ -300,45 +299,6 @@ def normpaths(paths):
|
||||
delattr(dep_pkg, 'libs')
|
||||
|
||||
|
||||
def test_external_prefixes_last(mutable_config, mock_packages, working_env,
|
||||
monkeypatch):
|
||||
# Sanity check: under normal circumstances paths associated with
|
||||
# dt-diamond-left would appear first. We'll mark it as external in
|
||||
# the test to check if the associated paths are placed last.
|
||||
assert 'dt-diamond-left' < 'dt-diamond-right'
|
||||
|
||||
cfg_data = syaml.load_config("""\
|
||||
dt-diamond-left:
|
||||
externals:
|
||||
- spec: dt-diamond-left@1.0
|
||||
prefix: /fake/path1
|
||||
buildable: false
|
||||
""")
|
||||
spack.config.set("packages", cfg_data)
|
||||
top = spack.spec.Spec('dt-diamond').concretized()
|
||||
|
||||
def _trust_me_its_a_dir(path):
|
||||
return True
|
||||
monkeypatch.setattr(
|
||||
os.path, 'isdir', _trust_me_its_a_dir
|
||||
)
|
||||
|
||||
env_mods = EnvironmentModifications()
|
||||
spack.build_environment.set_build_environment_variables(
|
||||
top.package, env_mods, False)
|
||||
|
||||
env_mods.apply_modifications()
|
||||
link_dir_var = os.environ['SPACK_LINK_DIRS']
|
||||
link_dirs = link_dir_var.split(':')
|
||||
external_lib_paths = set(['/fake/path1/lib', '/fake/path1/lib64'])
|
||||
# The external lib paths should be the last two entries of the list and
|
||||
# should not appear anywhere before the last two entries
|
||||
assert (set(os.path.normpath(x) for x in link_dirs[-2:]) ==
|
||||
external_lib_paths)
|
||||
assert not (set(os.path.normpath(x) for x in link_dirs[:-2]) &
|
||||
external_lib_paths)
|
||||
|
||||
|
||||
def test_parallel_false_is_not_propagating(config, mock_packages):
|
||||
class AttributeHolder(object):
|
||||
pass
|
||||
|
||||
@@ -73,7 +73,7 @@ def test_specs_staging(config):
|
||||
b = mock_repo.add_package('b', [d, e], [default, default])
|
||||
mock_repo.add_package('a', [b, c], [default, default])
|
||||
|
||||
with repo.use_repositories(mock_repo):
|
||||
with repo.swap(mock_repo):
|
||||
spec_a = Spec('a')
|
||||
spec_a.concretize()
|
||||
|
||||
|
||||
@@ -12,41 +12,56 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def job_parser():
|
||||
# --jobs needs to write to a command_line config scope, so this is the only
|
||||
# scope we create.
|
||||
def parser():
|
||||
p = argparse.ArgumentParser()
|
||||
arguments.add_common_arguments(p, ['jobs'])
|
||||
scopes = [spack.config.InternalConfigScope('command_line', {'config': {}})]
|
||||
|
||||
with spack.config.use_configuration(*scopes):
|
||||
yield p
|
||||
yield p
|
||||
# Cleanup the command line scope if it was set during tests
|
||||
spack.config.config.clear_caches()
|
||||
if 'command_line' in spack.config.config.scopes:
|
||||
spack.config.config.scopes['command_line'].clear()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("ncores", [1, 2, 4, 8, 16, 32])
|
||||
def test_setting_jobs_flag(job_parser, ncores, monkeypatch):
|
||||
monkeypatch.setattr(multiprocessing, 'cpu_count', lambda: ncores)
|
||||
namespace = job_parser.parse_args(['-j', '24'])
|
||||
expected = min(24, ncores)
|
||||
@pytest.fixture(params=[1, 2, 4, 8, 16, 32])
|
||||
def ncores(monkeypatch, request):
|
||||
"""Mocks having a machine with n cores for the purpose of
|
||||
computing config:build_jobs.
|
||||
"""
|
||||
def _cpu_count():
|
||||
return request.param
|
||||
|
||||
# Patch multiprocessing.cpu_count() to return the value we need
|
||||
monkeypatch.setattr(multiprocessing, 'cpu_count', _cpu_count)
|
||||
# Patch the configuration parts that have been cached already
|
||||
monkeypatch.setitem(spack.config.config_defaults['config'],
|
||||
'build_jobs', min(16, request.param))
|
||||
monkeypatch.setitem(
|
||||
spack.config.config.scopes, '_builtin',
|
||||
spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
))
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.mark.parametrize('cli_args,requested', [
|
||||
(['-j', '24'], 24),
|
||||
# Here we report the default if we have enough cores, as the cap
|
||||
# on the available number of cores will be taken care of in the test
|
||||
([], 16)
|
||||
])
|
||||
def test_setting_parallel_jobs(parser, cli_args, ncores, requested):
|
||||
expected = min(requested, ncores)
|
||||
namespace = parser.parse_args(cli_args)
|
||||
assert namespace.jobs == expected
|
||||
assert spack.config.get('config:build_jobs') == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("ncores", [1, 2, 4, 8, 16, 32])
|
||||
def test_omitted_job_flag(job_parser, ncores, monkeypatch):
|
||||
monkeypatch.setattr(multiprocessing, 'cpu_count', lambda: ncores)
|
||||
namespace = job_parser.parse_args([])
|
||||
assert namespace.jobs == min(ncores, 16)
|
||||
assert spack.config.get('config:build_jobs') is None
|
||||
|
||||
|
||||
def test_negative_integers_not_allowed_for_parallel_jobs(job_parser):
|
||||
def test_negative_integers_not_allowed_for_parallel_jobs(parser):
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
job_parser.parse_args(['-j', '-2'])
|
||||
parser.parse_args(['-j', '-2'])
|
||||
|
||||
assert 'expected a positive integer' in str(exc_info.value)
|
||||
|
||||
@@ -66,40 +81,3 @@ def test_parse_spec_flags_with_spaces(
|
||||
|
||||
assert all(x not in s.variants for x in unexpected_variants)
|
||||
assert all(x in s.variants for x in expected_variants)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
||||
"""
|
||||
Concretize a spec with non-default options in an environment. Make
|
||||
sure that when we ask for a matching spec when the environment is
|
||||
active that we get the instance concretized in the environment.
|
||||
"""
|
||||
# Initial sanity check: we are planning on choosing a non-default
|
||||
# value, so make sure that is in fact not the default.
|
||||
check_defaults = spack.cmd.parse_specs(['a'], concretize=True)[0]
|
||||
assert not check_defaults.satisfies('foobar=baz')
|
||||
|
||||
e = ev.create('test')
|
||||
e.add('a foobar=baz')
|
||||
e.concretize()
|
||||
with e:
|
||||
env_spec = spack.cmd.matching_spec_from_env(
|
||||
spack.cmd.parse_specs(['a'])[0])
|
||||
assert env_spec.satisfies('foobar=baz')
|
||||
assert env_spec.concrete
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
||||
e = ev.create('test')
|
||||
e.add('a foobar=baz')
|
||||
e.add('a foobar=fee')
|
||||
e.concretize()
|
||||
with e:
|
||||
with pytest.raises(
|
||||
spack.environment.SpackEnvironmentError) as exc_info:
|
||||
|
||||
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(['a'])[0])
|
||||
|
||||
assert 'matches multiple specs' in exc_info.value.message
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import os.path
|
||||
|
||||
import spack.architecture as architecture
|
||||
import spack.config
|
||||
from spack.main import SpackCommand, get_version
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -53,3 +54,4 @@ def test_report():
|
||||
assert get_version() in out
|
||||
assert platform.python_version() in out
|
||||
assert str(arch) in out
|
||||
assert spack.config.get('config:concretizer') in out
|
||||
|
||||
@@ -23,8 +23,7 @@ def test_immediate_dependents(mock_packages):
|
||||
'libdwarf',
|
||||
'patch-a-dependency',
|
||||
'patch-several-dependencies',
|
||||
'quantum-espresso',
|
||||
'conditionally-patch-dependency'
|
||||
'quantum-espresso'
|
||||
])
|
||||
|
||||
|
||||
@@ -39,8 +38,7 @@ def test_transitive_dependents(mock_packages):
|
||||
'multivalue-variant',
|
||||
'singlevalue-variant-dependent',
|
||||
'patch-a-dependency', 'patch-several-dependencies',
|
||||
'quantum-espresso',
|
||||
'conditionally-patch-dependency'
|
||||
'quantum-espresso'
|
||||
])
|
||||
|
||||
|
||||
|
||||
@@ -139,19 +139,6 @@ def test_concretize():
|
||||
assert any(x.name == 'mpileaks' for x in env_specs)
|
||||
|
||||
|
||||
def test_env_uninstalled_specs(install_mockery, mock_fetch):
|
||||
e = ev.create('test')
|
||||
e.add('cmake-client')
|
||||
e.concretize()
|
||||
assert any(s.name == 'cmake-client' for s in e.uninstalled_specs())
|
||||
e.install_all()
|
||||
assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs())
|
||||
e.add('mpileaks')
|
||||
e.concretize()
|
||||
assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs())
|
||||
assert any(s.name == 'mpileaks' for s in e.uninstalled_specs())
|
||||
|
||||
|
||||
def test_env_install_all(install_mockery, mock_fetch):
|
||||
e = ev.create('test')
|
||||
e.add('cmake-client')
|
||||
@@ -343,7 +330,7 @@ def test_env_status_broken_view(
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and warns the user
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.use_repositories(new_repo):
|
||||
with spack.repo.swap(new_repo):
|
||||
output = env('status')
|
||||
assert 'In environment test' in output
|
||||
assert 'Environment test includes out of date' in output
|
||||
@@ -364,7 +351,7 @@ def test_env_activate_broken_view(
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and fails gracefully
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.use_repositories(new_repo):
|
||||
with spack.repo.swap(new_repo):
|
||||
with pytest.raises(SpackCommandError):
|
||||
env('activate', '--sh', 'test')
|
||||
|
||||
@@ -942,7 +929,7 @@ def test_read_old_lock_and_write_new(tmpdir):
|
||||
y = mock_repo.add_package('y', [], [])
|
||||
mock_repo.add_package('x', [y], [build_only])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
x = Spec('x')
|
||||
x.concretize()
|
||||
|
||||
@@ -973,7 +960,7 @@ def test_read_old_lock_creates_backup(tmpdir):
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
y = mock_repo.add_package('y', [], [])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
y = Spec('y')
|
||||
y.concretize()
|
||||
|
||||
@@ -1010,7 +997,7 @@ def noop(*args):
|
||||
pass
|
||||
setattr(mock_repo, 'dump_provenance', noop)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
x_spec = Spec('x')
|
||||
x_concretized = x_spec.concretized()
|
||||
|
||||
@@ -1051,7 +1038,7 @@ def noop(*args):
|
||||
pass
|
||||
setattr(mock_repo, 'dump_provenance', noop)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
y_spec = Spec('y ^z@3')
|
||||
y_concretized = y_spec.concretized()
|
||||
|
||||
@@ -2175,40 +2162,6 @@ def test_env_write_only_non_default():
|
||||
assert yaml == ev.default_manifest_yaml
|
||||
|
||||
|
||||
@pytest.mark.regression('20526')
|
||||
def test_env_write_only_non_default_nested(tmpdir):
|
||||
# setup an environment file
|
||||
# the environment includes configuration because nested configs proved the
|
||||
# most difficult to avoid writing.
|
||||
filename = 'spack.yaml'
|
||||
filepath = str(tmpdir.join(filename))
|
||||
contents = """\
|
||||
env:
|
||||
specs:
|
||||
- matrix:
|
||||
- [mpileaks]
|
||||
packages:
|
||||
mpileaks:
|
||||
compiler: [gcc]
|
||||
view: true
|
||||
"""
|
||||
|
||||
# create environment with some structure
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(contents)
|
||||
env('create', 'test', filepath)
|
||||
|
||||
# concretize
|
||||
with ev.read('test') as e:
|
||||
concretize()
|
||||
e.write()
|
||||
|
||||
with open(e.manifest_path, 'r') as f:
|
||||
manifest = f.read()
|
||||
|
||||
assert manifest == contents
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def packages_yaml_v015(tmpdir):
|
||||
"""Return the path to an existing manifest in the v0.15.x format
|
||||
@@ -2369,18 +2322,3 @@ def _write_helper_raise(self, x, y):
|
||||
e.clear()
|
||||
e.write()
|
||||
assert os.path.exists(str(spack_lock))
|
||||
|
||||
|
||||
@pytest.mark.regression('23440')
|
||||
def test_custom_version_concretize_together(tmpdir):
|
||||
# Custom versions should be permitted in specs when
|
||||
# concretizing together
|
||||
e = ev.create('custom_version')
|
||||
e.concretization = 'together'
|
||||
|
||||
# Concretize a first time using 'mpich' as the MPI provider
|
||||
e.add('hdf5@myversion')
|
||||
e.add('mpich')
|
||||
e.concretize()
|
||||
|
||||
assert any('hdf5@myversion' in spec for _, spec in e.concretized_specs())
|
||||
|
||||
@@ -27,7 +27,7 @@ def python_database(mock_packages, mutable_database):
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_extensions(mock_packages, python_database, config, capsys):
|
||||
def test_extensions(mock_packages, python_database, capsys):
|
||||
ext2 = Spec("py-extension2").concretized()
|
||||
|
||||
def check_output(ni, na):
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.environment as ev
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
|
||||
# everything here uses the mock_env_path
|
||||
pytestmark = pytest.mark.usefixtures(
|
||||
"mutable_mock_env_path", "config", "mutable_mock_repo"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_fetch_in_env(
|
||||
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
SpackCommand("env")("create", "test")
|
||||
with ev.read("test"):
|
||||
SpackCommand("add")("python")
|
||||
with pytest.raises(SpackCommandError):
|
||||
SpackCommand("fetch")()
|
||||
SpackCommand("concretize")()
|
||||
SpackCommand("fetch")()
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_fetch_single_spec(
|
||||
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
SpackCommand("fetch")("mpileaks")
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_fetch_multiple_specs(
|
||||
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
SpackCommand("fetch")("mpileaks", "gcc@10.2.0", "python")
|
||||
|
||||
|
||||
def test_fetch_no_argument():
|
||||
with pytest.raises(SpackCommandError):
|
||||
SpackCommand("fetch")()
|
||||
@@ -52,26 +52,18 @@ def test_location_build_dir(mock_spec):
|
||||
assert location('--build-dir', spec.name).strip() == pkg.stage.source_path
|
||||
|
||||
|
||||
@pytest.mark.regression('22738')
|
||||
def test_location_source_dir(mock_spec):
|
||||
"""Tests spack location --source-dir."""
|
||||
spec, pkg = mock_spec
|
||||
assert location('--source-dir', spec.name).strip() == pkg.stage.source_path
|
||||
assert location(spec.name).strip() == pkg.stage.source_path
|
||||
|
||||
|
||||
def test_location_source_dir_missing():
|
||||
"""Tests spack location --source-dir with a missing source directory."""
|
||||
def test_location_build_dir_missing():
|
||||
"""Tests spack location --build-dir with a missing build directory."""
|
||||
spec = 'mpileaks'
|
||||
prefix = "==> Error: "
|
||||
expected = "%sSource directory does not exist yet. Run this to create it:"\
|
||||
expected = "%sBuild directory does not exist yet. Run this to create it:"\
|
||||
"%s spack stage %s" % (prefix, os.linesep, spec)
|
||||
out = location('--source-dir', spec, fail_on_error=False).strip()
|
||||
out = location('--build-dir', spec, fail_on_error=False).strip()
|
||||
assert out == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('options', [([]),
|
||||
(['--source-dir', 'mpileaks']),
|
||||
(['--build-dir', 'mpileaks']),
|
||||
(['--env', 'missing-env']),
|
||||
(['spec1', 'spec2'])])
|
||||
def test_location_cmd_error(options):
|
||||
|
||||
@@ -8,10 +8,9 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.modules
|
||||
import spack.store
|
||||
from spack.test.conftest import use_store, use_configuration, use_repo
|
||||
|
||||
module = spack.main.SpackCommand('module')
|
||||
|
||||
@@ -19,13 +18,12 @@
|
||||
#: make sure module files are generated for all the tests here
|
||||
@pytest.fixture(scope='module', autouse=True)
|
||||
def ensure_module_files_are_there(
|
||||
mock_repo_path, mock_store, mock_configuration_scopes
|
||||
):
|
||||
mock_repo_path, mock_store, mock_configuration):
|
||||
"""Generate module files for module tests."""
|
||||
module = spack.main.SpackCommand('module')
|
||||
with spack.store.use_store(mock_store):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
with use_store(mock_store):
|
||||
with use_configuration(mock_configuration):
|
||||
with use_repo(mock_repo_path):
|
||||
module('tcl', 'refresh', '-y')
|
||||
|
||||
|
||||
|
||||
@@ -82,7 +82,7 @@ def mock_pkg_git_repo(tmpdir_factory):
|
||||
git('-c', 'commit.gpgsign=false', 'commit',
|
||||
'-m', 'change pkg-b, remove pkg-c, add pkg-d')
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
yield mock_repo_packages
|
||||
|
||||
|
||||
@@ -129,8 +129,7 @@ def test_pkg_add(mock_pkg_git_repo):
|
||||
finally:
|
||||
shutil.rmtree('pkg-e')
|
||||
# Removing a package mid-run disrupts Spack's caching
|
||||
if spack.repo.path.repos[0]._fast_package_checker:
|
||||
spack.repo.path.repos[0]._fast_package_checker.invalidate()
|
||||
spack.repo.path.repos[0]._fast_package_checker.invalidate()
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
pkg('add', 'does-not-exist')
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
concretize = SpackCommand('concretize')
|
||||
|
||||
|
||||
def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
def test_undevelop(tmpdir, mock_packages, mutable_mock_env_path):
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir('env')
|
||||
with envdir.as_cwd():
|
||||
@@ -39,9 +39,7 @@ def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
assert not after.satisfies('dev_path=*')
|
||||
|
||||
|
||||
def test_undevelop_nonexistent(
|
||||
tmpdir, config, mock_packages, mutable_mock_env_path
|
||||
):
|
||||
def test_undevelop_nonexistent(tmpdir, mock_packages, mutable_mock_env_path):
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir('env')
|
||||
with envdir.as_cwd():
|
||||
|
||||
@@ -471,14 +471,16 @@ def test_fj_flags():
|
||||
supported_flag_test("cxx98_flag", "-std=c++98", "fj@4.0.0")
|
||||
supported_flag_test("cxx11_flag", "-std=c++11", "fj@4.0.0")
|
||||
supported_flag_test("cxx14_flag", "-std=c++14", "fj@4.0.0")
|
||||
supported_flag_test("cxx17_flag", "-std=c++17", "fj@4.0.0")
|
||||
supported_flag_test("c99_flag", "-std=c99", "fj@4.0.0")
|
||||
supported_flag_test("c11_flag", "-std=c11", "fj@4.0.0")
|
||||
supported_flag_test("cc_pic_flag", "-KPIC", "fj@4.0.0")
|
||||
supported_flag_test("cxx_pic_flag", "-KPIC", "fj@4.0.0")
|
||||
supported_flag_test("f77_pic_flag", "-KPIC", "fj@4.0.0")
|
||||
supported_flag_test("fc_pic_flag", "-KPIC", "fj@4.0.0")
|
||||
supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'],
|
||||
supported_flag_test("opt_flags", ['-O0', '-O1', '-O2', '-O3', '-Ofast'],
|
||||
'fj@4.0.0')
|
||||
supported_flag_test("debug_flags", "-g", "fj@4.0.0")
|
||||
|
||||
|
||||
def test_gcc_flags():
|
||||
|
||||
@@ -96,7 +96,11 @@ def test_apple_clang_version_detection(
|
||||
('clang version 8.0.0-3 (tags/RELEASE_800/final)\n'
|
||||
'Target: aarch64-unknown-linux-gnu\n'
|
||||
'Thread model: posix\n'
|
||||
'InstalledDir: /usr/bin\n', '8.0.0')
|
||||
'InstalledDir: /usr/bin\n', '8.0.0'),
|
||||
('clang version 11.0.0\n'
|
||||
'Target: aarch64-unknown-linux-gnu\n'
|
||||
'Thread model: posix\n'
|
||||
'InstalledDir: /usr/bin\n', '11.0.0')
|
||||
])
|
||||
def test_clang_version_detection(version_str, expected_version):
|
||||
version = spack.compilers.clang.Clang.extract_version_from_output(
|
||||
@@ -152,18 +156,28 @@ def test_intel_version_detection(version_str, expected_version):
|
||||
|
||||
|
||||
@pytest.mark.parametrize('version_str,expected_version', [
|
||||
( # ICX/ICPX
|
||||
'Intel(R) oneAPI DPC++ Compiler 2021.1 (2020.10.0.1113)\n'
|
||||
( # ICX
|
||||
'Intel(R) oneAPI DPC++ Compiler Pro 2021.1 (2020.8.0.0827)\n'
|
||||
'Target: x86_64-unknown-linux-gnu\n'
|
||||
'Thread model: posix\n'
|
||||
'InstalledDir: /made/up/path',
|
||||
'2021.1'
|
||||
'InstalledDir: /soft/restricted/CNDA/sdk/\n'
|
||||
'2020.9.15.1/oneapi/compiler/2021.1-beta09/linux/bin',
|
||||
'2020.8.0.0827'
|
||||
),
|
||||
( # IFX
|
||||
'ifx (IFORT) 2021.1 Beta 20201113\n'
|
||||
'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.',
|
||||
'2021.1'
|
||||
( # ICPX
|
||||
'Intel(R) oneAPI DPC++ Compiler Pro 2021.1 (2020.8.0.0827)\n'
|
||||
'Target: x86_64-unknown-linux-gnu\n'
|
||||
'Thread model: posix\n'
|
||||
'InstalledDir: /soft/restricted/CNDA/sdk/\n'
|
||||
'2020.9.15.1/oneapi/compiler/2021.1-beta09/linux/bin',
|
||||
'2020.8.0.0827'
|
||||
)
|
||||
# Detection will fail for ifx because it can't parse it from this.
|
||||
# ( # IFX
|
||||
# 'ifx (IFORT) 2021.1 Beta 20200827\n'
|
||||
# 'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.',
|
||||
# '2020.8.0.0827'
|
||||
# )
|
||||
])
|
||||
def test_oneapi_version_detection(version_str, expected_version):
|
||||
version = spack.compilers.oneapi.Oneapi.extract_version_from_output(
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
import jinja2
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -115,64 +114,6 @@ def current_host(request, monkeypatch):
|
||||
spack.architecture.get_platform.cache.clear()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def repo_with_changing_recipe(tmpdir_factory, mutable_mock_repo):
|
||||
repo_namespace = 'changing'
|
||||
repo_dir = tmpdir_factory.mktemp(repo_namespace)
|
||||
|
||||
repo_dir.join('repo.yaml').write("""
|
||||
repo:
|
||||
namespace: changing
|
||||
""", ensure=True)
|
||||
|
||||
packages_dir = repo_dir.ensure('packages', dir=True)
|
||||
root_pkg_str = """
|
||||
class Root(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/root-1.0.tar.gz"
|
||||
|
||||
version(1.0, sha256='abcde')
|
||||
depends_on('changing')
|
||||
"""
|
||||
packages_dir.join('root', 'package.py').write(
|
||||
root_pkg_str, ensure=True
|
||||
)
|
||||
|
||||
changing_template = """
|
||||
class Changing(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/changing-1.0.tar.gz"
|
||||
|
||||
version(1.0, sha256='abcde')
|
||||
{% if not delete_variant %}
|
||||
variant('fee', default=True, description='nope')
|
||||
{% endif %}
|
||||
variant('foo', default=True, description='nope')
|
||||
{% if add_variant %}
|
||||
variant('fum', default=True, description='nope')
|
||||
{% endif %}
|
||||
"""
|
||||
repo = spack.repo.Repo(str(repo_dir))
|
||||
mutable_mock_repo.put_first(repo)
|
||||
|
||||
class _ChangingPackage(object):
|
||||
def change(self, context):
|
||||
# To ensure we get the changed package we need to
|
||||
# invalidate the cache
|
||||
repo._modules = {}
|
||||
|
||||
t = jinja2.Template(changing_template)
|
||||
changing_pkg_str = t.render(**context)
|
||||
packages_dir.join('changing', 'package.py').write(
|
||||
changing_pkg_str, ensure=True
|
||||
)
|
||||
|
||||
_changing_pkg = _ChangingPackage()
|
||||
_changing_pkg.change({'delete_variant': False, 'add_variant': False})
|
||||
|
||||
return _changing_pkg
|
||||
|
||||
|
||||
# This must use the mutable_config fixture because the test
|
||||
# adjusting_default_target_based_on_compiler uses the current_host fixture,
|
||||
# which changes the config.
|
||||
@@ -304,7 +245,7 @@ def test_architecture_deep_inheritance(self, mock_targets):
|
||||
barpkg = mock_repo.add_package('barpkg', [bazpkg], [default_dep])
|
||||
mock_repo.add_package('foopkg', [barpkg], [default_dep])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = Spec('foopkg %gcc@4.5.0 os=CNL target=nocona' +
|
||||
' ^barpkg os=SuSE11 ^bazpkg os=be')
|
||||
spec.concretize()
|
||||
@@ -323,10 +264,6 @@ def concretize_multi_provider(self):
|
||||
s.concretize()
|
||||
assert s['mpi'].version == ver('1.10.3')
|
||||
|
||||
def test_concretize_dependent_with_singlevalued_variant_type(self):
|
||||
s = Spec('singlevalue-variant-dependent-type')
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.parametrize("spec,version", [
|
||||
('dealii', 'develop'),
|
||||
('xsdk', '0.4.0'),
|
||||
@@ -564,11 +501,6 @@ def test_conflicts_in_spec(self, conflict_spec):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
s.concretize()
|
||||
|
||||
def test_conflict_in_all_directives_true(self):
|
||||
s = Spec('when-directives-true')
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.parametrize('spec_str', [
|
||||
'conflict@10.0%clang+foo'
|
||||
])
|
||||
@@ -688,14 +620,13 @@ def test_noversion_pkg(self, spec):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
Spec(spec).concretized()
|
||||
|
||||
# Include targets to prevent regression on 20537
|
||||
@pytest.mark.parametrize('spec, best_achievable', [
|
||||
('mpileaks%gcc@4.4.7 target=x86_64:', 'core2'),
|
||||
('mpileaks%gcc@4.8 target=x86_64:', 'haswell'),
|
||||
('mpileaks%gcc@5.3.0 target=x86_64:', 'broadwell'),
|
||||
('mpileaks%apple-clang@5.1.0 target=x86_64:', 'x86_64')
|
||||
('mpileaks%gcc@4.4.7', 'core2'),
|
||||
('mpileaks%gcc@4.8', 'haswell'),
|
||||
('mpileaks%gcc@5.3.0', 'broadwell'),
|
||||
('mpileaks%apple-clang@5.1.0', 'x86_64')
|
||||
])
|
||||
@pytest.mark.regression('13361', '20537')
|
||||
@pytest.mark.regression('13361')
|
||||
def test_adjusting_default_target_based_on_compiler(
|
||||
self, spec, best_achievable, current_host, mock_targets
|
||||
):
|
||||
@@ -964,221 +895,3 @@ def test_conditional_provides_or_depends_on(self):
|
||||
assert 'v1-provider' in s
|
||||
assert s['v1'].name == 'v1-provider'
|
||||
assert s['v2'].name == 'conditional-provider'
|
||||
|
||||
@pytest.mark.regression('20079')
|
||||
@pytest.mark.parametrize('spec_str,tests_arg,with_dep,without_dep', [
|
||||
# Check that True is treated correctly and attaches test deps
|
||||
# to all nodes in the DAG
|
||||
('a', True, ['a'], []),
|
||||
('a foobar=bar', True, ['a', 'b'], []),
|
||||
# Check that a list of names activates the dependency only for
|
||||
# packages in that list
|
||||
('a foobar=bar', ['a'], ['a'], ['b']),
|
||||
('a foobar=bar', ['b'], ['b'], ['a']),
|
||||
# Check that False disregard test dependencies
|
||||
('a foobar=bar', False, [], ['a', 'b']),
|
||||
])
|
||||
def test_activating_test_dependencies(
|
||||
self, spec_str, tests_arg, with_dep, without_dep
|
||||
):
|
||||
s = Spec(spec_str).concretized(tests=tests_arg)
|
||||
|
||||
for pkg_name in with_dep:
|
||||
msg = "Cannot find test dependency in package '{0}'"
|
||||
node = s[pkg_name]
|
||||
assert node.dependencies(deptype='test'), msg.format(pkg_name)
|
||||
|
||||
for pkg_name in without_dep:
|
||||
msg = "Test dependency in package '{0}' is unexpected"
|
||||
node = s[pkg_name]
|
||||
assert not node.dependencies(deptype='test'), msg.format(pkg_name)
|
||||
|
||||
@pytest.mark.regression('20019')
|
||||
def test_compiler_match_is_preferred_to_newer_version(self):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
# This spec depends on openblas. Openblas has a conflict
|
||||
# that doesn't allow newer versions with gcc@4.4.0. Check
|
||||
# that an old version of openblas is selected, rather than
|
||||
# a different compiler for just that node.
|
||||
spec_str = 'simple-inheritance+openblas %gcc@4.4.0 os=redhat6'
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert 'openblas@0.2.13' in s
|
||||
assert s['openblas'].satisfies('%gcc@4.4.0')
|
||||
|
||||
@pytest.mark.regression('19981')
|
||||
def test_target_ranges_in_conflicts(self):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
Spec('impossible-concretization').concretized()
|
||||
|
||||
@pytest.mark.regression('20040')
|
||||
def test_variant_not_default(self):
|
||||
s = Spec('ecp-viz-sdk').concretized()
|
||||
|
||||
# Check default variant value for the package
|
||||
assert '+dep' in s['conditional-constrained-dependencies']
|
||||
|
||||
# Check that non-default variant values are forced on the dependency
|
||||
d = s['dep-with-variants']
|
||||
assert '+foo+bar+baz' in d
|
||||
|
||||
@pytest.mark.regression('20055')
|
||||
def test_custom_compiler_version(self):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
s = Spec('a %gcc@foo os=redhat6').concretized()
|
||||
assert '%gcc@foo' in s
|
||||
|
||||
def test_all_patches_applied(self):
|
||||
uuidpatch = 'a60a42b73e03f207433c5579de207c6ed61d58e4d12dd3b5142eb525728d89ea'
|
||||
localpatch = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
|
||||
spec = spack.spec.Spec('conditionally-patch-dependency+jasper')
|
||||
spec.concretize()
|
||||
assert ((uuidpatch, localpatch) ==
|
||||
spec['libelf'].variants['patches'].value)
|
||||
|
||||
def test_dont_select_version_that_brings_more_variants_in(self):
|
||||
s = Spec('dep-with-variants-if-develop-root').concretized()
|
||||
assert s['dep-with-variants-if-develop'].satisfies('@1.0')
|
||||
|
||||
@pytest.mark.regression('20244,20736')
|
||||
@pytest.mark.parametrize('spec_str,is_external,expected', [
|
||||
# These are all externals, and 0_8 is a version not in package.py
|
||||
('externaltool@1.0', True, '@1.0'),
|
||||
('externaltool@0.9', True, '@0.9'),
|
||||
('externaltool@0_8', True, '@0_8'),
|
||||
# This external package is buildable, has a custom version
|
||||
# in packages.yaml that is greater than the ones in package.py
|
||||
# and specifies a variant
|
||||
('external-buildable-with-variant +baz', True, '@1.1.special +baz'),
|
||||
('external-buildable-with-variant ~baz', False, '@1.0 ~baz'),
|
||||
('external-buildable-with-variant@1.0: ~baz', False, '@1.0 ~baz'),
|
||||
# This uses an external version that meets the condition for
|
||||
# having an additional dependency, but the dependency shouldn't
|
||||
# appear in the answer set
|
||||
('external-buildable-with-variant@0.9 +baz', True, '@0.9'),
|
||||
# This package has an external version declared that would be
|
||||
# the least preferred if Spack had to build it
|
||||
('old-external', True, '@1.0.0'),
|
||||
])
|
||||
def test_external_package_versions(self, spec_str, is_external, expected):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.external == is_external
|
||||
assert s.satisfies(expected)
|
||||
|
||||
@pytest.mark.regression('20292')
|
||||
@pytest.mark.parametrize('context', [
|
||||
{'add_variant': True, 'delete_variant': False},
|
||||
{'add_variant': False, 'delete_variant': True},
|
||||
{'add_variant': True, 'delete_variant': True}
|
||||
])
|
||||
@pytest.mark.xfail()
|
||||
def test_reuse_installed_packages(
|
||||
self, context, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
# Install a spec
|
||||
root = Spec('root').concretized()
|
||||
dependency = root['changing'].copy()
|
||||
root.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# Modify package.py
|
||||
repo_with_changing_recipe.change(context)
|
||||
|
||||
# Try to concretize with the spec installed previously
|
||||
new_root = Spec('root ^/{0}'.format(
|
||||
dependency.dag_hash())
|
||||
).concretized()
|
||||
|
||||
assert root.dag_hash() == new_root.dag_hash()
|
||||
|
||||
@pytest.mark.regression('20784')
|
||||
def test_concretization_of_test_dependencies(self):
|
||||
# With clingo we emit dependency_conditions regardless of the type
|
||||
# of the dependency. We need to ensure that there's at least one
|
||||
# dependency type declared to infer that the dependency holds.
|
||||
s = Spec('test-dep-with-imposed-conditions').concretized()
|
||||
assert 'c' not in s
|
||||
|
||||
@pytest.mark.parametrize('spec_str', [
|
||||
'wrong-variant-in-conflicts',
|
||||
'wrong-variant-in-depends-on'
|
||||
])
|
||||
def test_error_message_for_inconsistent_variants(self, spec_str):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
s = Spec(spec_str)
|
||||
with pytest.raises(RuntimeError, match='not found in package'):
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.regression('22533')
|
||||
@pytest.mark.parametrize('spec_str,variant_name,expected_values', [
|
||||
# Test the default value 'auto'
|
||||
('mvapich2', 'file_systems', ('auto',)),
|
||||
# Test setting a single value from the disjoint set
|
||||
('mvapich2 file_systems=lustre', 'file_systems', ('lustre',)),
|
||||
# Test setting multiple values from the disjoint set
|
||||
('mvapich2 file_systems=lustre,gpfs', 'file_systems',
|
||||
('lustre', 'gpfs')),
|
||||
])
|
||||
def test_mv_variants_disjoint_sets_from_spec(
|
||||
self, spec_str, variant_name, expected_values
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert set(expected_values) == set(s.variants[variant_name].value)
|
||||
|
||||
@pytest.mark.regression('22533')
|
||||
def test_mv_variants_disjoint_sets_from_packages_yaml(self):
|
||||
external_mvapich2 = {
|
||||
'mvapich2': {
|
||||
'buildable': False,
|
||||
'externals': [{
|
||||
'spec': 'mvapich2@2.3.1 file_systems=nfs,ufs',
|
||||
'prefix': '/usr'
|
||||
}]
|
||||
}
|
||||
}
|
||||
spack.config.set('packages', external_mvapich2)
|
||||
|
||||
s = Spec('mvapich2').concretized()
|
||||
assert set(s.variants['file_systems'].value) == set(['ufs', 'nfs'])
|
||||
|
||||
@pytest.mark.regression('22596')
|
||||
def test_external_with_non_default_variant_as_dependency(self):
|
||||
# This package depends on another that is registered as an external
|
||||
# with 'buildable: true' and a variant with a non-default value set
|
||||
s = Spec('trigger-external-non-default-variant').concretized()
|
||||
|
||||
assert '~foo' in s['external-non-default-variant']
|
||||
assert '~bar' in s['external-non-default-variant']
|
||||
assert s['external-non-default-variant'].external
|
||||
|
||||
@pytest.mark.regression('22871')
|
||||
@pytest.mark.parametrize('spec_str,expected_os', [
|
||||
('mpileaks', 'os=debian6'),
|
||||
# To trigger the bug in 22871 we need to have the same compiler
|
||||
# spec available on both operating systems
|
||||
('mpileaks%gcc@4.5.0 platform=test os=debian6', 'os=debian6'),
|
||||
('mpileaks%gcc@4.5.0 platform=test os=redhat6', 'os=redhat6')
|
||||
])
|
||||
def test_os_selection_when_multiple_choices_are_possible(
|
||||
self, spec_str, expected_os
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_os)
|
||||
|
||||
@pytest.mark.regression('22718')
|
||||
@pytest.mark.parametrize('spec_str,expected_compiler', [
|
||||
('mpileaks', '%gcc@4.5.0'),
|
||||
('mpileaks ^mpich%clang@3.3', '%clang@3.3')
|
||||
])
|
||||
def test_compiler_is_unique(self, spec_str, expected_compiler):
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_compiler)
|
||||
|
||||
@@ -82,9 +82,7 @@ class TestConcretizePreferences(object):
|
||||
{'debug': True, 'opt': True, 'shared': False, 'static': False}),
|
||||
# Check a multivalued variant with multiple values set
|
||||
('multivalue-variant', ['foo=bar,baz', 'fee=bar'],
|
||||
{'foo': ('bar', 'baz'), 'fee': 'bar'}),
|
||||
('singlevalue-variant', ['fum=why'],
|
||||
{'fum': 'why'})
|
||||
{'foo': ('bar', 'baz'), 'fee': 'bar'})
|
||||
])
|
||||
def test_preferred_variants(
|
||||
self, package_name, variant_value, expected_results
|
||||
@@ -373,13 +371,3 @@ def test_config_perms_fail_write_gt_read(self, configure_permissions):
|
||||
spec = Spec('callpath')
|
||||
with pytest.raises(ConfigError):
|
||||
spack.package_prefs.get_package_permissions(spec)
|
||||
|
||||
@pytest.mark.regression('20040')
|
||||
def test_variant_not_flipped_to_pull_externals(self):
|
||||
"""Test that a package doesn't prefer pulling in an
|
||||
external to using the default value of a variant.
|
||||
"""
|
||||
s = Spec('vdefault-or-external-root').concretized()
|
||||
|
||||
assert '~external' in s['vdefault-or-external']
|
||||
assert 'externaltool' not in s
|
||||
|
||||
@@ -72,25 +72,6 @@ def _write(config, data, scope):
|
||||
return _write
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def env_yaml(tmpdir):
|
||||
"""Return a sample env.yaml for test purposes"""
|
||||
env_yaml = str(tmpdir.join("env.yaml"))
|
||||
with open(env_yaml, 'w') as f:
|
||||
f.write("""\
|
||||
env:
|
||||
config:
|
||||
verify_ssl: False
|
||||
dirty: False
|
||||
packages:
|
||||
libelf:
|
||||
compiler: [ 'gcc@4.5.3' ]
|
||||
repos:
|
||||
- /x/y/z
|
||||
""")
|
||||
return env_yaml
|
||||
|
||||
|
||||
def check_compiler_config(comps, *compiler_names):
|
||||
"""Check that named compilers in comps match Spack's config."""
|
||||
config = spack.config.get('compilers')
|
||||
@@ -816,10 +797,23 @@ def test_immutable_scope(tmpdir):
|
||||
scope._write_section('config')
|
||||
|
||||
|
||||
def test_single_file_scope(config, env_yaml):
|
||||
def test_single_file_scope(tmpdir, config):
|
||||
env_yaml = str(tmpdir.join("env.yaml"))
|
||||
with open(env_yaml, 'w') as f:
|
||||
f.write("""\
|
||||
env:
|
||||
config:
|
||||
verify_ssl: False
|
||||
dirty: False
|
||||
packages:
|
||||
libelf:
|
||||
compiler: [ 'gcc@4.5.3' ]
|
||||
repos:
|
||||
- /x/y/z
|
||||
""")
|
||||
|
||||
scope = spack.config.SingleFileScope(
|
||||
'env', env_yaml, spack.schema.env.schema, ['env']
|
||||
)
|
||||
'env', env_yaml, spack.schema.env.schema, ['env'])
|
||||
|
||||
with spack.config.override(scope):
|
||||
# from the single-file config
|
||||
@@ -1051,41 +1045,3 @@ def test_bad_path_double_override(config):
|
||||
match='Meaningless second override'):
|
||||
with spack.config.override('bad::double:override::directive', ''):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.regression('22547')
|
||||
def test_single_file_scope_cache_clearing(env_yaml):
|
||||
scope = spack.config.SingleFileScope(
|
||||
'env', env_yaml, spack.schema.env.schema, ['env']
|
||||
)
|
||||
# Check that we can retrieve data from the single file scope
|
||||
before = scope.get_section('config')
|
||||
assert before
|
||||
# Clear the cache of the Single file scope
|
||||
scope.clear()
|
||||
# Check that the section can be retireved again and it's
|
||||
# the same as before
|
||||
after = scope.get_section('config')
|
||||
assert after
|
||||
assert before == after
|
||||
|
||||
|
||||
@pytest.mark.regression('22611')
|
||||
def test_internal_config_scope_cache_clearing():
|
||||
"""
|
||||
An InternalConfigScope object is constructed from data that is already
|
||||
in memory, therefore it doesn't have any cache to clear. Here we ensure
|
||||
that calling the clear method is consistent with that..
|
||||
"""
|
||||
data = {
|
||||
'config': {
|
||||
'build_jobs': 10
|
||||
}
|
||||
}
|
||||
internal_scope = spack.config.InternalConfigScope('internal', data)
|
||||
# Ensure that the initial object is properly set
|
||||
assert internal_scope.sections['config'] == data
|
||||
# Call the clear method
|
||||
internal_scope.clear()
|
||||
# Check that this didn't affect the scope object
|
||||
assert internal_scope.sections['config'] == data
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import errno
|
||||
import inspect
|
||||
import itertools
|
||||
@@ -34,7 +35,6 @@
|
||||
import spack.platforms.test
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
import spack.util.gpg
|
||||
import spack.subprocess_context
|
||||
@@ -315,21 +315,27 @@ def _skip_if_missing_executables(request):
|
||||
pytest.skip(msg.format(', '.join(missing_execs)))
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
# FIXME: The lines below should better be added to a fixture with
|
||||
# FIXME: session-scope. Anyhow doing it is not easy, as it seems
|
||||
# FIXME: there's some weird interaction with compilers during concretization.
|
||||
spack.architecture.real_platform = spack.architecture.platform
|
||||
|
||||
|
||||
def test_platform():
|
||||
return spack.platforms.test.Test()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope='session')
|
||||
def _use_test_platform(test_platform):
|
||||
# This is the only context manager used at session scope (see note
|
||||
# below for more insight) since we want to use the test platform as
|
||||
# a default during tests.
|
||||
with spack.architecture.use_platform(test_platform):
|
||||
yield
|
||||
spack.architecture.platform = test_platform
|
||||
|
||||
|
||||
# FIXME: Since we change the architecture above, we have to (re)initialize
|
||||
# FIXME: the config singleton. If it gets initialized too early with the
|
||||
# FIXME: actual architecture, tests will fail.
|
||||
spack.config.config = spack.config._config()
|
||||
|
||||
|
||||
#
|
||||
# Note on context managers used by fixtures
|
||||
# Context managers used by fixtures
|
||||
#
|
||||
# Because these context managers modify global state, they should really
|
||||
# ONLY be used persistently (i.e., around yield statements) in
|
||||
@@ -350,13 +356,45 @@ def _use_test_platform(test_platform):
|
||||
# *USE*, or things can get really confusing.
|
||||
#
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_configuration(config):
|
||||
"""Context manager to swap out the global Spack configuration."""
|
||||
saved = spack.config.replace_config(config)
|
||||
|
||||
# Avoid using real spack configuration that has been cached by other
|
||||
# tests, and avoid polluting the cache with spack test configuration
|
||||
# (including modified configuration)
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
yield
|
||||
|
||||
spack.config.replace_config(saved)
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_store(store):
|
||||
"""Context manager to swap out the global Spack store."""
|
||||
saved = spack.store.store
|
||||
spack.store.store = store
|
||||
yield
|
||||
spack.store.store = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_repo(repo):
|
||||
"""Context manager to swap out the global Spack repo path."""
|
||||
with spack.repo.swap(repo):
|
||||
yield
|
||||
|
||||
|
||||
#
|
||||
# Test-specific fixtures
|
||||
#
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_repo_path():
|
||||
yield spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
yield spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
|
||||
|
||||
def _pkg_install_fn(pkg, spec, prefix):
|
||||
@@ -373,15 +411,15 @@ def mock_pkg_install(monkeypatch):
|
||||
@pytest.fixture(scope='function')
|
||||
def mock_packages(mock_repo_path, mock_pkg_install):
|
||||
"""Use the 'builtin.mock' repository instead of 'builtin'"""
|
||||
with spack.repo.use_repositories(mock_repo_path) as mock_repo:
|
||||
yield mock_repo
|
||||
with use_repo(mock_repo_path):
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_mock_repo(mock_repo_path):
|
||||
"""Function-scoped mock packages, for tests that need to modify them."""
|
||||
mock_repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
with spack.repo.use_repositories(mock_repo) as mock_repo_path:
|
||||
mock_repo_path = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
with use_repo(mock_repo_path):
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@@ -407,7 +445,9 @@ def default_config():
|
||||
This ensures we can test the real default configuration without having
|
||||
tests fail when the user overrides the defaults that we test against."""
|
||||
defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
with spack.config.use_configuration(defaults_path) as defaults_config:
|
||||
defaults_scope = spack.config.ConfigScope('defaults', defaults_path)
|
||||
defaults_config = spack.config.Configuration(defaults_scope)
|
||||
with use_configuration(defaults_config):
|
||||
yield defaults_config
|
||||
|
||||
|
||||
@@ -429,8 +469,9 @@ def load_json():
|
||||
with open(mock_uarch_json) as f:
|
||||
return json.load(f)
|
||||
|
||||
targets_json = load_json()
|
||||
targets = archspec.cpu.microarchitecture._known_microarchitectures()
|
||||
targets_json = archspec.cpu.schema.LazyDictionary(load_json)
|
||||
targets = archspec.cpu.microarchitecture.LazyDictionary(
|
||||
archspec.cpu.microarchitecture._known_microarchitectures)
|
||||
|
||||
yield targets_json, targets
|
||||
|
||||
@@ -482,7 +523,7 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_configuration_scopes(configuration_dir):
|
||||
def mock_configuration(configuration_dir):
|
||||
"""Create a persistent Configuration object from the configuration_dir."""
|
||||
defaults = spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
@@ -493,14 +534,14 @@ def mock_configuration_scopes(configuration_dir):
|
||||
for name in ['site', 'system', 'user']]
|
||||
test_scopes.append(spack.config.InternalConfigScope('command_line'))
|
||||
|
||||
yield test_scopes
|
||||
yield spack.config.Configuration(*test_scopes)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def config(mock_configuration_scopes):
|
||||
def config(mock_configuration):
|
||||
"""This fixture activates/deactivates the mock configuration."""
|
||||
with spack.config.use_configuration(*mock_configuration_scopes) as config:
|
||||
yield config
|
||||
with use_configuration(mock_configuration):
|
||||
yield mock_configuration
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@@ -509,10 +550,11 @@ def mutable_config(tmpdir_factory, configuration_dir):
|
||||
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
|
||||
configuration_dir.copy(mutable_dir)
|
||||
|
||||
scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
cfg = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']])
|
||||
|
||||
with spack.config.use_configuration(*scopes) as cfg:
|
||||
with use_configuration(cfg):
|
||||
yield cfg
|
||||
|
||||
|
||||
@@ -520,20 +562,23 @@ def mutable_config(tmpdir_factory, configuration_dir):
|
||||
def mutable_empty_config(tmpdir_factory, configuration_dir):
|
||||
"""Empty configuration that can be modified by the tests."""
|
||||
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
|
||||
scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
|
||||
with spack.config.use_configuration(*scopes) as cfg:
|
||||
cfg = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']])
|
||||
|
||||
with use_configuration(cfg):
|
||||
yield cfg
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_low_high_config(tmpdir):
|
||||
"""Mocks two configuration scopes: 'low' and 'high'."""
|
||||
scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||
for name in ['low', 'high']]
|
||||
config = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||
for name in ['low', 'high']])
|
||||
|
||||
with spack.config.use_configuration(*scopes) as config:
|
||||
with use_configuration(config):
|
||||
yield config
|
||||
|
||||
|
||||
@@ -582,7 +627,7 @@ def _store_dir_and_cache(tmpdir_factory):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
|
||||
def mock_store(tmpdir_factory, mock_repo_path, mock_configuration,
|
||||
_store_dir_and_cache):
|
||||
"""Creates a read-only mock database with some packages installed note
|
||||
that the ref count for dyninst here will be 3, as it's recycled
|
||||
@@ -593,43 +638,13 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
|
||||
|
||||
"""
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
store = spack.store.Store(str(store_path))
|
||||
|
||||
# If the cache does not exist populate the store and create it
|
||||
if not os.path.exists(str(store_cache.join('.spack-db'))):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.store.use_store(str(store_path)) as store:
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
_populate(store.db)
|
||||
store_path.copy(store_cache, mode=True, stat=True)
|
||||
|
||||
# Make the DB filesystem read-only to ensure we can't modify entries
|
||||
store_path.join('.spack-db').chmod(mode=0o555, rec=1)
|
||||
|
||||
yield store
|
||||
|
||||
store_path.join('.spack-db').chmod(mode=0o755, rec=1)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_mock_store(
|
||||
tmpdir_factory, mock_repo_path, mock_configuration_scopes,
|
||||
_store_dir_and_cache
|
||||
):
|
||||
"""Creates a read-only mock database with some packages installed note
|
||||
that the ref count for dyninst here will be 3, as it's recycled
|
||||
across each install.
|
||||
|
||||
This does not actually activate the store for use by Spack -- see the
|
||||
``database`` fixture for that.
|
||||
|
||||
"""
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
|
||||
# If the cache does not exist populate the store and create it
|
||||
if not os.path.exists(str(store_cache.join('.spack-db'))):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.store.use_store(str(store_path)) as store:
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
with use_configuration(mock_configuration):
|
||||
with use_store(store):
|
||||
with use_repo(mock_repo_path):
|
||||
_populate(store.db)
|
||||
store_path.copy(store_cache, mode=True, stat=True)
|
||||
|
||||
@@ -1205,7 +1220,8 @@ def mock_test_repo(tmpdir_factory):
|
||||
namespace: mock_test_repo
|
||||
""")
|
||||
|
||||
with spack.repo.use_repositories(str(repodir)) as repo:
|
||||
repo = spack.repo.RepoPath(str(repodir))
|
||||
with spack.repo.swap(repo):
|
||||
yield repo, repodir
|
||||
|
||||
shutil.rmtree(str(repodir))
|
||||
|
||||
@@ -8,7 +8,6 @@ compilers:
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: {0.name}{0.version}
|
||||
@@ -18,17 +17,6 @@ compilers:
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: CNL
|
||||
@@ -47,7 +35,6 @@ compilers:
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: yosemite
|
||||
@@ -57,7 +44,6 @@ compilers:
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
@@ -76,7 +62,6 @@ compilers:
|
||||
operating_system: SuSE11
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
@@ -86,7 +71,6 @@ compilers:
|
||||
operating_system: yosemite
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
@@ -96,7 +80,6 @@ compilers:
|
||||
operating_system: elcapitan
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: elcapitan
|
||||
@@ -106,7 +89,6 @@ compilers:
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.7.2
|
||||
operating_system: redhat6
|
||||
@@ -120,16 +102,6 @@ compilers:
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.4.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc440
|
||||
cxx: /path/to/g++440
|
||||
f77: /path/to/gfortran440
|
||||
fc: /path/to/gfortran440
|
||||
modules: 'None'
|
||||
- compiler:
|
||||
spec: clang@3.5
|
||||
operating_system: redhat6
|
||||
@@ -142,7 +114,6 @@ compilers:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@8.0.0
|
||||
operating_system: redhat7
|
||||
@@ -155,7 +126,6 @@ compilers:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: apple-clang@9.1.0
|
||||
operating_system: elcapitan
|
||||
@@ -165,7 +135,6 @@ compilers:
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@foo
|
||||
operating_system: redhat6
|
||||
@@ -175,7 +144,6 @@ compilers:
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.4.0-special
|
||||
operating_system: redhat6
|
||||
@@ -185,4 +153,3 @@ compilers:
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
|
||||
@@ -9,8 +9,6 @@ packages:
|
||||
prefix: /path/to/external_tool
|
||||
- spec: externaltool@0.9%gcc@4.5.0
|
||||
prefix: /usr
|
||||
- spec: externaltool@0_8%gcc@4.5.0
|
||||
prefix: /usr
|
||||
externalvirtual:
|
||||
buildable: False
|
||||
externals:
|
||||
@@ -29,20 +27,3 @@ packages:
|
||||
externals:
|
||||
- spec: requires-virtual@2.0
|
||||
prefix: /usr
|
||||
'external-buildable-with-variant':
|
||||
buildable: True
|
||||
externals:
|
||||
- spec: external-buildable-with-variant@1.1.special +baz
|
||||
prefix: /usr
|
||||
- spec: external-buildable-with-variant@0.9 +baz
|
||||
prefix: /usr
|
||||
'old-external':
|
||||
buildable: True
|
||||
externals:
|
||||
- spec: old-external@1.0.0
|
||||
prefix: /usr
|
||||
'external-non-default-variant':
|
||||
buildable: True
|
||||
externals:
|
||||
- spec: external-non-default-variant@3.8.7~foo~bar
|
||||
prefix: /usr
|
||||
|
||||
@@ -81,7 +81,7 @@ def test_installed_upstream(upstream_and_downstream_db):
|
||||
y = mock_repo.add_package('y', [z], [default])
|
||||
mock_repo.add_package('w', [x, y], [default, default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = spack.spec.Spec('w')
|
||||
spec.concretize()
|
||||
|
||||
@@ -122,7 +122,7 @@ def test_removed_upstream_dep(upstream_and_downstream_db):
|
||||
z = mock_repo.add_package('z', [], [])
|
||||
mock_repo.add_package('y', [z], [default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = spack.spec.Spec('y')
|
||||
spec.concretize()
|
||||
|
||||
@@ -155,7 +155,7 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
mock_repo.add_package('x', [], [])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = spack.spec.Spec('x')
|
||||
spec.concretize()
|
||||
|
||||
@@ -197,7 +197,7 @@ def test_cannot_write_upstream(tmpdir_factory, test_store, gen_mock_layout):
|
||||
upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots(
|
||||
[roots[1]], _test=True)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = spack.spec.Spec('x')
|
||||
spec.concretize()
|
||||
|
||||
@@ -216,7 +216,7 @@ def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout):
|
||||
y = mock_repo.add_package('y', [z], [default])
|
||||
mock_repo.add_package('x', [y], [default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = spack.spec.Spec('x')
|
||||
spec.concretize()
|
||||
db_c = spack.database.Database(roots[2])
|
||||
@@ -694,7 +694,7 @@ def test_115_reindex_with_packages_not_in_repo(mutable_database):
|
||||
# Dont add any package definitions to this repository, the idea is that
|
||||
# packages should not have to be defined in the repository once they
|
||||
# are installed
|
||||
with spack.repo.use_repositories(MockPackageMultiRepo()):
|
||||
with spack.repo.swap(MockPackageMultiRepo()):
|
||||
spack.store.store.reindex()
|
||||
_check_db_sanity(mutable_database)
|
||||
|
||||
|
||||
@@ -194,7 +194,7 @@ def test_handle_unknown_package(layout_and_dir, config, mock_packages):
|
||||
layout.create_install_directory(spec)
|
||||
installed_specs[spec] = layout.path_for_spec(spec)
|
||||
|
||||
with spack.repo.use_repositories(mock_db):
|
||||
with spack.repo.swap(mock_db):
|
||||
# Now check that even without the package files, we know
|
||||
# enough to read a spec from the spec file.
|
||||
for spec, path in installed_specs.items():
|
||||
|
||||
@@ -450,8 +450,7 @@ def test_packages_needed_to_bootstrap_compiler_none(install_mockery):
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
|
||||
packages = inst._packages_needed_to_bootstrap_compiler(
|
||||
spec.compiler, spec.architecture, [spec.package])
|
||||
packages = inst._packages_needed_to_bootstrap_compiler(spec.package)
|
||||
assert not packages
|
||||
|
||||
|
||||
@@ -469,19 +468,18 @@ def _conc_spec(compiler):
|
||||
monkeypatch.setattr(spack.compilers, 'pkg_spec_for_compiler', _conc_spec)
|
||||
monkeypatch.setattr(spack.spec.Spec, 'concretize', _noop)
|
||||
|
||||
packages = inst._packages_needed_to_bootstrap_compiler(
|
||||
spec.compiler, spec.architecture, [spec.package])
|
||||
packages = inst._packages_needed_to_bootstrap_compiler(spec.package)
|
||||
assert packages
|
||||
|
||||
|
||||
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
|
||||
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_repo_path):
|
||||
"""Test happy path for dump_packages with dependencies."""
|
||||
|
||||
spec_name = 'simple-inheritance'
|
||||
spec = spack.spec.Spec(spec_name).concretized()
|
||||
inst.dump_packages(spec, str(tmpdir))
|
||||
|
||||
repo = mock_packages.repos[0]
|
||||
repo = mock_repo_path.repos[0]
|
||||
dest_pkg = repo.filename_for_package_name(spec_name)
|
||||
assert os.path.isfile(dest_pkg)
|
||||
|
||||
@@ -628,7 +626,7 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
|
||||
from collections import defaultdict
|
||||
|
||||
def _pkgs(compiler, architecture, pkgs):
|
||||
def _pkgs(pkg):
|
||||
spec = spack.spec.Spec('mpi').concretized()
|
||||
return [(spec.package, True)]
|
||||
|
||||
@@ -638,8 +636,7 @@ def _pkgs(compiler, architecture, pkgs):
|
||||
all_deps = defaultdict(set)
|
||||
|
||||
monkeypatch.setattr(inst, '_packages_needed_to_bootstrap_compiler', _pkgs)
|
||||
installer._add_bootstrap_compilers(
|
||||
'fake', 'fake', [request.pkg], request, all_deps)
|
||||
installer._add_bootstrap_compilers(request.pkg, request, all_deps)
|
||||
|
||||
ids = list(installer.build_tasks)
|
||||
assert len(ids) == 1
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This test does sanity checks on Spack's builtin package database."""
|
||||
import os.path
|
||||
import re
|
||||
@@ -13,7 +14,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.executable as executable
|
||||
import spack.variant
|
||||
# A few functions from this module are used to
|
||||
# do sanity checks only on packagess modified by a PR
|
||||
import spack.cmd.flake8 as flake8
|
||||
@@ -73,7 +73,7 @@ def test_repo_getpkg_names_and_classes():
|
||||
def test_get_all_mock_packages():
|
||||
"""Get the mock packages once each too."""
|
||||
db = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
with spack.repo.use_repositories(db):
|
||||
with spack.repo.swap(db):
|
||||
check_repo()
|
||||
|
||||
|
||||
@@ -257,15 +257,3 @@ def test_variant_defaults_are_parsable_from_cli():
|
||||
if not default_is_parsable:
|
||||
failing.append((pkg.name, variant_name))
|
||||
assert not failing
|
||||
|
||||
|
||||
def test_variant_defaults_listed_explicitly_in_values():
|
||||
failing = []
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
for variant_name, variant in pkg.variants.items():
|
||||
vspec = variant.make_default()
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg=pkg)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
failing.append((pkg.name, variant.name))
|
||||
assert not failing
|
||||
|
||||
@@ -196,8 +196,10 @@ def test_relocate_text(tmpdir):
|
||||
script.close()
|
||||
filenames = [filename]
|
||||
new_dir = '/opt/rh/devtoolset/'
|
||||
# Singleton dict doesn't matter if Ordered
|
||||
relocate_text(filenames, {old_dir: new_dir})
|
||||
relocate_text(filenames, old_dir, new_dir,
|
||||
old_dir, new_dir,
|
||||
old_dir, new_dir,
|
||||
{old_dir: new_dir})
|
||||
with open(filename, "r")as script:
|
||||
for line in script:
|
||||
assert(new_dir in line)
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import pytest
|
||||
import spack.architecture
|
||||
import spack.concretize
|
||||
import spack.hooks.sbang as sbang
|
||||
import spack.paths
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
@@ -280,7 +281,7 @@ def test_replace_prefix_bin(hello_world):
|
||||
executable = hello_world(rpaths=['/usr/lib', '/usr/lib64'])
|
||||
|
||||
# Relocate the RPATHs
|
||||
spack.relocate._replace_prefix_bin(str(executable), {b'/usr': b'/foo'})
|
||||
spack.relocate._replace_prefix_bin(str(executable), '/usr', '/foo')
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert '/foo/lib:/foo/lib64' in rpaths_for(executable)
|
||||
@@ -381,12 +382,11 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
|
||||
assert not text_in_bin(str(new_binary.dirpath()), new_binary)
|
||||
|
||||
# Check this call succeed
|
||||
orig_path_bytes = str(orig_binary.dirpath()).encode('utf-8')
|
||||
new_path_bytes = str(new_binary.dirpath()).encode('utf-8')
|
||||
|
||||
spack.relocate.relocate_text_bin(
|
||||
[str(new_binary)],
|
||||
{orig_path_bytes: new_path_bytes}
|
||||
str(orig_binary.dirpath()), str(new_binary.dirpath()),
|
||||
spack.paths.spack_root, spack.paths.spack_root,
|
||||
{str(orig_binary.dirpath()): str(new_binary.dirpath())}
|
||||
)
|
||||
|
||||
# Check original directory is not there anymore and it was
|
||||
@@ -395,13 +395,55 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
|
||||
assert text_in_bin(str(new_binary.dirpath()), new_binary)
|
||||
|
||||
|
||||
def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
|
||||
short_prefix = b'/short'
|
||||
long_prefix = b'/much/longer'
|
||||
fpath = str(tmpdir.join('fakebin'))
|
||||
with open(fpath, 'w') as f:
|
||||
f.write('/short')
|
||||
def test_relocate_text_bin_raise_if_new_prefix_is_longer():
|
||||
short_prefix = '/short'
|
||||
long_prefix = '/much/longer'
|
||||
with pytest.raises(spack.relocate.BinaryTextReplaceError):
|
||||
spack.relocate.relocate_text_bin(
|
||||
[fpath], {short_prefix: long_prefix}
|
||||
['item'], short_prefix, long_prefix, None, None, None
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("sbang_line", [
|
||||
"#!/bin/bash /path/to/orig/spack/bin/sbang",
|
||||
"#!/bin/sh /orig/layout/root/bin/sbang"
|
||||
])
|
||||
def test_relocate_text_old_sbang(tmpdir, sbang_line):
|
||||
"""Ensure that old and new sbang styles are relocated."""
|
||||
|
||||
old_install_prefix = "/orig/layout/root/orig/install/prefix"
|
||||
new_install_prefix = os.path.join(
|
||||
spack.store.layout.root, "new", "install", "prefix"
|
||||
)
|
||||
|
||||
# input file with an sbang line
|
||||
original = """\
|
||||
{0}
|
||||
#!/usr/bin/env python
|
||||
|
||||
/orig/layout/root/orig/install/prefix
|
||||
""".format(sbang_line)
|
||||
|
||||
# expected relocation
|
||||
expected = """\
|
||||
{0}
|
||||
#!/usr/bin/env python
|
||||
|
||||
{1}
|
||||
""".format(sbang.sbang_shebang_line(), new_install_prefix)
|
||||
|
||||
path = tmpdir.ensure("path", "to", "file")
|
||||
with path.open("w") as f:
|
||||
f.write(original)
|
||||
|
||||
spack.relocate.relocate_text(
|
||||
[str(path)],
|
||||
"/orig/layout/root", spack.store.layout.root,
|
||||
old_install_prefix, new_install_prefix,
|
||||
"/path/to/orig/spack", spack.paths.spack_root,
|
||||
{
|
||||
old_install_prefix: new_install_prefix
|
||||
}
|
||||
)
|
||||
|
||||
assert expected == open(str(path)).read()
|
||||
|
||||
@@ -75,7 +75,7 @@ def test_test_deptype():
|
||||
y = mock_repo.add_package('y', [z], [test_only])
|
||||
w = mock_repo.add_package('w', [x, y], [test_only, default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = Spec('w')
|
||||
spec.concretize(tests=(w.name,))
|
||||
|
||||
@@ -114,7 +114,7 @@ def test_installed_deps():
|
||||
b = mock_repo.add_package('b', [d, e], [default, default])
|
||||
mock_repo.add_package('a', [b, c], [default, default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
c_spec = Spec('c')
|
||||
c_spec.concretize()
|
||||
assert c_spec['d'].version == spack.version.Version('2')
|
||||
@@ -143,7 +143,7 @@ def test_specify_preinstalled_dep():
|
||||
b = mock_repo.add_package('b', [c], [default])
|
||||
mock_repo.add_package('a', [b], [default])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
b_spec = Spec('b')
|
||||
b_spec.concretize()
|
||||
for spec in b_spec.traverse():
|
||||
@@ -186,7 +186,7 @@ def test_conditional_dep_with_user_constraints(spec_str, expr_str, expected):
|
||||
}
|
||||
mock_repo.add_package('x', [y], [default], conditions=x_on_y_conditions)
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
spec = Spec(spec_str)
|
||||
spec.concretize()
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import shlex
|
||||
@@ -806,26 +806,3 @@ def test_kv_with_spaces(self):
|
||||
])
|
||||
def test_target_tokenization(self, expected_tokens, spec_string):
|
||||
self.check_lex(expected_tokens, spec_string)
|
||||
|
||||
@pytest.mark.regression('20310')
|
||||
def test_compare_abstract_specs(self):
|
||||
"""Spec comparisons must be valid for abstract specs.
|
||||
|
||||
Check that the spec cmp_key appropriately handles comparing specs for
|
||||
which some attributes are None in exactly one of two specs"""
|
||||
# Add fields in order they appear in `Spec._cmp_node`
|
||||
constraints = [
|
||||
None,
|
||||
'foo',
|
||||
'foo.foo',
|
||||
'foo.foo@foo',
|
||||
'foo.foo@foo+foo',
|
||||
'foo.foo@foo+foo arch=foo-foo-foo',
|
||||
'foo.foo@foo+foo arch=foo-foo-foo %foo',
|
||||
'foo.foo@foo+foo arch=foo-foo-foo %foo cflags=foo',
|
||||
]
|
||||
specs = [Spec(s) for s in constraints]
|
||||
|
||||
for a, b in itertools.product(specs, repeat=2):
|
||||
# Check that we can compare without raising an error
|
||||
assert a <= b or b < a
|
||||
|
||||
@@ -11,7 +11,8 @@
|
||||
import ast
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
|
||||
from collections import Iterable, Mapping
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -28,12 +29,6 @@
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Iterable, Mapping # novm
|
||||
else:
|
||||
from collections import Iterable, Mapping
|
||||
|
||||
|
||||
def check_yaml_round_trip(spec):
|
||||
yaml_text = spec.to_yaml()
|
||||
spec_from_yaml = Spec.from_yaml(yaml_text)
|
||||
@@ -315,7 +310,7 @@ def test_save_dependency_spec_yamls_subset(tmpdir, config):
|
||||
b = mock_repo.add_package('b', [d, e], [default, default])
|
||||
mock_repo.add_package('a', [b, c], [default, default])
|
||||
|
||||
with repo.use_repositories(mock_repo):
|
||||
with repo.swap(mock_repo):
|
||||
spec_a = Spec('a')
|
||||
spec_a.concretize()
|
||||
b_spec = spec_a['b']
|
||||
|
||||
@@ -54,7 +54,7 @@ def builtin_and_mock_packages():
|
||||
repo_dirs = [spack.paths.packages_path, spack.paths.mock_packages_path]
|
||||
path = RepoPath(*repo_dirs)
|
||||
|
||||
with spack.repo.use_repositories(path):
|
||||
with spack.repo.swap(path):
|
||||
yield
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ def test_mock_package_possible_dependencies():
|
||||
b = mock_repo.add_package('b', [d])
|
||||
a = mock_repo.add_package('a', [b, c])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
with spack.repo.swap(mock_repo):
|
||||
assert set(a.possible_dependencies()) == set(['a', 'b', 'c', 'd', 'e'])
|
||||
assert set(b.possible_dependencies()) == set(['b', 'd', 'e'])
|
||||
assert set(c.possible_dependencies()) == set(['c', 'd', 'e'])
|
||||
|
||||
@@ -528,18 +528,13 @@ def reversed(self):
|
||||
|
||||
return rev
|
||||
|
||||
def apply_modifications(self, env=None):
|
||||
def apply_modifications(self):
|
||||
"""Applies the modifications and clears the list."""
|
||||
# Use os.environ if not specified
|
||||
# Do not copy, we want to modify it in place
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
modifications = self.group_by_name()
|
||||
# Apply modifications one variable at a time
|
||||
for name, actions in sorted(modifications.items()):
|
||||
for x in actions:
|
||||
x.execute(env)
|
||||
x.execute(os.environ)
|
||||
|
||||
def shell_modifications(self, shell='sh'):
|
||||
"""Return shell code to apply the modifications and clears the list."""
|
||||
@@ -942,10 +937,7 @@ def _source_single_file(file_and_args, environment):
|
||||
source_file, suppress_output,
|
||||
concatenate_on_success, dump_environment,
|
||||
])
|
||||
output = shell(
|
||||
source_file_arguments, output=str, env=environment,
|
||||
ignore_quotes=True
|
||||
)
|
||||
output = shell(source_file_arguments, output=str, env=environment)
|
||||
environment = json.loads(output)
|
||||
|
||||
# If we're in python2, convert to str objects instead of unicode
|
||||
|
||||
@@ -22,8 +22,6 @@ class Executable(object):
|
||||
def __init__(self, name):
|
||||
self.exe = shlex.split(str(name))
|
||||
self.default_env = {}
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
self.returncode = None
|
||||
|
||||
if not self.exe:
|
||||
@@ -42,10 +40,6 @@ def add_default_env(self, key, value):
|
||||
"""
|
||||
self.default_env[key] = value
|
||||
|
||||
def add_default_envmod(self, envmod):
|
||||
"""Set an EnvironmentModifications to use when the command is run."""
|
||||
self.default_envmod.extend(envmod)
|
||||
|
||||
@property
|
||||
def command(self):
|
||||
"""The command-line string.
|
||||
@@ -82,18 +76,15 @@ def __call__(self, *args, **kwargs):
|
||||
Keyword Arguments:
|
||||
_dump_env (dict): Dict to be set to the environment actually
|
||||
used (envisaged for testing purposes only)
|
||||
env (dict or EnvironmentModifications): The environment with which
|
||||
to run the executable
|
||||
extra_env (dict or EnvironmentModifications): Extra items to add to
|
||||
the environment (neither requires nor precludes env)
|
||||
env (dict): The environment to run the executable with
|
||||
extra_env (dict): Extra items to add to the environment
|
||||
(neither requires nor precludes env)
|
||||
fail_on_error (bool): Raise an exception if the subprocess returns
|
||||
an error. Default is True. The return code is available as
|
||||
``exe.returncode``
|
||||
ignore_errors (int or list): A list of error codes to ignore.
|
||||
If these codes are returned, this process will not raise
|
||||
an exception even if ``fail_on_error`` is set to ``True``
|
||||
ignore_quotes (bool): If False, warn users that quotes are not
|
||||
needed as Spack does not use a shell. Defaults to False.
|
||||
input: Where to read stdin from
|
||||
output: Where to send stdout
|
||||
error: Where to send stderr
|
||||
@@ -116,33 +107,19 @@ def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
# Environment
|
||||
env_arg = kwargs.get('env', None)
|
||||
|
||||
# Setup default environment
|
||||
env = os.environ.copy() if env_arg is None else {}
|
||||
self.default_envmod.apply_modifications(env)
|
||||
env.update(self.default_env)
|
||||
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
# Apply env argument
|
||||
if isinstance(env_arg, EnvironmentModifications):
|
||||
env_arg.apply_modifications(env)
|
||||
elif env_arg:
|
||||
env.update(env_arg)
|
||||
|
||||
# Apply extra env
|
||||
extra_env = kwargs.get('extra_env', {})
|
||||
if isinstance(extra_env, EnvironmentModifications):
|
||||
extra_env.apply_modifications(env)
|
||||
if env_arg is None:
|
||||
env = os.environ.copy()
|
||||
env.update(self.default_env)
|
||||
else:
|
||||
env.update(extra_env)
|
||||
|
||||
env = self.default_env.copy()
|
||||
env.update(env_arg)
|
||||
env.update(kwargs.get('extra_env', {}))
|
||||
if '_dump_env' in kwargs:
|
||||
kwargs['_dump_env'].clear()
|
||||
kwargs['_dump_env'].update(env)
|
||||
|
||||
fail_on_error = kwargs.pop('fail_on_error', True)
|
||||
ignore_errors = kwargs.pop('ignore_errors', ())
|
||||
ignore_quotes = kwargs.pop('ignore_quotes', False)
|
||||
|
||||
# If they just want to ignore one error code, make it a tuple.
|
||||
if isinstance(ignore_errors, int):
|
||||
@@ -167,20 +144,15 @@ def streamify(arg, mode):
|
||||
estream, close_estream = streamify(error, 'w')
|
||||
istream, close_istream = streamify(input, 'r')
|
||||
|
||||
if not ignore_quotes:
|
||||
quoted_args = [
|
||||
arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)
|
||||
]
|
||||
if quoted_args:
|
||||
tty.warn(
|
||||
"Quotes in command arguments can confuse scripts like"
|
||||
" configure.",
|
||||
"These arguments may cause problems when executed:",
|
||||
str("\n".join([" " + arg for arg in quoted_args])),
|
||||
"Quotes aren't needed because spack doesn't use a shell. "
|
||||
"Consider removing them.",
|
||||
"If multiple levels of quotation are required, use "
|
||||
"`ignore_quotes=True`.")
|
||||
quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)]
|
||||
if quoted_args:
|
||||
tty.warn(
|
||||
"Quotes in command arguments can confuse scripts like"
|
||||
" configure.",
|
||||
"The following arguments may cause problems when executed:",
|
||||
str("\n".join([" " + arg for arg in quoted_args])),
|
||||
"Quotes aren't needed because spack doesn't use a shell.",
|
||||
"Consider removing them")
|
||||
|
||||
cmd = self.exe + list(args)
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import ordereddict_backport
|
||||
|
||||
import spack.util.naming
|
||||
import spack.provider_index
|
||||
from spack.dependency import Dependency
|
||||
from spack.spec import Spec
|
||||
from spack.version import Version
|
||||
@@ -81,8 +80,6 @@ class MockPackageMultiRepo(object):
|
||||
|
||||
def __init__(self):
|
||||
self.spec_to_pkg = {}
|
||||
self.namespace = ''
|
||||
self.full_namespace = 'spack.pkg.mock'
|
||||
|
||||
def get(self, spec):
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
@@ -105,9 +102,6 @@ def repo_for_pkg(self, name):
|
||||
Repo = collections.namedtuple('Repo', ['namespace'])
|
||||
return Repo('mockrepo')
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.spec_to_pkg
|
||||
|
||||
def add_package(self, name, dependencies=None, dependency_types=None,
|
||||
conditions=None):
|
||||
"""Factory method for creating mock packages.
|
||||
@@ -174,7 +168,3 @@ class MockPackage(MockPackageBase):
|
||||
self.spec_to_pkg["mockrepo." + name] = mock_package
|
||||
|
||||
return mock_package
|
||||
|
||||
@property
|
||||
def provider_index(self):
|
||||
return spack.provider_index.ProviderIndex()
|
||||
|
||||
@@ -4,14 +4,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
import collections
|
||||
import functools
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSequence # novm
|
||||
else:
|
||||
from collections import MutableSequence
|
||||
|
||||
|
||||
class Delegate(object):
|
||||
@@ -58,7 +52,7 @@ def composite(interface=None, method_list=None, container=list):
|
||||
# exception if it doesn't. The patched class returned by the decorator will
|
||||
# inherit from the container class to expose the interface needed to manage
|
||||
# objects composition
|
||||
if not issubclass(container, MutableSequence):
|
||||
if not issubclass(container, collections.MutableSequence):
|
||||
raise TypeError("Container must fulfill the MutableSequence contract")
|
||||
|
||||
# Check if at least one of the 'interface' or the 'method_list' arguments
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
"""
|
||||
import ctypes
|
||||
import sys
|
||||
import collections
|
||||
|
||||
from ordereddict_backport import OrderedDict
|
||||
from six import string_types, StringIO
|
||||
@@ -25,13 +25,6 @@
|
||||
|
||||
import spack.error
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping # novm
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
# Only export load and dump
|
||||
__all__ = ['load', 'dump', 'SpackYAMLError']
|
||||
|
||||
@@ -350,7 +343,7 @@ def sorted_dict(dict_like):
|
||||
"""
|
||||
result = syaml_dict(sorted(dict_like.items()))
|
||||
for key, value in result.items():
|
||||
if isinstance(value, Mapping):
|
||||
if isinstance(value, collections.Mapping):
|
||||
result[key] = sorted_dict(value)
|
||||
return result
|
||||
|
||||
|
||||
@@ -41,7 +41,6 @@ class HTMLParseError(Exception):
|
||||
import spack.util.crypto
|
||||
import spack.util.s3 as s3_util
|
||||
import spack.util.url as url_util
|
||||
import llnl.util.lang
|
||||
|
||||
from spack.util.compression import ALLOWED_ARCHIVE_TYPES
|
||||
|
||||
@@ -425,6 +424,12 @@ def _spider(url, collect_nested):
|
||||
|
||||
return pages, links, subcalls
|
||||
|
||||
# TODO: Needed until we drop support for Python 2.X
|
||||
def star(func):
|
||||
def _wrapper(args):
|
||||
return func(*args)
|
||||
return _wrapper
|
||||
|
||||
if isinstance(root_urls, six.string_types):
|
||||
root_urls = [root_urls]
|
||||
|
||||
@@ -445,7 +450,7 @@ def _spider(url, collect_nested):
|
||||
tty.debug("SPIDER: [depth={0}, max_depth={1}, urls={2}]".format(
|
||||
current_depth, depth, len(spider_args))
|
||||
)
|
||||
results = tp.map(llnl.util.lang.star(_spider), spider_args)
|
||||
results = tp.map(star(_spider), spider_args)
|
||||
spider_args = []
|
||||
collect = current_depth < depth
|
||||
for sub_pages, sub_links, sub_spider_args in results:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user