Compare commits
50 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f1fe03cd59 | ||
![]() |
80f0c78f00 | ||
![]() |
818664d55a | ||
![]() |
76c5a02125 | ||
![]() |
30dd61264a | ||
![]() |
13fed376f2 | ||
![]() |
fb27c7ad0c | ||
![]() |
4a7581eda3 | ||
![]() |
0d173bb32b | ||
![]() |
8a7bfe97c3 | ||
![]() |
f1f94ad31a | ||
![]() |
2655e21bd0 | ||
![]() |
5546b22c70 | ||
![]() |
2496c7b514 | ||
![]() |
43cea1b354 | ||
![]() |
cbd55332e3 | ||
![]() |
c8a10e4910 | ||
![]() |
a5213dabb1 | ||
![]() |
6e714808fa | ||
![]() |
9f99e8ad64 | ||
![]() |
18880a668b | ||
![]() |
e7494b627b | ||
![]() |
b11dd0478a | ||
![]() |
31a07f9bc4 | ||
![]() |
61e619bb27 | ||
![]() |
4d148a430e | ||
![]() |
9717e0244f | ||
![]() |
a823cffc40 | ||
![]() |
f1c7402c64 | ||
![]() |
16f7a02654 | ||
![]() |
6c1b348d91 | ||
![]() |
68ef6fce92 | ||
![]() |
0b5c5b8068 | ||
![]() |
add339cbfe | ||
![]() |
316c292685 | ||
![]() |
bd9929f9dc | ||
![]() |
2a5f46d8d3 | ||
![]() |
095ace9028 | ||
![]() |
f30fc6cd33 | ||
![]() |
4e5e1e8f35 | ||
![]() |
0678d5df90 | ||
![]() |
2a4d2f905c | ||
![]() |
14e179398f | ||
![]() |
8d13193434 | ||
![]() |
94bb37c107 | ||
![]() |
a59fcd60f5 | ||
![]() |
566becbbfe | ||
![]() |
8c05387ebc | ||
![]() |
393a105c06 | ||
![]() |
a3714b3292 |
20
.github/workflows/linux_unit_tests.yaml
vendored
20
.github/workflows/linux_unit_tests.yaml
vendored
@@ -15,6 +15,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['original', 'clingo']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -50,16 +51,23 @@ jobs:
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: unittests,linux
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -103,6 +111,7 @@ jobs:
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
centos6:
|
||||
# Test for Python2.6 run on Centos 6
|
||||
runs-on: ubuntu-latest
|
||||
@@ -117,15 +126,16 @@ jobs:
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
share/spack/qa/run-unit-tests
|
||||
clingo:
|
||||
# Test for the clingo based solver
|
||||
|
||||
clingo-cffi:
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:clingo
|
||||
container: spack/github-actions:clingo-cffi
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER
|
||||
which clingo && clingo --version
|
||||
python3 -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
|
27
CHANGELOG.md
27
CHANGELOG.md
@@ -1,3 +1,30 @@
|
||||
# v0.16.2 (2021-05-22)
|
||||
|
||||
* Major performance improvement for `spack load` and other commands. (#23661)
|
||||
* `spack fetch` is now environment-aware. (#19166)
|
||||
* Numerous fixes for the new, `clingo`-based concretizer. (#23016, #23307,
|
||||
#23090, #22896, #22534, #20644, #20537, #21148)
|
||||
* Supoprt for automatically bootstrapping `clingo` from source. (#20652, #20657
|
||||
#21364, #21446, #21913, #22354, #22444, #22460, #22489, #22610, #22631)
|
||||
* Python 3.10 support: `collections.abc` (#20441)
|
||||
* Fix import issues by using `__import__` instead of Spack package importe.
|
||||
(#23288, #23290)
|
||||
* Bugfixes and `--source-dir` argument for `spack location`. (#22755, #22348,
|
||||
#22321)
|
||||
* Better support for externals in shared prefixes. (#22653)
|
||||
* `spack build-env` now prefers specs defined in the active environment.
|
||||
(#21642)
|
||||
* Remove erroneous warnings about quotes in `from_sourcing_files`. (#22767)
|
||||
* Fix clearing cache of `InternalConfigScope`. (#22609)
|
||||
* Bugfix for active when pkg is already active error. (#22587)
|
||||
* Make `SingleFileScope` able to repopulate the cache after clearing it.
|
||||
(#22559)
|
||||
* Channelflow: Fix the package. (#22483)
|
||||
* More descriptive error message for bugs in `package.py` (#21811)
|
||||
* Use package-supplied `autogen.sh`. (#20319)
|
||||
* Respect `-k/verify-ssl-false` in `_existing_url` method. (#21864)
|
||||
|
||||
|
||||
# v0.16.1 (2021-02-22)
|
||||
|
||||
This minor release includes a new feature and associated fixes:
|
||||
|
@@ -103,6 +103,53 @@ environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using clingo as an external solver to compute which software
|
||||
needs to be installed. If you have a default compiler supporting C++14 Spack
|
||||
can automatically bootstrap this tool from sources the first time it is
|
||||
needed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison``
|
||||
on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Alternate Prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
7
lib/spack/external/_pytest/assertion/util.py
vendored
7
lib/spack/external/_pytest/assertion/util.py
vendored
@@ -5,9 +5,12 @@
|
||||
import _pytest._code
|
||||
import py
|
||||
try:
|
||||
from collections import Sequence
|
||||
from collections.abc import Sequence
|
||||
except ImportError:
|
||||
Sequence = list
|
||||
try:
|
||||
from collections import Sequence
|
||||
except ImportError:
|
||||
Sequence = list
|
||||
|
||||
|
||||
u = py.builtin._totext
|
||||
|
7
lib/spack/external/_pytest/main.py
vendored
7
lib/spack/external/_pytest/main.py
vendored
@@ -10,9 +10,12 @@
|
||||
import _pytest._code
|
||||
import py
|
||||
try:
|
||||
from collections import MutableMapping as MappingMixin
|
||||
from collections.abc import MutableMapping as MappingMixin
|
||||
except ImportError:
|
||||
from UserDict import DictMixin as MappingMixin
|
||||
try:
|
||||
from collections import MutableMapping as MappingMixin
|
||||
except ImportError:
|
||||
from UserDict import DictMixin as MappingMixin
|
||||
|
||||
from _pytest.config import directory_arg, UsageError, hookimpl
|
||||
from _pytest.outcomes import exit
|
||||
|
5
lib/spack/external/_pytest/python_api.py
vendored
5
lib/spack/external/_pytest/python_api.py
vendored
@@ -398,7 +398,10 @@ def approx(expected, rel=None, abs=None, nan_ok=False):
|
||||
__ https://docs.python.org/3/reference/datamodel.html#object.__ge__
|
||||
"""
|
||||
|
||||
from collections import Mapping, Sequence
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping, Sequence
|
||||
else:
|
||||
from collections import Mapping, Sequence
|
||||
from _pytest.compat import STRING_TYPES as String
|
||||
|
||||
# Delegate the comparison to a class that knows how to deal with the type
|
||||
|
8
lib/spack/external/jinja2/runtime.py
vendored
8
lib/spack/external/jinja2/runtime.py
vendored
@@ -315,10 +315,14 @@ def __repr__(self):
|
||||
|
||||
# register the context as mapping if possible
|
||||
try:
|
||||
from collections import Mapping
|
||||
from collections.abc import Mapping
|
||||
Mapping.register(Context)
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from collections import Mapping
|
||||
Mapping.register(Context)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class BlockReference(object):
|
||||
|
12
lib/spack/external/jinja2/sandbox.py
vendored
12
lib/spack/external/jinja2/sandbox.py
vendored
@@ -14,7 +14,7 @@
|
||||
"""
|
||||
import types
|
||||
import operator
|
||||
from collections import Mapping
|
||||
import sys
|
||||
from jinja2.environment import Environment
|
||||
from jinja2.exceptions import SecurityError
|
||||
from jinja2._compat import string_types, PY2
|
||||
@@ -23,6 +23,11 @@
|
||||
from markupsafe import EscapeFormatter
|
||||
from string import Formatter
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
#: maximum number of items a range may produce
|
||||
MAX_RANGE = 100000
|
||||
@@ -79,7 +84,10 @@
|
||||
pass
|
||||
|
||||
#: register Python 2.6 abstract base classes
|
||||
from collections import MutableSet, MutableMapping, MutableSequence
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSet, MutableMapping, MutableSequence
|
||||
else:
|
||||
from collections import MutableSet, MutableMapping, MutableSequence
|
||||
_mutable_set_types += (MutableSet,)
|
||||
_mutable_mapping_types += (MutableMapping,)
|
||||
_mutable_sequence_types += (MutableSequence,)
|
||||
|
7
lib/spack/external/jinja2/tests.py
vendored
7
lib/spack/external/jinja2/tests.py
vendored
@@ -10,11 +10,16 @@
|
||||
"""
|
||||
import operator
|
||||
import re
|
||||
from collections import Mapping
|
||||
import sys
|
||||
from jinja2.runtime import Undefined
|
||||
from jinja2._compat import text_type, string_types, integer_types
|
||||
import decimal
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
number_re = re.compile(r'^-?\d+(\.\d+)?$')
|
||||
regex_type = type(number_re)
|
||||
|
||||
|
8
lib/spack/external/jinja2/utils.py
vendored
8
lib/spack/external/jinja2/utils.py
vendored
@@ -482,10 +482,14 @@ def __reversed__(self):
|
||||
|
||||
# register the LRU cache as mutable mapping if possible
|
||||
try:
|
||||
from collections import MutableMapping
|
||||
from collections.abc import MutableMapping
|
||||
MutableMapping.register(LRUCache)
|
||||
except ImportError:
|
||||
pass
|
||||
try:
|
||||
from collections import MutableMapping
|
||||
MutableMapping.register(LRUCache)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def select_autoescape(enabled_extensions=('html', 'htm', 'xml'),
|
||||
|
7
lib/spack/external/markupsafe/__init__.py
vendored
7
lib/spack/external/markupsafe/__init__.py
vendored
@@ -10,10 +10,15 @@
|
||||
"""
|
||||
import re
|
||||
import string
|
||||
from collections import Mapping
|
||||
import sys
|
||||
from markupsafe._compat import text_type, string_types, int_types, \
|
||||
unichr, iteritems, PY2
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
__version__ = "1.0"
|
||||
|
||||
__all__ = ['Markup', 'soft_unicode', 'escape', 'escape_silent']
|
||||
|
7
lib/spack/external/ruamel/yaml/comments.py
vendored
7
lib/spack/external/ruamel/yaml/comments.py
vendored
@@ -9,7 +9,12 @@
|
||||
a separate base
|
||||
"""
|
||||
|
||||
from collections import MutableSet
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSet
|
||||
else:
|
||||
from collections import MutableSet
|
||||
|
||||
__all__ = ["CommentedSeq", "CommentedMap", "CommentedOrderedMap",
|
||||
"CommentedSet", 'comment_attrib', 'merge_attrib']
|
||||
|
7
lib/spack/external/ruamel/yaml/compat.py
vendored
7
lib/spack/external/ruamel/yaml/compat.py
vendored
@@ -12,9 +12,12 @@
|
||||
from ruamel.ordereddict import ordereddict
|
||||
except:
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
from collections.abc import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
# to get the right name import ... as ordereddict doesn't do that
|
||||
|
||||
class ordereddict(OrderedDict):
|
||||
|
19
lib/spack/external/ruamel/yaml/constructor.py
vendored
19
lib/spack/external/ruamel/yaml/constructor.py
vendored
@@ -3,7 +3,6 @@
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import datetime
|
||||
import base64
|
||||
import binascii
|
||||
@@ -26,6 +25,12 @@
|
||||
from ruamel.yaml.scalarstring import * # NOQA
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Hashable
|
||||
else:
|
||||
from collections import Hashable
|
||||
|
||||
|
||||
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
|
||||
'ConstructorError', 'RoundTripConstructor']
|
||||
|
||||
@@ -163,7 +168,7 @@ def construct_mapping(self, node, deep=False):
|
||||
# keys can be list -> deep
|
||||
key = self.construct_object(key_node, deep=True)
|
||||
# lists are not hashable, but tuples are
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if not isinstance(key, Hashable):
|
||||
if isinstance(key, list):
|
||||
key = tuple(key)
|
||||
if PY2:
|
||||
@@ -175,7 +180,7 @@ def construct_mapping(self, node, deep=False):
|
||||
"found unacceptable key (%s)" %
|
||||
exc, key_node.start_mark)
|
||||
else:
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if not isinstance(key, Hashable):
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
@@ -959,7 +964,7 @@ def construct_mapping(self, node, maptyp, deep=False):
|
||||
# keys can be list -> deep
|
||||
key = self.construct_object(key_node, deep=True)
|
||||
# lists are not hashable, but tuples are
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if not isinstance(key, Hashable):
|
||||
if isinstance(key, list):
|
||||
key = tuple(key)
|
||||
if PY2:
|
||||
@@ -971,7 +976,7 @@ def construct_mapping(self, node, maptyp, deep=False):
|
||||
"found unacceptable key (%s)" %
|
||||
exc, key_node.start_mark)
|
||||
else:
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if not isinstance(key, Hashable):
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
@@ -1003,7 +1008,7 @@ def construct_setting(self, node, typ, deep=False):
|
||||
# keys can be list -> deep
|
||||
key = self.construct_object(key_node, deep=True)
|
||||
# lists are not hashable, but tuples are
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if not isinstance(key, Hashable):
|
||||
if isinstance(key, list):
|
||||
key = tuple(key)
|
||||
if PY2:
|
||||
@@ -1015,7 +1020,7 @@ def construct_setting(self, node, typ, deep=False):
|
||||
"found unacceptable key (%s)" %
|
||||
exc, key_node.start_mark)
|
||||
else:
|
||||
if not isinstance(key, collections.Hashable):
|
||||
if not isinstance(key, Hashable):
|
||||
raise ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
|
@@ -23,6 +23,12 @@
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
else:
|
||||
from collections import Sequence
|
||||
|
||||
__all__ = [
|
||||
'FileFilter',
|
||||
'FileList',
|
||||
@@ -1105,7 +1111,7 @@ def find(root, files, recursive=True):
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or collections.Sequence): Library name(s) to search for
|
||||
files (str or Sequence): Library name(s) to search for
|
||||
recurse (bool, optional): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
@@ -1168,7 +1174,7 @@ def _find_non_recursive(root, search_files):
|
||||
# Utilities for libraries and headers
|
||||
|
||||
|
||||
class FileList(collections.Sequence):
|
||||
class FileList(Sequence):
|
||||
"""Sequence of absolute paths to files.
|
||||
|
||||
Provides a few convenience methods to manipulate file paths.
|
||||
@@ -1411,7 +1417,7 @@ def find_headers(headers, root, recursive=False):
|
||||
"""
|
||||
if isinstance(headers, six.string_types):
|
||||
headers = [headers]
|
||||
elif not isinstance(headers, collections.Sequence):
|
||||
elif not isinstance(headers, Sequence):
|
||||
message = '{0} expects a string or sequence of strings as the '
|
||||
message += 'first argument [got {1} instead]'
|
||||
message = message.format(find_headers.__name__, type(headers))
|
||||
@@ -1566,7 +1572,7 @@ def find_system_libraries(libraries, shared=True):
|
||||
"""
|
||||
if isinstance(libraries, six.string_types):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.Sequence):
|
||||
elif not isinstance(libraries, Sequence):
|
||||
message = '{0} expects a string or sequence of strings as the '
|
||||
message += 'first argument [got {1} instead]'
|
||||
message = message.format(find_system_libraries.__name__,
|
||||
@@ -1620,7 +1626,7 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
"""
|
||||
if isinstance(libraries, six.string_types):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.Sequence):
|
||||
elif not isinstance(libraries, Sequence):
|
||||
message = '{0} expects a string or sequence of strings as the '
|
||||
message += 'first argument [got {1} instead]'
|
||||
message = message.format(find_libraries.__name__, type(libraries))
|
||||
|
@@ -9,13 +9,18 @@
|
||||
import os
|
||||
import re
|
||||
import functools
|
||||
import collections
|
||||
import inspect
|
||||
from datetime import datetime, timedelta
|
||||
from six import string_types
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Hashable, MutableMapping # novm
|
||||
else:
|
||||
from collections import Hashable, MutableMapping
|
||||
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
|
||||
@@ -189,7 +194,7 @@ def memoized(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def _memoized_function(*args):
|
||||
if not isinstance(args, collections.Hashable):
|
||||
if not isinstance(args, Hashable):
|
||||
# Not hashable, so just call the function.
|
||||
return func(*args)
|
||||
|
||||
@@ -264,7 +269,7 @@ def setter(name, value):
|
||||
|
||||
|
||||
@key_ordering
|
||||
class HashableMap(collections.MutableMapping):
|
||||
class HashableMap(MutableMapping):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 16, 1)
|
||||
spack_version_info = (0, 16, 2)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
@@ -56,6 +56,7 @@
|
||||
attributes front_os and back_os. The operating system as described earlier,
|
||||
will be responsible for compiler detection.
|
||||
"""
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import warnings
|
||||
@@ -67,6 +68,8 @@
|
||||
from llnl.util.lang import memoized, list_modules, key_ordering
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.error as serr
|
||||
import spack.util.executable
|
||||
@@ -491,7 +494,7 @@ def arch_for_spec(arch_spec):
|
||||
|
||||
|
||||
@memoized
|
||||
def all_platforms():
|
||||
def _all_platforms():
|
||||
classes = []
|
||||
mod_path = spack.paths.platform_path
|
||||
parent_module = "spack.platforms"
|
||||
@@ -512,7 +515,7 @@ def all_platforms():
|
||||
|
||||
|
||||
@memoized
|
||||
def platform():
|
||||
def _platform():
|
||||
"""Detects the platform for this machine.
|
||||
|
||||
Gather a list of all available subclasses of platforms.
|
||||
@@ -521,7 +524,7 @@ def platform():
|
||||
a file path (/opt/cray...)
|
||||
"""
|
||||
# Try to create a Platform object using the config file FIRST
|
||||
platform_list = all_platforms()
|
||||
platform_list = _all_platforms()
|
||||
platform_list.sort(key=lambda a: a.priority)
|
||||
|
||||
for platform_cls in platform_list:
|
||||
@@ -529,6 +532,19 @@ def platform():
|
||||
return platform_cls()
|
||||
|
||||
|
||||
#: The "real" platform of the host running Spack. This should not be changed
|
||||
#: by any method and is here as a convenient way to refer to the host platform.
|
||||
real_platform = _platform
|
||||
|
||||
#: The current platform used by Spack. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
platform = _platform
|
||||
|
||||
#: The list of all platform classes. May be swapped by the use_platform
|
||||
#: context manager.
|
||||
all_platforms = _all_platforms
|
||||
|
||||
|
||||
@memoized
|
||||
def default_arch():
|
||||
"""Default ``Arch`` object for this machine.
|
||||
@@ -563,3 +579,39 @@ def compatible_sys_types():
|
||||
arch = Arch(platform(), 'default_os', target)
|
||||
compatible_archs.append(str(arch))
|
||||
return compatible_archs
|
||||
|
||||
|
||||
class _PickleableCallable(object):
|
||||
"""Class used to pickle a callable that may substitute either
|
||||
_platform or _all_platforms. Lambda or nested functions are
|
||||
not pickleable.
|
||||
"""
|
||||
def __init__(self, return_value):
|
||||
self.return_value = return_value
|
||||
|
||||
def __call__(self):
|
||||
return self.return_value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_platform(new_platform):
|
||||
global platform, all_platforms
|
||||
|
||||
msg = '"{0}" must be an instance of Platform'
|
||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
||||
|
||||
original_platform_fn, original_all_platforms_fn = platform, all_platforms
|
||||
platform = _PickleableCallable(new_platform)
|
||||
all_platforms = _PickleableCallable([type(new_platform)])
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
|
||||
platform, all_platforms = original_platform_fn, original_all_platforms_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
252
lib/spack/spack/bootstrap.py
Normal file
252
lib/spack/spack/bootstrap.py
Normal file
@@ -0,0 +1,252 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
import sysconfig # novm
|
||||
except ImportError:
|
||||
# Not supported on Python 2.6
|
||||
pass
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
minor version (all patches are ABI compatible with the same minor)
|
||||
and on whether ucs4 support has been enabled for Python 2.7
|
||||
|
||||
See:
|
||||
https://www.python.org/dev/peps/pep-0513/
|
||||
https://stackoverflow.com/a/35801395/771663
|
||||
"""
|
||||
version_str = '.'.join(str(x) for x in sys.version_info[:2])
|
||||
variant_str = ''
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
|
||||
unicode_size = sysconfig.get_config_var('Py_UNICODE_SIZE')
|
||||
variant_str = '+ucs4' if unicode_size == 4 else '~ucs4'
|
||||
|
||||
spec_fmt = 'python@{0} {1}'
|
||||
return spec_fmt.format(version_str, variant_str)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = os.path.dirname(os.path.dirname(sys.executable))
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
'buildable': False,
|
||||
'externals': [
|
||||
{'prefix': python_prefix, 'spec': str(external_python)}
|
||||
]
|
||||
}
|
||||
|
||||
with spack.config.override('packages:python::', entry):
|
||||
yield
|
||||
|
||||
|
||||
def make_module_available(module, spec=None, install=False):
|
||||
"""Ensure module is importable"""
|
||||
# If we already can import it, that's great
|
||||
try:
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# If it's already installed, use it
|
||||
# Search by spec
|
||||
spec = spack.spec.Spec(spec or module)
|
||||
|
||||
# We have to run as part of this python
|
||||
# We can constrain by a shortened version in place of a version range
|
||||
# because this spec is only used for querying or as a placeholder to be
|
||||
# replaced by an external that already has a concrete version. This syntax
|
||||
# is not sufficient when concretizing without an external, as it will
|
||||
# concretize to python@X.Y instead of python@X.Y.Z
|
||||
python_requirement = '^' + spec_for_current_python()
|
||||
spec.constrain(python_requirement)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
|
||||
for ispec in installed_specs:
|
||||
# TODO: make sure run-environment is appropriate
|
||||
module_path = os.path.join(ispec.prefix,
|
||||
ispec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
tty.warn("Spec %s did not provide module %s" % (ispec, module))
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
def _raise_error(module_name, module_spec):
|
||||
error_msg = 'cannot import module "{0}"'.format(module_name)
|
||||
if module_spec:
|
||||
error_msg += ' from spec "{0}'.format(module_spec)
|
||||
raise ImportError(error_msg)
|
||||
|
||||
if not install:
|
||||
_raise_error(module, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
spec.package.do_install()
|
||||
|
||||
module_path = os.path.join(spec.prefix,
|
||||
spec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
sys.path = sys.path[:-2]
|
||||
_raise_error(module, spec)
|
||||
|
||||
|
||||
def get_executable(exe, spec=None, install=False):
|
||||
"""Find an executable named exe, either in PATH or in Spack
|
||||
|
||||
Args:
|
||||
exe (str): needed executable name
|
||||
spec (Spec or str): spec to search for exe in (default exe)
|
||||
install (bool): install spec if not available
|
||||
|
||||
When ``install`` is True, Spack will use the python used to run Spack as an
|
||||
external. The ``install`` option should only be used with packages that
|
||||
install quickly (when using external python) or are guaranteed by Spack
|
||||
organization to be in a binary mirror (clingo)."""
|
||||
# Search the system first
|
||||
runner = spack.util.executable.which(exe)
|
||||
if runner:
|
||||
return runner
|
||||
|
||||
# Check whether it's already installed
|
||||
spec = spack.spec.Spec(spec or exe)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
for ispec in installed_specs:
|
||||
# filter out directories of the same name as the executable
|
||||
exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe)
|
||||
if fs.is_exe(exe_p)]
|
||||
if exe_path:
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in ispec.traverse(root=True, order='post'):
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
else:
|
||||
tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix))
|
||||
|
||||
def _raise_error(executable, exe_spec):
|
||||
error_msg = 'cannot find the executable "{0}"'.format(executable)
|
||||
if exe_spec:
|
||||
error_msg += ' from spec "{0}'.format(exe_spec)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
# If we're not allowed to install this for ourselves, we can't find it
|
||||
if not install:
|
||||
_raise_error(exe, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
|
||||
spec.package.do_install()
|
||||
# filter out directories of the same name as the executable
|
||||
exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe)
|
||||
if fs.is_exe(exe_p)]
|
||||
if exe_path:
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in spec.traverse(root=True, order='post'):
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
|
||||
_raise_error(exe, spec)
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
tty.debug('[BOOTSTRAP CONFIG SCOPE] name=_builtin')
|
||||
config_scopes = [
|
||||
spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
)
|
||||
]
|
||||
for name, path in spack.config.configuration_paths:
|
||||
platform = spack.architecture.platform().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
'/'.join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = '[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}'
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||
return config_scopes
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
# Construct the root spec that will be used to bootstrap clingo
|
||||
spec_str = 'clingo-bootstrap@spack+python'
|
||||
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS.
|
||||
if str(spack.architecture.platform()) == 'darwin':
|
||||
spec_str += ' %apple-clang'
|
||||
else:
|
||||
spec_str += ' %gcc'
|
||||
|
||||
# Add hint to use frontend operating system on Cray
|
||||
if str(spack.architecture.platform()) == 'cray':
|
||||
spec_str += ' os=fe'
|
||||
|
||||
# Add the generic target
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str += ' target={0}'.format(str(generic_target))
|
||||
|
||||
tty.debug('[BOOTSTRAP ROOT SPEC] clingo: {0}'.format(spec_str))
|
||||
|
||||
return spack.spec.Spec(spec_str)
|
@@ -302,6 +302,19 @@ def set_compiler_environment_variables(pkg, env):
|
||||
return env
|
||||
|
||||
|
||||
def _place_externals_last(spec_container):
|
||||
"""
|
||||
For a (possibly unordered) container of specs, return an ordered list
|
||||
where all external specs are at the end of the list. External packages
|
||||
may be installed in merged prefixes with other packages, and so
|
||||
they should be deprioritized for any search order (i.e. in PATH, or
|
||||
for a set of -L entries in a compiler invocation).
|
||||
"""
|
||||
first = list(x for x in spec_container if not x.external)
|
||||
second = list(x for x in spec_container if x.external)
|
||||
return first + second
|
||||
|
||||
|
||||
def set_build_environment_variables(pkg, env, dirty):
|
||||
"""Ensure a clean install environment when we build packages.
|
||||
|
||||
@@ -319,6 +332,29 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
|
||||
build_link_deps = build_deps | link_deps
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
# This includes all build dependencies and any other dependencies that
|
||||
# should be added to PATH (e.g. supporting executables run by build
|
||||
# dependencies)
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype='run'))
|
||||
|
||||
# Establish an arbitrary but fixed ordering of specs so that resulting
|
||||
# environment variable values are stable
|
||||
def _order(specs):
|
||||
return sorted(specs, key=lambda x: x.name)
|
||||
|
||||
# External packages may be installed in a prefix which contains many other
|
||||
# package installs. To avoid having those installations override
|
||||
# Spack-installed packages, they are placed at the end of search paths.
|
||||
# System prefixes are removed entirely later on since they are already
|
||||
# searched.
|
||||
build_deps = _place_externals_last(_order(build_deps))
|
||||
link_deps = _place_externals_last(_order(link_deps))
|
||||
build_link_deps = _place_externals_last(_order(build_link_deps))
|
||||
rpath_deps = _place_externals_last(_order(rpath_deps))
|
||||
build_and_supporting_deps = _place_externals_last(
|
||||
_order(build_and_supporting_deps))
|
||||
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
@@ -365,21 +401,10 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs))
|
||||
|
||||
build_prefixes = [dep.prefix for dep in build_deps]
|
||||
build_link_prefixes = [dep.prefix for dep in build_link_deps]
|
||||
|
||||
# add run-time dependencies of direct build-time dependencies:
|
||||
for build_dep in build_deps:
|
||||
for run_dep in build_dep.traverse(deptype='run'):
|
||||
build_prefixes.append(run_dep.prefix)
|
||||
|
||||
# Filter out system paths: ['/', '/usr', '/usr/local']
|
||||
# These paths can be introduced into the build when an external package
|
||||
# is added as a dependency. The problem with these paths is that they often
|
||||
# contain hundreds of other packages installed in the same directory.
|
||||
# If these paths come first, they can overshadow Spack installations.
|
||||
build_prefixes = filter_system_paths(build_prefixes)
|
||||
build_link_prefixes = filter_system_paths(build_link_prefixes)
|
||||
build_and_supporting_prefixes = filter_system_paths(
|
||||
x.prefix for x in build_and_supporting_deps)
|
||||
build_link_prefixes = filter_system_paths(
|
||||
x.prefix for x in build_link_deps)
|
||||
|
||||
# Add dependencies to CMAKE_PREFIX_PATH
|
||||
env.set_path('CMAKE_PREFIX_PATH', build_link_prefixes)
|
||||
@@ -394,7 +419,10 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
|
||||
|
||||
# Add bin directories from dependencies to the PATH for the build.
|
||||
for prefix in build_prefixes:
|
||||
# These directories are added to the beginning of the search path, and in
|
||||
# the order given by 'build_and_supporting_prefixes' (the iteration order
|
||||
# is reversed because each entry is prepended)
|
||||
for prefix in reversed(build_and_supporting_prefixes):
|
||||
for dirname in ['bin', 'bin64']:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
@@ -439,7 +467,7 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
|
||||
# Add any pkgconfig directories to PKG_CONFIG_PATH
|
||||
for prefix in build_link_prefixes:
|
||||
for prefix in reversed(build_link_prefixes):
|
||||
for directory in ('lib', 'lib64', 'share'):
|
||||
pcdir = os.path.join(prefix, directory, 'pkgconfig')
|
||||
if os.path.isdir(pcdir):
|
||||
|
@@ -233,7 +233,28 @@ def install_args(self, spec, prefix):
|
||||
if ('py-setuptools' == spec.name or # this is setuptools, or
|
||||
'py-setuptools' in spec._dependencies and # it's an immediate dep
|
||||
'build' in spec._dependencies['py-setuptools'].deptypes):
|
||||
args += ['--single-version-externally-managed', '--root=/']
|
||||
args += ['--single-version-externally-managed']
|
||||
|
||||
# Get all relative paths since we set the root to `prefix`
|
||||
# We query the python with which these will be used for the lib and inc
|
||||
# directories. This ensures we use `lib`/`lib64` as expected by python.
|
||||
python = spec['python'].package.command
|
||||
command_start = 'print(distutils.sysconfig.'
|
||||
commands = ';'.join([
|
||||
'import distutils.sysconfig',
|
||||
command_start + 'get_python_lib(plat_specific=False, prefix=""))',
|
||||
command_start + 'get_python_lib(plat_specific=True, prefix=""))',
|
||||
command_start + 'get_python_inc(plat_specific=True, prefix=""))'])
|
||||
pure_site_packages_dir, plat_site_packages_dir, inc_dir = python(
|
||||
'-c', commands, output=str, error=str).strip().split('\n')
|
||||
|
||||
args += ['--root=%s' % prefix,
|
||||
'--install-purelib=%s' % pure_site_packages_dir,
|
||||
'--install-platlib=%s' % plat_site_packages_dir,
|
||||
'--install-scripts=bin',
|
||||
'--install-data=""',
|
||||
'--install-headers=%s' % inc_dir
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -181,6 +181,19 @@ def parse_specs(args, **kwargs):
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
"""
|
||||
Returns a concrete spec, matching what is available in the environment.
|
||||
If no matching spec is found in the environment (or if no environment is
|
||||
active), this will return the given spec but concretized.
|
||||
"""
|
||||
env = spack.environment.get_env({}, cmd_name)
|
||||
if env:
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
else:
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
@@ -10,11 +10,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.cmd.test
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.config
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
|
||||
@@ -26,7 +27,7 @@
|
||||
class AllClean(argparse.Action):
|
||||
"""Activates flags -s -d -f -m and -p simultaneously"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
parser.parse_args(['-sdfmp'], namespace=namespace)
|
||||
parser.parse_args(['-sdfmpb'], namespace=namespace)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -46,7 +47,10 @@ def setup_parser(subparser):
|
||||
'-p', '--python-cache', action='store_true',
|
||||
help="remove .pyc, .pyo files and __pycache__ folders")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmp", nargs=0
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help="remove software needed to bootstrap Spack")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmpb", nargs=0
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
@@ -54,7 +58,7 @@ def setup_parser(subparser):
|
||||
def clean(parser, args):
|
||||
# If nothing was set, activate the default
|
||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||
args.misc_cache, args.python_cache]):
|
||||
args.misc_cache, args.python_cache, args.bootstrap]):
|
||||
args.stage = True
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
@@ -96,3 +100,10 @@ def clean(parser, args):
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
@@ -53,11 +53,13 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
spec = args.spec[0]
|
||||
cmd = args.spec[1:]
|
||||
|
||||
specs = spack.cmd.parse_specs(spec, concretize=True)
|
||||
specs = spack.cmd.parse_specs(spec, concretize=False)
|
||||
if len(specs) > 1:
|
||||
tty.die("spack %s only takes one spec." % cmd_name)
|
||||
spec = specs[0]
|
||||
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
|
||||
build_environment.setup_package(spec.package, args.dirty, context)
|
||||
|
||||
if args.dump:
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
|
||||
description = "fetch archives for packages"
|
||||
@@ -18,22 +19,51 @@
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['no_checksum'])
|
||||
subparser.add_argument(
|
||||
'-m', '--missing', action='store_true',
|
||||
help="fetch only missing (not yet installed) dependencies")
|
||||
"-m",
|
||||
"--missing",
|
||||
action="store_true",
|
||||
help="fetch only missing (not yet installed) dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-D', '--dependencies', action='store_true',
|
||||
help="also fetch all dependencies")
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
"-D",
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="also fetch all dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
subparser.epilog = (
|
||||
"With an active environment, the specs "
|
||||
"parameter can be omitted. In this case all (uninstalled"
|
||||
", in case of --missing) specs from the environment are fetched"
|
||||
)
|
||||
|
||||
|
||||
def fetch(parser, args):
|
||||
if not args.specs:
|
||||
tty.die("fetch requires at least one package argument")
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
else:
|
||||
# No specs were given explicitly, check if we are in an
|
||||
# environment. If yes, check the missing argument, if yes
|
||||
# fetch all uninstalled specs from it otherwise fetch all.
|
||||
# If we are also not in an environment, complain to the
|
||||
# user that we don't know what to do.
|
||||
env = ev.get_env(args, "fetch")
|
||||
if env:
|
||||
if args.missing:
|
||||
specs = env.uninstalled_specs()
|
||||
else:
|
||||
specs = env.all_specs()
|
||||
if specs == []:
|
||||
tty.die(
|
||||
"No uninstalled specs in environment. Did you "
|
||||
"run `spack concretize` yet?"
|
||||
)
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
|
||||
if args.no_checksum:
|
||||
spack.config.set('config:checksum', False, scope='command_line')
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse():
|
||||
|
@@ -109,6 +109,10 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--end-date', help='latest date of installation [YYYY-MM-DD]'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help='show software in the internal bootstrap store'
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ['constraint'])
|
||||
|
||||
@@ -201,7 +205,14 @@ def display_env(env, args, decorator):
|
||||
|
||||
def find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
# Query the current store or the internal bootstrap store if required
|
||||
if args.bootstrap:
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
results = args.specs(**q_args)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = lambda s, f: f
|
||||
added = set()
|
||||
|
@@ -47,9 +47,13 @@ def setup_parser(subparser):
|
||||
directories.add_argument(
|
||||
'-S', '--stages', action='store_true',
|
||||
help="top level stage directory")
|
||||
directories.add_argument(
|
||||
'--source-dir', action='store_true',
|
||||
help="source directory for a spec "
|
||||
"(requires it to be staged first)")
|
||||
directories.add_argument(
|
||||
'-b', '--build-dir', action='store_true',
|
||||
help="checked out or expanded source directory for a spec "
|
||||
help="build directory for a spec "
|
||||
"(requires it to be staged first)")
|
||||
directories.add_argument(
|
||||
'-e', '--env', action='store',
|
||||
@@ -61,53 +65,77 @@ def setup_parser(subparser):
|
||||
def location(parser, args):
|
||||
if args.module_dir:
|
||||
print(spack.paths.module_path)
|
||||
return
|
||||
|
||||
elif args.spack_root:
|
||||
if args.spack_root:
|
||||
print(spack.paths.prefix)
|
||||
return
|
||||
|
||||
elif args.env:
|
||||
if args.env:
|
||||
path = spack.environment.root(args.env)
|
||||
if not os.path.isdir(path):
|
||||
tty.die("no such environment: '%s'" % args.env)
|
||||
print(path)
|
||||
return
|
||||
|
||||
elif args.packages:
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
return
|
||||
|
||||
elif args.stages:
|
||||
if args.stages:
|
||||
print(spack.stage.get_stage_root())
|
||||
return
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if not specs:
|
||||
tty.die("You must supply a spec.")
|
||||
if len(specs) != 1:
|
||||
tty.die("Too many specs. Supply only one.")
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
||||
if args.install_dir:
|
||||
# install_dir command matches against installed specs.
|
||||
env = ev.get_env(args, 'location')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
print(spec.prefix)
|
||||
if not specs:
|
||||
tty.die("You must supply a spec.")
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
if len(specs) != 1:
|
||||
tty.die("Too many specs. Supply only one.")
|
||||
|
||||
if args.package_dir:
|
||||
# This one just needs the spec name.
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
# install_dir command matches against installed specs.
|
||||
if args.install_dir:
|
||||
env = ev.get_env(args, 'location')
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
print(spec.prefix)
|
||||
return
|
||||
|
||||
else:
|
||||
# These versions need concretized specs.
|
||||
spec.concretize()
|
||||
pkg = spack.repo.get(spec)
|
||||
spec = specs[0]
|
||||
|
||||
if args.stage_dir:
|
||||
print(pkg.stage.path)
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
else: # args.build_dir is the default.
|
||||
if not pkg.stage.expanded:
|
||||
tty.die("Build directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
print(pkg.stage.source_path)
|
||||
# Either concretize or filter from already concretized environment
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if args.stage_dir:
|
||||
print(pkg.stage.path)
|
||||
return
|
||||
|
||||
if args.build_dir:
|
||||
# Out of source builds have build_directory defined
|
||||
if hasattr(pkg, 'build_directory'):
|
||||
# build_directory can be either absolute or relative to the
|
||||
# stage path in either case os.path.join makes it absolute
|
||||
print(os.path.normpath(os.path.join(
|
||||
pkg.stage.path,
|
||||
pkg.build_directory
|
||||
)))
|
||||
return
|
||||
|
||||
# Otherwise assume in-source builds
|
||||
print(pkg.stage.source_path)
|
||||
return
|
||||
|
||||
# source dir remains, which requires the spec to be staged
|
||||
if not pkg.stage.expanded:
|
||||
tty.die("Source directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
|
||||
# Default to source dir.
|
||||
print(pkg.stage.source_path)
|
||||
|
@@ -25,8 +25,8 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.15"
|
||||
tutorial_mirror = "s3://spack-tutorial-container/mirror/"
|
||||
tutorial_branch = "releases/v0.16"
|
||||
tutorial_mirror = "s3://spack-binaries-prs/tutorial/ecp21/mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
# configs to remove
|
||||
|
@@ -22,11 +22,10 @@
|
||||
import spack.spec
|
||||
import spack.config
|
||||
import spack.architecture
|
||||
import spack.util.imp as simp
|
||||
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
_imported_compilers_module = 'spack.compilers'
|
||||
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_flags_instance_vars = ['cflags', 'cppflags', 'cxxflags', 'fflags']
|
||||
_other_instance_vars = ['modules', 'operating_system', 'environment',
|
||||
@@ -470,17 +469,17 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
assert(supported(compiler_name))
|
||||
assert supported(compiler_name)
|
||||
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
module_name = compiler_name
|
||||
submodule_name = compiler_name
|
||||
if compiler_name == 'apple-clang':
|
||||
module_name = compiler_name.replace('-', '_')
|
||||
submodule_name = compiler_name.replace('-', '_')
|
||||
|
||||
file_path = os.path.join(spack.paths.compilers_path, module_name + ".py")
|
||||
compiler_mod = simp.load_source(_imported_compilers_module, file_path)
|
||||
cls = getattr(compiler_mod, mod_to_class(compiler_name))
|
||||
module_name = '.'.join(['spack', 'compilers', submodule_name])
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
cls = getattr(module_obj, mod_to_class(compiler_name))
|
||||
|
||||
# make a note of the name in the module so we can get to it easily.
|
||||
cls.name = compiler_name
|
||||
|
@@ -578,10 +578,14 @@ def adjust_target(self, spec):
|
||||
True if spec was modified, False otherwise
|
||||
"""
|
||||
# To minimize the impact on performance this function will attempt
|
||||
# to adjust the target only at the very first call. It will just
|
||||
# return False on subsequent calls. The way this is achieved is by
|
||||
# initializing a generator and making this function return the next
|
||||
# answer.
|
||||
# to adjust the target only at the very first call once necessary
|
||||
# information is set. It will just return False on subsequent calls.
|
||||
# The way this is achieved is by initializing a generator and making
|
||||
# this function return the next answer.
|
||||
if not (spec.architecture and spec.architecture.concrete):
|
||||
# Not ready, but keep going because we have work to do later
|
||||
return True
|
||||
|
||||
def _make_only_one_call(spec):
|
||||
yield self._adjust_target(spec)
|
||||
while True:
|
||||
@@ -619,9 +623,10 @@ def _adjust_target(self, spec):
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
default_target = self.target_from_package_preferences(spec)
|
||||
|
||||
if current_target != default_target or \
|
||||
(self.abstract_spec.architecture is not None and
|
||||
self.abstract_spec.architecture.target is not None):
|
||||
if current_target != default_target or (
|
||||
self.abstract_spec and
|
||||
self.abstract_spec.architecture and
|
||||
self.abstract_spec.architecture.concrete):
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -709,7 +714,7 @@ def _compiler_concretization_failure(compiler_spec, arch):
|
||||
raise UnavailableCompilerVersionError(compiler_spec, arch)
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs):
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
@@ -719,6 +724,24 @@ def concretize_specs_together(*abstract_specs):
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
return _concretize_specs_together_original(*abstract_specs, **kwargs)
|
||||
return _concretize_specs_together_new(*abstract_specs, **kwargs)
|
||||
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
result = spack.solver.asp.solve(abstract_specs)
|
||||
|
||||
if not result.satisfiable:
|
||||
result.print_cores()
|
||||
tty.die("Unsatisfiable spec.")
|
||||
|
||||
opt, i, answer = min(result.answers)
|
||||
return [answer[s.name].copy() for s in abstract_specs]
|
||||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
def make_concretization_repository(abstract_specs):
|
||||
"""Returns the path to a temporary repository created to contain
|
||||
a fake package that depends on all of the abstract specs.
|
||||
|
@@ -29,6 +29,7 @@
|
||||
|
||||
"""
|
||||
import collections
|
||||
import contextlib
|
||||
import copy
|
||||
import functools
|
||||
import os
|
||||
@@ -48,6 +49,7 @@
|
||||
|
||||
import spack.paths
|
||||
import spack.architecture
|
||||
import spack.compilers
|
||||
import spack.schema
|
||||
import spack.schema.compilers
|
||||
import spack.schema.mirrors
|
||||
@@ -238,11 +240,18 @@ def get_section(self, section):
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
|
||||
# This bit ensures we have read the file and have
|
||||
# the raw data in memory
|
||||
if self._raw_data is None:
|
||||
self._raw_data = read_config_file(self.path, self.schema)
|
||||
if self._raw_data is None:
|
||||
return None
|
||||
|
||||
# Here we know we have the raw data and ensure we
|
||||
# populate the sections dictionary, which may be
|
||||
# cleared by the clear() method
|
||||
if not self.sections:
|
||||
section_data = self._raw_data
|
||||
for key in self.yaml_path:
|
||||
if section_data is None:
|
||||
@@ -251,6 +260,7 @@ def get_section(self, section):
|
||||
|
||||
for section_key, data in section_data.items():
|
||||
self.sections[section_key] = {section_key: data}
|
||||
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section):
|
||||
@@ -351,6 +361,10 @@ def _write_section(self, section):
|
||||
def __repr__(self):
|
||||
return '<InternalConfigScope: %s>' % self.name
|
||||
|
||||
def clear(self):
|
||||
# no cache to clear here.
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _process_dict_keyname_overrides(data):
|
||||
"""Turn a trailing `:' in a key name into an override attribute."""
|
||||
@@ -803,22 +817,6 @@ def _config():
|
||||
config = llnl.util.lang.Singleton(_config)
|
||||
|
||||
|
||||
def replace_config(configuration):
|
||||
"""Replace the current global configuration with the instance passed as
|
||||
argument.
|
||||
|
||||
Args:
|
||||
configuration (Configuration): the new configuration to be used.
|
||||
|
||||
Returns:
|
||||
The old configuration that has been removed
|
||||
"""
|
||||
global config
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
old_config, config = config, configuration
|
||||
return old_config
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
@@ -1133,6 +1131,55 @@ def ensure_latest_format_fn(section):
|
||||
return update_fn
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_configuration(*scopes_or_paths):
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
|
||||
Args:
|
||||
*scopes_or_paths: scope objects or paths to be used
|
||||
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
|
||||
# Save and clear the current compiler cache
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
saved_config, config = config, configuration
|
||||
|
||||
yield configuration
|
||||
|
||||
# Restore previous config files
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
config = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _config_from(scopes_or_paths):
|
||||
scopes = []
|
||||
for scope_or_path in scopes_or_paths:
|
||||
# If we have a config scope we are already done
|
||||
if isinstance(scope_or_path, ConfigScope):
|
||||
scopes.append(scope_or_path)
|
||||
continue
|
||||
|
||||
# Otherwise we need to construct it
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), '"{0}" must be a directory'.format(path)
|
||||
name = os.path.basename(path)
|
||||
scopes.append(ConfigScope(name, path))
|
||||
|
||||
configuration = Configuration(*scopes)
|
||||
return configuration
|
||||
|
||||
|
||||
class ConfigError(SpackError):
|
||||
"""Superclass for all Spack config related errors."""
|
||||
|
||||
|
@@ -28,10 +28,11 @@ class OpenMpi(Package):
|
||||
|
||||
"""
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
from six import string_types
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -47,6 +48,13 @@ class OpenMpi(Package):
|
||||
from spack.resource import Resource
|
||||
from spack.version import Version, VersionChecksumError
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
else:
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
__all__ = []
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
@@ -202,7 +210,7 @@ class Foo(Package):
|
||||
|
||||
if isinstance(dicts, string_types):
|
||||
dicts = (dicts, )
|
||||
if not isinstance(dicts, collections.Sequence):
|
||||
if not isinstance(dicts, Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
raise TypeError(message.format(type(dicts)))
|
||||
# Add the dictionary names if not already there
|
||||
@@ -243,7 +251,7 @@ def remove_directives(arg):
|
||||
|
||||
# ...so if it is not a sequence make it so
|
||||
values = result
|
||||
if not isinstance(values, collections.Sequence):
|
||||
if not isinstance(values, Sequence):
|
||||
values = (values, )
|
||||
|
||||
DirectiveMeta._directives_to_be_executed.extend(values)
|
||||
|
@@ -1387,6 +1387,21 @@ def _install_log_links(self, spec):
|
||||
os.remove(build_log_link)
|
||||
os.symlink(spec.package.build_log_path, build_log_link)
|
||||
|
||||
def uninstalled_specs(self):
|
||||
"""Return a list of all uninstalled (and non-dev) specs."""
|
||||
# Do the installed check across all specs within a single
|
||||
# DB read transaction to reduce time spent in lock acquisition.
|
||||
uninstalled_specs = []
|
||||
with spack.store.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.package.installed or (
|
||||
spec.satisfies('dev_path=*') or
|
||||
spec.satisfies('^dev_path=*')
|
||||
):
|
||||
uninstalled_specs.append(spec)
|
||||
return uninstalled_specs
|
||||
|
||||
def install_all(self, args=None, **install_args):
|
||||
"""Install all concretized specs in an environment.
|
||||
|
||||
@@ -1397,22 +1412,13 @@ def install_all(self, args=None, **install_args):
|
||||
args (Namespace): argparse namespace with command arguments
|
||||
install_args (dict): keyword install arguments
|
||||
"""
|
||||
tty.debug('Assessing installation status of environment packages')
|
||||
# If "spack install" is invoked repeatedly for a large environment
|
||||
# where all specs are already installed, the operation can take
|
||||
# a large amount of time due to repeatedly acquiring and releasing
|
||||
# locks, this does an initial check across all specs within a single
|
||||
# DB read transaction to reduce time spent in this case.
|
||||
tty.debug('Assessing installation status of environment packages')
|
||||
specs_to_install = []
|
||||
with spack.store.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.package.installed or (
|
||||
spec.satisfies('dev_path=*') or
|
||||
spec.satisfies('^dev_path=*')
|
||||
):
|
||||
# If it's a dev build it could need to be reinstalled
|
||||
specs_to_install.append(spec)
|
||||
specs_to_install = self.uninstalled_specs()
|
||||
|
||||
if not specs_to_install:
|
||||
tty.msg('All of the packages are already installed')
|
||||
@@ -1499,6 +1505,67 @@ def concretized_specs(self):
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
|
||||
def matching_spec(self, spec):
|
||||
"""
|
||||
Given a spec (likely not concretized), find a matching concretized
|
||||
spec in the environment.
|
||||
|
||||
The matching spec does not have to be installed in the environment,
|
||||
but must be concrete (specs added with `spack add` without an
|
||||
intervening `spack concretize` will not be matched).
|
||||
|
||||
If there is a single root spec that matches the provided spec or a
|
||||
single dependency spec that matches the provided spec, then the
|
||||
concretized instance of that spec will be returned.
|
||||
|
||||
If multiple root specs match the provided spec, or no root specs match
|
||||
and multiple dependency specs match, then this raises an error
|
||||
and reports all matching specs.
|
||||
"""
|
||||
# Root specs will be keyed by concrete spec, value abstract
|
||||
# Dependency-only specs will have value None
|
||||
matches = {}
|
||||
|
||||
for user_spec, concretized_user_spec in self.concretized_specs():
|
||||
if concretized_user_spec.satisfies(spec):
|
||||
matches[concretized_user_spec] = user_spec
|
||||
for dep_spec in concretized_user_spec.traverse(root=False):
|
||||
if dep_spec.satisfies(spec):
|
||||
# Don't overwrite the abstract spec if present
|
||||
# If not present already, set to None
|
||||
matches[dep_spec] = matches.get(dep_spec, None)
|
||||
|
||||
if not matches:
|
||||
return None
|
||||
elif len(matches) == 1:
|
||||
return list(matches.keys())[0]
|
||||
|
||||
root_matches = dict((concrete, abstract)
|
||||
for concrete, abstract in matches.items()
|
||||
if abstract)
|
||||
|
||||
if len(root_matches) == 1:
|
||||
return root_matches[0][1]
|
||||
|
||||
# More than one spec matched, and either multiple roots matched or
|
||||
# none of the matches were roots
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
match_strings = []
|
||||
fmt_str = '{hash:7} ' + spack.spec.default_format
|
||||
for concrete, abstract in matches.items():
|
||||
if abstract:
|
||||
s = 'Root spec %s\n %s' % (abstract, concrete.format(fmt_str))
|
||||
else:
|
||||
s = 'Dependency spec\n %s' % concrete.format(fmt_str)
|
||||
match_strings.append(s)
|
||||
matches_str = '\n'.join(match_strings)
|
||||
|
||||
msg = ("{0} matches multiple specs in the environment {1}: \n"
|
||||
"{2}".format(str(spec), self.name, matches_str))
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
def removed_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all specs that will be
|
||||
removed on nexg concretize."""
|
||||
|
@@ -324,6 +324,8 @@ def _existing_url(self, url):
|
||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||
# portable.
|
||||
curl_args = ['--stderr', '-', '-s', '-f', '-r', '0-0', url]
|
||||
if not spack.config.get('config:verify_ssl'):
|
||||
curl_args.append('-k')
|
||||
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
|
||||
return curl.returncode == 0
|
||||
|
||||
|
@@ -2,8 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This package contains modules with hooks for various stages in the
|
||||
|
||||
Spack install process. You can add modules here and they'll be
|
||||
executed by package at various times during the package lifecycle.
|
||||
|
||||
@@ -21,46 +21,55 @@
|
||||
systems (e.g. modules, lmod, etc.) or to add other custom
|
||||
features.
|
||||
"""
|
||||
import os.path
|
||||
|
||||
import llnl.util.lang
|
||||
import spack.paths
|
||||
import spack.util.imp as simp
|
||||
from llnl.util.lang import memoized, list_modules
|
||||
|
||||
|
||||
@memoized
|
||||
def all_hook_modules():
|
||||
modules = []
|
||||
for name in list_modules(spack.paths.hooks_path):
|
||||
mod_name = __name__ + '.' + name
|
||||
path = os.path.join(spack.paths.hooks_path, name) + ".py"
|
||||
mod = simp.load_source(mod_name, path)
|
||||
|
||||
if name == 'write_install_manifest':
|
||||
last_mod = mod
|
||||
else:
|
||||
modules.append(mod)
|
||||
|
||||
# put `write_install_manifest` as the last hook to run
|
||||
modules.append(last_mod)
|
||||
return modules
|
||||
|
||||
|
||||
class HookRunner(object):
|
||||
class _HookRunner(object):
|
||||
#: Stores all hooks on first call, shared among
|
||||
#: all HookRunner objects
|
||||
_hooks = None
|
||||
|
||||
def __init__(self, hook_name):
|
||||
self.hook_name = hook_name
|
||||
|
||||
@classmethod
|
||||
def _populate_hooks(cls):
|
||||
# Lazily populate the list of hooks
|
||||
cls._hooks = []
|
||||
relative_names = list(llnl.util.lang.list_modules(
|
||||
spack.paths.hooks_path
|
||||
))
|
||||
|
||||
# We want this hook to be the last registered
|
||||
relative_names.sort(key=lambda x: x == 'write_install_manifest')
|
||||
assert relative_names[-1] == 'write_install_manifest'
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + '.' + name
|
||||
# When importing a module from a package, __import__('A.B', ...)
|
||||
# returns package A when 'fromlist' is empty. If fromlist is not
|
||||
# empty it returns the submodule B instead
|
||||
# See: https://stackoverflow.com/a/2725668/771663
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
cls._hooks.append((module_name, module_obj))
|
||||
|
||||
@property
|
||||
def hooks(self):
|
||||
if not self._hooks:
|
||||
self._populate_hooks()
|
||||
return self._hooks
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
for module in all_hook_modules():
|
||||
for _, module in self.hooks:
|
||||
if hasattr(module, self.hook_name):
|
||||
hook = getattr(module, self.hook_name)
|
||||
if hasattr(hook, '__call__'):
|
||||
hook(*args, **kwargs)
|
||||
|
||||
|
||||
pre_install = HookRunner('pre_install')
|
||||
post_install = HookRunner('post_install')
|
||||
pre_install = _HookRunner('pre_install')
|
||||
post_install = _HookRunner('post_install')
|
||||
|
||||
pre_uninstall = HookRunner('pre_uninstall')
|
||||
post_uninstall = HookRunner('post_uninstall')
|
||||
pre_uninstall = _HookRunner('pre_uninstall')
|
||||
post_uninstall = _HookRunner('post_uninstall')
|
||||
|
@@ -2284,8 +2284,13 @@ def do_activate(self, view=None, with_dependencies=True, verbose=True):
|
||||
|
||||
extensions_layout = view.extensions_layout
|
||||
|
||||
extensions_layout.check_extension_conflict(
|
||||
self.extendee_spec, self.spec)
|
||||
try:
|
||||
extensions_layout.check_extension_conflict(
|
||||
self.extendee_spec, self.spec)
|
||||
except spack.directory_layout.ExtensionAlreadyInstalledError as e:
|
||||
# already installed, let caller know
|
||||
tty.msg(e.message)
|
||||
return
|
||||
|
||||
# Activate any package dependencies that are also extensions.
|
||||
if with_dependencies:
|
||||
|
@@ -50,7 +50,8 @@
|
||||
|
||||
#: User configuration location
|
||||
user_config_path = os.path.expanduser('~/.spack')
|
||||
|
||||
user_bootstrap_path = os.path.join(user_config_path, 'bootstrap')
|
||||
user_bootstrap_store = os.path.join(user_bootstrap_path, 'store')
|
||||
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
|
@@ -914,8 +914,12 @@ def _read_config(self):
|
||||
@autospec
|
||||
def get(self, spec):
|
||||
"""Returns the package associated with the supplied spec."""
|
||||
if not self.exists(spec.name):
|
||||
raise UnknownPackageError(spec.name)
|
||||
# NOTE: we only check whether the package is None here, not whether it
|
||||
# actually exists, because we have to load it anyway, and that ends up
|
||||
# checking for existence. We avoid constructing FastPackageChecker,
|
||||
# which will stat all packages.
|
||||
if spec.name is None:
|
||||
raise UnknownPackageError(None, self)
|
||||
|
||||
if spec.namespace and spec.namespace != self.namespace:
|
||||
raise UnknownPackageError(spec.name, self.namespace)
|
||||
@@ -1060,7 +1064,16 @@ def all_package_classes(self):
|
||||
|
||||
def exists(self, pkg_name):
|
||||
"""Whether a package with the supplied name exists."""
|
||||
return pkg_name in self._pkg_checker
|
||||
if pkg_name is None:
|
||||
return False
|
||||
|
||||
# if the FastPackageChecker is already constructed, use it
|
||||
if self._fast_package_checker:
|
||||
return pkg_name in self._pkg_checker
|
||||
|
||||
# if not, check for the package.py file
|
||||
path = self.filename_for_package_name(pkg_name)
|
||||
return os.path.exists(path)
|
||||
|
||||
def last_mtime(self):
|
||||
"""Time a package file in this repo was last updated."""
|
||||
@@ -1255,23 +1268,6 @@ def set_path(repo):
|
||||
return append
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def swap(repo_path):
|
||||
"""Temporarily use another RepoPath."""
|
||||
global path
|
||||
|
||||
# swap out _path for repo_path
|
||||
saved = path
|
||||
remove_from_meta = set_path(repo_path)
|
||||
|
||||
yield
|
||||
|
||||
# restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(repo_path)
|
||||
path = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def additional_repository(repository):
|
||||
"""Adds temporarily a repository to the default one.
|
||||
@@ -1284,6 +1280,34 @@ def additional_repository(repository):
|
||||
path.remove(repository)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_repositories(*paths_and_repos):
|
||||
"""Use the repositories passed as arguments within the context manager.
|
||||
|
||||
Args:
|
||||
*paths_and_repos: paths to the repositories to be used, or
|
||||
already constructed Repo objects
|
||||
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
|
||||
# Construct a temporary RepoPath object from
|
||||
temporary_repositories = RepoPath(*paths_and_repos)
|
||||
|
||||
# Swap the current repository out
|
||||
saved = path
|
||||
remove_from_meta = set_path(temporary_repositories)
|
||||
|
||||
yield temporary_repositories
|
||||
|
||||
# Restore _path and sys.meta_path
|
||||
if remove_from_meta:
|
||||
sys.meta_path.remove(temporary_repositories)
|
||||
path = saved
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
"""Superclass for repository-related errors."""
|
||||
|
||||
@@ -1316,7 +1340,7 @@ def __init__(self, name, repo=None):
|
||||
long_msg = None
|
||||
if name:
|
||||
if repo:
|
||||
msg = "Package '{0}' not found in repository '{1}'"
|
||||
msg = "Package '{0}' not found in repository '{1.root}'"
|
||||
msg = msg.format(name, repo)
|
||||
else:
|
||||
msg = "Package '{0}' not found.".format(name)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -12,14 +12,18 @@
|
||||
import sys
|
||||
import time
|
||||
import types
|
||||
import warnings
|
||||
from six import string_types
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
try:
|
||||
import clingo
|
||||
# There may be a better way to detect this
|
||||
clingo_cffi = hasattr(clingo.Symbol, '_rep')
|
||||
except ImportError:
|
||||
clingo = None
|
||||
clingo_cffi = False
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -36,10 +40,17 @@
|
||||
import spack.package
|
||||
import spack.package_prefs
|
||||
import spack.repo
|
||||
import spack.bootstrap
|
||||
import spack.variant
|
||||
import spack.version
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
else:
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
class Timer(object):
|
||||
"""Simple timer for timing phases of a solve"""
|
||||
def __init__(self):
|
||||
@@ -64,7 +75,7 @@ def write(self, out=sys.stdout):
|
||||
def issequence(obj):
|
||||
if isinstance(obj, string_types):
|
||||
return False
|
||||
return isinstance(obj, (collections.Sequence, types.GeneratorType))
|
||||
return isinstance(obj, (Sequence, types.GeneratorType))
|
||||
|
||||
|
||||
def listify(args):
|
||||
@@ -113,11 +124,11 @@ def __call__(self, *args):
|
||||
def symbol(self, positive=True):
|
||||
def argify(arg):
|
||||
if isinstance(arg, bool):
|
||||
return str(arg)
|
||||
return clingo.String(str(arg))
|
||||
elif isinstance(arg, int):
|
||||
return arg
|
||||
return clingo.Number(arg)
|
||||
else:
|
||||
return str(arg)
|
||||
return clingo.String(str(arg))
|
||||
return clingo.Function(
|
||||
self.name, [argify(arg) for arg in self.args], positive=positive)
|
||||
|
||||
@@ -204,19 +215,6 @@ def print_cores(self):
|
||||
*sorted(str(symbol) for symbol in core))
|
||||
|
||||
|
||||
def _normalize(body):
|
||||
"""Accept an AspAnd object or a single Symbol and return a list of
|
||||
symbols.
|
||||
"""
|
||||
if isinstance(body, clingo.Symbol):
|
||||
args = [body]
|
||||
elif hasattr(body, 'symbol'):
|
||||
args = [body.symbol()]
|
||||
else:
|
||||
raise TypeError("Invalid typee: ", type(body))
|
||||
return args
|
||||
|
||||
|
||||
def _normalize_packages_yaml(packages_yaml):
|
||||
normalized_yaml = copy.copy(packages_yaml)
|
||||
for pkg_name in packages_yaml:
|
||||
@@ -251,7 +249,17 @@ def __init__(self, cores=True, asp=None):
|
||||
asp (file-like): optional stream to write a text-based ASP program
|
||||
for debugging or verification.
|
||||
"""
|
||||
assert clingo, "PyclingoDriver requires clingo with Python support"
|
||||
global clingo
|
||||
if not clingo:
|
||||
# TODO: Find a way to vendor the concrete spec
|
||||
# in a cross-platform way
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
clingo_spec = spack.bootstrap.clingo_root_spec()
|
||||
clingo_spec._old_concretize()
|
||||
spack.bootstrap.make_module_available(
|
||||
'clingo', spec=clingo_spec, install=True
|
||||
)
|
||||
import clingo
|
||||
self.out = asp or llnl.util.lang.Devnull()
|
||||
self.cores = cores
|
||||
|
||||
@@ -274,19 +282,14 @@ def newline(self):
|
||||
|
||||
def fact(self, head):
|
||||
"""ASP fact (a rule without a body)."""
|
||||
symbols = _normalize(head)
|
||||
self.out.write("%s.\n" % ','.join(str(a) for a in symbols))
|
||||
symbol = head.symbol() if hasattr(head, 'symbol') else head
|
||||
|
||||
atoms = {}
|
||||
for s in symbols:
|
||||
atoms[s] = self.backend.add_atom(s)
|
||||
self.out.write("%s.\n" % str(symbol))
|
||||
|
||||
self.backend.add_rule(
|
||||
[atoms[s] for s in symbols], [], choice=self.cores
|
||||
)
|
||||
atom = self.backend.add_atom(symbol)
|
||||
self.backend.add_rule([atom], [], choice=self.cores)
|
||||
if self.cores:
|
||||
for s in symbols:
|
||||
self.assumptions.append(atoms[s])
|
||||
self.assumptions.append(atom)
|
||||
|
||||
def solve(
|
||||
self, solver_setup, specs, dump=None, nmodels=0,
|
||||
@@ -330,18 +333,26 @@ def solve(
|
||||
def on_model(model):
|
||||
models.append((model.cost, model.symbols(shown=True, terms=True)))
|
||||
|
||||
solve_result = self.control.solve(
|
||||
assumptions=self.assumptions,
|
||||
on_model=on_model,
|
||||
on_core=cores.append
|
||||
)
|
||||
solve_kwargs = {"assumptions": self.assumptions,
|
||||
"on_model": on_model,
|
||||
"on_core": cores.append}
|
||||
if clingo_cffi:
|
||||
solve_kwargs["on_unsat"] = cores.append
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
timer.phase("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
def stringify(x):
|
||||
return x.string or str(x)
|
||||
if clingo_cffi:
|
||||
# Clingo w/ CFFI will throw an exception on failure
|
||||
try:
|
||||
return x.string
|
||||
except RuntimeError:
|
||||
return str(x)
|
||||
else:
|
||||
return x.string or str(x)
|
||||
|
||||
if result.satisfiable:
|
||||
builder = SpecBuilder(specs)
|
||||
@@ -383,6 +394,8 @@ class SpackSolverSetup(object):
|
||||
def __init__(self):
|
||||
self.gen = None # set by setup()
|
||||
self.possible_versions = {}
|
||||
self.versions_in_package_py = {}
|
||||
self.versions_from_externals = {}
|
||||
self.possible_virtuals = None
|
||||
self.possible_compilers = []
|
||||
self.variant_values_from_specs = set()
|
||||
@@ -436,9 +449,18 @@ def pkg_version_rules(self, pkg):
|
||||
# c) Numeric or string comparison
|
||||
v)
|
||||
|
||||
most_to_least_preferred = sorted(
|
||||
self.possible_versions[pkg.name], key=keyfn, reverse=True
|
||||
)
|
||||
# Compute which versions appear only in packages.yaml
|
||||
from_externals = self.versions_from_externals[pkg.name]
|
||||
from_package_py = self.versions_in_package_py[pkg.name]
|
||||
only_from_externals = from_externals - from_package_py
|
||||
|
||||
# These versions don't need a default weight, as they are
|
||||
# already weighted in a more favorable way when accounting
|
||||
# for externals. Assigning them a default weight would be
|
||||
# equivalent to state that they are also declared in
|
||||
# the package.py file
|
||||
considered = self.possible_versions[pkg.name] - only_from_externals
|
||||
most_to_least_preferred = sorted(considered, key=keyfn, reverse=True)
|
||||
|
||||
for i, v in enumerate(most_to_least_preferred):
|
||||
self.gen.fact(fn.version_declared(pkg.name, v, i))
|
||||
@@ -470,28 +492,14 @@ def target_ranges(self, spec, single_target_fn):
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_id = self.condition(
|
||||
spack.spec.Spec(trigger), name=pkg.name
|
||||
)
|
||||
self.gen.fact(fn.conflict_trigger(trigger_id))
|
||||
|
||||
for constraint, _ in constraints:
|
||||
constraint_body = spack.spec.Spec(pkg.name)
|
||||
constraint_body.constrain(constraint)
|
||||
constraint_body.constrain(trigger)
|
||||
|
||||
clauses = []
|
||||
for s in constraint_body.traverse():
|
||||
clauses += self.spec_clauses(s, body=True)
|
||||
|
||||
# TODO: find a better way to generate clauses for integrity
|
||||
# TODO: constraints, instead of generating them for the body
|
||||
# TODO: of a rule and filter unwanted functions.
|
||||
to_be_filtered = ['node_compiler_hard']
|
||||
clauses = [x for x in clauses if x.name not in to_be_filtered]
|
||||
|
||||
# Emit facts based on clauses
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.conflict(condition_id, pkg.name))
|
||||
for clause in clauses:
|
||||
self.gen.fact(fn.conflict_condition(
|
||||
condition_id, clause.name, *clause.args
|
||||
))
|
||||
constraint_id = self.condition(constraint, name=pkg.name)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id))
|
||||
self.gen.newline()
|
||||
|
||||
def available_compilers(self):
|
||||
@@ -596,11 +604,16 @@ def pkg_rules(self, pkg, tests):
|
||||
values = []
|
||||
elif isinstance(values, spack.variant.DisjointSetsOfValues):
|
||||
union = set()
|
||||
for s in values.sets:
|
||||
# Encode the disjoint sets in the logic program
|
||||
for sid, s in enumerate(values.sets):
|
||||
for value in s:
|
||||
self.gen.fact(fn.variant_value_from_disjoint_sets(
|
||||
pkg.name, name, value, sid
|
||||
))
|
||||
union.update(s)
|
||||
values = union
|
||||
|
||||
# make sure that every variant has at least one posisble value
|
||||
# make sure that every variant has at least one possible value
|
||||
if not values:
|
||||
values = [variant.default]
|
||||
|
||||
@@ -629,46 +642,44 @@ def pkg_rules(self, pkg, tests):
|
||||
)
|
||||
)
|
||||
|
||||
def _condition_facts(
|
||||
self, pkg_name, cond_spec, dep_spec,
|
||||
cond_fn, require_fn, impose_fn
|
||||
):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
pkg_name (str): name of the package that triggers the
|
||||
condition (e.g., the dependent or the provider)
|
||||
cond_spec (Spec): the dependency spec representing the
|
||||
condition that needs to be True (can be anonymous)
|
||||
dep_spec (Spec): the sepc of the dependency or provider
|
||||
to be depended on/provided if the condition holds.
|
||||
cond_fn (AspFunction): function to use to declare the condition;
|
||||
will be called with the cond id, pkg_name, an dep_spec.name
|
||||
require_fn (AspFunction): function to use to declare the conditions
|
||||
required of the dependent/provider to trigger
|
||||
impose_fn (AspFunction): function to use for constraints imposed
|
||||
on the dependency/virtual
|
||||
required_spec (Spec): the spec that triggers this condition
|
||||
imposed_spec (optional, Spec): the sepc with constraints that
|
||||
are imposed when this condition is triggered
|
||||
name (optional, str): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
|
||||
Returns:
|
||||
(int): id of the condition created by this function
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
named_cond = cond_spec.copy()
|
||||
named_cond.name = named_cond.name or pkg_name
|
||||
self.gen.fact(fn.condition(condition_id))
|
||||
|
||||
self.gen.fact(cond_fn(condition_id, pkg_name, dep_spec.name))
|
||||
# requirements trigger the condition
|
||||
requirements = self.checked_spec_clauses(
|
||||
named_cond, body=True, required_from=name)
|
||||
for pred in requirements:
|
||||
self.gen.fact(
|
||||
fn.condition_requirement(condition_id, pred.name, *pred.args)
|
||||
)
|
||||
|
||||
# conditions that trigger the condition
|
||||
conditions = self.spec_clauses(named_cond, body=True)
|
||||
for pred in conditions:
|
||||
self.gen.fact(require_fn(condition_id, pred.name, *pred.args))
|
||||
|
||||
imposed_constraints = self.spec_clauses(dep_spec)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if pred.name in ("node", "virtual_node"):
|
||||
continue
|
||||
self.gen.fact(impose_fn(condition_id, pred.name, *pred.args))
|
||||
if imposed_spec:
|
||||
imposed_constraints = self.checked_spec_clauses(
|
||||
imposed_spec, body=False, required_from=name)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if pred.name in ("node", "virtual_node"):
|
||||
continue
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(condition_id, pred.name, *pred.args)
|
||||
)
|
||||
|
||||
return condition_id
|
||||
|
||||
@@ -678,36 +689,36 @@ def package_provider_rules(self, pkg):
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
for when in whens:
|
||||
self._condition_facts(
|
||||
pkg.name, when, provided,
|
||||
fn.provider_condition,
|
||||
fn.required_provider_condition,
|
||||
fn.imposed_dependency_condition
|
||||
)
|
||||
|
||||
condition_id = self.condition(when, provided, pkg.name)
|
||||
self.gen.fact(fn.provider_condition(
|
||||
condition_id, when.name, provided.name
|
||||
))
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg, tests):
|
||||
"""Translate 'depends_on' directives into ASP logic."""
|
||||
for _, conditions in sorted(pkg.dependencies.items()):
|
||||
for cond, dep in sorted(conditions.items()):
|
||||
condition_id = self._condition_facts(
|
||||
pkg.name, cond, dep.spec,
|
||||
fn.dependency_condition,
|
||||
fn.required_dependency_condition,
|
||||
fn.imposed_dependency_condition
|
||||
)
|
||||
deptypes = dep.type.copy()
|
||||
# Skip test dependencies if they're not requested
|
||||
if not tests:
|
||||
deptypes.discard("test")
|
||||
|
||||
for t in sorted(dep.type):
|
||||
# Skip test dependencies if they're not requested at all
|
||||
if t == 'test' and not tests:
|
||||
continue
|
||||
# ... or if they are requested only for certain packages
|
||||
if not isinstance(tests, bool) and pkg.name not in tests:
|
||||
deptypes.discard("test")
|
||||
|
||||
# ... or if they are requested only for certain packages
|
||||
if t == 'test' and (not isinstance(tests, bool)
|
||||
and pkg.name not in tests):
|
||||
continue
|
||||
# if there are no dependency types to be considered
|
||||
# anymore, don't generate the dependency
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name)
|
||||
self.gen.fact(fn.dependency_condition(
|
||||
condition_id, pkg.name, dep.spec.name
|
||||
))
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(fn.dependency_type(condition_id, t))
|
||||
|
||||
@@ -777,24 +788,14 @@ def external_packages(self):
|
||||
pkg_name, str(version), weight, id
|
||||
))
|
||||
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
condition_id = next(self._condition_id_counter)
|
||||
|
||||
# Declare the global ID associated with this external spec
|
||||
self.gen.fact(fn.external_spec(condition_id, pkg_name))
|
||||
|
||||
# Local index into packages.yaml
|
||||
condition_id = self.condition(spec)
|
||||
self.gen.fact(
|
||||
fn.external_spec_index(condition_id, pkg_name, local_idx))
|
||||
|
||||
# Add conditions to be satisfied for this external
|
||||
fn.possible_external(condition_id, pkg_name, local_idx)
|
||||
)
|
||||
self.versions_from_externals[spec.name].add(spec.version)
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
clauses = self.spec_clauses(spec, body=True)
|
||||
for clause in clauses:
|
||||
self.gen.fact(
|
||||
fn.external_spec_condition(
|
||||
condition_id, clause.name, *clause.args)
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def preferred_variants(self, pkg_name):
|
||||
@@ -869,6 +870,20 @@ def flag_defaults(self):
|
||||
self.gen.fact(fn.compiler_version_flag(
|
||||
compiler.name, compiler.version, name, flag))
|
||||
|
||||
def checked_spec_clauses(self, *args, **kwargs):
|
||||
"""Wrap a call to spec clauses into a try/except block that raise
|
||||
a comprehensible error message in case of failure.
|
||||
"""
|
||||
requestor = kwargs.pop('required_from', None)
|
||||
try:
|
||||
clauses = self.spec_clauses(*args, **kwargs)
|
||||
except RuntimeError as exc:
|
||||
msg = str(exc)
|
||||
if requestor:
|
||||
msg += ' [required from package "{0}"]'.format(requestor)
|
||||
raise RuntimeError(msg)
|
||||
return clauses
|
||||
|
||||
def spec_clauses(self, spec, body=False, transitive=True):
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
@@ -937,9 +952,14 @@ class Body(object):
|
||||
|
||||
# validate variant value
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if (not spec.virtual and vname not in reserved_names):
|
||||
variant_def = spec.package.variants[vname]
|
||||
variant_def.validate_or_raise(variant, spec.package)
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
try:
|
||||
variant_def = spec.package.variants[vname]
|
||||
except KeyError:
|
||||
msg = 'variant "{0}" not found in package "{1}"'
|
||||
raise RuntimeError(msg.format(vname, spec.name))
|
||||
else:
|
||||
variant_def.validate_or_raise(variant, spec.package)
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
@@ -982,11 +1002,14 @@ class Body(object):
|
||||
|
||||
def build_version_dict(self, possible_pkgs, specs):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
self.possible_versions = collections.defaultdict(lambda: set())
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
self.versions_in_package_py = collections.defaultdict(set)
|
||||
self.versions_from_externals = collections.defaultdict(set)
|
||||
|
||||
for pkg_name in possible_pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
for v in pkg.versions:
|
||||
self.versions_in_package_py[pkg_name].add(v)
|
||||
self.possible_versions[pkg_name].add(v)
|
||||
|
||||
for spec in specs:
|
||||
@@ -1003,7 +1026,9 @@ def _supported_targets(self, compiler_name, compiler_version, targets):
|
||||
|
||||
for target in targets:
|
||||
try:
|
||||
target.optimization_flags(compiler_name, compiler_version)
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
target.optimization_flags(compiler_name, compiler_version)
|
||||
supported.append(target)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
continue
|
||||
@@ -1091,7 +1116,6 @@ def target_defaults(self, specs):
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
|
||||
print("TTYPE:", type(platform.target(spec.target.name)))
|
||||
target = archspec.cpu.TARGETS.get(spec.target.name)
|
||||
if not target:
|
||||
self.target_ranges(spec, None)
|
||||
@@ -1439,7 +1463,7 @@ def node_flag_source(self, pkg, source):
|
||||
def no_flags(self, pkg, flag_type):
|
||||
self._specs[pkg].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, condition_id, pkg, idx):
|
||||
def external_spec_selected(self, pkg, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
|
@@ -14,13 +14,15 @@ version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
1 { version(Package, Version) : version_declared(Package, Version) } 1
|
||||
:- node(Package).
|
||||
|
||||
version_weight(Package, Weight)
|
||||
possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version), version_declared(Package, Version, Weight),
|
||||
not preferred_version_declared(Package, Version, _).
|
||||
|
||||
version_weight(Package, Weight)
|
||||
possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version), preferred_version_declared(Package, Version, Weight).
|
||||
|
||||
1 { version_weight(Package, Weight) : possible_version_weight(Package, Weight) } 1 :- node(Package).
|
||||
|
||||
% version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1
|
||||
@@ -31,18 +33,66 @@ version_satisfies(Package, Constraint)
|
||||
#defined preferred_version_declared/3.
|
||||
#defined version_satisfies/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Spec conditions and imposed constraints
|
||||
%
|
||||
% Given Spack directives like these:
|
||||
% depends_on("foo@1.0+bar", when="@2.0+variant")
|
||||
% provides("mpi@2:", when="@1.9:")
|
||||
%
|
||||
% The conditions are `@2.0+variant` and `@1.9:`, and the imposed constraints
|
||||
% are `@1.0+bar` on `foo` and `@2:` on `mpi`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% conditions are specified with `condition_requirement` and hold when
|
||||
% corresponding spec attributes hold.
|
||||
condition_holds(ID) :-
|
||||
condition(ID);
|
||||
attr(Name, A1) : condition_requirement(ID, Name, A1);
|
||||
attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2);
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3).
|
||||
|
||||
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
|
||||
|
||||
% conditions that hold impose constraints on other specs
|
||||
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
||||
attr(Name, A1, A2) :- impose(ID), imposed_constraint(ID, Name, A1, A2).
|
||||
attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
|
||||
#defined condition/1.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
#defined imposed_constraint/3.
|
||||
#defined imposed_constraint/4.
|
||||
#defined imposed_constraint/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Dependency semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
% Dependencies of any type imply that one package "depends on" another
|
||||
depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
|
||||
% a dependency holds if its condition holds
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
condition_holds(ID),
|
||||
not external(Package).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
do_not_impose(ID) :-
|
||||
not dependency_holds(Package, Dependency, _),
|
||||
dependency_condition(ID, Package, Dependency).
|
||||
|
||||
% declared dependencies are real if they're not virtual AND
|
||||
% the package is not an external
|
||||
% the package is not an external.
|
||||
% They're only triggered if the associated dependnecy condition holds.
|
||||
depends_on(Package, Dependency, Type)
|
||||
:- dependency_conditions(Package, Dependency, Type),
|
||||
not virtual(Dependency),
|
||||
not external(Package).
|
||||
:- dependency_holds(Package, Dependency, Type),
|
||||
not virtual(Dependency).
|
||||
|
||||
% every root must be a node
|
||||
node(Package) :- root(Package).
|
||||
@@ -64,74 +114,19 @@ path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conditional dependencies
|
||||
%
|
||||
% This takes care of `when=SPEC` in `depends_on("foo@1.0+bar", when="SPEC")`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% if any individual condition below is true, trigger the dependency.
|
||||
dependency_conditions(Package, Dependency, Type) :-
|
||||
dependency_conditions_hold(ID, Package, Dependency),
|
||||
dependency_type(ID, Type).
|
||||
|
||||
#defined dependency_type/2.
|
||||
|
||||
% collect all the dependency conditions into a single conditional rule
|
||||
% distinguishing between Parent and Package (Arg1) is needed to account
|
||||
% for conditions like:
|
||||
%
|
||||
% depends_on('patchelf@0.9', when='@1.0:1.1 ^python@:2')
|
||||
%
|
||||
% that include dependencies
|
||||
dependency_conditions_hold(ID, Parent, Dependency) :-
|
||||
attr(Name, Arg1) : required_dependency_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
dependency_condition(ID, Parent, Dependency);
|
||||
% There must be at least a dependency type declared,
|
||||
% otherwise the dependency doesn't hold
|
||||
dependency_type(ID, _);
|
||||
node(Parent);
|
||||
not external(Parent).
|
||||
|
||||
#defined dependency_condition/3.
|
||||
#defined required_dependency_condition/3.
|
||||
#defined required_dependency_condition/4.
|
||||
#defined required_dependency_condition/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Imposed constraints on dependencies
|
||||
%
|
||||
% This handles the `@1.0+bar` in `depends_on("foo@1.0+bar", when="SPEC")`, or
|
||||
% the `mpi@2:` in `provides("mpi@2:", when="@1.9:")`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% NOTE: `attr(Name, Arg1)` is omitted here b/c the only single-arg attribute is
|
||||
% NOTE: `node()`, which is handled above under "Dependency Semantics"
|
||||
|
||||
attr(Name, Arg1, Arg2) :-
|
||||
dependency_conditions_hold(ID, Package, Dependency),
|
||||
imposed_dependency_condition(ID, Name, Arg1, Arg2).
|
||||
|
||||
attr(Name, Arg1, Arg2, Arg3) :-
|
||||
dependency_conditions_hold(ID, Package, Dependency),
|
||||
imposed_dependency_condition(ID, Name, Arg1, Arg2, Arg3).
|
||||
|
||||
#defined imposed_dependency_condition/4.
|
||||
#defined imposed_dependency_condition/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
:- not external(Package) : conflict_condition(ID, "node", Package);
|
||||
attr(Name, Arg1) : conflict_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : conflict_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : conflict_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
conflict(ID, Package).
|
||||
:- node(Package),
|
||||
not external(Package),
|
||||
conflict(Package, TriggerID, ConstraintID),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID).
|
||||
|
||||
#defined conflict/2.
|
||||
#defined conflict_condition/3.
|
||||
#defined conflict_condition/4.
|
||||
#defined conflict_condition/5.
|
||||
#defined conflict/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependencies
|
||||
@@ -140,10 +135,15 @@ attr(Name, Arg1, Arg2, Arg3) :-
|
||||
% if a package depends on a virtual, it's not external and we have a
|
||||
% provider for that virtual then it depends on the provider
|
||||
depends_on(Package, Provider, Type)
|
||||
:- dependency_conditions(Package, Virtual, Type),
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provides_virtual(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
virtual_node(Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% if there's a virtual node, we must select one provider
|
||||
1 { provides_virtual(Package, Virtual) : possible_provider(Package, Virtual) } 1
|
||||
:- virtual_node(Virtual).
|
||||
@@ -153,32 +153,21 @@ virtual_node(Virtual) :- virtual_root(Virtual).
|
||||
1 { root(Package) : provides_virtual(Package, Virtual) } 1
|
||||
:- virtual_root(Virtual).
|
||||
|
||||
% all virtual providers come from provider conditions like this
|
||||
dependency_conditions_hold(ID, Provider, Virtual) :-
|
||||
attr(Name, Arg1) : required_provider_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : required_provider_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : required_provider_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
virtual(Virtual);
|
||||
provider_condition(ID, Provider, Virtual).
|
||||
|
||||
% The provider provides the virtual if some provider condition holds.
|
||||
provides_virtual(Provider, Virtual) :-
|
||||
provider_condition(ID, Provider, Virtual),
|
||||
dependency_conditions_hold(ID, Provider, Virtual),
|
||||
condition_holds(ID),
|
||||
virtual(Virtual).
|
||||
|
||||
% a node that provides a virtual is a provider
|
||||
provider(Package, Virtual)
|
||||
:- node(Package), provides_virtual(Package, Virtual).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
virtual_node(Virtual)
|
||||
:- dependency_conditions(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% for any virtual, there can be at most one provider in the DAG
|
||||
0 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual(Virtual).
|
||||
|
||||
#defined possible_provider/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependency weights
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -302,32 +291,29 @@ external(Package) :- external_only(Package), node(Package).
|
||||
% a package is a real_node if it is not external
|
||||
real_node(Package) :- node(Package), not external(Package).
|
||||
|
||||
% if an external version is selected, the package is external and
|
||||
% we are using the corresponding spec
|
||||
external(Package) :-
|
||||
version(Package, Version), version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID).
|
||||
% a package is external if we are using an external spec for it
|
||||
external(Package) :- external_spec_selected(Package, _).
|
||||
|
||||
% we can't use the weight for an external version if we don't use the
|
||||
% corresponding external spec.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID),
|
||||
not external(Package).
|
||||
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(ID, Package, LocalIndex) :-
|
||||
version(Package, Version), version_weight(Package, Weight),
|
||||
external_spec_index(ID, Package, LocalIndex),
|
||||
external_version_declared(Package, Version, Weight, LocalIndex),
|
||||
external_spec_conditions_hold(ID, Package).
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
external_conditions_hold(Package, LocalIndex),
|
||||
node(Package).
|
||||
|
||||
% determine if all the conditions on an external spec hold. If they do
|
||||
% the spec can be selected.
|
||||
external_spec_conditions_hold(ID, Package) :-
|
||||
attr(Name, Arg1) : external_spec_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : external_spec_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : external_spec_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
external_spec(ID, Package);
|
||||
node(Package).
|
||||
external_conditions_hold(Package, LocalIndex) :-
|
||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
:- external(Package), not external_spec_conditions_hold(_, Package).
|
||||
:- external(Package), not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
#defined external_spec_condition/3.
|
||||
#defined external_spec_condition/4.
|
||||
@@ -360,6 +346,15 @@ variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
:- variant_value(Package, Variant, Value), not variant_possible_value(Package, Variant, Value).
|
||||
|
||||
% Some multi valued variants accept multiple values from disjoint sets.
|
||||
% Ensure that we respect that constraint and we don't pick values from more
|
||||
% than one set at once
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 != Set2.
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
variant_value(Package, Variant, Value)
|
||||
@@ -367,23 +362,40 @@ variant_value(Package, Variant, Value)
|
||||
variant(Package, Variant),
|
||||
variant_set(Package, Variant, Value).
|
||||
|
||||
% prefer default values.
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
% specified in an external, we score it as if it was a default value.
|
||||
variant_not_default(Package, Variant, Value, 1)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_default_value(Package, Variant, Value),
|
||||
not variant_set(Package, Variant, Value),
|
||||
not external_with_variant_set(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% We are using the default value for a variant
|
||||
variant_not_default(Package, Variant, Value, 0)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_default_value(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% The variant is set in the spec
|
||||
variant_not_default(Package, Variant, Value, 0)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_set(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% The variant is set in an external spec
|
||||
external_with_variant_set(Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
||||
possible_external(ID, Package, _),
|
||||
external(Package),
|
||||
node(Package).
|
||||
|
||||
variant_not_default(Package, Variant, Value, 0)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
external_with_variant_set(Package, Variant, Value),
|
||||
node(Package).
|
||||
|
||||
% The default value for a variant in a package is what is written
|
||||
% in the package.py file, unless some preference is set in packages.yaml
|
||||
@@ -419,6 +431,7 @@ variant_single_value(Package, "dev_path")
|
||||
#defined variant_possible_value/3.
|
||||
#defined variant_default_value_from_packages_yaml/3.
|
||||
#defined variant_default_value_from_package_py/3.
|
||||
#defined variant_value_from_disjoint_sets/4.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Platform semantics
|
||||
@@ -459,6 +472,8 @@ node_os_inherit(Dependency, OS)
|
||||
not node_os_set(Dependency).
|
||||
node_os_inherit(Package) :- node_os_inherit(Package, _).
|
||||
|
||||
node_os(Package, OS) :- node_os_inherit(Package, OS).
|
||||
|
||||
% fall back to default if not set or inherited
|
||||
node_os(Package, OS)
|
||||
:- node(Package),
|
||||
@@ -582,36 +597,24 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(P
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version).
|
||||
|
||||
% If the compiler is what was prescribed from command line etc.
|
||||
% or is the same as a root node, there is a version match
|
||||
% If a package and one of its dependencies don't have the
|
||||
% same compiler there's a mismatch.
|
||||
compiler_mismatch(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version(Package, Compiler1, _),
|
||||
node_compiler_version(Dependency, Compiler2, _),
|
||||
Compiler1 != Compiler2.
|
||||
|
||||
% Compiler prescribed in the root spec
|
||||
node_compiler_version_match_pref(Package, Compiler, V)
|
||||
:- node_compiler_set(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, V),
|
||||
not external(Package).
|
||||
|
||||
% Compiler inherited from a parent node
|
||||
node_compiler_version_match_pref(Dependency, Compiler, V)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version_match_pref(Package, Compiler, V),
|
||||
node_compiler_version(Dependency, Compiler, V),
|
||||
not node_compiler_set(Dependency, Compiler).
|
||||
|
||||
% Compiler inherited from the root package
|
||||
node_compiler_version_match_pref(Dependency, Compiler, V)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version(Package, Compiler, V), root(Package),
|
||||
node_compiler_version(Dependency, Compiler, V),
|
||||
not node_compiler_set(Dependency, Compiler).
|
||||
|
||||
compiler_version_match(Package, 1)
|
||||
:- node_compiler_version(Package, Compiler, V),
|
||||
node_compiler_version_match_pref(Package, Compiler, V).
|
||||
compiler_mismatch(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_version(Package, Compiler, Version1),
|
||||
node_compiler_version(Dependency, Compiler, Version2),
|
||||
Version1 != Version2.
|
||||
|
||||
#defined node_compiler_set/2.
|
||||
#defined node_compiler_version_set/3.
|
||||
#defined compiler_supports_os/3.
|
||||
#defined compiler_version_match/2.
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
@@ -751,12 +754,9 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency).
|
||||
not root(Package)
|
||||
}.
|
||||
|
||||
% Try to maximize the number of compiler matches in the DAG,
|
||||
% while minimizing the number of nodes. This is done because
|
||||
% a maximization on the number of matches for compilers is highly
|
||||
% correlated to a preference to have as many nodes as possible
|
||||
#minimize{ 1@7,Package : node(Package) }.
|
||||
#maximize{ Weight@7,Package : compiler_version_match(Package, Weight) }.
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
#minimize{ 0@7 : #true }.
|
||||
#minimize{ 1@7,Package,Dependency : compiler_mismatch(Package, Dependency) }.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
#minimize{
|
||||
|
@@ -24,4 +24,4 @@
|
||||
#show compiler_weight/2.
|
||||
#show node_target_match/2.
|
||||
#show node_target_weight/2.
|
||||
#show external_spec_selected/3.
|
||||
#show external_spec_selected/2.
|
||||
|
@@ -116,6 +116,13 @@
|
||||
import spack.variant as vt
|
||||
import spack.version as vn
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping # novm
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Spec',
|
||||
'parse',
|
||||
@@ -2120,7 +2127,7 @@ def validate_detection(self):
|
||||
# which likely means the spec was created with Spec.from_detection
|
||||
msg = ('cannot validate "{0}" since it was not created '
|
||||
'using Spec.from_detection'.format(self))
|
||||
assert isinstance(self.extra_attributes, collections.Mapping), msg
|
||||
assert isinstance(self.extra_attributes, Mapping), msg
|
||||
|
||||
# Validate the spec calling a package specific method
|
||||
validate_fn = getattr(
|
||||
@@ -3141,25 +3148,23 @@ def satisfies(self, other, deps=True, strict=False, strict_deps=False):
|
||||
if other.concrete:
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
pkg = spack.repo.get(self.fullname)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
return False
|
||||
|
||||
if pkg.provides(other.name):
|
||||
for provided, when_specs in pkg.provided.items():
|
||||
if any(self.satisfies(when_spec, deps=False, strict=strict)
|
||||
for when_spec in when_specs):
|
||||
if provided.satisfies(other):
|
||||
return True
|
||||
return False
|
||||
|
||||
# Otherwise, first thing we care about is whether the name matches
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
pkg = spack.repo.get(self.fullname)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
return False
|
||||
|
||||
if pkg.provides(other.name):
|
||||
for provided, when_specs in pkg.provided.items():
|
||||
if any(self.satisfies(when, deps=False, strict=strict)
|
||||
for when in when_specs):
|
||||
if provided.satisfies(other):
|
||||
return True
|
||||
return False
|
||||
|
||||
# namespaces either match, or other doesn't require one.
|
||||
@@ -4506,7 +4511,8 @@ def spec(self, name):
|
||||
break
|
||||
|
||||
elif self.accept(HASH):
|
||||
# Get spec by hash and confirm it matches what we already have
|
||||
# Get spec by hash and confirm it matches any constraints we
|
||||
# already read in
|
||||
hash_spec = self.spec_by_hash()
|
||||
if hash_spec.satisfies(spec):
|
||||
spec._dup(hash_spec)
|
||||
|
@@ -23,6 +23,7 @@
|
||||
configuration.
|
||||
|
||||
"""
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import six
|
||||
@@ -172,6 +173,16 @@ def _store():
|
||||
config_dict = spack.config.get('config')
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = spack.config.get('config:install_hash_length')
|
||||
|
||||
# Check that the user is not trying to install software into the store
|
||||
# reserved by Spack to bootstrap its own dependencies, since this would
|
||||
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
|
||||
# user installed software)
|
||||
if spack.paths.user_bootstrap_store == root:
|
||||
msg = ('please change the install tree root "{0}" in your '
|
||||
'configuration [path reserved for Spack internal use]')
|
||||
raise ValueError(msg.format(root))
|
||||
|
||||
return Store(root=root,
|
||||
unpadded_root=unpadded_root,
|
||||
projections=projections,
|
||||
@@ -205,6 +216,19 @@ def _store_layout():
|
||||
layout = llnl.util.lang.LazyReference(_store_layout)
|
||||
|
||||
|
||||
def reinitialize():
|
||||
"""Restore globals to the same state they would have at start-up"""
|
||||
global store
|
||||
global root, unpadded_root, db, layout
|
||||
|
||||
store = llnl.util.lang.Singleton(_store)
|
||||
|
||||
root = llnl.util.lang.LazyReference(_store_root)
|
||||
unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
|
||||
db = llnl.util.lang.LazyReference(_store_db)
|
||||
layout = llnl.util.lang.LazyReference(_store_layout)
|
||||
|
||||
|
||||
def retrieve_upstream_dbs():
|
||||
other_spack_instances = spack.config.get('upstreams', {})
|
||||
|
||||
@@ -227,3 +251,29 @@ def _construct_upstream_dbs_from_install_roots(
|
||||
accumulated_upstream_dbs.insert(0, next_db)
|
||||
|
||||
return accumulated_upstream_dbs
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_store(store_or_path):
|
||||
"""Use the store passed as argument within the context manager.
|
||||
|
||||
Args:
|
||||
store_or_path: either a Store object ot a path to where the
|
||||
store resides
|
||||
|
||||
Returns:
|
||||
Store object associated with the context manager's store
|
||||
"""
|
||||
global store
|
||||
|
||||
# Normalize input arguments
|
||||
temporary_store = store_or_path
|
||||
if not isinstance(store_or_path, Store):
|
||||
temporary_store = Store(store_or_path)
|
||||
|
||||
# Swap the store with the one just constructed and return it
|
||||
original_store, store = store, temporary_store
|
||||
yield temporary_store
|
||||
|
||||
# Restore the original store
|
||||
store = original_store
|
||||
|
@@ -6,6 +6,7 @@
|
||||
""" Test checks if the architecture class is created correctly and also that
|
||||
the functions are looking for the correct architecture name
|
||||
"""
|
||||
import itertools
|
||||
import os
|
||||
import platform as py_platform
|
||||
|
||||
@@ -116,20 +117,26 @@ def test_user_defaults(config):
|
||||
assert default_target == default_spec.architecture.target
|
||||
|
||||
|
||||
@pytest.mark.parametrize('operating_system', [
|
||||
x for x in spack.architecture.platform().operating_sys
|
||||
] + ["fe", "be", "frontend", "backend"])
|
||||
@pytest.mark.parametrize('target', [
|
||||
x for x in spack.architecture.platform().targets
|
||||
] + ["fe", "be", "frontend", "backend"])
|
||||
def test_user_input_combination(config, operating_system, target):
|
||||
platform = spack.architecture.platform()
|
||||
spec = Spec("libelf os=%s target=%s" % (operating_system, target))
|
||||
spec.concretize()
|
||||
assert spec.architecture.os == str(
|
||||
platform.operating_system(operating_system)
|
||||
)
|
||||
assert spec.architecture.target == platform.target(target)
|
||||
def test_user_input_combination(config):
|
||||
valid_keywords = ["fe", "be", "frontend", "backend"]
|
||||
|
||||
possible_targets = ([x for x in spack.architecture.platform().targets]
|
||||
+ valid_keywords)
|
||||
|
||||
possible_os = ([x for x in spack.architecture.platform().operating_sys]
|
||||
+ valid_keywords)
|
||||
|
||||
for target, operating_system in itertools.product(
|
||||
possible_targets, possible_os
|
||||
):
|
||||
platform = spack.architecture.platform()
|
||||
spec_str = "libelf os={0} target={1}".format(operating_system, target)
|
||||
spec = Spec(spec_str)
|
||||
spec.concretize()
|
||||
assert spec.architecture.os == str(
|
||||
platform.operating_system(operating_system)
|
||||
)
|
||||
assert spec.architecture.target == platform.target(target)
|
||||
|
||||
|
||||
def test_operating_system_conversion_to_dict():
|
||||
|
26
lib/spack/spack/test/bootstrap.py
Normal file
26
lib/spack/spack/test/bootstrap.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.store
|
||||
|
||||
|
||||
@pytest.mark.regression('22294')
|
||||
def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
# Prepare a custom store path. This should be in a writeable location
|
||||
# since Spack needs to initialize the DB.
|
||||
user_path = str(tmpdir.join('store'))
|
||||
# Reassign global variables in spack.store to the value
|
||||
# they would have at Spack startup.
|
||||
spack.store.reinitialize()
|
||||
# Set the custom user path
|
||||
spack.config.set('config:install_tree:root', user_path)
|
||||
|
||||
# Test that within the context manager we use the bootstrap store
|
||||
# and that outside we restore the correct location
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert str(spack.store.root) == spack.paths.user_bootstrap_store
|
||||
assert str(spack.store.root) == user_path
|
@@ -11,6 +11,7 @@
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.paths import build_env_path
|
||||
from spack.build_environment import dso_suffix, _static_to_shared_library
|
||||
from spack.util.executable import Executable
|
||||
@@ -299,6 +300,45 @@ def normpaths(paths):
|
||||
delattr(dep_pkg, 'libs')
|
||||
|
||||
|
||||
def test_external_prefixes_last(mutable_config, mock_packages, working_env,
|
||||
monkeypatch):
|
||||
# Sanity check: under normal circumstances paths associated with
|
||||
# dt-diamond-left would appear first. We'll mark it as external in
|
||||
# the test to check if the associated paths are placed last.
|
||||
assert 'dt-diamond-left' < 'dt-diamond-right'
|
||||
|
||||
cfg_data = syaml.load_config("""\
|
||||
dt-diamond-left:
|
||||
externals:
|
||||
- spec: dt-diamond-left@1.0
|
||||
prefix: /fake/path1
|
||||
buildable: false
|
||||
""")
|
||||
spack.config.set("packages", cfg_data)
|
||||
top = spack.spec.Spec('dt-diamond').concretized()
|
||||
|
||||
def _trust_me_its_a_dir(path):
|
||||
return True
|
||||
monkeypatch.setattr(
|
||||
os.path, 'isdir', _trust_me_its_a_dir
|
||||
)
|
||||
|
||||
env_mods = EnvironmentModifications()
|
||||
spack.build_environment.set_build_environment_variables(
|
||||
top.package, env_mods, False)
|
||||
|
||||
env_mods.apply_modifications()
|
||||
link_dir_var = os.environ['SPACK_LINK_DIRS']
|
||||
link_dirs = link_dir_var.split(':')
|
||||
external_lib_paths = set(['/fake/path1/lib', '/fake/path1/lib64'])
|
||||
# The external lib paths should be the last two entries of the list and
|
||||
# should not appear anywhere before the last two entries
|
||||
assert (set(os.path.normpath(x) for x in link_dirs[-2:]) ==
|
||||
external_lib_paths)
|
||||
assert not (set(os.path.normpath(x) for x in link_dirs[:-2]) &
|
||||
external_lib_paths)
|
||||
|
||||
|
||||
def test_parallel_false_is_not_propagating(config, mock_packages):
|
||||
class AttributeHolder(object):
|
||||
pass
|
||||
|
@@ -73,7 +73,7 @@ def test_specs_staging(config):
|
||||
b = mock_repo.add_package('b', [d, e], [default, default])
|
||||
mock_repo.add_package('a', [b, c], [default, default])
|
||||
|
||||
with repo.swap(mock_repo):
|
||||
with repo.use_repositories(mock_repo):
|
||||
spec_a = Spec('a')
|
||||
spec_a.concretize()
|
||||
|
||||
|
@@ -12,56 +12,41 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def parser():
|
||||
def job_parser():
|
||||
# --jobs needs to write to a command_line config scope, so this is the only
|
||||
# scope we create.
|
||||
p = argparse.ArgumentParser()
|
||||
arguments.add_common_arguments(p, ['jobs'])
|
||||
yield p
|
||||
# Cleanup the command line scope if it was set during tests
|
||||
spack.config.config.clear_caches()
|
||||
if 'command_line' in spack.config.config.scopes:
|
||||
spack.config.config.scopes['command_line'].clear()
|
||||
scopes = [spack.config.InternalConfigScope('command_line', {'config': {}})]
|
||||
|
||||
with spack.config.use_configuration(*scopes):
|
||||
yield p
|
||||
|
||||
|
||||
@pytest.fixture(params=[1, 2, 4, 8, 16, 32])
|
||||
def ncores(monkeypatch, request):
|
||||
"""Mocks having a machine with n cores for the purpose of
|
||||
computing config:build_jobs.
|
||||
"""
|
||||
def _cpu_count():
|
||||
return request.param
|
||||
|
||||
# Patch multiprocessing.cpu_count() to return the value we need
|
||||
monkeypatch.setattr(multiprocessing, 'cpu_count', _cpu_count)
|
||||
# Patch the configuration parts that have been cached already
|
||||
monkeypatch.setitem(spack.config.config_defaults['config'],
|
||||
'build_jobs', min(16, request.param))
|
||||
monkeypatch.setitem(
|
||||
spack.config.config.scopes, '_builtin',
|
||||
spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
))
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.mark.parametrize('cli_args,requested', [
|
||||
(['-j', '24'], 24),
|
||||
# Here we report the default if we have enough cores, as the cap
|
||||
# on the available number of cores will be taken care of in the test
|
||||
([], 16)
|
||||
])
|
||||
def test_setting_parallel_jobs(parser, cli_args, ncores, requested):
|
||||
expected = min(requested, ncores)
|
||||
namespace = parser.parse_args(cli_args)
|
||||
@pytest.mark.parametrize("ncores", [1, 2, 4, 8, 16, 32])
|
||||
def test_setting_jobs_flag(job_parser, ncores, monkeypatch):
|
||||
monkeypatch.setattr(multiprocessing, 'cpu_count', lambda: ncores)
|
||||
namespace = job_parser.parse_args(['-j', '24'])
|
||||
expected = min(24, ncores)
|
||||
assert namespace.jobs == expected
|
||||
assert spack.config.get('config:build_jobs') == expected
|
||||
|
||||
|
||||
def test_negative_integers_not_allowed_for_parallel_jobs(parser):
|
||||
@pytest.mark.parametrize("ncores", [1, 2, 4, 8, 16, 32])
|
||||
def test_omitted_job_flag(job_parser, ncores, monkeypatch):
|
||||
monkeypatch.setattr(multiprocessing, 'cpu_count', lambda: ncores)
|
||||
namespace = job_parser.parse_args([])
|
||||
assert namespace.jobs == min(ncores, 16)
|
||||
assert spack.config.get('config:build_jobs') is None
|
||||
|
||||
|
||||
def test_negative_integers_not_allowed_for_parallel_jobs(job_parser):
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parser.parse_args(['-j', '-2'])
|
||||
job_parser.parse_args(['-j', '-2'])
|
||||
|
||||
assert 'expected a positive integer' in str(exc_info.value)
|
||||
|
||||
@@ -81,3 +66,40 @@ def test_parse_spec_flags_with_spaces(
|
||||
|
||||
assert all(x not in s.variants for x in unexpected_variants)
|
||||
assert all(x in s.variants for x in expected_variants)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
||||
"""
|
||||
Concretize a spec with non-default options in an environment. Make
|
||||
sure that when we ask for a matching spec when the environment is
|
||||
active that we get the instance concretized in the environment.
|
||||
"""
|
||||
# Initial sanity check: we are planning on choosing a non-default
|
||||
# value, so make sure that is in fact not the default.
|
||||
check_defaults = spack.cmd.parse_specs(['a'], concretize=True)[0]
|
||||
assert not check_defaults.satisfies('foobar=baz')
|
||||
|
||||
e = ev.create('test')
|
||||
e.add('a foobar=baz')
|
||||
e.concretize()
|
||||
with e:
|
||||
env_spec = spack.cmd.matching_spec_from_env(
|
||||
spack.cmd.parse_specs(['a'])[0])
|
||||
assert env_spec.satisfies('foobar=baz')
|
||||
assert env_spec.concrete
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
||||
e = ev.create('test')
|
||||
e.add('a foobar=baz')
|
||||
e.add('a foobar=fee')
|
||||
e.concretize()
|
||||
with e:
|
||||
with pytest.raises(
|
||||
spack.environment.SpackEnvironmentError) as exc_info:
|
||||
|
||||
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(['a'])[0])
|
||||
|
||||
assert 'matches multiple specs' in exc_info.value.message
|
||||
|
@@ -139,6 +139,19 @@ def test_concretize():
|
||||
assert any(x.name == 'mpileaks' for x in env_specs)
|
||||
|
||||
|
||||
def test_env_uninstalled_specs(install_mockery, mock_fetch):
|
||||
e = ev.create('test')
|
||||
e.add('cmake-client')
|
||||
e.concretize()
|
||||
assert any(s.name == 'cmake-client' for s in e.uninstalled_specs())
|
||||
e.install_all()
|
||||
assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs())
|
||||
e.add('mpileaks')
|
||||
e.concretize()
|
||||
assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs())
|
||||
assert any(s.name == 'mpileaks' for s in e.uninstalled_specs())
|
||||
|
||||
|
||||
def test_env_install_all(install_mockery, mock_fetch):
|
||||
e = ev.create('test')
|
||||
e.add('cmake-client')
|
||||
@@ -330,7 +343,7 @@ def test_env_status_broken_view(
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and warns the user
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.swap(new_repo):
|
||||
with spack.repo.use_repositories(new_repo):
|
||||
output = env('status')
|
||||
assert 'In environment test' in output
|
||||
assert 'Environment test includes out of date' in output
|
||||
@@ -351,7 +364,7 @@ def test_env_activate_broken_view(
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and fails gracefully
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.swap(new_repo):
|
||||
with spack.repo.use_repositories(new_repo):
|
||||
with pytest.raises(SpackCommandError):
|
||||
env('activate', '--sh', 'test')
|
||||
|
||||
@@ -929,7 +942,7 @@ def test_read_old_lock_and_write_new(tmpdir):
|
||||
y = mock_repo.add_package('y', [], [])
|
||||
mock_repo.add_package('x', [y], [build_only])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
x = Spec('x')
|
||||
x.concretize()
|
||||
|
||||
@@ -960,7 +973,7 @@ def test_read_old_lock_creates_backup(tmpdir):
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
y = mock_repo.add_package('y', [], [])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
y = Spec('y')
|
||||
y.concretize()
|
||||
|
||||
@@ -997,7 +1010,7 @@ def noop(*args):
|
||||
pass
|
||||
setattr(mock_repo, 'dump_provenance', noop)
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
x_spec = Spec('x')
|
||||
x_concretized = x_spec.concretized()
|
||||
|
||||
@@ -1038,7 +1051,7 @@ def noop(*args):
|
||||
pass
|
||||
setattr(mock_repo, 'dump_provenance', noop)
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
y_spec = Spec('y ^z@3')
|
||||
y_concretized = y_spec.concretized()
|
||||
|
||||
@@ -2356,3 +2369,18 @@ def _write_helper_raise(self, x, y):
|
||||
e.clear()
|
||||
e.write()
|
||||
assert os.path.exists(str(spack_lock))
|
||||
|
||||
|
||||
@pytest.mark.regression('23440')
|
||||
def test_custom_version_concretize_together(tmpdir):
|
||||
# Custom versions should be permitted in specs when
|
||||
# concretizing together
|
||||
e = ev.create('custom_version')
|
||||
e.concretization = 'together'
|
||||
|
||||
# Concretize a first time using 'mpich' as the MPI provider
|
||||
e.add('hdf5@myversion')
|
||||
e.add('mpich')
|
||||
e.concretize()
|
||||
|
||||
assert any('hdf5@myversion' in spec for _, spec in e.concretized_specs())
|
||||
|
@@ -27,7 +27,7 @@ def python_database(mock_packages, mutable_database):
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_extensions(mock_packages, python_database, capsys):
|
||||
def test_extensions(mock_packages, python_database, config, capsys):
|
||||
ext2 = Spec("py-extension2").concretized()
|
||||
|
||||
def check_output(ni, na):
|
||||
|
48
lib/spack/spack/test/cmd/fetch.py
Normal file
48
lib/spack/spack/test/cmd/fetch.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.environment as ev
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
|
||||
# everything here uses the mock_env_path
|
||||
pytestmark = pytest.mark.usefixtures(
|
||||
"mutable_mock_env_path", "config", "mutable_mock_repo"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_fetch_in_env(
|
||||
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
SpackCommand("env")("create", "test")
|
||||
with ev.read("test"):
|
||||
SpackCommand("add")("python")
|
||||
with pytest.raises(SpackCommandError):
|
||||
SpackCommand("fetch")()
|
||||
SpackCommand("concretize")()
|
||||
SpackCommand("fetch")()
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_fetch_single_spec(
|
||||
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
SpackCommand("fetch")("mpileaks")
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_fetch_multiple_specs(
|
||||
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
SpackCommand("fetch")("mpileaks", "gcc@10.2.0", "python")
|
||||
|
||||
|
||||
def test_fetch_no_argument():
|
||||
with pytest.raises(SpackCommandError):
|
||||
SpackCommand("fetch")()
|
@@ -52,18 +52,26 @@ def test_location_build_dir(mock_spec):
|
||||
assert location('--build-dir', spec.name).strip() == pkg.stage.source_path
|
||||
|
||||
|
||||
def test_location_build_dir_missing():
|
||||
"""Tests spack location --build-dir with a missing build directory."""
|
||||
@pytest.mark.regression('22738')
|
||||
def test_location_source_dir(mock_spec):
|
||||
"""Tests spack location --source-dir."""
|
||||
spec, pkg = mock_spec
|
||||
assert location('--source-dir', spec.name).strip() == pkg.stage.source_path
|
||||
assert location(spec.name).strip() == pkg.stage.source_path
|
||||
|
||||
|
||||
def test_location_source_dir_missing():
|
||||
"""Tests spack location --source-dir with a missing source directory."""
|
||||
spec = 'mpileaks'
|
||||
prefix = "==> Error: "
|
||||
expected = "%sBuild directory does not exist yet. Run this to create it:"\
|
||||
expected = "%sSource directory does not exist yet. Run this to create it:"\
|
||||
"%s spack stage %s" % (prefix, os.linesep, spec)
|
||||
out = location('--build-dir', spec, fail_on_error=False).strip()
|
||||
out = location('--source-dir', spec, fail_on_error=False).strip()
|
||||
assert out == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('options', [([]),
|
||||
(['--build-dir', 'mpileaks']),
|
||||
(['--source-dir', 'mpileaks']),
|
||||
(['--env', 'missing-env']),
|
||||
(['spec1', 'spec2'])])
|
||||
def test_location_cmd_error(options):
|
||||
|
@@ -8,9 +8,10 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.modules
|
||||
from spack.test.conftest import use_store, use_configuration, use_repo
|
||||
import spack.store
|
||||
|
||||
module = spack.main.SpackCommand('module')
|
||||
|
||||
@@ -18,12 +19,13 @@
|
||||
#: make sure module files are generated for all the tests here
|
||||
@pytest.fixture(scope='module', autouse=True)
|
||||
def ensure_module_files_are_there(
|
||||
mock_repo_path, mock_store, mock_configuration):
|
||||
mock_repo_path, mock_store, mock_configuration_scopes
|
||||
):
|
||||
"""Generate module files for module tests."""
|
||||
module = spack.main.SpackCommand('module')
|
||||
with use_store(mock_store):
|
||||
with use_configuration(mock_configuration):
|
||||
with use_repo(mock_repo_path):
|
||||
with spack.store.use_store(mock_store):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
module('tcl', 'refresh', '-y')
|
||||
|
||||
|
||||
|
@@ -82,7 +82,7 @@ def mock_pkg_git_repo(tmpdir_factory):
|
||||
git('-c', 'commit.gpgsign=false', 'commit',
|
||||
'-m', 'change pkg-b, remove pkg-c, add pkg-d')
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
yield mock_repo_packages
|
||||
|
||||
|
||||
@@ -129,7 +129,8 @@ def test_pkg_add(mock_pkg_git_repo):
|
||||
finally:
|
||||
shutil.rmtree('pkg-e')
|
||||
# Removing a package mid-run disrupts Spack's caching
|
||||
spack.repo.path.repos[0]._fast_package_checker.invalidate()
|
||||
if spack.repo.path.repos[0]._fast_package_checker:
|
||||
spack.repo.path.repos[0]._fast_package_checker.invalidate()
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
pkg('add', 'does-not-exist')
|
||||
|
@@ -12,7 +12,7 @@
|
||||
concretize = SpackCommand('concretize')
|
||||
|
||||
|
||||
def test_undevelop(tmpdir, mock_packages, mutable_mock_env_path):
|
||||
def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir('env')
|
||||
with envdir.as_cwd():
|
||||
@@ -39,7 +39,9 @@ def test_undevelop(tmpdir, mock_packages, mutable_mock_env_path):
|
||||
assert not after.satisfies('dev_path=*')
|
||||
|
||||
|
||||
def test_undevelop_nonexistent(tmpdir, mock_packages, mutable_mock_env_path):
|
||||
def test_undevelop_nonexistent(
|
||||
tmpdir, config, mock_packages, mutable_mock_env_path
|
||||
):
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir('env')
|
||||
with envdir.as_cwd():
|
||||
|
@@ -304,7 +304,7 @@ def test_architecture_deep_inheritance(self, mock_targets):
|
||||
barpkg = mock_repo.add_package('barpkg', [bazpkg], [default_dep])
|
||||
mock_repo.add_package('foopkg', [barpkg], [default_dep])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = Spec('foopkg %gcc@4.5.0 os=CNL target=nocona' +
|
||||
' ^barpkg os=SuSE11 ^bazpkg os=be')
|
||||
spec.concretize()
|
||||
@@ -688,13 +688,14 @@ def test_noversion_pkg(self, spec):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
Spec(spec).concretized()
|
||||
|
||||
# Include targets to prevent regression on 20537
|
||||
@pytest.mark.parametrize('spec, best_achievable', [
|
||||
('mpileaks%gcc@4.4.7', 'core2'),
|
||||
('mpileaks%gcc@4.8', 'haswell'),
|
||||
('mpileaks%gcc@5.3.0', 'broadwell'),
|
||||
('mpileaks%apple-clang@5.1.0', 'x86_64')
|
||||
('mpileaks%gcc@4.4.7 target=x86_64:', 'core2'),
|
||||
('mpileaks%gcc@4.8 target=x86_64:', 'haswell'),
|
||||
('mpileaks%gcc@5.3.0 target=x86_64:', 'broadwell'),
|
||||
('mpileaks%apple-clang@5.1.0 target=x86_64:', 'x86_64')
|
||||
])
|
||||
@pytest.mark.regression('13361')
|
||||
@pytest.mark.regression('13361', '20537')
|
||||
def test_adjusting_default_target_based_on_compiler(
|
||||
self, spec, best_achievable, current_host, mock_targets
|
||||
):
|
||||
@@ -1059,6 +1060,9 @@ def test_dont_select_version_that_brings_more_variants_in(self):
|
||||
# having an additional dependency, but the dependency shouldn't
|
||||
# appear in the answer set
|
||||
('external-buildable-with-variant@0.9 +baz', True, '@0.9'),
|
||||
# This package has an external version declared that would be
|
||||
# the least preferred if Spack had to build it
|
||||
('old-external', True, '@1.0.0'),
|
||||
])
|
||||
def test_external_package_versions(self, spec_str, is_external, expected):
|
||||
s = Spec(spec_str).concretized()
|
||||
@@ -1097,3 +1101,84 @@ def test_concretization_of_test_dependencies(self):
|
||||
# dependency type declared to infer that the dependency holds.
|
||||
s = Spec('test-dep-with-imposed-conditions').concretized()
|
||||
assert 'c' not in s
|
||||
|
||||
@pytest.mark.parametrize('spec_str', [
|
||||
'wrong-variant-in-conflicts',
|
||||
'wrong-variant-in-depends-on'
|
||||
])
|
||||
def test_error_message_for_inconsistent_variants(self, spec_str):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
s = Spec(spec_str)
|
||||
with pytest.raises(RuntimeError, match='not found in package'):
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.regression('22533')
|
||||
@pytest.mark.parametrize('spec_str,variant_name,expected_values', [
|
||||
# Test the default value 'auto'
|
||||
('mvapich2', 'file_systems', ('auto',)),
|
||||
# Test setting a single value from the disjoint set
|
||||
('mvapich2 file_systems=lustre', 'file_systems', ('lustre',)),
|
||||
# Test setting multiple values from the disjoint set
|
||||
('mvapich2 file_systems=lustre,gpfs', 'file_systems',
|
||||
('lustre', 'gpfs')),
|
||||
])
|
||||
def test_mv_variants_disjoint_sets_from_spec(
|
||||
self, spec_str, variant_name, expected_values
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert set(expected_values) == set(s.variants[variant_name].value)
|
||||
|
||||
@pytest.mark.regression('22533')
|
||||
def test_mv_variants_disjoint_sets_from_packages_yaml(self):
|
||||
external_mvapich2 = {
|
||||
'mvapich2': {
|
||||
'buildable': False,
|
||||
'externals': [{
|
||||
'spec': 'mvapich2@2.3.1 file_systems=nfs,ufs',
|
||||
'prefix': '/usr'
|
||||
}]
|
||||
}
|
||||
}
|
||||
spack.config.set('packages', external_mvapich2)
|
||||
|
||||
s = Spec('mvapich2').concretized()
|
||||
assert set(s.variants['file_systems'].value) == set(['ufs', 'nfs'])
|
||||
|
||||
@pytest.mark.regression('22596')
|
||||
def test_external_with_non_default_variant_as_dependency(self):
|
||||
# This package depends on another that is registered as an external
|
||||
# with 'buildable: true' and a variant with a non-default value set
|
||||
s = Spec('trigger-external-non-default-variant').concretized()
|
||||
|
||||
assert '~foo' in s['external-non-default-variant']
|
||||
assert '~bar' in s['external-non-default-variant']
|
||||
assert s['external-non-default-variant'].external
|
||||
|
||||
@pytest.mark.regression('22871')
|
||||
@pytest.mark.parametrize('spec_str,expected_os', [
|
||||
('mpileaks', 'os=debian6'),
|
||||
# To trigger the bug in 22871 we need to have the same compiler
|
||||
# spec available on both operating systems
|
||||
('mpileaks%gcc@4.5.0 platform=test os=debian6', 'os=debian6'),
|
||||
('mpileaks%gcc@4.5.0 platform=test os=redhat6', 'os=redhat6')
|
||||
])
|
||||
def test_os_selection_when_multiple_choices_are_possible(
|
||||
self, spec_str, expected_os
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_os)
|
||||
|
||||
@pytest.mark.regression('22718')
|
||||
@pytest.mark.parametrize('spec_str,expected_compiler', [
|
||||
('mpileaks', '%gcc@4.5.0'),
|
||||
('mpileaks ^mpich%clang@3.3', '%clang@3.3')
|
||||
])
|
||||
def test_compiler_is_unique(self, spec_str, expected_compiler):
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_compiler)
|
||||
|
@@ -72,6 +72,25 @@ def _write(config, data, scope):
|
||||
return _write
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def env_yaml(tmpdir):
|
||||
"""Return a sample env.yaml for test purposes"""
|
||||
env_yaml = str(tmpdir.join("env.yaml"))
|
||||
with open(env_yaml, 'w') as f:
|
||||
f.write("""\
|
||||
env:
|
||||
config:
|
||||
verify_ssl: False
|
||||
dirty: False
|
||||
packages:
|
||||
libelf:
|
||||
compiler: [ 'gcc@4.5.3' ]
|
||||
repos:
|
||||
- /x/y/z
|
||||
""")
|
||||
return env_yaml
|
||||
|
||||
|
||||
def check_compiler_config(comps, *compiler_names):
|
||||
"""Check that named compilers in comps match Spack's config."""
|
||||
config = spack.config.get('compilers')
|
||||
@@ -797,23 +816,10 @@ def test_immutable_scope(tmpdir):
|
||||
scope._write_section('config')
|
||||
|
||||
|
||||
def test_single_file_scope(tmpdir, config):
|
||||
env_yaml = str(tmpdir.join("env.yaml"))
|
||||
with open(env_yaml, 'w') as f:
|
||||
f.write("""\
|
||||
env:
|
||||
config:
|
||||
verify_ssl: False
|
||||
dirty: False
|
||||
packages:
|
||||
libelf:
|
||||
compiler: [ 'gcc@4.5.3' ]
|
||||
repos:
|
||||
- /x/y/z
|
||||
""")
|
||||
|
||||
def test_single_file_scope(config, env_yaml):
|
||||
scope = spack.config.SingleFileScope(
|
||||
'env', env_yaml, spack.schema.env.schema, ['env'])
|
||||
'env', env_yaml, spack.schema.env.schema, ['env']
|
||||
)
|
||||
|
||||
with spack.config.override(scope):
|
||||
# from the single-file config
|
||||
@@ -1045,3 +1051,41 @@ def test_bad_path_double_override(config):
|
||||
match='Meaningless second override'):
|
||||
with spack.config.override('bad::double:override::directive', ''):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.regression('22547')
|
||||
def test_single_file_scope_cache_clearing(env_yaml):
|
||||
scope = spack.config.SingleFileScope(
|
||||
'env', env_yaml, spack.schema.env.schema, ['env']
|
||||
)
|
||||
# Check that we can retrieve data from the single file scope
|
||||
before = scope.get_section('config')
|
||||
assert before
|
||||
# Clear the cache of the Single file scope
|
||||
scope.clear()
|
||||
# Check that the section can be retireved again and it's
|
||||
# the same as before
|
||||
after = scope.get_section('config')
|
||||
assert after
|
||||
assert before == after
|
||||
|
||||
|
||||
@pytest.mark.regression('22611')
|
||||
def test_internal_config_scope_cache_clearing():
|
||||
"""
|
||||
An InternalConfigScope object is constructed from data that is already
|
||||
in memory, therefore it doesn't have any cache to clear. Here we ensure
|
||||
that calling the clear method is consistent with that..
|
||||
"""
|
||||
data = {
|
||||
'config': {
|
||||
'build_jobs': 10
|
||||
}
|
||||
}
|
||||
internal_scope = spack.config.InternalConfigScope('internal', data)
|
||||
# Ensure that the initial object is properly set
|
||||
assert internal_scope.sections['config'] == data
|
||||
# Call the clear method
|
||||
internal_scope.clear()
|
||||
# Check that this didn't affect the scope object
|
||||
assert internal_scope.sections['config'] == data
|
||||
|
@@ -1,10 +1,9 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import errno
|
||||
import inspect
|
||||
import itertools
|
||||
@@ -35,6 +34,7 @@
|
||||
import spack.platforms.test
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
import spack.util.gpg
|
||||
import spack.subprocess_context
|
||||
@@ -315,27 +315,21 @@ def _skip_if_missing_executables(request):
|
||||
pytest.skip(msg.format(', '.join(missing_execs)))
|
||||
|
||||
|
||||
# FIXME: The lines below should better be added to a fixture with
|
||||
# FIXME: session-scope. Anyhow doing it is not easy, as it seems
|
||||
# FIXME: there's some weird interaction with compilers during concretization.
|
||||
spack.architecture.real_platform = spack.architecture.platform
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def test_platform():
|
||||
return spack.platforms.test.Test()
|
||||
|
||||
|
||||
spack.architecture.platform = test_platform
|
||||
|
||||
|
||||
# FIXME: Since we change the architecture above, we have to (re)initialize
|
||||
# FIXME: the config singleton. If it gets initialized too early with the
|
||||
# FIXME: actual architecture, tests will fail.
|
||||
spack.config.config = spack.config._config()
|
||||
|
||||
@pytest.fixture(autouse=True, scope='session')
|
||||
def _use_test_platform(test_platform):
|
||||
# This is the only context manager used at session scope (see note
|
||||
# below for more insight) since we want to use the test platform as
|
||||
# a default during tests.
|
||||
with spack.architecture.use_platform(test_platform):
|
||||
yield
|
||||
|
||||
#
|
||||
# Context managers used by fixtures
|
||||
# Note on context managers used by fixtures
|
||||
#
|
||||
# Because these context managers modify global state, they should really
|
||||
# ONLY be used persistently (i.e., around yield statements) in
|
||||
@@ -356,45 +350,13 @@ def test_platform():
|
||||
# *USE*, or things can get really confusing.
|
||||
#
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_configuration(config):
|
||||
"""Context manager to swap out the global Spack configuration."""
|
||||
saved = spack.config.replace_config(config)
|
||||
|
||||
# Avoid using real spack configuration that has been cached by other
|
||||
# tests, and avoid polluting the cache with spack test configuration
|
||||
# (including modified configuration)
|
||||
saved_compiler_cache = spack.compilers._cache_config_file
|
||||
spack.compilers._cache_config_file = []
|
||||
|
||||
yield
|
||||
|
||||
spack.config.replace_config(saved)
|
||||
spack.compilers._cache_config_file = saved_compiler_cache
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_store(store):
|
||||
"""Context manager to swap out the global Spack store."""
|
||||
saved = spack.store.store
|
||||
spack.store.store = store
|
||||
yield
|
||||
spack.store.store = saved
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_repo(repo):
|
||||
"""Context manager to swap out the global Spack repo path."""
|
||||
with spack.repo.swap(repo):
|
||||
yield
|
||||
|
||||
|
||||
#
|
||||
# Test-specific fixtures
|
||||
#
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_repo_path():
|
||||
yield spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
yield spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
|
||||
|
||||
def _pkg_install_fn(pkg, spec, prefix):
|
||||
@@ -411,15 +373,15 @@ def mock_pkg_install(monkeypatch):
|
||||
@pytest.fixture(scope='function')
|
||||
def mock_packages(mock_repo_path, mock_pkg_install):
|
||||
"""Use the 'builtin.mock' repository instead of 'builtin'"""
|
||||
with use_repo(mock_repo_path):
|
||||
yield mock_repo_path
|
||||
with spack.repo.use_repositories(mock_repo_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_mock_repo(mock_repo_path):
|
||||
"""Function-scoped mock packages, for tests that need to modify them."""
|
||||
mock_repo_path = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
with use_repo(mock_repo_path):
|
||||
mock_repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||
with spack.repo.use_repositories(mock_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@@ -445,9 +407,7 @@ def default_config():
|
||||
This ensures we can test the real default configuration without having
|
||||
tests fail when the user overrides the defaults that we test against."""
|
||||
defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
defaults_scope = spack.config.ConfigScope('defaults', defaults_path)
|
||||
defaults_config = spack.config.Configuration(defaults_scope)
|
||||
with use_configuration(defaults_config):
|
||||
with spack.config.use_configuration(defaults_path) as defaults_config:
|
||||
yield defaults_config
|
||||
|
||||
|
||||
@@ -469,9 +429,8 @@ def load_json():
|
||||
with open(mock_uarch_json) as f:
|
||||
return json.load(f)
|
||||
|
||||
targets_json = archspec.cpu.schema.LazyDictionary(load_json)
|
||||
targets = archspec.cpu.microarchitecture.LazyDictionary(
|
||||
archspec.cpu.microarchitecture._known_microarchitectures)
|
||||
targets_json = load_json()
|
||||
targets = archspec.cpu.microarchitecture._known_microarchitectures()
|
||||
|
||||
yield targets_json, targets
|
||||
|
||||
@@ -523,7 +482,7 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_configuration(configuration_dir):
|
||||
def mock_configuration_scopes(configuration_dir):
|
||||
"""Create a persistent Configuration object from the configuration_dir."""
|
||||
defaults = spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
@@ -534,14 +493,14 @@ def mock_configuration(configuration_dir):
|
||||
for name in ['site', 'system', 'user']]
|
||||
test_scopes.append(spack.config.InternalConfigScope('command_line'))
|
||||
|
||||
yield spack.config.Configuration(*test_scopes)
|
||||
yield test_scopes
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def config(mock_configuration):
|
||||
def config(mock_configuration_scopes):
|
||||
"""This fixture activates/deactivates the mock configuration."""
|
||||
with use_configuration(mock_configuration):
|
||||
yield mock_configuration
|
||||
with spack.config.use_configuration(*mock_configuration_scopes) as config:
|
||||
yield config
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@@ -550,11 +509,10 @@ def mutable_config(tmpdir_factory, configuration_dir):
|
||||
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
|
||||
configuration_dir.copy(mutable_dir)
|
||||
|
||||
cfg = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']])
|
||||
scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
|
||||
with use_configuration(cfg):
|
||||
with spack.config.use_configuration(*scopes) as cfg:
|
||||
yield cfg
|
||||
|
||||
|
||||
@@ -562,23 +520,20 @@ def mutable_config(tmpdir_factory, configuration_dir):
|
||||
def mutable_empty_config(tmpdir_factory, configuration_dir):
|
||||
"""Empty configuration that can be modified by the tests."""
|
||||
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
|
||||
scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
|
||||
cfg = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']])
|
||||
|
||||
with use_configuration(cfg):
|
||||
with spack.config.use_configuration(*scopes) as cfg:
|
||||
yield cfg
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_low_high_config(tmpdir):
|
||||
"""Mocks two configuration scopes: 'low' and 'high'."""
|
||||
config = spack.config.Configuration(
|
||||
*[spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||
for name in ['low', 'high']])
|
||||
scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
|
||||
for name in ['low', 'high']]
|
||||
|
||||
with use_configuration(config):
|
||||
with spack.config.use_configuration(*scopes) as config:
|
||||
yield config
|
||||
|
||||
|
||||
@@ -627,7 +582,7 @@ def _store_dir_and_cache(tmpdir_factory):
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_store(tmpdir_factory, mock_repo_path, mock_configuration,
|
||||
def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
|
||||
_store_dir_and_cache):
|
||||
"""Creates a read-only mock database with some packages installed note
|
||||
that the ref count for dyninst here will be 3, as it's recycled
|
||||
@@ -638,13 +593,12 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration,
|
||||
|
||||
"""
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
store = spack.store.Store(str(store_path))
|
||||
|
||||
# If the cache does not exist populate the store and create it
|
||||
if not os.path.exists(str(store_cache.join('.spack-db'))):
|
||||
with use_configuration(mock_configuration):
|
||||
with use_store(store):
|
||||
with use_repo(mock_repo_path):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.store.use_store(str(store_path)) as store:
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
_populate(store.db)
|
||||
store_path.copy(store_cache, mode=True, stat=True)
|
||||
|
||||
@@ -657,8 +611,10 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration,
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration,
|
||||
_store_dir_and_cache):
|
||||
def mutable_mock_store(
|
||||
tmpdir_factory, mock_repo_path, mock_configuration_scopes,
|
||||
_store_dir_and_cache
|
||||
):
|
||||
"""Creates a read-only mock database with some packages installed note
|
||||
that the ref count for dyninst here will be 3, as it's recycled
|
||||
across each install.
|
||||
@@ -668,13 +624,12 @@ def mutable_mock_store(tmpdir_factory, mock_repo_path, mock_configuration,
|
||||
|
||||
"""
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
store = spack.store.Store(str(store_path))
|
||||
|
||||
# If the cache does not exist populate the store and create it
|
||||
if not os.path.exists(str(store_cache.join('.spack-db'))):
|
||||
with use_configuration(mock_configuration):
|
||||
with use_store(store):
|
||||
with use_repo(mock_repo_path):
|
||||
with spack.config.use_configuration(*mock_configuration_scopes):
|
||||
with spack.store.use_store(str(store_path)) as store:
|
||||
with spack.repo.use_repositories(mock_repo_path):
|
||||
_populate(store.db)
|
||||
store_path.copy(store_cache, mode=True, stat=True)
|
||||
|
||||
@@ -1250,8 +1205,7 @@ def mock_test_repo(tmpdir_factory):
|
||||
namespace: mock_test_repo
|
||||
""")
|
||||
|
||||
repo = spack.repo.RepoPath(str(repodir))
|
||||
with spack.repo.swap(repo):
|
||||
with spack.repo.use_repositories(str(repodir)) as repo:
|
||||
yield repo, repodir
|
||||
|
||||
shutil.rmtree(str(repodir))
|
||||
|
@@ -19,6 +19,16 @@ compilers:
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: CNL
|
||||
|
@@ -35,4 +35,14 @@ packages:
|
||||
- spec: external-buildable-with-variant@1.1.special +baz
|
||||
prefix: /usr
|
||||
- spec: external-buildable-with-variant@0.9 +baz
|
||||
prefix: /usr
|
||||
prefix: /usr
|
||||
'old-external':
|
||||
buildable: True
|
||||
externals:
|
||||
- spec: old-external@1.0.0
|
||||
prefix: /usr
|
||||
'external-non-default-variant':
|
||||
buildable: True
|
||||
externals:
|
||||
- spec: external-non-default-variant@3.8.7~foo~bar
|
||||
prefix: /usr
|
||||
|
@@ -81,7 +81,7 @@ def test_installed_upstream(upstream_and_downstream_db):
|
||||
y = mock_repo.add_package('y', [z], [default])
|
||||
mock_repo.add_package('w', [x, y], [default, default])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec('w')
|
||||
spec.concretize()
|
||||
|
||||
@@ -122,7 +122,7 @@ def test_removed_upstream_dep(upstream_and_downstream_db):
|
||||
z = mock_repo.add_package('z', [], [])
|
||||
mock_repo.add_package('y', [z], [default])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec('y')
|
||||
spec.concretize()
|
||||
|
||||
@@ -155,7 +155,7 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
mock_repo.add_package('x', [], [])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec('x')
|
||||
spec.concretize()
|
||||
|
||||
@@ -197,7 +197,7 @@ def test_cannot_write_upstream(tmpdir_factory, test_store, gen_mock_layout):
|
||||
upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots(
|
||||
[roots[1]], _test=True)
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec('x')
|
||||
spec.concretize()
|
||||
|
||||
@@ -216,7 +216,7 @@ def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout):
|
||||
y = mock_repo.add_package('y', [z], [default])
|
||||
mock_repo.add_package('x', [y], [default])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec('x')
|
||||
spec.concretize()
|
||||
db_c = spack.database.Database(roots[2])
|
||||
@@ -694,7 +694,7 @@ def test_115_reindex_with_packages_not_in_repo(mutable_database):
|
||||
# Dont add any package definitions to this repository, the idea is that
|
||||
# packages should not have to be defined in the repository once they
|
||||
# are installed
|
||||
with spack.repo.swap(MockPackageMultiRepo()):
|
||||
with spack.repo.use_repositories(MockPackageMultiRepo()):
|
||||
spack.store.store.reindex()
|
||||
_check_db_sanity(mutable_database)
|
||||
|
||||
|
@@ -194,7 +194,7 @@ def test_handle_unknown_package(layout_and_dir, config, mock_packages):
|
||||
layout.create_install_directory(spec)
|
||||
installed_specs[spec] = layout.path_for_spec(spec)
|
||||
|
||||
with spack.repo.swap(mock_db):
|
||||
with spack.repo.use_repositories(mock_db):
|
||||
# Now check that even without the package files, we know
|
||||
# enough to read a spec from the spec file.
|
||||
for spec, path in installed_specs.items():
|
||||
|
@@ -474,14 +474,14 @@ def _conc_spec(compiler):
|
||||
assert packages
|
||||
|
||||
|
||||
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_repo_path):
|
||||
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
|
||||
"""Test happy path for dump_packages with dependencies."""
|
||||
|
||||
spec_name = 'simple-inheritance'
|
||||
spec = spack.spec.Spec(spec_name).concretized()
|
||||
inst.dump_packages(spec, str(tmpdir))
|
||||
|
||||
repo = mock_repo_path.repos[0]
|
||||
repo = mock_packages.repos[0]
|
||||
dest_pkg = repo.filename_for_package_name(spec_name)
|
||||
assert os.path.isfile(dest_pkg)
|
||||
|
||||
|
@@ -73,7 +73,7 @@ def test_repo_getpkg_names_and_classes():
|
||||
def test_get_all_mock_packages():
|
||||
"""Get the mock packages once each too."""
|
||||
db = spack.repo.RepoPath(spack.paths.mock_packages_path)
|
||||
with spack.repo.swap(db):
|
||||
with spack.repo.use_repositories(db):
|
||||
check_repo()
|
||||
|
||||
|
||||
|
@@ -75,7 +75,7 @@ def test_test_deptype():
|
||||
y = mock_repo.add_package('y', [z], [test_only])
|
||||
w = mock_repo.add_package('w', [x, y], [test_only, default])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = Spec('w')
|
||||
spec.concretize(tests=(w.name,))
|
||||
|
||||
@@ -114,7 +114,7 @@ def test_installed_deps():
|
||||
b = mock_repo.add_package('b', [d, e], [default, default])
|
||||
mock_repo.add_package('a', [b, c], [default, default])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
c_spec = Spec('c')
|
||||
c_spec.concretize()
|
||||
assert c_spec['d'].version == spack.version.Version('2')
|
||||
@@ -143,7 +143,7 @@ def test_specify_preinstalled_dep():
|
||||
b = mock_repo.add_package('b', [c], [default])
|
||||
mock_repo.add_package('a', [b], [default])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
b_spec = Spec('b')
|
||||
b_spec.concretize()
|
||||
for spec in b_spec.traverse():
|
||||
@@ -186,7 +186,7 @@ def test_conditional_dep_with_user_constraints(spec_str, expr_str, expected):
|
||||
}
|
||||
mock_repo.add_package('x', [y], [default], conditions=x_on_y_conditions)
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = Spec(spec_str)
|
||||
spec.concretize()
|
||||
|
||||
|
@@ -11,8 +11,7 @@
|
||||
import ast
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from collections import Iterable, Mapping
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -29,6 +28,12 @@
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Iterable, Mapping # novm
|
||||
else:
|
||||
from collections import Iterable, Mapping
|
||||
|
||||
|
||||
def check_yaml_round_trip(spec):
|
||||
yaml_text = spec.to_yaml()
|
||||
spec_from_yaml = Spec.from_yaml(yaml_text)
|
||||
@@ -310,7 +315,7 @@ def test_save_dependency_spec_yamls_subset(tmpdir, config):
|
||||
b = mock_repo.add_package('b', [d, e], [default, default])
|
||||
mock_repo.add_package('a', [b, c], [default, default])
|
||||
|
||||
with repo.swap(mock_repo):
|
||||
with repo.use_repositories(mock_repo):
|
||||
spec_a = Spec('a')
|
||||
spec_a.concretize()
|
||||
b_spec = spec_a['b']
|
||||
|
@@ -54,7 +54,7 @@ def builtin_and_mock_packages():
|
||||
repo_dirs = [spack.paths.packages_path, spack.paths.mock_packages_path]
|
||||
path = RepoPath(*repo_dirs)
|
||||
|
||||
with spack.repo.swap(path):
|
||||
with spack.repo.use_repositories(path):
|
||||
yield
|
||||
|
||||
|
||||
|
@@ -15,7 +15,7 @@ def test_mock_package_possible_dependencies():
|
||||
b = mock_repo.add_package('b', [d])
|
||||
a = mock_repo.add_package('a', [b, c])
|
||||
|
||||
with spack.repo.swap(mock_repo):
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
assert set(a.possible_dependencies()) == set(['a', 'b', 'c', 'd', 'e'])
|
||||
assert set(b.possible_dependencies()) == set(['b', 'd', 'e'])
|
||||
assert set(c.possible_dependencies()) == set(['c', 'd', 'e'])
|
||||
|
@@ -528,13 +528,18 @@ def reversed(self):
|
||||
|
||||
return rev
|
||||
|
||||
def apply_modifications(self):
|
||||
def apply_modifications(self, env=None):
|
||||
"""Applies the modifications and clears the list."""
|
||||
# Use os.environ if not specified
|
||||
# Do not copy, we want to modify it in place
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
modifications = self.group_by_name()
|
||||
# Apply modifications one variable at a time
|
||||
for name, actions in sorted(modifications.items()):
|
||||
for x in actions:
|
||||
x.execute(os.environ)
|
||||
x.execute(env)
|
||||
|
||||
def shell_modifications(self, shell='sh'):
|
||||
"""Return shell code to apply the modifications and clears the list."""
|
||||
@@ -937,7 +942,10 @@ def _source_single_file(file_and_args, environment):
|
||||
source_file, suppress_output,
|
||||
concatenate_on_success, dump_environment,
|
||||
])
|
||||
output = shell(source_file_arguments, output=str, env=environment)
|
||||
output = shell(
|
||||
source_file_arguments, output=str, env=environment,
|
||||
ignore_quotes=True
|
||||
)
|
||||
environment = json.loads(output)
|
||||
|
||||
# If we're in python2, convert to str objects instead of unicode
|
||||
|
@@ -22,6 +22,8 @@ class Executable(object):
|
||||
def __init__(self, name):
|
||||
self.exe = shlex.split(str(name))
|
||||
self.default_env = {}
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
self.returncode = None
|
||||
|
||||
if not self.exe:
|
||||
@@ -40,6 +42,10 @@ def add_default_env(self, key, value):
|
||||
"""
|
||||
self.default_env[key] = value
|
||||
|
||||
def add_default_envmod(self, envmod):
|
||||
"""Set an EnvironmentModifications to use when the command is run."""
|
||||
self.default_envmod.extend(envmod)
|
||||
|
||||
@property
|
||||
def command(self):
|
||||
"""The command-line string.
|
||||
@@ -76,15 +82,18 @@ def __call__(self, *args, **kwargs):
|
||||
Keyword Arguments:
|
||||
_dump_env (dict): Dict to be set to the environment actually
|
||||
used (envisaged for testing purposes only)
|
||||
env (dict): The environment to run the executable with
|
||||
extra_env (dict): Extra items to add to the environment
|
||||
(neither requires nor precludes env)
|
||||
env (dict or EnvironmentModifications): The environment with which
|
||||
to run the executable
|
||||
extra_env (dict or EnvironmentModifications): Extra items to add to
|
||||
the environment (neither requires nor precludes env)
|
||||
fail_on_error (bool): Raise an exception if the subprocess returns
|
||||
an error. Default is True. The return code is available as
|
||||
``exe.returncode``
|
||||
ignore_errors (int or list): A list of error codes to ignore.
|
||||
If these codes are returned, this process will not raise
|
||||
an exception even if ``fail_on_error`` is set to ``True``
|
||||
ignore_quotes (bool): If False, warn users that quotes are not
|
||||
needed as Spack does not use a shell. Defaults to False.
|
||||
input: Where to read stdin from
|
||||
output: Where to send stdout
|
||||
error: Where to send stderr
|
||||
@@ -107,19 +116,33 @@ def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
# Environment
|
||||
env_arg = kwargs.get('env', None)
|
||||
if env_arg is None:
|
||||
env = os.environ.copy()
|
||||
env.update(self.default_env)
|
||||
else:
|
||||
env = self.default_env.copy()
|
||||
|
||||
# Setup default environment
|
||||
env = os.environ.copy() if env_arg is None else {}
|
||||
self.default_envmod.apply_modifications(env)
|
||||
env.update(self.default_env)
|
||||
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
# Apply env argument
|
||||
if isinstance(env_arg, EnvironmentModifications):
|
||||
env_arg.apply_modifications(env)
|
||||
elif env_arg:
|
||||
env.update(env_arg)
|
||||
env.update(kwargs.get('extra_env', {}))
|
||||
|
||||
# Apply extra env
|
||||
extra_env = kwargs.get('extra_env', {})
|
||||
if isinstance(extra_env, EnvironmentModifications):
|
||||
extra_env.apply_modifications(env)
|
||||
else:
|
||||
env.update(extra_env)
|
||||
|
||||
if '_dump_env' in kwargs:
|
||||
kwargs['_dump_env'].clear()
|
||||
kwargs['_dump_env'].update(env)
|
||||
|
||||
fail_on_error = kwargs.pop('fail_on_error', True)
|
||||
ignore_errors = kwargs.pop('ignore_errors', ())
|
||||
ignore_quotes = kwargs.pop('ignore_quotes', False)
|
||||
|
||||
# If they just want to ignore one error code, make it a tuple.
|
||||
if isinstance(ignore_errors, int):
|
||||
@@ -144,15 +167,20 @@ def streamify(arg, mode):
|
||||
estream, close_estream = streamify(error, 'w')
|
||||
istream, close_istream = streamify(input, 'r')
|
||||
|
||||
quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)]
|
||||
if quoted_args:
|
||||
tty.warn(
|
||||
"Quotes in command arguments can confuse scripts like"
|
||||
" configure.",
|
||||
"The following arguments may cause problems when executed:",
|
||||
str("\n".join([" " + arg for arg in quoted_args])),
|
||||
"Quotes aren't needed because spack doesn't use a shell.",
|
||||
"Consider removing them")
|
||||
if not ignore_quotes:
|
||||
quoted_args = [
|
||||
arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)
|
||||
]
|
||||
if quoted_args:
|
||||
tty.warn(
|
||||
"Quotes in command arguments can confuse scripts like"
|
||||
" configure.",
|
||||
"These arguments may cause problems when executed:",
|
||||
str("\n".join([" " + arg for arg in quoted_args])),
|
||||
"Quotes aren't needed because spack doesn't use a shell. "
|
||||
"Consider removing them.",
|
||||
"If multiple levels of quotation are required, use "
|
||||
"`ignore_quotes=True`.")
|
||||
|
||||
cmd = self.exe + list(args)
|
||||
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import ordereddict_backport
|
||||
|
||||
import spack.util.naming
|
||||
import spack.provider_index
|
||||
from spack.dependency import Dependency
|
||||
from spack.spec import Spec
|
||||
from spack.version import Version
|
||||
@@ -80,6 +81,8 @@ class MockPackageMultiRepo(object):
|
||||
|
||||
def __init__(self):
|
||||
self.spec_to_pkg = {}
|
||||
self.namespace = ''
|
||||
self.full_namespace = 'spack.pkg.mock'
|
||||
|
||||
def get(self, spec):
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
@@ -171,3 +174,7 @@ class MockPackage(MockPackageBase):
|
||||
self.spec_to_pkg["mockrepo." + name] = mock_package
|
||||
|
||||
return mock_package
|
||||
|
||||
@property
|
||||
def provider_index(self):
|
||||
return spack.provider_index.ProviderIndex()
|
||||
|
@@ -4,8 +4,14 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
import collections
|
||||
import functools
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSequence # novm
|
||||
else:
|
||||
from collections import MutableSequence
|
||||
|
||||
|
||||
class Delegate(object):
|
||||
@@ -52,7 +58,7 @@ def composite(interface=None, method_list=None, container=list):
|
||||
# exception if it doesn't. The patched class returned by the decorator will
|
||||
# inherit from the container class to expose the interface needed to manage
|
||||
# objects composition
|
||||
if not issubclass(container, collections.MutableSequence):
|
||||
if not issubclass(container, MutableSequence):
|
||||
raise TypeError("Container must fulfill the MutableSequence contract")
|
||||
|
||||
# Check if at least one of the 'interface' or the 'method_list' arguments
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
"""
|
||||
import ctypes
|
||||
import collections
|
||||
import sys
|
||||
|
||||
from ordereddict_backport import OrderedDict
|
||||
from six import string_types, StringIO
|
||||
@@ -25,6 +25,13 @@
|
||||
|
||||
import spack.error
|
||||
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Mapping # novm
|
||||
else:
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
# Only export load and dump
|
||||
__all__ = ['load', 'dump', 'SpackYAMLError']
|
||||
|
||||
@@ -343,7 +350,7 @@ def sorted_dict(dict_like):
|
||||
"""
|
||||
result = syaml_dict(sorted(dict_like.items()))
|
||||
for key, value in result.items():
|
||||
if isinstance(value, collections.Mapping):
|
||||
if isinstance(value, Mapping):
|
||||
result[key] = sorted_dict(value)
|
||||
return result
|
||||
|
||||
|
@@ -1,38 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQENBF1IgqcBCADqSIBM0TT4+6Acv6SUpQ2l1Ql+UVRtJ74VGFOw+8I8aBWcBryB
|
||||
wNsS/Drxn9M9rX8il2aGtAmwc1dhTh0JvdZO7KqG8Q4vvWOytdLnGSE61LV4147q
|
||||
S/dJiYH2DCvhMKpOByIsEiuoTrUHzd1EQBnEPSwAQV8oWPrc1++f3iYmRemsOBCT
|
||||
BldAu7Y5RwjI3qQ6GazoCF5rd1uyiMYrpT4amEKFE91VRe+IG8XfEaSTapOc/hO3
|
||||
Sw4fzPelA2qD12I+JMj56vM0fQy3TXD5qngIb+leb2jGI+0bTz8RGS0xSMYVvftA
|
||||
upzQPaQIfzijVBt3tFSayx/NXKR0p+EuCqGBABEBAAG0MFNwYWNrIEJ1aWxkIFBp
|
||||
cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQgAOBYhBDHI
|
||||
4nh6FErErdiO0pX4aBGV4jnYBQJdSIKnAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B
|
||||
AheAAAoJEJX4aBGV4jnYpf0IAJDYEjpm0h1pNswTvmnEhgNVbojCGRfAts7F5uf8
|
||||
IFXGafKQsekMWZh0Ig0YXVn72jsOuNK/+keErMfXM3DFNTq0Ki7mcFedR9r5EfLf
|
||||
4YW2n6mphsfMgsg8NwKVLFYWyhQQ4OzhdydPxkGVhEebHwfHNQ3aIcqbFmzkhxnX
|
||||
CIYh2Flf3T306tKX4lXbhsXKG1L/bLtDiFRaMCBp66HGZ8u9Dbyy/W8aDwyx4duD
|
||||
MG+y2OrhOf+zEu3ZPFyc/jsjmfnUtIfQVyRajh/8vh+i9fkvFlLaOQittNElt3z1
|
||||
8+ybGjE9qWY/mvR2ZqnP8SVkGvxSpBVfVXiFFdepvuPAcLu5AQ0EXUiCpwEIAJ2s
|
||||
npNBAVocDUSdOF/Z/eCRvy3epuYm5f1Ge1ao9K2qWYno2FatnsYxK4qqB5yGRkfj
|
||||
sEzAGP8JtJvqDSuB5Xk7CIjRNOwoSB3hqvmxWh2h+HsITUhMl11FZ0Cllz+etXcK
|
||||
APz2ZHSKnA3R8uf4JzIr1cHLS+gDBoj8NgBCZhcyva2b5UC///FLm1+/Lpvekd0U
|
||||
n7B524hbXhFUG+UMfHO/U1c4TvCMt7RGMoWUtRzfO6XB1VQCwWJBVcVGl8Yy59Zk
|
||||
3K76VbFWQWOq6fRBE0xHBAga7pOgCc9qrb+FGl1IHUT8aV8CzkxckHlNb3PlntmE
|
||||
lXZLPcGFWaPtGtuIJVsAEQEAAYkCbAQYAQgAIBYhBDHI4nh6FErErdiO0pX4aBGV
|
||||
4jnYBQJdSIKnAhsuAUAJEJX4aBGV4jnYwHQgBBkBCAAdFiEEneR3pKqi9Rnivv07
|
||||
CYCNVr37XP0FAl1IgqcACgkQCYCNVr37XP13RQf/Ttxidgo9upF8jxrWnT5YhM6D
|
||||
ozzGWzqE+/KDBX+o4f33o6uzozjESRXQUKdclC9ftDJQ84lFTMs3Z+/12ZDqCV2k
|
||||
2qf0VfXg4e5xMq4tt6hojXUeYSfeGZXNU9LzjURCcMD+amIKjVztFg4kl3KHW3Pi
|
||||
/aPTr4xWWgy2tZ1FDEuA5J6AZiKKJSVeoSPOGANouPqm4fNj273XFXQepIhQ5wve
|
||||
4No0abxfXcLt5Yp3y06rNCBC9QdC++19N5+ajn2z9Qd2ZwztPb0mNuqHAok4vrlE
|
||||
1c4WBWk93Nfy9fKImalGENpPDz0td2H9pNC9IafOWltGSWSINRrU1GeaNXS/uAOT
|
||||
CADjcDN+emLbDTTReW4FLoQ0mPJ0tACgszGW50PtncTMPSj4uxSktQPWWk41oD9q
|
||||
gpXm1Vgto4GvPWYs/ewR6Kyd8K0YkBxbRFyYOmycu3/zzYJnry+EHdvtQspwUDPg
|
||||
QlI/avDrncERzICsbd86Jz0CMY4kzpg5v9dt/N6WnHlSk/S+vv4pPUDSz26Q4Ehh
|
||||
iDvDavLGyzKSlVzWQ4bzzlQxXbDL6TZyVAQ4DBI4sI+WGtLbfD51EI5G9BfmDsbw
|
||||
XJ0Dt2yEwRfDUx/lYbAMvhUnWEu2DSpYdJb8GG0GKTGqU4YpvO1JgTCsLSLIAHfT
|
||||
tQMw04Gs+kORRNbggsdTD4sR
|
||||
=N5Wp
|
||||
mQINBFoGPeUBEACtR/y+HqInBMMmhoCIoaltG3PnOpRtuLIaiUWRgVCJ3ZbkSNDK
|
||||
n2SWgnzQOuh4TeHgr9YEOiPdN8DYNI0Cp+IY2v73cxdMEHPHtVAGJn6fzmEnmFal
|
||||
vTSUAgekYfrtJKTtURJ6l+Z1jaX1jqpKY+FTQvdyp08oJiEeDhL57Tx5s0QChvMK
|
||||
A+2Hdkiz/FeSVDakQaEil4CBB7oC+MQsUphQUUEVGaLd91iHMsPF78J2SXMJ2E/i
|
||||
xz+jcSLnyacaRyQxUBGMY0e4AFNq53GE6aC0bqso5q8JnZH9PP5IiH6XqeRiWCYb
|
||||
GqvhW322pDrC83aHHz7dzCafBvcdh0/TlN7L5Qr+zRGVU5X/EdEeSruO0OylzXJp
|
||||
RxPc1WW5uY0YMayPXtiESLs9ElczF34aXX5mLokvgcuroIJQY+oUoU4p4r+23gJw
|
||||
HsQCCr9vUt1j7SOHhiYhFlMHpTvpaXyPAq6kah/iGl9okA4V6QPI5upPrwLdVWMZ
|
||||
LQEYfGAuCfwZub0Mz2qZnvjiew0dCDImMLV8GOdJDN1Ui74OxesHHakdyqbOlSt7
|
||||
Z4XatINbnEM5VE1VBUlseOAHu/YTmsqnBu4w7//Ys5I4mFmKvVWgURRjtArE+Z05
|
||||
Voa0pD2wDb+5R7DkDqMLECJuxjD+X33vNHMOrA9qcef1WZ3CLtQHqs+abQARAQAB
|
||||
tDdzYy10dXRvcmlhbCAoR1BHIGNyZWF0ZWQgZm9yIFNwYWNrKSA8YmVja2VyMzNA
|
||||
bGxubC5nb3Y+iQI3BBMBCAAhBQJaBj3lAhsDBQsJCAcCBhUICQoLAgQWAgMBAh4B
|
||||
AheAAAoJEJz6SkU7fGmyAr0P/159TwSgTJx0op7B+qhQ1UGac1yktFRyBaQmzkrl
|
||||
sVH7HxB2Rsq0ty6uV0PA6JlRRb4efEKMsNLTIx6azNYgUgWY+gf/XXdde2romaHB
|
||||
ZK1uFIp9A108HlqCp+Phwtbdp3NmYiTcpH9ZqpWUmz9dWw37zJkqbqi5xLmbqToM
|
||||
+hQ49WsjGTE4ngLwwUm8Bxz+nFcyJHjZ1u4KLxTqMNoNjpWbDhM5mDuCYvf7Arwa
|
||||
IE7Kk3jaYbrsCOw7Qz0oGAp41kKY3WedFfFhqfUNUR+p0o9N79tfkeQjxWK+Ds0W
|
||||
H24d8HUT8n8lHlXo/0DfR9h9VHpnDo9VfXJyVIqr0uzU9FGHzJ/S8WddzgvxnOBl
|
||||
Ia4fZTOk+KeQsKJ+Q/X6577F6/S0bQ48D6S09/6HAac6tyrnB3RRPETvGNPOHnA0
|
||||
+P8WuhKjojqBO0N8VsTweCCjWDYlF7MEGsHvWiyYYq9gdGY3crWPv8NjMyj+wYhK
|
||||
NgF7IHIIgZszRyw2hRlwOdC8w+06AhpDivhE4n6wQlO7ScydQpZm44JJtqGO+cjo
|
||||
6akvjsFe5hV1GmeCNp46GYDe2zQ1O2/50prYcNaRfJ9PsfaNmyTGg4NVqBbPNkwR
|
||||
uXGiAS2qgeqfS3wORXexHCwYFB2Lcu0qDv7MJosM0cI1saY/eff3+Uw/AkqV51QY
|
||||
0ajT
|
||||
=Fc8M
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
|
@@ -440,7 +440,7 @@ _spack_buildcache_update_index() {
|
||||
_spack_cd() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages -b --build-dir -e --env"
|
||||
SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages --source-dir -b --build-dir -e --env"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -475,7 +475,7 @@ _spack_ci_rebuild() {
|
||||
_spack_clean() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -a --all"
|
||||
SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -b --bootstrap -a --all"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -891,7 +891,7 @@ _spack_fetch() {
|
||||
_spack_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tags -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date"
|
||||
SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tags -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date -b --bootstrap"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
@@ -1064,7 +1064,7 @@ _spack_load() {
|
||||
_spack_location() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages -b --build-dir -e --env"
|
||||
SPACK_COMPREPLY="-h --help -m --module-dir -r --spack-root -i --install-dir -p --package-dir -P --packages -s --stage-dir -S --stages --source-dir -b --build-dir -e --env"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
|
@@ -0,0 +1,13 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
class ExternalNonDefaultVariant(Package):
|
||||
"""An external that is registered with a non-default value"""
|
||||
homepage = "http://www.python.org"
|
||||
url = "http://www.python.org/ftp/python/3.8.7/Python-3.8.7.tgz"
|
||||
|
||||
version('3.8.7', 'be78e48cdfc1a7ad90efff146dce6cfe')
|
||||
|
||||
variant('foo', default=True, description='just a variant')
|
||||
variant('bar', default=True, description='just a variant')
|
15
var/spack/repos/builtin.mock/packages/mvapich2/package.py
Normal file
15
var/spack/repos/builtin.mock/packages/mvapich2/package.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
class Mvapich2(Package):
|
||||
homepage = "http://www.homepage.org"
|
||||
url = "http://www.someurl"
|
||||
|
||||
version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
|
||||
|
||||
variant(
|
||||
'file_systems',
|
||||
description='List of the ROMIO file systems to activate',
|
||||
values=auto_or_any_combination_of('lustre', 'gpfs', 'nfs', 'ufs'),
|
||||
)
|
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
class OldExternal(Package):
|
||||
"""A package that has an old version declared in packages.yaml"""
|
||||
|
||||
homepage = "https://www.example.com"
|
||||
url = "https://www.example.com/old-external.tar.gz"
|
||||
|
||||
version('1.2.0', '0123456789abcdef0123456789abcdef')
|
||||
version('1.1.4', '0123456789abcdef0123456789abcdef')
|
||||
version('1.1.3', '0123456789abcdef0123456789abcdef')
|
||||
version('1.1.2', '0123456789abcdef0123456789abcdef')
|
||||
version('1.1.1', '0123456789abcdef0123456789abcdef')
|
||||
version('1.1.0', '0123456789abcdef0123456789abcdef')
|
||||
version('1.0.0', '0123456789abcdef0123456789abcdef')
|
@@ -0,0 +1,12 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
class TriggerExternalNonDefaultVariant(Package):
|
||||
"""This ackage depends on an external with a non-default variant"""
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.someurl.tar.gz"
|
||||
|
||||
version('1.0', 'foobarbaz')
|
||||
|
||||
depends_on('external-non-default-variant')
|
@@ -0,0 +1,13 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
class WrongVariantInConflicts(Package):
|
||||
"""This package has a wrong variant spelled in a conflict."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/b-1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
conflicts('+foo', when='@1.0')
|
@@ -0,0 +1,13 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
class WrongVariantInDependsOn(Package):
|
||||
"""This package has a wrong variant spelled in a depends_on."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/b-1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
depends_on('b+doesnotexist')
|
@@ -69,7 +69,7 @@ def cmake_args(self):
|
||||
}
|
||||
|
||||
args.append('-DWITH_NETCDF:STRING={0}'.format(
|
||||
netcdf_str[spec.variants['netcdf-c'].value]
|
||||
netcdf_str[spec.variants['netcdf'].value]
|
||||
))
|
||||
|
||||
# Set an MPI compiler for parallel builds
|
||||
|
70
var/spack/repos/builtin/packages/clingo-bootstrap/package.py
Normal file
70
var/spack/repos/builtin/packages/clingo-bootstrap/package.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.pkg.builtin.clingo import Clingo
|
||||
|
||||
import spack.compilers
|
||||
|
||||
|
||||
class ClingoBootstrap(Clingo):
|
||||
"""Clingo with some options used for bootstrapping"""
|
||||
|
||||
maintainers = ['alalazo']
|
||||
|
||||
variant('build_type', default='Release', values=('Release',),
|
||||
description='CMake build type')
|
||||
|
||||
# CMake at version 3.16.0 or higher has the possibility to force the
|
||||
# Python interpreter, which is crucial to build against external Python
|
||||
# in environment where more than one interpreter is in the same prefix
|
||||
depends_on('cmake@3.16.0:', type='build')
|
||||
|
||||
# On Linux we bootstrap with GCC
|
||||
for compiler_spec in [
|
||||
c for c in spack.compilers.supported_compilers()
|
||||
if c != 'gcc'
|
||||
]:
|
||||
conflicts('%{0}'.format(compiler_spec), when='platform=linux',
|
||||
msg='GCC is required to bootstrap clingo on Linux')
|
||||
conflicts('%{0}'.format(compiler_spec), when='platform=cray',
|
||||
msg='GCC is required to bootstrap clingo on Cray')
|
||||
conflicts(
|
||||
'%gcc@:5.99.99', msg='C++14 support is required to bootstrap clingo'
|
||||
)
|
||||
|
||||
# On Darwin we bootstrap with Apple Clang
|
||||
for compiler_spec in [
|
||||
c for c in spack.compilers.supported_compilers()
|
||||
if c != 'apple-clang'
|
||||
]:
|
||||
conflicts('%{0}'.format(compiler_spec), when='platform=darwin',
|
||||
msg='Apple-clang is required to bootstrap clingo on MacOS')
|
||||
|
||||
# Clingo needs the Python module to be usable by Spack
|
||||
conflicts('~python', msg='Python support is required to bootstrap Spack')
|
||||
|
||||
@property
|
||||
def cmake_py_shared(self):
|
||||
return self.define('CLINGO_BUILD_PY_SHARED', 'OFF')
|
||||
|
||||
def cmake_args(self):
|
||||
args = super(ClingoBootstrap, self).cmake_args()
|
||||
args.extend([
|
||||
# Avoid building the clingo executable
|
||||
self.define('CLINGO_BUILD_APPS', 'OFF'),
|
||||
])
|
||||
return args
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if '%apple-clang platform=darwin' in self.spec:
|
||||
opts = '-mmacosx-version-min=10.13'
|
||||
elif '%gcc' in self.spec:
|
||||
# This is either linux or cray
|
||||
opts = '-static-libstdc++ -static-libgcc -Wl,--exclude-libs,ALL'
|
||||
else:
|
||||
msg = 'unexpected compiler for spec "{0}"'.format(self.spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
env.set('CXXFLAGS', opts)
|
||||
env.set('LDFLAGS', opts)
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
from spack.compiler import UnsupportedCompilerFlag
|
||||
|
||||
|
||||
class Clingo(CMakePackage):
|
||||
@@ -20,16 +21,16 @@ class Clingo(CMakePackage):
|
||||
url = "https://github.com/potassco/clingo/archive/v5.2.2.tar.gz"
|
||||
git = 'https://github.com/potassco/clingo.git'
|
||||
|
||||
maintainers = ["tgamblin"]
|
||||
maintainers = ["tgamblin", "alalazo"]
|
||||
|
||||
version('master', branch='master', submodules=True)
|
||||
version('spack', commit='2ab2e81bcb24f6070b7efce30a754d74ef52ee2d', submodules=True)
|
||||
version('master', branch='master', submodules=True, preferred=True)
|
||||
version('spack', commit='2a025667090d71b2c9dce60fe924feb6bde8f667', submodules=True)
|
||||
|
||||
version('5.4.0', sha256='e2de331ee0a6d254193aab5995338a621372517adcf91568092be8ac511c18f3')
|
||||
version('5.3.0', sha256='b0d406d2809352caef7fccf69e8864d55e81ee84f4888b0744894977f703f976')
|
||||
version('5.2.2', sha256='da1ef8142e75c5a6f23c9403b90d4f40b9f862969ba71e2aaee9a257d058bfcf')
|
||||
|
||||
variant("docs", default=False, description="build documentation with Doxyegen")
|
||||
variant("docs", default=False, description="build documentation with Doxygen")
|
||||
variant("python", default=True, description="build with python bindings")
|
||||
|
||||
depends_on('doxygen', type="build", when="+docs")
|
||||
@@ -50,17 +51,37 @@ def patch(self):
|
||||
'clasp/CMakeLists.txt',
|
||||
'clasp/libpotassco/CMakeLists.txt')
|
||||
|
||||
@property
|
||||
def cmake_python_hints(self):
|
||||
"""Return standard CMake defines to ensure that the
|
||||
current spec is the one found by CMake find_package(Python, ...)
|
||||
"""
|
||||
python_spec = self.spec['python']
|
||||
include_dir = python_spec.package.get_python_inc()
|
||||
return [
|
||||
self.define('Python_EXECUTABLE', str(python_spec.command)),
|
||||
self.define('Python_INCLUDE_DIR', include_dir)
|
||||
]
|
||||
|
||||
@property
|
||||
def cmake_py_shared(self):
|
||||
return self.define('CLINGO_BUILD_PY_SHARED', 'ON')
|
||||
|
||||
def cmake_args(self):
|
||||
try:
|
||||
self.compiler.cxx14_flag
|
||||
except UnsupportedCompilerFlag:
|
||||
InstallError('clingo requires a C++14-compliant C++ compiler')
|
||||
|
||||
return [
|
||||
args = [
|
||||
'-DCLINGO_REQUIRE_PYTHON=ON',
|
||||
'-DCLINGO_BUILD_WITH_PYTHON=ON',
|
||||
'-DCLINGO_BUILD_PY_SHARED=ON',
|
||||
'-DPYCLINGO_USER_INSTALL=OFF',
|
||||
'-DPYCLINGO_USE_INSTALL_PREFIX=ON',
|
||||
'-DCLINGO_BUILD_WITH_LUA=OFF'
|
||||
'-DCLINGO_BUILD_WITH_LUA=OFF',
|
||||
self.cmake_py_shared
|
||||
]
|
||||
if self.spec['cmake'].satisfies('@3.16.0:'):
|
||||
args += self.cmake_python_hints
|
||||
|
||||
return args
|
||||
|
@@ -12,6 +12,8 @@ class Numactl(AutotoolsPackage):
|
||||
homepage = "http://oss.sgi.com/projects/libnuma/"
|
||||
url = "https://github.com/numactl/numactl/archive/v2.0.11.tar.gz"
|
||||
|
||||
force_autoreconf = True
|
||||
|
||||
version('2.0.14', sha256='1ee27abd07ff6ba140aaf9bc6379b37825e54496e01d6f7343330cf1a4487035')
|
||||
version('2.0.12', sha256='7c3e819c2bdeb883de68bafe88776a01356f7ef565e75ba866c4b49a087c6bdf')
|
||||
version('2.0.11', sha256='3e099a59b2c527bcdbddd34e1952ca87462d2cef4c93da9b0bc03f02903f7089')
|
||||
@@ -25,6 +27,10 @@ class Numactl(AutotoolsPackage):
|
||||
depends_on('libtool', type='build')
|
||||
depends_on('m4', type='build')
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
bash = which('bash')
|
||||
bash('./autogen.sh')
|
||||
|
||||
def patch(self):
|
||||
# Remove flags not recognized by the NVIDIA compiler
|
||||
if self.spec.satisfies('%nvhpc'):
|
||||
|
Reference in New Issue
Block a user