Compare commits
30 Commits
hs/test/tr
...
backports/
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7af5a72e0d | ||
![]() |
a03ed8fbc5 | ||
![]() |
2bfcc69fa8 | ||
![]() |
af62d91457 | ||
![]() |
f32a74491e | ||
![]() |
4f80f07b9a | ||
![]() |
4a8ae59a9e | ||
![]() |
6f7e881b69 | ||
![]() |
24bcaec6c3 | ||
![]() |
1c9bb36fdf | ||
![]() |
044d1b12bb | ||
![]() |
536468783d | ||
![]() |
a298296237 | ||
![]() |
1db11ff06b | ||
![]() |
b621c045a7 | ||
![]() |
734ada1f10 | ||
![]() |
71712465e8 | ||
![]() |
7cc67b5c23 | ||
![]() |
1f9b0e39b9 | ||
![]() |
d012683c1b | ||
![]() |
8b3c3e9165 | ||
![]() |
dd94a44b6a | ||
![]() |
95d190e354 | ||
![]() |
f124409d8a | ||
![]() |
bee2132c04 | ||
![]() |
d0ea33fa67 | ||
![]() |
4e913876b9 | ||
![]() |
c25e43ce61 | ||
![]() |
85146d875b | ||
![]() |
c8167eec5d |
18
.github/workflows/unit_tests.yaml
vendored
18
.github/workflows/unit_tests.yaml
vendored
@@ -52,7 +52,13 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
cmake bison libbison-dev kcov
|
||||
cmake bison libbison-dev subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
@@ -99,7 +105,13 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
@@ -134,7 +146,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git config --global --add safe.directory '*'
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
|
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git config --global --add safe.directory '*'
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
|
30
CHANGELOG.md
30
CHANGELOG.md
@@ -1,3 +1,33 @@
|
||||
# v0.23.1 (2025-02-19)
|
||||
|
||||
## Bugfixes
|
||||
- Fix a correctness issue of `ArchSpec.intersects` (#48741)
|
||||
- Make `extra_attributes` order independent in Spec hashing (#48615, #48854)
|
||||
- Fix issue where system proxy settings were not respected in OCI build caches (#48783)
|
||||
- Fix an issue where the `--test` concretizer flag was not forwarded correctly (#48417)
|
||||
- Fix an issue where `codesign` and `install_name_tool` would not preserve hardlinks on
|
||||
Darwin (#47808)
|
||||
- Fix an issue on Darwin where codesign would run on unmodified binaries (#48568)
|
||||
- Patch configure scripts generated with libtool < 2.5.4, to avoid redundant flags when
|
||||
creating shared libraries on Darwin (#48671)
|
||||
- Fix issue related to mirror URL paths on Windows (#47898)
|
||||
- Esnure proper UTF-8 encoding/decoding in logging (#48005)
|
||||
- Fix issues related to `filter_file` (#48038, #48108)
|
||||
- Fix issue related to creating bootstrap source mirrors (#48235)
|
||||
- Fix issue where command line config arguments were not always top level (#48255)
|
||||
- Fix an incorrect typehint of `concretized()` (#48504)
|
||||
- Improve mention of next Spack version in warning (#47887)
|
||||
- Tests: fix forward compatibility with Python 3.13 (#48209)
|
||||
- Docs: encourage use of `--oci-username-variable` and `--oci-password-variable` (#48189)
|
||||
- Docs: ensure Getting Started has bootstrap list output in correct place (#48281)
|
||||
- CI: allow GitHub actions to run on forks of Spack with different project name (#48041)
|
||||
- CI: make unit tests work on Ubuntu 24.04 (#48151)
|
||||
- CI: re-enable cray pipelines (#47697)
|
||||
|
||||
## Package updates
|
||||
- `qt-base`: fix rpath for dependents (#47424)
|
||||
- `gdk-pixbuf`: fix outdated URL (#47825)
|
||||
|
||||
# v0.23.0 (2024-11-13)
|
||||
|
||||
`v0.23.0` is a major feature release.
|
||||
|
@@ -265,25 +265,30 @@ infrastructure, or to cache Spack built binaries in Github Actions and
|
||||
GitLab CI.
|
||||
|
||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||
and optionally specify a username and password (or personal access token):
|
||||
and optionally specify variables that hold the username and password (or
|
||||
personal access token) for the registry:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||
$ spack mirror add --oci-username-variable REGISTRY_USER \
|
||||
--oci-password-variable REGISTRY_TOKEN \
|
||||
my_registry oci://example.com/my_image
|
||||
|
||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||
registry. To use Dockerhub, you can omit the registry domain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||
$ spack mirror add ... my_registry oci://username/my_image
|
||||
|
||||
From here, you can use the mirror as any other build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export REGISTRY_USER=...
|
||||
$ export REGISTRY_TOKEN=...
|
||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||
$ spack install <specs...> # install from the registry
|
||||
$ spack install <specs...> # or install from the registry
|
||||
|
||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||
easy to generate get a runnable container image with the binaries installed. This
|
||||
|
@@ -38,9 +38,11 @@ just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
spack -e . mirror add --oci-username-variable REGISTRY_USER \
|
||||
--oci-password-variable REGISTRY_TOKEN \
|
||||
container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image
|
||||
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
@@ -148,20 +148,22 @@ The first time you concretize a spec, Spack will bootstrap automatically:
|
||||
--------------------------------
|
||||
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
||||
|
||||
The default bootstrap behavior is to use pre-built binaries. You can verify the
|
||||
active bootstrap repositories with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap disable github-actions-v0.4
|
||||
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.3
|
||||
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
$ spack bootstrap disable github-actions-v0.6
|
||||
==> "github-actions-v0.6" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.5
|
||||
==> "github-actions-v0.5" is now disabled and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with ``spack bootstrap list``.
|
||||
|
||||
.. note::
|
||||
|
||||
|
@@ -24,6 +24,7 @@
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
@@ -300,35 +301,32 @@ def filter_file(
|
||||
ignore_absent: bool = False,
|
||||
start_at: Optional[str] = None,
|
||||
stop_at: Optional[str] = None,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> None:
|
||||
r"""Like sed, but uses python regular expressions.
|
||||
|
||||
Filters every line of each file through regex and replaces the file
|
||||
with a filtered version. Preserves mode of filtered files.
|
||||
Filters every line of each file through regex and replaces the file with a filtered version.
|
||||
Preserves mode of filtered files.
|
||||
|
||||
As with re.sub, ``repl`` can be either a string or a callable.
|
||||
If it is a callable, it is passed the match object and should
|
||||
return a suitable replacement string. If it is a string, it
|
||||
can contain ``\1``, ``\2``, etc. to represent back-substitution
|
||||
as sed would allow.
|
||||
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
|
||||
passed the match object and should return a suitable replacement string. If it is a string, it
|
||||
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
|
||||
|
||||
Args:
|
||||
regex (str): The regular expression to search for
|
||||
repl (str): The string to replace matches with
|
||||
*filenames: One or more files to search and replace
|
||||
string (bool): Treat regex as a plain string. Default it False
|
||||
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent (bool): Ignore any files that don't exist.
|
||||
Default is False
|
||||
start_at (str): Marker used to start applying the replacements. If a
|
||||
text line matches this marker filtering is started at the next line.
|
||||
All contents before the marker and the marker itself are copied
|
||||
verbatim. Default is to start filtering from the first line of the
|
||||
file.
|
||||
stop_at (str): Marker used to stop scanning the file further. If a text
|
||||
line matches this marker filtering is stopped and the rest of the
|
||||
file is copied verbatim. Default is to filter until the end of the
|
||||
file.
|
||||
regex: The regular expression to search for
|
||||
repl: The string to replace matches with
|
||||
*filenames: One or more files to search and replace string: Treat regex as a plain string.
|
||||
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent: Ignore any files that don't exist. Default is False
|
||||
start_at: Marker used to start applying the replacements. If a text line matches this
|
||||
marker filtering is started at the next line. All contents before the marker and the
|
||||
marker itself are copied verbatim. Default is to start filtering from the first line of
|
||||
the file.
|
||||
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
|
||||
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
|
||||
until the end of the file.
|
||||
encoding: The encoding to use when reading and writing the files. Default is None, which
|
||||
uses the system's default encoding.
|
||||
"""
|
||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||
if not callable(repl):
|
||||
@@ -344,72 +342,56 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
for filename in path_to_os_path(*filenames):
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
backup_filename = filename + "~"
|
||||
tmp_filename = filename + ".spack~"
|
||||
|
||||
if ignore_absent and not os.path.exists(filename):
|
||||
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
|
||||
tty.debug(msg.format(filename))
|
||||
regex_compiled = re.compile(regex)
|
||||
for path in path_to_os_path(*filenames):
|
||||
if ignore_absent and not os.path.exists(path):
|
||||
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
|
||||
continue
|
||||
else:
|
||||
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
|
||||
|
||||
# Create backup file. Don't overwrite an existing backup
|
||||
# file in case this file is being filtered multiple times.
|
||||
if not os.path.exists(backup_filename):
|
||||
shutil.copy(filename, backup_filename)
|
||||
fd, temp_path = tempfile.mkstemp(
|
||||
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
|
||||
)
|
||||
os.close(fd)
|
||||
|
||||
# Create a temporary file to read from. We cannot use backup_filename
|
||||
# in case filter_file is invoked multiple times on the same file.
|
||||
shutil.copy(filename, tmp_filename)
|
||||
shutil.copy(path, temp_path)
|
||||
errored = False
|
||||
|
||||
try:
|
||||
# Open as a text file and filter until the end of the file is
|
||||
# reached, or we found a marker in the line if it was specified
|
||||
#
|
||||
# To avoid translating line endings (\n to \r\n and vice-versa)
|
||||
# we force os.open to ignore translations and use the line endings
|
||||
# the file comes with
|
||||
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
|
||||
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
|
||||
do_filtering = start_at is None
|
||||
# Using iter and readline is a workaround needed not to
|
||||
# disable input_file.tell(), which will happen if we call
|
||||
# input_file.next() implicitly via the for loop
|
||||
for line in iter(input_file.readline, ""):
|
||||
if stop_at is not None:
|
||||
current_position = input_file.tell()
|
||||
# Open as a text file and filter until the end of the file is reached, or we found a
|
||||
# marker in the line if it was specified. To avoid translating line endings (\n to
|
||||
# \r\n and vice-versa) use newline="".
|
||||
with open(
|
||||
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
|
||||
) as input_file, open(
|
||||
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
|
||||
) as output_file:
|
||||
if start_at is None and stop_at is None: # common case, avoids branching in loop
|
||||
for line in input_file:
|
||||
output_file.write(re.sub(regex_compiled, repl, line))
|
||||
else:
|
||||
# state is -1 before start_at; 0 between; 1 after stop_at
|
||||
state = 0 if start_at is None else -1
|
||||
for line in input_file:
|
||||
if state == 0:
|
||||
if stop_at == line.strip():
|
||||
output_file.write(line)
|
||||
break
|
||||
if do_filtering:
|
||||
filtered_line = re.sub(regex, repl, line)
|
||||
output_file.write(filtered_line)
|
||||
else:
|
||||
do_filtering = start_at == line.strip()
|
||||
output_file.write(line)
|
||||
else:
|
||||
current_position = None
|
||||
|
||||
# If we stopped filtering at some point, reopen the file in
|
||||
# binary mode and copy verbatim the remaining part
|
||||
if current_position and stop_at:
|
||||
with open(tmp_filename, mode="rb") as input_binary_buffer:
|
||||
input_binary_buffer.seek(current_position)
|
||||
with open(filename, mode="ab") as output_binary_buffer:
|
||||
output_binary_buffer.writelines(input_binary_buffer.readlines())
|
||||
state = 1
|
||||
else:
|
||||
line = re.sub(regex_compiled, repl, line)
|
||||
elif state == -1 and start_at == line.strip():
|
||||
state = 0
|
||||
output_file.write(line)
|
||||
|
||||
except BaseException:
|
||||
# clean up the original file on failure.
|
||||
shutil.move(backup_filename, filename)
|
||||
# restore the original file
|
||||
os.rename(temp_path, path)
|
||||
errored = True
|
||||
raise
|
||||
|
||||
finally:
|
||||
os.remove(tmp_filename)
|
||||
if not backup and os.path.exists(backup_filename):
|
||||
os.remove(backup_filename)
|
||||
if not errored and not backup:
|
||||
os.unlink(temp_path)
|
||||
|
||||
|
||||
class FileFilter:
|
||||
@@ -2838,6 +2820,25 @@ def temporary_dir(
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
|
||||
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
|
||||
for functions or external tools that do not support in-place editing. Notice that this function
|
||||
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
|
||||
since we assume the call site will create a new inode at the same path."""
|
||||
tmp_fd, tmp_path = tempfile.mkstemp(
|
||||
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
|
||||
)
|
||||
# windows cannot replace a file with open fds, so close since the call site needs to replace.
|
||||
os.close(tmp_fd)
|
||||
try:
|
||||
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
|
||||
yield tmp_path
|
||||
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
@@ -879,10 +879,13 @@ def _writer_daemon(
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
# that prevents unbuffered text I/O. [needs citation]
|
||||
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
|
||||
# The downside is that the log file will not contain the exact output of the build process.
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
read_file = os.fdopen(
|
||||
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
|
||||
)
|
||||
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
@@ -928,11 +931,7 @@ def _writer_daemon(
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
line = _retry(read_file.readline)()
|
||||
|
||||
if not line:
|
||||
return
|
||||
@@ -946,6 +945,13 @@ def _writer_daemon(
|
||||
output_line = clean_line
|
||||
if filter_fn:
|
||||
output_line = filter_fn(clean_line)
|
||||
enc = sys.stdout.encoding
|
||||
if enc != "utf-8":
|
||||
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
|
||||
# may not be able to handle utf-8 output. We do an inefficient
|
||||
# dance of re-encoding with errors replaced, so stdout.write
|
||||
# does not raise.
|
||||
output_line = output_line.encode(enc, "replace").decode(enc)
|
||||
sys.stdout.write(output_line)
|
||||
|
||||
# Stripped output to log file.
|
||||
|
@@ -11,9 +11,21 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0"
|
||||
__version__ = "0.23.2.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
#: compatibility with vX.0.
|
||||
min_package_api_version = (1, 0)
|
||||
|
||||
|
||||
def __try_int(v):
|
||||
try:
|
||||
@@ -80,4 +92,6 @@ def get_short_version() -> str:
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
"package_api_version",
|
||||
"min_package_api_version",
|
||||
]
|
||||
|
@@ -1366,14 +1366,8 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str):
|
||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
@@ -1389,7 +1383,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if isinstance(_expected, dict):
|
||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
@@ -1404,6 +1398,10 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
else:
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -2332,7 +2332,9 @@ def is_backup_file(file):
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
codesign("-fs-", binary)
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
|
@@ -357,6 +357,13 @@ def _do_patch_libtool_configure(self):
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
|
@@ -167,7 +167,9 @@ def quote_kvp(string: str) -> str:
|
||||
|
||||
|
||||
def parse_specs(
|
||||
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||
args: Union[str, List[str]],
|
||||
concretize: bool = False,
|
||||
tests: spack.concretize.TestsType = False,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -179,11 +181,13 @@ def parse_specs(
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize = [(s, None) for s in specs]
|
||||
to_concretize: List[spack.concretize.SpecPairInput] = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
def _concretize_spec_pairs(
|
||||
to_concretize: List[spack.concretize.SpecPairInput], tests: spack.concretize.TestsType = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
@@ -194,7 +198,7 @@ def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized()]
|
||||
return [concrete or abstract.concretized(tests=tests)]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
|
@@ -29,7 +29,7 @@
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz"
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||
@@ -51,9 +51,9 @@
|
||||
},
|
||||
}
|
||||
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json"
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/patchelf.json"
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
|
@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
|
||||
# the const from the constructor or a value from the CLI.
|
||||
# Note that this is only called if the argument is actually
|
||||
# specified on the command line.
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||
|
||||
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
@@ -97,7 +96,7 @@ def list_files(args):
|
||||
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
|
||||
|
||||
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
|
||||
latest_year = datetime.date.today().year
|
||||
latest_year = 2024 # year of 0.23 release
|
||||
strict_date = r"Copyright 2013-%s" % latest_year
|
||||
|
||||
#: regexes for valid license lines at tops of files
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterable, Optional, Sequence, Tuple, Union
|
||||
from typing import Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -36,6 +36,7 @@ def enable_compiler_existence_check():
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
|
||||
|
||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
@@ -60,8 +61,8 @@ def concretize_specs_together(
|
||||
|
||||
|
||||
def concretize_together(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
@@ -77,8 +78,8 @@ def concretize_together(
|
||||
|
||||
|
||||
def concretize_together_when_possible(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||
|
||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||
@@ -114,8 +115,8 @@ def concretize_together_when_possible(
|
||||
|
||||
|
||||
def concretize_separately(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Concretizes the input specs separately from each other.
|
||||
|
||||
Args:
|
||||
|
@@ -431,6 +431,19 @@ def ensure_unwrapped(self) -> "Configuration":
|
||||
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
||||
return self
|
||||
|
||||
def highest(self) -> ConfigScope:
|
||||
"""Scope with highest precedence"""
|
||||
return next(reversed(self.scopes.values())) # type: ignore
|
||||
|
||||
@_config_mutator
|
||||
def ensure_scope_ordering(self):
|
||||
"""Ensure that scope order matches documented precedent"""
|
||||
# FIXME: We also need to consider that custom configurations and other orderings
|
||||
# may not be preserved correctly
|
||||
if "command_line" in self.scopes:
|
||||
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
||||
self.scopes["command_line"] = self.remove_scope("command_line")
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope) -> None:
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
|
@@ -3044,11 +3044,13 @@ def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
|
||||
def deactivate_config_scope(self) -> None:
|
||||
"""Remove any of the manifest's scopes from the global config path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_config(self):
|
||||
|
@@ -397,6 +397,7 @@ def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.ProxyHandler(),
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -275,10 +276,10 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
|
||||
# Deduplicate and flatten
|
||||
args = list(itertools.chain.from_iterable(llnl.util.lang.dedupe(args)))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
if args:
|
||||
args.append(str(cur_path))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
install_name_tool(*args)
|
||||
with fs.edit_in_place_through_temporary_file(cur_path) as temp_path:
|
||||
install_name_tool(*args, temp_path)
|
||||
|
||||
|
||||
def macholib_get_paths(cur_path):
|
||||
@@ -717,8 +718,8 @@ def fixup_macos_rpath(root, filename):
|
||||
# No fixes needed
|
||||
return False
|
||||
|
||||
args.append(abspath)
|
||||
executable.Executable("install_name_tool")(*args)
|
||||
with fs.edit_in_place_through_temporary_file(abspath) as temp_path:
|
||||
executable.Executable("install_name_tool")(*args, temp_path)
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -209,7 +209,7 @@ def _apply_to_file(self, f):
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
modified = True
|
||||
modified = False
|
||||
|
||||
for match in self.regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
|
@@ -34,6 +34,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -50,6 +51,8 @@
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
|
||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
@@ -946,19 +949,52 @@ def __reduce__(self):
|
||||
return RepoPath.unmarshal, self.marshal()
|
||||
|
||||
|
||||
def _parse_package_api_version(
|
||||
config: Dict[str, Any],
|
||||
min_api: Tuple[int, int] = spack.min_package_api_version,
|
||||
max_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Tuple[int, int]:
|
||||
api = config.get("api")
|
||||
if api is None:
|
||||
package_api = (1, 0)
|
||||
else:
|
||||
if not isinstance(api, str):
|
||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||
api_match = _API_REGEX.match(api)
|
||||
if api_match is None:
|
||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||
package_api = (int(api_match.group(1)), int(api_match.group(2)))
|
||||
|
||||
if min_api <= package_api <= max_api:
|
||||
return package_api
|
||||
|
||||
min_str = ".".join(str(i) for i in min_api)
|
||||
max_str = ".".join(str(i) for i in max_api)
|
||||
curr_str = ".".join(str(i) for i in package_api)
|
||||
raise BadRepoError(
|
||||
f"Package API v{curr_str} is not supported by this version of Spack ("
|
||||
f"must be between v{min_str} and v{max_str})"
|
||||
)
|
||||
|
||||
|
||||
class Repo:
|
||||
"""Class representing a package repository in the filesystem.
|
||||
|
||||
Each package repository must have a top-level configuration file
|
||||
called `repo.yaml`.
|
||||
Each package repository must have a top-level configuration file called `repo.yaml`.
|
||||
|
||||
Currently, `repo.yaml` must define:
|
||||
It contains the following keys:
|
||||
|
||||
`namespace`:
|
||||
A Python namespace where the repository's packages should live.
|
||||
|
||||
`subdirectory`:
|
||||
An optional subdirectory name where packages are placed
|
||||
|
||||
`api`:
|
||||
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
|
||||
For the repo to be compatible with the current version of Spack, the version must be
|
||||
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
|
||||
:py:data:`spack.package_api_version`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -995,7 +1031,7 @@ def check(condition, msg):
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
|
||||
self.namespace = config["namespace"]
|
||||
self.namespace: str = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
@@ -1008,12 +1044,14 @@ def check(condition, msg):
|
||||
# Keep name components around for checking prefixes.
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
@@ -1063,7 +1101,7 @@ def is_prefix(self, fullname: str) -> bool:
|
||||
parts = fullname.split(".")
|
||||
return self._names[: len(parts)] == parts
|
||||
|
||||
def _read_config(self) -> Dict[str, str]:
|
||||
def _read_config(self) -> Dict[str, Any]:
|
||||
"""Check for a YAML config file in this db's root directory."""
|
||||
try:
|
||||
with open(self.config_file) as reponame_file:
|
||||
|
@@ -106,8 +106,8 @@
|
||||
{
|
||||
"names": ["install_missing_compilers"],
|
||||
"message": "The config:install_missing_compilers option has been deprecated in "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config in "
|
||||
"Spack v0.25.",
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config after "
|
||||
"Spack v1.0.",
|
||||
"error": False,
|
||||
},
|
||||
],
|
||||
|
@@ -448,6 +448,9 @@ def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
|
||||
return bool(self._target_intersection(other))
|
||||
|
||||
def _target_constrain(self, other: "ArchSpec") -> bool:
|
||||
if self.target is None and other.target is None:
|
||||
return False
|
||||
|
||||
if not other._target_satisfies(self, strict=False):
|
||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
|
||||
@@ -496,21 +499,56 @@ def _target_intersection(self, other):
|
||||
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
||||
results.append(o_min)
|
||||
else:
|
||||
# Take intersection of two ranges
|
||||
# Lots of comparisons needed
|
||||
_s_min = _make_microarchitecture(s_min)
|
||||
_s_max = _make_microarchitecture(s_max)
|
||||
_o_min = _make_microarchitecture(o_min)
|
||||
_o_max = _make_microarchitecture(o_max)
|
||||
# Take the "min" of the two max, if there is a partial ordering.
|
||||
n_max = ""
|
||||
if s_max and o_max:
|
||||
_s_max = _make_microarchitecture(s_max)
|
||||
_o_max = _make_microarchitecture(o_max)
|
||||
if _s_max.family != _o_max.family:
|
||||
continue
|
||||
if _s_max <= _o_max:
|
||||
n_max = s_max
|
||||
elif _o_max < _s_max:
|
||||
n_max = o_max
|
||||
else:
|
||||
continue
|
||||
elif s_max:
|
||||
n_max = s_max
|
||||
elif o_max:
|
||||
n_max = o_max
|
||||
|
||||
# Take the "max" of the two min.
|
||||
n_min = ""
|
||||
if s_min and o_min:
|
||||
_s_min = _make_microarchitecture(s_min)
|
||||
_o_min = _make_microarchitecture(o_min)
|
||||
if _s_min.family != _o_min.family:
|
||||
continue
|
||||
if _s_min >= _o_min:
|
||||
n_min = s_min
|
||||
elif _o_min > _s_min:
|
||||
n_min = o_min
|
||||
else:
|
||||
continue
|
||||
elif s_min:
|
||||
n_min = s_min
|
||||
elif o_min:
|
||||
n_min = o_min
|
||||
|
||||
if n_min and n_max:
|
||||
_n_min = _make_microarchitecture(n_min)
|
||||
_n_max = _make_microarchitecture(n_max)
|
||||
if _n_min.family != _n_max.family or not _n_min <= _n_max:
|
||||
continue
|
||||
if n_min == n_max:
|
||||
results.append(n_min)
|
||||
else:
|
||||
results.append(f"{n_min}:{n_max}")
|
||||
elif n_min:
|
||||
results.append(f"{n_min}:")
|
||||
elif n_max:
|
||||
results.append(f":{n_max}")
|
||||
|
||||
n_min = s_min if _s_min >= _o_min else o_min
|
||||
n_max = s_max if _s_max <= _o_max else o_max
|
||||
_n_min = _make_microarchitecture(n_min)
|
||||
_n_max = _make_microarchitecture(n_max)
|
||||
if _n_min == _n_max:
|
||||
results.append(n_min)
|
||||
elif not n_min or not n_max or _n_min < _n_max:
|
||||
results.append("%s:%s" % (n_min, n_max))
|
||||
return results
|
||||
|
||||
def constrain(self, other: "ArchSpec") -> bool:
|
||||
@@ -1500,9 +1538,8 @@ def __init__(
|
||||
self._external_path = external_path
|
||||
self.external_modules = Spec._format_module_list(external_modules)
|
||||
|
||||
# This attribute is used to store custom information for
|
||||
# external specs. None signal that it was not set yet.
|
||||
self.extra_attributes = None
|
||||
# This attribute is used to store custom information for external specs.
|
||||
self.extra_attributes: dict = {}
|
||||
|
||||
# This attribute holds the original build copy of the spec if it is
|
||||
# deployed differently than it was built. None signals that the spec
|
||||
@@ -2217,8 +2254,8 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
d["external"] = syaml.syaml_dict(
|
||||
[
|
||||
("path", self.external_path),
|
||||
("module", self.external_modules),
|
||||
("extra_attributes", self.extra_attributes),
|
||||
("module", self.external_modules or None),
|
||||
("extra_attributes", syaml.sorted_dict(self.extra_attributes)),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -2956,7 +2993,7 @@ def _finalize_concretization(self):
|
||||
for spec in self.traverse():
|
||||
spec._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "Spec":
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -3079,18 +3116,13 @@ def constrain(self, other, deps=True):
|
||||
if not self.variants[v].compatible(other.variants[v]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||
|
||||
# TODO: Check out the logic here
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
if sarch is not None and oarch is not None:
|
||||
if sarch.platform is not None and oarch.platform is not None:
|
||||
if sarch.platform != oarch.platform:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch.os is not None and oarch.os is not None:
|
||||
if sarch.os != oarch.os:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch.target is not None and oarch.target is not None:
|
||||
if sarch.target != oarch.target:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if (
|
||||
sarch is not None
|
||||
and oarch is not None
|
||||
and not self.architecture.intersects(other.architecture)
|
||||
):
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
|
||||
changed = False
|
||||
|
||||
@@ -3113,18 +3145,12 @@ def constrain(self, other, deps=True):
|
||||
|
||||
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
||||
|
||||
old = str(self.architecture)
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
if sarch is None or other.architecture is None:
|
||||
self.architecture = sarch or oarch
|
||||
else:
|
||||
if sarch.platform is None or oarch.platform is None:
|
||||
self.architecture.platform = sarch.platform or oarch.platform
|
||||
if sarch.os is None or oarch.os is None:
|
||||
sarch.os = sarch.os or oarch.os
|
||||
if sarch.target is None or oarch.target is None:
|
||||
sarch.target = sarch.target or oarch.target
|
||||
changed |= str(self.architecture) != old
|
||||
if sarch is not None and oarch is not None:
|
||||
changed |= self.architecture.constrain(other.architecture)
|
||||
elif oarch is not None:
|
||||
self.architecture = oarch
|
||||
changed = True
|
||||
|
||||
if deps:
|
||||
changed |= self._constrain_dependencies(other)
|
||||
@@ -4835,8 +4861,8 @@ def from_node_dict(cls, node):
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
spec.extra_attributes = (
|
||||
node["external"].get("extra_attributes") or syaml.syaml_dict()
|
||||
)
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
|
@@ -487,7 +487,7 @@ def _generate_fetchers(self, mirror_only=False) -> Generator["fs.FetchStrategy",
|
||||
# Insert fetchers in the order that the URLs are provided.
|
||||
fetchers[:0] = (
|
||||
fs.from_url_scheme(
|
||||
url_util.join(mirror.fetch_url, self.mirror_layout.path),
|
||||
url_util.join(mirror.fetch_url, *self.mirror_layout.path.split(os.sep)),
|
||||
checksum=digest,
|
||||
expand=expand,
|
||||
extension=extension,
|
||||
|
@@ -1532,3 +1532,30 @@ def test_config_path_dsl(path, it_should_work, expected_parsed):
|
||||
else:
|
||||
with pytest.raises(ValueError):
|
||||
spack.config.ConfigPath._validate(path)
|
||||
|
||||
|
||||
@pytest.mark.regression("48254")
|
||||
def test_env_activation_preserves_config_scopes(mutable_mock_env_path):
|
||||
"""Check that the "command_line" scope remains the highest priority scope, when we activate,
|
||||
or deactivate, environments.
|
||||
"""
|
||||
expected_cl_scope = spack.config.CONFIG.highest()
|
||||
assert expected_cl_scope.name == "command_line"
|
||||
|
||||
# Creating an environment pushes a new scope
|
||||
ev.create("test")
|
||||
with ev.read("test"):
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
||||
# No active environment pops the scope
|
||||
with ev.no_active_environment():
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
||||
# Switch the environment to another one
|
||||
ev.create("test-2")
|
||||
with ev.read("test-2"):
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
@@ -1249,3 +1249,14 @@ def test_find_input_types(tmp_path: pathlib.Path):
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
fs.find(1, "file.txt") # type: ignore
|
||||
|
||||
|
||||
def test_edit_in_place_through_temporary_file(tmp_path):
|
||||
(tmp_path / "example.txt").write_text("Hello")
|
||||
current_ino = os.stat(tmp_path / "example.txt").st_ino
|
||||
with fs.edit_in_place_through_temporary_file(tmp_path / "example.txt") as temporary:
|
||||
os.unlink(temporary)
|
||||
with open(temporary, "w") as f:
|
||||
f.write("World")
|
||||
assert (tmp_path / "example.txt").read_text() == "World"
|
||||
assert os.stat(tmp_path / "example.txt").st_ino == current_ino
|
||||
|
@@ -298,30 +298,6 @@ def inner():
|
||||
top-level raised TypeError: ok"""
|
||||
)
|
||||
|
||||
full_message = h.grouped_message(with_tracebacks=True)
|
||||
no_line_numbers = re.sub(r"line [0-9]+,", "line xxx,", full_message)
|
||||
|
||||
assert (
|
||||
no_line_numbers
|
||||
== dedent(
|
||||
"""\
|
||||
due to the following failures:
|
||||
inner method raised ValueError: wow!
|
||||
File "{0}", \
|
||||
line xxx, in test_grouped_exception
|
||||
inner()
|
||||
File "{0}", \
|
||||
line xxx, in inner
|
||||
raise ValueError("wow!")
|
||||
|
||||
top-level raised TypeError: ok
|
||||
File "{0}", \
|
||||
line xxx, in test_grouped_exception
|
||||
raise TypeError("ok")
|
||||
"""
|
||||
).format(__file__)
|
||||
)
|
||||
|
||||
|
||||
def test_grouped_exception_base_type():
|
||||
h = llnl.util.lang.GroupedExceptionHandler()
|
||||
|
@@ -57,18 +57,16 @@ def test_log_python_output_without_echo(capfd, tmpdir):
|
||||
assert capfd.readouterr()[0] == ""
|
||||
|
||||
|
||||
def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log.log_output("foo.txt"):
|
||||
sys.stdout.buffer.write(b"\xc3\x28\n")
|
||||
def test_log_python_output_with_invalid_utf8(capfd, tmp_path):
|
||||
tmp_file = str(tmp_path / "foo.txt")
|
||||
with log.log_output(tmp_file, echo=True):
|
||||
sys.stdout.buffer.write(b"\xc3helloworld\n")
|
||||
|
||||
expected = b"<line lost: output was not encoded as UTF-8>\n"
|
||||
with open("foo.txt", "rb") as f:
|
||||
written = f.read()
|
||||
assert written == expected
|
||||
# we should be able to read this as valid utf-8
|
||||
with open(tmp_file, "r", encoding="utf-8") as f:
|
||||
assert f.read() == "<EFBFBD>helloworld\n"
|
||||
|
||||
# nothing on stdout or stderr
|
||||
assert capfd.readouterr()[0] == ""
|
||||
assert capfd.readouterr().out == "<EFBFBD>helloworld\n"
|
||||
|
||||
|
||||
def test_log_python_output_and_echo_output(capfd, tmpdir):
|
||||
|
@@ -302,3 +302,48 @@ def test_get_repo(self, mock_test_cache):
|
||||
# foo is not there, raise
|
||||
with pytest.raises(spack.repo.UnknownNamespaceError):
|
||||
repo.get_repo("foo")
|
||||
|
||||
|
||||
def test_parse_package_api_version():
|
||||
"""Test that we raise an error if a repository has a version that is not supported."""
|
||||
# valid version
|
||||
assert spack.repo._parse_package_api_version(
|
||||
{"api": "v1.2"}, min_api=(1, 0), max_api=(2, 3)
|
||||
) == (1, 2)
|
||||
# too new and too old
|
||||
with pytest.raises(
|
||||
spack.repo.BadRepoError,
|
||||
match=r"Package API v2.4 is not supported .* \(must be between v1.0 and v2.3\)",
|
||||
):
|
||||
spack.repo._parse_package_api_version({"api": "v2.4"}, min_api=(1, 0), max_api=(2, 3))
|
||||
with pytest.raises(
|
||||
spack.repo.BadRepoError,
|
||||
match=r"Package API v0.9 is not supported .* \(must be between v1.0 and v2.3\)",
|
||||
):
|
||||
spack.repo._parse_package_api_version({"api": "v0.9"}, min_api=(1, 0), max_api=(2, 3))
|
||||
# default to v1.0 if not specified
|
||||
assert spack.repo._parse_package_api_version({}, min_api=(1, 0), max_api=(2, 3)) == (1, 0)
|
||||
# if v1.0 support is dropped we should also raise
|
||||
with pytest.raises(
|
||||
spack.repo.BadRepoError,
|
||||
match=r"Package API v1.0 is not supported .* \(must be between v2.0 and v2.3\)",
|
||||
):
|
||||
spack.repo._parse_package_api_version({}, min_api=(2, 0), max_api=(2, 3))
|
||||
# finally test invalid input
|
||||
with pytest.raises(spack.repo.BadRepoError, match="Invalid Package API version"):
|
||||
spack.repo._parse_package_api_version({"api": "v2"}, min_api=(1, 0), max_api=(3, 3))
|
||||
with pytest.raises(spack.repo.BadRepoError, match="Invalid Package API version"):
|
||||
spack.repo._parse_package_api_version({"api": 2.0}, min_api=(1, 0), max_api=(3, 3))
|
||||
|
||||
|
||||
def test_repo_package_api_version(tmp_path: pathlib.Path):
|
||||
"""Test that we can specify the API version of a repository."""
|
||||
(tmp_path / "example" / "packages").mkdir(parents=True)
|
||||
(tmp_path / "example" / "repo.yaml").write_text(
|
||||
"""\
|
||||
repo:
|
||||
namespace: example
|
||||
"""
|
||||
)
|
||||
cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
assert spack.repo.Repo(str(tmp_path / "example"), cache=cache).package_api == (1, 0)
|
||||
|
@@ -138,3 +138,19 @@ def test_round_trip_configuration(initial_content, expected_final_content, tmp_p
|
||||
expected_final_content = initial_content
|
||||
|
||||
assert final_content.getvalue() == expected_final_content
|
||||
|
||||
|
||||
def test_sorted_dict():
|
||||
assert syaml.sorted_dict(
|
||||
{
|
||||
"z": 0,
|
||||
"y": [{"x": 0, "w": [2, 1, 0]}, 0],
|
||||
"v": ({"u": 0, "t": 0, "s": 0}, 0, {"r": 0, "q": 0}),
|
||||
"p": 0,
|
||||
}
|
||||
) == {
|
||||
"p": 0,
|
||||
"v": ({"s": 0, "t": 0, "u": 0}, 0, {"q": 0, "r": 0}),
|
||||
"y": [{"w": [2, 1, 0], "x": 0}, 0],
|
||||
"z": 0,
|
||||
}
|
||||
|
@@ -1842,6 +1842,16 @@ def test_abstract_contains_semantic(lhs, rhs, expected, mock_packages):
|
||||
# Different virtuals intersect if there is at least package providing both
|
||||
(Spec, "mpi", "lapack", (True, False, False)),
|
||||
(Spec, "mpi", "pkgconfig", (False, False, False)),
|
||||
# Intersection among target ranges for different architectures
|
||||
(Spec, "target=x86_64:", "target=ppc64le:", (False, False, False)),
|
||||
(Spec, "target=x86_64:", "target=:power9", (False, False, False)),
|
||||
(Spec, "target=:haswell", "target=:power9", (False, False, False)),
|
||||
(Spec, "target=:haswell", "target=ppc64le:", (False, False, False)),
|
||||
# Intersection among target ranges for the same architecture
|
||||
(Spec, "target=:haswell", "target=x86_64:", (True, True, True)),
|
||||
(Spec, "target=:haswell", "target=x86_64_v4:", (False, False, False)),
|
||||
# Edge case of uarch that split in a diamond structure, from a common ancestor
|
||||
(Spec, "target=:cascadelake", "target=:cannonlake", (False, False, False)),
|
||||
],
|
||||
)
|
||||
def test_intersects_and_satisfies(factory, lhs_str, rhs_str, results):
|
||||
@@ -1891,6 +1901,16 @@ def test_intersects_and_satisfies(factory, lhs_str, rhs_str, results):
|
||||
# Flags
|
||||
(Spec, "cppflags=-foo", "cppflags=-foo", False, "cppflags=-foo"),
|
||||
(Spec, "cppflags=-foo", "cflags=-foo", True, "cppflags=-foo cflags=-foo"),
|
||||
# Target ranges
|
||||
(Spec, "target=x86_64:", "target=x86_64:", False, "target=x86_64:"),
|
||||
(Spec, "target=x86_64:", "target=:haswell", True, "target=x86_64:haswell"),
|
||||
(
|
||||
Spec,
|
||||
"target=x86_64:haswell",
|
||||
"target=x86_64_v2:icelake",
|
||||
True,
|
||||
"target=x86_64_v2:haswell",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_constrain(factory, lhs_str, rhs_str, result, constrained_str):
|
||||
|
@@ -21,6 +21,7 @@
|
||||
import pytest
|
||||
import ruamel.yaml
|
||||
|
||||
import spack.config
|
||||
import spack.hash_types as ht
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
@@ -144,86 +145,83 @@ def descend_and_check(iterable, level=0):
|
||||
assert level >= 5
|
||||
|
||||
|
||||
def test_ordered_read_not_required_for_consistent_dag_hash(config, mock_packages):
|
||||
@pytest.mark.parametrize("spec_str", ["mpileaks ^zmpi", "dttop", "dtuse"])
|
||||
def test_ordered_read_not_required_for_consistent_dag_hash(
|
||||
spec_str, mutable_config: spack.config.Configuration, mock_packages
|
||||
):
|
||||
"""Make sure ordered serialization isn't required to preserve hashes.
|
||||
|
||||
For consistent hashes, we require that YAML and json documents
|
||||
have their keys serialized in a deterministic order. However, we
|
||||
don't want to require them to be serialized in order. This
|
||||
ensures that is not required.
|
||||
"""
|
||||
specs = ["mpileaks ^zmpi", "dttop", "dtuse"]
|
||||
for spec in specs:
|
||||
spec = Spec(spec)
|
||||
spec.concretize()
|
||||
For consistent hashes, we require that YAML and JSON serializations have their keys in a
|
||||
deterministic order. However, we don't want to require them to be serialized in order. This
|
||||
ensures that is not required."""
|
||||
|
||||
#
|
||||
# Dict & corresponding YAML & JSON from the original spec.
|
||||
#
|
||||
spec_dict = spec.to_dict()
|
||||
spec_yaml = spec.to_yaml()
|
||||
spec_json = spec.to_json()
|
||||
# Make sure that `extra_attributes` of externals is order independent for hashing.
|
||||
extra_attributes = {
|
||||
"compilers": {"c": "/some/path/bin/cc", "cxx": "/some/path/bin/c++"},
|
||||
"foo": "bar",
|
||||
"baz": "qux",
|
||||
}
|
||||
mutable_config.set(
|
||||
"packages:dtuse",
|
||||
{
|
||||
"buildable": False,
|
||||
"externals": [
|
||||
{"spec": "dtuse@=1.0", "prefix": "/usr", "extra_attributes": extra_attributes}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
#
|
||||
# Make a spec with reversed OrderedDicts for every
|
||||
# OrderedDict in the original.
|
||||
#
|
||||
reversed_spec_dict = reverse_all_dicts(spec.to_dict())
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
|
||||
#
|
||||
# Dump to YAML and JSON
|
||||
#
|
||||
yaml_string = syaml.dump(spec_dict, default_flow_style=False)
|
||||
reversed_yaml_string = syaml.dump(reversed_spec_dict, default_flow_style=False)
|
||||
json_string = sjson.dump(spec_dict)
|
||||
reversed_json_string = sjson.dump(reversed_spec_dict)
|
||||
if spec_str == "dtuse":
|
||||
assert spec.external and spec.extra_attributes == extra_attributes
|
||||
|
||||
#
|
||||
# Do many consistency checks
|
||||
#
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_yaml = spec.to_yaml()
|
||||
spec_json = spec.to_json()
|
||||
|
||||
# spec yaml is ordered like the spec dict
|
||||
assert yaml_string == spec_yaml
|
||||
assert json_string == spec_json
|
||||
# Make a spec with dict keys reversed recursively
|
||||
spec_dict_rev = reverse_all_dicts(spec_dict)
|
||||
|
||||
# reversed string is different from the original, so it
|
||||
# *would* generate a different hash
|
||||
assert yaml_string != reversed_yaml_string
|
||||
assert json_string != reversed_json_string
|
||||
# Dump to YAML and JSON
|
||||
yaml_string = syaml.dump(spec_dict, default_flow_style=False)
|
||||
yaml_string_rev = syaml.dump(spec_dict_rev, default_flow_style=False)
|
||||
json_string = sjson.dump(spec_dict)
|
||||
json_string_rev = sjson.dump(spec_dict_rev)
|
||||
|
||||
# build specs from the "wrongly" ordered data
|
||||
round_trip_yaml_spec = Spec.from_yaml(yaml_string)
|
||||
round_trip_json_spec = Spec.from_json(json_string)
|
||||
round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string)
|
||||
round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string)
|
||||
# spec yaml is ordered like the spec dict
|
||||
assert yaml_string == spec_yaml
|
||||
assert json_string == spec_json
|
||||
|
||||
# Strip spec if we stripped the yaml
|
||||
spec = spec.copy(deps=ht.dag_hash.depflag)
|
||||
# reversed string is different from the original, so it *would* generate a different hash
|
||||
assert yaml_string != yaml_string_rev
|
||||
assert json_string != json_string_rev
|
||||
|
||||
# specs are equal to the original
|
||||
assert spec == round_trip_yaml_spec
|
||||
assert spec == round_trip_json_spec
|
||||
# build specs from the "wrongly" ordered data
|
||||
from_yaml = Spec.from_yaml(yaml_string)
|
||||
from_json = Spec.from_json(json_string)
|
||||
from_yaml_rev = Spec.from_yaml(yaml_string_rev)
|
||||
from_json_rev = Spec.from_json(json_string_rev)
|
||||
|
||||
assert spec == round_trip_reversed_yaml_spec
|
||||
assert spec == round_trip_reversed_json_spec
|
||||
assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
|
||||
assert round_trip_json_spec == round_trip_reversed_json_spec
|
||||
# dag_hashes are equal
|
||||
assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
|
||||
assert spec.dag_hash() == round_trip_json_spec.dag_hash()
|
||||
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
|
||||
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
|
||||
# Strip spec if we stripped the yaml
|
||||
spec = spec.copy(deps=ht.dag_hash.depflag)
|
||||
|
||||
# dag_hash is equal after round-trip by dag_hash
|
||||
spec.concretize()
|
||||
round_trip_yaml_spec.concretize()
|
||||
round_trip_json_spec.concretize()
|
||||
round_trip_reversed_yaml_spec.concretize()
|
||||
round_trip_reversed_json_spec.concretize()
|
||||
assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
|
||||
assert spec.dag_hash() == round_trip_json_spec.dag_hash()
|
||||
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
|
||||
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
|
||||
# specs and their hashes are equal to the original
|
||||
assert (
|
||||
spec.process_hash()
|
||||
== from_yaml.process_hash()
|
||||
== from_json.process_hash()
|
||||
== from_yaml_rev.process_hash()
|
||||
== from_json_rev.process_hash()
|
||||
)
|
||||
assert (
|
||||
spec.dag_hash()
|
||||
== from_yaml.dag_hash()
|
||||
== from_json.dag_hash()
|
||||
== from_yaml_rev.dag_hash()
|
||||
== from_json_rev.dag_hash()
|
||||
)
|
||||
assert spec == from_yaml == from_json == from_yaml_rev == from_json_rev
|
||||
|
||||
|
||||
@pytest.mark.parametrize("module", [spack.spec, spack.version])
|
||||
@@ -294,13 +292,10 @@ def visit_Call(self, node):
|
||||
def reverse_all_dicts(data):
|
||||
"""Descend into data and reverse all the dictionaries"""
|
||||
if isinstance(data, dict):
|
||||
return syaml_dict(
|
||||
reversed([(reverse_all_dicts(k), reverse_all_dicts(v)) for k, v in data.items()])
|
||||
)
|
||||
return type(data)((k, reverse_all_dicts(v)) for k, v in reversed(list(data.items())))
|
||||
elif isinstance(data, (list, tuple)):
|
||||
return type(data)(reverse_all_dicts(elt) for elt in data)
|
||||
else:
|
||||
return data
|
||||
return data
|
||||
|
||||
|
||||
def check_specs_equal(original_spec, spec_yaml_path):
|
||||
|
@@ -13,7 +13,7 @@
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.filesystem import edit_in_place_through_temporary_file
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
@@ -81,12 +81,11 @@ def fix_darwin_install_name(path):
|
||||
Parameters:
|
||||
path (str): directory in which .dylib files are located
|
||||
"""
|
||||
libs = glob.glob(join_path(path, "*.dylib"))
|
||||
libs = glob.glob(os.path.join(path, "*.dylib"))
|
||||
install_name_tool = Executable("install_name_tool")
|
||||
otool = Executable("otool")
|
||||
for lib in libs:
|
||||
# fix install name first:
|
||||
install_name_tool = Executable("install_name_tool")
|
||||
install_name_tool("-id", lib, lib)
|
||||
otool = Executable("otool")
|
||||
args = ["-id", lib]
|
||||
long_deps = otool("-L", lib, output=str).split("\n")
|
||||
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
|
||||
# fix all dependencies:
|
||||
@@ -98,5 +97,8 @@ def fix_darwin_install_name(path):
|
||||
# but we don't know builddir (nor how symbolic links look
|
||||
# in builddir). We thus only compare the basenames.
|
||||
if os.path.basename(dep) == os.path.basename(loc):
|
||||
install_name_tool("-change", dep, loc, lib)
|
||||
args.extend(("-change", dep, loc))
|
||||
break
|
||||
|
||||
with edit_in_place_through_temporary_file(lib) as tmp:
|
||||
install_name_tool(*args, tmp)
|
||||
|
@@ -447,20 +447,13 @@ def _dump_annotated(handler, data, stream=None):
|
||||
return getvalue()
|
||||
|
||||
|
||||
def sorted_dict(dict_like):
|
||||
"""Return an ordered dict with all the fields sorted recursively.
|
||||
|
||||
Args:
|
||||
dict_like (dict): dictionary to be sorted
|
||||
|
||||
Returns:
|
||||
dictionary sorted recursively
|
||||
"""
|
||||
result = syaml_dict(sorted(dict_like.items()))
|
||||
for key, value in result.items():
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
result[key] = sorted_dict(value)
|
||||
return result
|
||||
def sorted_dict(data):
|
||||
"""Descend into data and sort all dictionary keys."""
|
||||
if isinstance(data, dict):
|
||||
return type(data)((k, sorted_dict(v)) for k, v in sorted(data.items()))
|
||||
elif isinstance(data, (list, tuple)):
|
||||
return type(data)(sorted_dict(v) for v in data)
|
||||
return data
|
||||
|
||||
|
||||
def extract_comments(data):
|
||||
|
@@ -14,10 +14,10 @@ default:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
|
||||
|
||||
# CI Platform-Arch
|
||||
.cray_rhel_zen4:
|
||||
.cray_rhel_x86_64_v3:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "cray-rhel"
|
||||
SPACK_TARGET_ARCH: "zen4"
|
||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||
|
||||
.cray_sles_zen4:
|
||||
variables:
|
||||
@@ -876,7 +876,7 @@ aws-pcluster-build-neoverse_v1:
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-cray-rhel:
|
||||
tags: [ "cray-rhel-zen4", "public" ]
|
||||
tags: [ "cray-rhel-x86_64_v3", "public" ]
|
||||
extends: [ ".generate-cray" ]
|
||||
|
||||
.generate-cray-sles:
|
||||
@@ -888,7 +888,7 @@ aws-pcluster-build-neoverse_v1:
|
||||
# E4S - Cray RHEL
|
||||
#######################################
|
||||
.e4s-cray-rhel:
|
||||
extends: [ ".cray_rhel_zen4" ]
|
||||
extends: [ ".cray_rhel_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-rhel
|
||||
|
||||
@@ -896,7 +896,6 @@ e4s-cray-rhel-generate:
|
||||
extends: [ ".generate-cray-rhel", ".e4s-cray-rhel" ]
|
||||
|
||||
e4s-cray-rhel-build:
|
||||
allow_failure: true # libsci_cray.so broken, misses DT_NEEDED for libdl.so
|
||||
extends: [ ".build", ".e4s-cray-rhel" ]
|
||||
trigger:
|
||||
include:
|
||||
@@ -915,10 +914,10 @@ e4s-cray-rhel-build:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-cray-sles
|
||||
|
||||
e4s-cray-sles-generate:
|
||||
.e4s-cray-sles-generate:
|
||||
extends: [ ".generate-cray-sles", ".e4s-cray-sles" ]
|
||||
|
||||
e4s-cray-sles-build:
|
||||
.e4s-cray-sles-build:
|
||||
allow_failure: true # libsci_cray.so broken, misses DT_NEEDED for libdl.so
|
||||
extends: [ ".build", ".e4s-cray-sles" ]
|
||||
trigger:
|
||||
|
@@ -1,31 +1,27 @@
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: cce@15.0.1
|
||||
spec: cce@=18.0.0
|
||||
paths:
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
cc: /opt/cray/pe/cce/18.0.0/bin/craycc
|
||||
cxx: /opt/cray/pe/cce/18.0.0/bin/crayCC
|
||||
f77: /opt/cray/pe/cce/18.0.0/bin/crayftn
|
||||
fc: /opt/cray/pe/cce/18.0.0/bin/crayftn
|
||||
flags: {}
|
||||
operating_system: rhel8
|
||||
target: any
|
||||
modules:
|
||||
- PrgEnv-cray/8.3.3
|
||||
- cce/15.0.1
|
||||
environment:
|
||||
set:
|
||||
MACHTYPE: x86_64
|
||||
target: x86_64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
||||
- compiler:
|
||||
spec: gcc@11.2.0
|
||||
spec: gcc@=8.5.0
|
||||
paths:
|
||||
cc: gcc
|
||||
cxx: g++
|
||||
f77: gfortran
|
||||
fc: gfortran
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
f77: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
flags: {}
|
||||
operating_system: rhel8
|
||||
target: any
|
||||
modules:
|
||||
- PrgEnv-gnu
|
||||
- gcc/11.2.0
|
||||
environment: {}
|
||||
target: x86_64
|
||||
modules: []
|
||||
environment: {}
|
||||
extra_rpaths: []
|
@@ -1,16 +1,15 @@
|
||||
packages:
|
||||
# EXTERNALS
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cray-mpich@8.1.25 %cce@15.0.1
|
||||
prefix: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0
|
||||
- spec: cray-mpich@8.1.30 %cce
|
||||
prefix: /opt/cray/pe/mpich/8.1.30/ofi/cray/18.0
|
||||
modules:
|
||||
- cray-mpich/8.1.25
|
||||
- cray-mpich/8.1.30
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cray-libsci@23.02.1.1 %cce@15.0.1
|
||||
prefix: /opt/cray/pe/libsci/23.02.1.1/CRAY/9.0/x86_64/
|
||||
- spec: cray-libsci@24.07.0 %cce
|
||||
prefix: /opt/cray/pe/libsci/24.07.0/CRAY/18.0/x86_64/
|
||||
modules:
|
||||
- cray-libsci/23.02.1.1
|
||||
- cray-libsci/24.07.0
|
@@ -0,0 +1,4 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["cray-rhel-x86_64_v3"]
|
@@ -1,4 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["cray-rhel-zen4"]
|
@@ -10,8 +10,7 @@ spack:
|
||||
|
||||
packages:
|
||||
all:
|
||||
prefer:
|
||||
- "%cce"
|
||||
require: "%cce@18.0.0 target=x86_64_v3"
|
||||
compiler: [cce]
|
||||
providers:
|
||||
blas: [cray-libsci]
|
||||
@@ -19,17 +18,15 @@ spack:
|
||||
mpi: [cray-mpich]
|
||||
tbb: [intel-tbb]
|
||||
scalapack: [netlib-scalapack]
|
||||
target: [zen4]
|
||||
variants: +mpi
|
||||
|
||||
ncurses:
|
||||
require: +termlib ldflags=-Wl,--undefined-version
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
boost:
|
||||
variants: +python +filesystem +iostreams +system
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
require: "%gcc"
|
||||
@@ -39,20 +36,14 @@ spack:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mgard:
|
||||
require:
|
||||
- "@2023-01-10:"
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
python:
|
||||
version: [3.8.13]
|
||||
require: "~qt ^[virtuals=gl] osmesa"
|
||||
trilinos:
|
||||
require:
|
||||
- one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
|
||||
@@ -63,12 +54,6 @@ spack:
|
||||
- one_of: [~ml ~muelu ~zoltan2 ~teko, +ml +muelu +zoltan2 +teko]
|
||||
- one_of: [+superlu-dist, ~superlu-dist]
|
||||
- one_of: [+shylu, ~shylu]
|
||||
xz:
|
||||
variants: +pic
|
||||
mesa:
|
||||
version: [21.3.8]
|
||||
unzip:
|
||||
require: "%gcc"
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
@@ -76,62 +61,43 @@ spack:
|
||||
- aml
|
||||
- arborx
|
||||
- argobots
|
||||
- bolt
|
||||
- butterflypack
|
||||
- boost +python +filesystem +iostreams +system
|
||||
- cabana
|
||||
- caliper
|
||||
- chai
|
||||
- charliecloud
|
||||
- conduit
|
||||
# - cp2k +mpi # libxsmm: ftn-78 ftn: ERROR in command linel; The -f option has an invalid argument, "tree-vectorize".
|
||||
- datatransferkit
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- h5bench
|
||||
- hdf5-vol-async
|
||||
- hdf5-vol-cache
|
||||
- hdf5-vol-cache cflags=-Wno-error=incompatible-function-pointer-types
|
||||
- hdf5-vol-log
|
||||
- heffte +fftw
|
||||
- hpx max_cpu_count=512 networking=mpi
|
||||
- hypre
|
||||
- kokkos +openmp
|
||||
- kokkos-kernels +openmp
|
||||
- lammps
|
||||
- legion
|
||||
- libnrm
|
||||
#- libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # mgard:
|
||||
- libquo
|
||||
- libunwind
|
||||
- mercury
|
||||
- metall
|
||||
- mfem
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard
|
||||
- mpark-variant
|
||||
- mpifileutils ~xattr
|
||||
- mpifileutils ~xattr cflags=-Wno-error=implicit-function-declaration
|
||||
- nccmp
|
||||
- nco
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openmpi
|
||||
- netlib-scalapack cflags=-Wno-error=implicit-function-declaration
|
||||
- openpmd-api ^adios2~mgard
|
||||
- papi
|
||||
- papyrus
|
||||
- pdt
|
||||
- petsc
|
||||
- plumed
|
||||
- precice
|
||||
- pumi
|
||||
- py-h5py +mpi
|
||||
- py-h5py ~mpi
|
||||
- py-libensemble +mpi +nlopt
|
||||
- py-petsc4py
|
||||
- qthreads scheduler=distrib
|
||||
- raja
|
||||
- slate ~cuda
|
||||
@@ -144,8 +110,7 @@ spack:
|
||||
- swig@4.0.2-fortran
|
||||
- sz3
|
||||
- tasmanian
|
||||
- tau +mpi +python
|
||||
- trilinos@13.0.1 +belos +ifpack2 +stokhos
|
||||
- trilinos +belos +ifpack2 +stokhos
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
@@ -155,27 +120,47 @@ spack:
|
||||
# - alquimia # pflotran: petsc-3.19.4-c6pmpdtpzarytxo434zf76jqdkhdyn37/lib/petsc/conf/rules:169: material_aux.o] Error 1: fortran errors
|
||||
# - amrex # disabled temporarily pending resolution of unreproducible CI failure
|
||||
# - axom # axom: CMake Error at axom/sidre/cmake_install.cmake:154 (file): file INSTALL cannot find "/tmp/gitlab-runner-2/spack-stage/spack-stage-axom-0.8.1-jvol6riu34vuyqvrd5ft2gyhrxdqvf63/spack-build-jvol6ri/lib/fortran/axom_spio.mod": No such file or directory.
|
||||
# - bolt # ld.lld: error: CMakeFiles/bolt-omp.dir/kmp_gsupport.cpp.o: symbol GOMP_atomic_end@@GOMP_1.0 has undefined version GOMP_1.0
|
||||
# - bricks # bricks: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
|
||||
# - butterflypack ^netlib-scalapack cflags=-Wno-error=implicit-function-declaration # ftn-2116 ftn: INTERNAL "driver" was terminated due to receipt of signal 01: Hangup.
|
||||
# - caliper # papi: papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - charliecloud # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - cp2k +mpi # libxsmm: ftn-78 ftn: ERROR in command linel; The -f option has an invalid argument, "tree-vectorize".
|
||||
# - dealii # llvm@14.0.6: ?; intel-tbb@2020.3: clang-15: error: unknown argument: '-flifetime-dse=1'; assimp@5.2.5: clang-15: error: clang frontend command failed with exit code 134 (use -v to see invocation)
|
||||
# - dyninst # requires %gcc
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp ^hdf5@1.14 # llvm@14.0.6: ?;
|
||||
# - exaworks # rust: ld.lld: error: relocation R_X86_64_32 cannot be used against local symbol; recompile with -fPIC'; defined in /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o, referenced by /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o:(__no_mmap_for_malloc)
|
||||
# - flux-core # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - fortrilinos # trilinos-14.0.0: packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp:67:8: error: no type named 'uint32_t' in namespace 'std'
|
||||
# - gasnet # configure error: User requested --enable-ofi but I don't know how to build ofi programs for your system
|
||||
# - gptune # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - hpctoolkit # dyninst requires %gcc
|
||||
# - hpx max_cpu_count=512 networking=mpi # libxcrypt-4.4.35
|
||||
# - lammps # lammps-20240829.1: Reversed (or previously applied) patch detected! Assume -R? [n]
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +mgard # mgard:
|
||||
# - mgard +serial +openmp +timing +unstructured ~cuda # mgard
|
||||
# - nrm # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - nvhpc # requires %gcc
|
||||
# - omega-h # trilinos-13.4.1: packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp:112:48: error: unknown type name 'uint32_t'
|
||||
# - openmpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - papi # papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - parsec ~cuda # parsec: parsec/fortran/CMakeFiles/parsec_fortran.dir/parsecf.F90.o: ftn-2103 ftn: WARNING in command line. The -W extra option is not supported or invalid and will be ignored.
|
||||
# - phist # fortran_bindings/CMakeFiles/phist_fort.dir/phist_testing.F90.o: ftn-78 ftn: ERROR in command line. The -f option has an invalid argument, "no-math-errno".
|
||||
# - plasma # %cce conflict
|
||||
# - plumed # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-h5py +mpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-h5py ~mpi # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-jupyterhub # rust: ld.lld: error: relocation R_X86_64_32 cannot be used against local symbol; recompile with -fPIC'; defined in /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o, referenced by /opt/cray/pe/cce/15.0.1/cce/x86_64/lib/no_mmap.o:(__no_mmap_for_malloc)
|
||||
# - py-libensemble +mpi +nlopt # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - py-petsc4py # libxcrypt-4.4.35: ld.lld: error: version script assignment of 'XCRYPT_2.0' to symbol 'xcrypt_r' failed: symbol not defined
|
||||
# - quantum-espresso # quantum-espresso: CMake Error at cmake/FindSCALAPACK.cmake:503 (message): A required library with SCALAPACK API not found. Please specify library
|
||||
# - scr # scr: make[2]: *** [examples/CMakeFiles/test_ckpt_F.dir/build.make:112: examples/test_ckpt_F] Error 1: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0/lib/libmpi_cray.so: undefined reference to `PMI_Barrier'
|
||||
# - strumpack ~slate # strumpack: [test/CMakeFiles/test_HSS_seq.dir/build.make:117: test/test_HSS_seq] Error 1: ld.lld: error: undefined reference due to --no-allow-shlib-undefined: mpi_abort_
|
||||
# - tau +mpi +python # libelf: configure: error: installation or configuration problem: C compiler cannot create executables.; papi: papi_internal.c:124:3: error: use of undeclared identifier '_papi_hwi_my_thread'; did you mean '_papi_hwi_read'?
|
||||
# - upcxx # upcxx: configure error: User requested --enable-ofi but I don't know how to build ofi programs for your system
|
||||
# - variorum # variorum: /opt/cray/pe/cce/15.0.1/binutils/x86_64/x86_64-pc-linux-gnu/bin/ld: /opt/cray/pe/lib64/libpals.so.0: undefined reference to `json_array_append_new@@libjansson.so.4'
|
||||
# - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [<builtin>: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2
|
||||
# - warpx +python # py-scipy: meson.build:82:0: ERROR: Unknown compiler(s): [['/home/gitlab-runner-3/builds/dWfnZWPh/0/spack/spack/lib/spack/env/cce/ftn']]
|
||||
# - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # openblas: ftn-2307 ftn: ERROR in command line: The "-m" option must be followed by 0, 1, 2, 3 or 4.; make[2]: *** [<builtin>: spotrf2.o] Error 1; make[1]: *** [Makefile:27: lapacklib] Error 2; make: *** [Makefile:250: netlib] Error 2
|
||||
|
||||
cdash:
|
||||
build-group: E4S Cray
|
||||
|
@@ -36,7 +36,7 @@ export QA_DIR=$(realpath $QA_DIR)
|
||||
cd "$SPACK_ROOT"
|
||||
|
||||
# Run bash tests with coverage enabled, but pipe output to /dev/null
|
||||
# because it seems that kcov seems to undo the script's redirection
|
||||
# because it seems that kcov undoes the script's redirection
|
||||
if [ "$COVERAGE" = true ]; then
|
||||
kcov "$SPACK_ROOT/coverage" "$QA_DIR/setup-env-test.sh" &> /dev/null
|
||||
kcov "$SPACK_ROOT/coverage" "$QA_DIR/completion-test.sh" &> /dev/null
|
||||
|
@@ -12,33 +12,36 @@ class GdkPixbuf(MesonPackage):
|
||||
GTK+ 2 but it was split off into a separate package in preparation for the change to GTK+ 3."""
|
||||
|
||||
homepage = "https://gitlab.gnome.org/GNOME/gdk-pixbuf"
|
||||
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/2.40/gdk-pixbuf-2.40.0.tar.xz"
|
||||
git = "https://gitlab.gnome.org/GNOME/gdk-pixbuf"
|
||||
list_url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
|
||||
url = "https://gitlab.gnome.org/GNOME/gdk-pixbuf/-/archive/2.40.0/gdk-pixbuf-2.40.0.tar.gz"
|
||||
|
||||
# Falling back to the gitlab source since the mirror here seems to be broken
|
||||
# url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/2.40/gdk-pixbuf-2.40.0.tar.xz"
|
||||
# list_url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
|
||||
list_depth = 1
|
||||
|
||||
license("LGPL-2.1-or-later", checked_by="wdconinc")
|
||||
|
||||
version("2.42.12", sha256="b9505b3445b9a7e48ced34760c3bcb73e966df3ac94c95a148cb669ab748e3c7")
|
||||
version("2.42.12", sha256="d41966831b3d291fcdfe31f683bea4b3f03241d591ddbe550b5db873af3da364")
|
||||
# https://nvd.nist.gov/vuln/detail/CVE-2022-48622
|
||||
version(
|
||||
"2.42.10",
|
||||
sha256="ee9b6c75d13ba096907a2e3c6b27b61bcd17f5c7ebeab5a5b439d2f2e39fe44b",
|
||||
sha256="87a086c51d9705698b22bd598a795efaccf61e4db3a96f439dcb3cd90506dab8",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"2.42.9",
|
||||
sha256="28f7958e7bf29a32d4e963556d241d0a41a6786582ff6a5ad11665e0347fc962",
|
||||
sha256="226d950375907857b23c5946ae6d30128f08cd75f65f14b14334c7a9fb686e36",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"2.42.6",
|
||||
sha256="c4a6b75b7ed8f58ca48da830b9fa00ed96d668d3ab4b1f723dcf902f78bde77f",
|
||||
sha256="c4f3a84a04bc7c5f4fbd97dce7976ab648c60628f72ad4c7b79edce2bbdb494d",
|
||||
deprecated=True,
|
||||
)
|
||||
version(
|
||||
"2.42.2",
|
||||
sha256="83c66a1cfd591d7680c144d2922c5955d38b4db336d7cd3ee109f7bcf9afef15",
|
||||
sha256="249b977279f761979104d7befbb5ee23f1661e29d19a36da5875f3a97952d13f",
|
||||
deprecated=True,
|
||||
)
|
||||
|
||||
|
@@ -65,9 +65,15 @@ def cmake_args(self):
|
||||
if re_qt.match(dep.name):
|
||||
qt_prefix_path.append(self.spec[dep.name].prefix)
|
||||
|
||||
# Now append all qt-* dependency prefixex into a prefix path
|
||||
# Now append all qt-* dependency prefixes into a prefix path
|
||||
args.append(self.define("QT_ADDITIONAL_PACKAGES_PREFIX_PATH", ":".join(qt_prefix_path)))
|
||||
|
||||
# Make our CMAKE_INSTALL_RPATH redundant:
|
||||
# for prefix of current package ($ORIGIN/../lib type of rpaths),
|
||||
args.append(self.define("QT_DISABLE_RPATH", True))
|
||||
# for prefixes of dependencies
|
||||
args.append(self.define("QT_NO_DISABLE_CMAKE_INSTALL_RPATH_USE_LINK_PATH", True))
|
||||
|
||||
return args
|
||||
|
||||
@run_after("install")
|
||||
|
Reference in New Issue
Block a user