Compare commits
5 Commits
hs/fix/det
...
hs/feature
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e9ada7a5d6 | ||
![]() |
14b51ce450 | ||
![]() |
dc3e124d1d | ||
![]() |
225a4ed1ff | ||
![]() |
ea823d2308 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
45
.github/workflows/bootstrap.yml
vendored
45
.github/workflows/bootstrap.yml
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
@@ -112,10 +112,10 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest'}}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
@@ -124,16 +124,11 @@ jobs:
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Setup Windows
|
||||
if: ${{ matrix.runner == 'windows-latest' }}
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -142,20 +137,11 @@ jobs:
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
- name: Disable from source bootstrap
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
# No binary clingo on Windows yet
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
@@ -178,24 +164,7 @@ jobs:
|
||||
fi
|
||||
done
|
||||
- name: Bootstrap GnuPG
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -87,7 +87,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -188,7 +188,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,6 +7,7 @@
|
||||
/var/spack/environments
|
||||
/var/spack/repos/*/index.yaml
|
||||
/var/spack/repos/*/lock
|
||||
/var/spack/repos/*/packages.zip
|
||||
/opt
|
||||
# Ignore everything in /etc/spack except /etc/spack/defaults
|
||||
/etc/spack/*
|
||||
|
@@ -115,6 +115,12 @@ config:
|
||||
suppress_gpg_warnings: false
|
||||
|
||||
|
||||
# If set to true, Spack will attempt to build any compiler on the spec
|
||||
# that is not already available. If set to False, Spack will only use
|
||||
# compilers already configured in compilers.yaml
|
||||
install_missing_compilers: false
|
||||
|
||||
|
||||
# If set to true, Spack will always check checksums after downloading
|
||||
# archives. If false, Spack skips the checksum step.
|
||||
checksum: true
|
||||
|
@@ -72,13 +72,3 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -218,7 +218,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -181,6 +181,10 @@ Spec-related modules
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
which allows site administrators to change Spack's :ref:`concretization-policies`.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
||||
|
@@ -863,7 +863,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
|
||||
spack:
|
||||
definitions:
|
||||
- compilers: ['%gcc', '%clang']
|
||||
- when: arch.satisfies('target=x86_64:')
|
||||
- when: arch.satisfies('x86_64:')
|
||||
compilers: ['%intel']
|
||||
|
||||
.. note::
|
||||
|
@@ -663,7 +663,11 @@ build the package.
|
||||
|
||||
When including a bootstrapping phase as in the example above, the result is that
|
||||
the bootstrapped compiler packages will be pushed to the binary mirror (and the
|
||||
local artifacts mirror) before the actual release specs are built.
|
||||
local artifacts mirror) before the actual release specs are built. In this case,
|
||||
the jobs corresponding to subsequent release specs are configured to
|
||||
``install_missing_compilers``, so that if spack is asked to install a package
|
||||
with a compiler it doesn't know about, it can be quickly installed from the
|
||||
binary mirror first.
|
||||
|
||||
Since bootstrapping compilers is optional, those items can be left out of the
|
||||
environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
|
@@ -27,6 +27,8 @@
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
@@ -51,6 +53,7 @@
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"fix_darwin_install_name",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"getuid",
|
||||
@@ -245,6 +248,42 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
@memoized
|
||||
def file_command(*args):
|
||||
"""Creates entry point to `file` system command with provided arguments"""
|
||||
file_cmd = which("file", required=True)
|
||||
for arg in args:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
Args:
|
||||
filename: file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||
|
||||
@@ -1640,6 +1679,41 @@ def safe_remove(*files_or_dirs):
|
||||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
There are two parts of this task:
|
||||
|
||||
1. Use ``install_name('-id', ...)`` to change install name of a single lib
|
||||
2. Use ``install_name('-change', ...)`` to change the cross linking between
|
||||
libs. The function assumes that all libraries are in one folder and
|
||||
currently won't follow subfolders.
|
||||
|
||||
Parameters:
|
||||
path (str): directory in which .dylib files are located
|
||||
"""
|
||||
libs = glob.glob(join_path(path, "*.dylib"))
|
||||
for lib in libs:
|
||||
# fix install name first:
|
||||
install_name_tool = Executable("install_name_tool")
|
||||
install_name_tool("-id", lib, lib)
|
||||
otool = Executable("otool")
|
||||
long_deps = otool("-L", lib, output=str).split("\n")
|
||||
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
|
||||
# fix all dependencies:
|
||||
for dep in deps:
|
||||
for loc in libs:
|
||||
# We really want to check for either
|
||||
# dep == os.path.basename(loc) or
|
||||
# dep == join_path(builddir, os.path.basename(loc)),
|
||||
# but we don't know builddir (nor how symbolic links look
|
||||
# in builddir). We thus only compare the basenames.
|
||||
if os.path.basename(dep) == os.path.basename(loc):
|
||||
install_name_tool("-change", dep, loc, lib)
|
||||
break
|
||||
|
||||
|
||||
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
||||
"""Find the first file matching a pattern.
|
||||
|
||||
|
@@ -257,6 +257,40 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(attribute_name, config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
|
||||
syaml.dump_config(dict_view, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
if "all" in packages_yaml and "version" in packages_yaml["all"]:
|
||||
summary = "Using the deprecated 'version' attribute under 'packages:all'"
|
||||
errors.append(make_error("version", packages_yaml["all"], summary))
|
||||
|
||||
for package_name in packages_yaml:
|
||||
if package_name == "all":
|
||||
continue
|
||||
|
||||
package_conf = packages_yaml[package_name]
|
||||
for attribute in ("compiler", "providers", "target"):
|
||||
if attribute not in package_conf:
|
||||
continue
|
||||
summary = (
|
||||
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
|
||||
)
|
||||
errors.append(make_error(attribute, package_conf, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _avoid_mismatched_variants(error_cls):
|
||||
"""Warns if variant preferences have mismatched types or names."""
|
||||
|
@@ -54,7 +54,6 @@
|
||||
import spack.util.archive
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.filesystem as ssys
|
||||
import spack.util.gpg
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
@@ -106,7 +105,7 @@ class BuildCacheDatabase(spack_db.Database):
|
||||
record_fields = ("spec", "ref_count", "in_buildcache")
|
||||
|
||||
def __init__(self, root):
|
||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK, layout=None)
|
||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
|
||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||
|
||||
@@ -688,7 +687,7 @@ def get_buildfile_manifest(spec):
|
||||
# Non-symlinks.
|
||||
for rel_path in visitor.files:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
m_type, m_subtype = ssys.mime_type(abs_path)
|
||||
m_type, m_subtype = fsys.mime_type(abs_path)
|
||||
|
||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||
# Why is this branch not part of needs_binary_relocation? :(
|
||||
@@ -789,9 +788,7 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
return signed_specfile_path
|
||||
|
||||
|
||||
def _read_specs_and_push_index(
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
):
|
||||
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
@@ -815,7 +812,7 @@ def _read_specs_and_push_index(
|
||||
else:
|
||||
continue
|
||||
|
||||
db.add(fetched_spec)
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
@@ -1768,7 +1765,7 @@ def _oci_update_index(
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
@@ -2564,8 +2561,9 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
|
@@ -9,7 +9,6 @@
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_file_in_path_or_raise,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
@@ -20,7 +19,6 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_file_in_path_or_raise",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
|
@@ -472,8 +472,7 @@ def ensure_clingo_importable_or_raise() -> None:
|
||||
|
||||
def gnupg_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg"
|
||||
return _root_spec(f"{root_spec_name}@2.3:")
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise() -> None:
|
||||
@@ -483,19 +482,6 @@ def ensure_gpg_in_path_or_raise() -> None:
|
||||
)
|
||||
|
||||
|
||||
def file_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap file"""
|
||||
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||
return _root_spec(root_spec_name)
|
||||
|
||||
|
||||
def ensure_file_in_path_or_raise() -> None:
|
||||
"""Ensure file is in the PATH or raise"""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["file"], abstract_spec=file_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
@@ -579,15 +565,14 @@ def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
elif sys.platform == "win32":
|
||||
ensure_file_in_path_or_raise()
|
||||
ensure_gpg_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs() -> List[str]:
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope: Optional[str] = None):
|
||||
|
@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
|
||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
|
@@ -1553,21 +1553,21 @@ class ModuleChangePropagator:
|
||||
|
||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||
|
||||
def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||
def __init__(self, package):
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
for cls in package.__class__.__mro__:
|
||||
module = getattr(cls, "module", None)
|
||||
for cls in type(package).__mro__:
|
||||
module = cls.module
|
||||
|
||||
if module is None or module is spack.package_base:
|
||||
break
|
||||
|
||||
if module is self.current_module:
|
||||
if module == self.current_module:
|
||||
continue
|
||||
|
||||
if module == spack.package_base:
|
||||
break
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
@@ -548,12 +549,13 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with fs.working_dir(self.configure_directory):
|
||||
m = inspect.getmodule(self.pkg)
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
m.autoreconf(*autoreconf_args)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
@@ -577,9 +579,7 @@ def set_configure_or_die(self):
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
@@ -596,7 +596,7 @@ def configure(self, pkg, spec, prefix):
|
||||
options += self.configure_args()
|
||||
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
inspect.getmodule(self.pkg).configure(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
@@ -604,12 +604,12 @@ def build(self, pkg, spec, prefix):
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
inspect.getmodule(self.pkg).make(*params)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
@@ -70,7 +72,9 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
inspect.getmodule(pkg).cargo(
|
||||
"install", "--root", "out", "--path", ".", *self.build_args
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy build files into package prefix."""
|
||||
@@ -82,4 +86,4 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg.module.cargo("test", *self.check_args)
|
||||
inspect.getmodule(self.pkg).cargo("test", *self.check_args)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
@@ -107,11 +108,6 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
@@ -543,24 +539,24 @@ def cmake(self, pkg, spec, prefix):
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
inspect.getmodule(self.pkg).cmake(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
elif self.generator == "Ninja":
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -145,8 +145,7 @@ def cuda_flags(arch_list):
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.6")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
@@ -80,7 +82,7 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
inspect.getmodule(pkg).go("build", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install built binaries into prefix bin."""
|
||||
@@ -93,4 +95,4 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg.module.go("test", *self.check_args)
|
||||
inspect.getmodule(self.pkg).go("test", *self.check_args)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -102,12 +103,12 @@ def edit(self, pkg, spec, prefix):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
@@ -194,19 +195,19 @@ def meson(self, pkg, spec, prefix):
|
||||
options += self.std_meson_args
|
||||
options += self.meson_args()
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
inspect.getmodule(self.pkg).meson(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
inspect.getmodule(self.pkg).ninja(*options)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -103,7 +104,7 @@ def msbuild_install_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
inspect.getmodule(self.pkg).msbuild(
|
||||
*self.std_msbuild_args,
|
||||
*self.msbuild_args(),
|
||||
self.define_targets(*self.build_targets),
|
||||
@@ -113,6 +114,6 @@ def install(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
inspect.getmodule(self.pkg).msbuild(
|
||||
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
||||
)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -131,7 +132,9 @@ def build(self, pkg, spec, prefix):
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
@@ -143,4 +146,6 @@ def install(self, pkg, spec, prefix):
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes)
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
)
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
@@ -45,7 +47,7 @@ class OctaveBuilder(BaseBuilder):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
inspect.getmodule(self.pkg).octave(
|
||||
"--quiet",
|
||||
"--norc",
|
||||
"--built-in-docstrings-file=/dev/null",
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import Iterable
|
||||
|
||||
@@ -133,7 +134,7 @@ def build_method(self):
|
||||
def build_executable(self):
|
||||
"""Returns the executable method to build the perl package"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
build_executable = self.pkg.module.make
|
||||
build_executable = inspect.getmodule(self.pkg).make
|
||||
elif self.build_method == "Build.PL":
|
||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
return build_executable
|
||||
@@ -157,7 +158,7 @@ def configure(self, pkg, spec, prefix):
|
||||
options = ["Build.PL", "--install_base", prefix]
|
||||
options += self.configure_args()
|
||||
|
||||
pkg.module.perl(*options)
|
||||
inspect.getmodule(self.pkg).perl(*options)
|
||||
|
||||
# It is possible that the shebang in the Build script that is created from
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
@@ -227,7 +228,7 @@ def test_imports(self) -> None:
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = self.module.python
|
||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -314,9 +315,9 @@ def get_external_python_for_prefix(self):
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
spec
|
||||
for spec in python_externals_detection.get("python", [])
|
||||
if spec.external_path == self.spec.external_path
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
@@ -64,17 +66,17 @@ def qmake_args(self):
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
inspect.getmodule(self.pkg).qmake(*self.qmake_args())
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
inspect.getmodule(self.pkg).make()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
inspect.getmodule(self.pkg).make("install")
|
||||
|
||||
def check(self):
|
||||
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
||||
|
@@ -2,10 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
from spack.directives import extends
|
||||
|
||||
@@ -37,7 +37,6 @@ def configure_vars(self):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Installs an R package."""
|
||||
mkdirp(pkg.module.r_lib_dir)
|
||||
|
||||
config_args = self.configure_args()
|
||||
config_vars = self.configure_vars()
|
||||
@@ -45,14 +44,14 @@ def install(self, pkg, spec, prefix):
|
||||
args = ["--vanilla", "CMD", "INSTALL"]
|
||||
|
||||
if config_args:
|
||||
args.append(f"--configure-args={' '.join(config_args)}")
|
||||
args.append("--configure-args={0}".format(" ".join(config_args)))
|
||||
|
||||
if config_vars:
|
||||
args.append(f"--configure-vars={' '.join(config_vars)}")
|
||||
args.append("--configure-vars={0}".format(" ".join(config_vars)))
|
||||
|
||||
args.extend([f"--library={pkg.module.r_lib_dir}", self.stage.source_path])
|
||||
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
|
||||
|
||||
pkg.module.R(*args)
|
||||
inspect.getmodule(self.pkg).R(*args)
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
@@ -81,21 +80,27 @@ class RPackage(Package):
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
return "https://cloud.r-project.org/package=" + cls.cran
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
return "https://bioconductor.org/packages/" + cls.bioc
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
return (
|
||||
"https://cloud.r-project.org/src/contrib/"
|
||||
+ cls.cran
|
||||
+ "_"
|
||||
+ str(list(cls.versions)[0])
|
||||
+ ".tar.gz"
|
||||
)
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
if self.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||
return "https://git.bioconductor.org/packages/" + self.bioc
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import glob
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
@@ -51,10 +52,10 @@ def build(self, pkg, spec, prefix):
|
||||
gemspecs = glob.glob("*.gemspec")
|
||||
rakefiles = glob.glob("Rakefile")
|
||||
if gemspecs:
|
||||
pkg.module.gem("build", "--norc", gemspecs[0])
|
||||
inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
|
||||
elif rakefiles:
|
||||
jobs = pkg.module.make_jobs
|
||||
pkg.module.rake("package", "-j{0}".format(jobs))
|
||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||
inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
|
||||
else:
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
@@ -69,6 +70,6 @@ def install(self, pkg, spec, prefix):
|
||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||
# environement, and Gems required to build native extensions will
|
||||
# not be found. Those extensions are built during `gem install`.
|
||||
pkg.module.gem(
|
||||
inspect.getmodule(self.pkg).gem(
|
||||
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
||||
)
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on
|
||||
@@ -61,7 +63,8 @@ def build_args(self, spec, prefix):
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
args = self.build_args(spec, prefix)
|
||||
inspect.getmodule(self.pkg).scons(*args)
|
||||
|
||||
def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
@@ -69,7 +72,9 @@ def install_args(self, spec, prefix):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
args = self.install_args(spec, prefix)
|
||||
|
||||
inspect.getmodule(self.pkg).scons("install", *args)
|
||||
|
||||
def build_test(self):
|
||||
"""Run unit tests after build.
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
|
||||
@@ -85,13 +86,14 @@ def import_modules(self):
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
self.pkg.module.python(*args, **kwargs)
|
||||
inspect.getmodule(self).python(*args, **kwargs)
|
||||
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with spack.install_test.test_part(
|
||||
self,
|
||||
@@ -99,7 +101,7 @@ def test_imports(self):
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
):
|
||||
self.python("-c", "import {0}".format(module))
|
||||
python("-c", "import {0}".format(module))
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
@@ -134,7 +136,7 @@ def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", pkg.module.python_platlib]
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||
@@ -153,7 +155,7 @@ def build(self, pkg, spec, prefix):
|
||||
args = self.build_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make(*args)
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -164,7 +166,7 @@ def install(self, pkg, spec, prefix):
|
||||
args = self.install_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install", *args)
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
@@ -88,11 +90,11 @@ def build_directory(self):
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
self.pkg.module.python(*args, **kwargs)
|
||||
inspect.getmodule(self.pkg).python(*args, **kwargs)
|
||||
|
||||
def waf(self, *args, **kwargs):
|
||||
"""Runs the waf ``Executable``."""
|
||||
jobs = self.pkg.module.make_jobs
|
||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
import inspect
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
@@ -96,10 +97,11 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
"""
|
||||
package_module = inspect.getmodule(pkg)
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
builder_cls_name = default_builder_cls.__name__
|
||||
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||
builder_cls = getattr(package_module, builder_cls_name, None)
|
||||
if builder_cls:
|
||||
return builder_cls(pkg)
|
||||
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
@@ -19,7 +18,6 @@
|
||||
import spack.cmd
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
from spack.main import section_descriptions
|
||||
|
||||
description = "list available spack commands"
|
||||
@@ -141,7 +139,7 @@ def usage(self, usage: str) -> str:
|
||||
|
||||
cmd = self.parser.prog.replace(" ", "-")
|
||||
if cmd in self.documented:
|
||||
string = f"{string}\n:ref:`More documentation <cmd-{cmd}>`\n"
|
||||
string += "\n:ref:`More documentation <cmd-{0}>`\n".format(cmd)
|
||||
|
||||
return string
|
||||
|
||||
@@ -251,27 +249,33 @@ def body(
|
||||
Function body.
|
||||
"""
|
||||
if positionals:
|
||||
return f"""
|
||||
return """
|
||||
if $list_options
|
||||
then
|
||||
{self.optionals(optionals)}
|
||||
{0}
|
||||
else
|
||||
{self.positionals(positionals)}
|
||||
{1}
|
||||
fi
|
||||
"""
|
||||
""".format(
|
||||
self.optionals(optionals), self.positionals(positionals)
|
||||
)
|
||||
elif subcommands:
|
||||
return f"""
|
||||
return """
|
||||
if $list_options
|
||||
then
|
||||
{self.optionals(optionals)}
|
||||
{0}
|
||||
else
|
||||
{self.subcommands(subcommands)}
|
||||
{1}
|
||||
fi
|
||||
"""
|
||||
""".format(
|
||||
self.optionals(optionals), self.subcommands(subcommands)
|
||||
)
|
||||
else:
|
||||
return f"""
|
||||
{self.optionals(optionals)}
|
||||
"""
|
||||
return """
|
||||
{0}
|
||||
""".format(
|
||||
self.optionals(optionals)
|
||||
)
|
||||
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
@@ -300,7 +304,7 @@ def optionals(self, optionals: Sequence[str]) -> str:
|
||||
Returns:
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
return f'SPACK_COMPREPLY="{" ".join(optionals)}"'
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
||||
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
@@ -311,7 +315,7 @@ def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
Returns:
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
return f'SPACK_COMPREPLY="{" ".join(subcommands)}"'
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||
|
||||
|
||||
# Map argument destination names to their complete commands
|
||||
@@ -391,7 +395,7 @@ def _fish_dest_get_complete(prog: str, dest: str) -> Optional[str]:
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
for (prog_key, pos_key), value in _dest_to_fish_complete.items():
|
||||
if subcmd.startswith(prog_key) and re.match(f"^{pos_key}$", dest):
|
||||
if subcmd.startswith(prog_key) and re.match("^" + pos_key + "$", dest):
|
||||
return value
|
||||
return None
|
||||
|
||||
@@ -423,6 +427,24 @@ def format(self, cmd: Command) -> str:
|
||||
+ self.complete(cmd.prog, positionals, optionals, subcommands)
|
||||
)
|
||||
|
||||
def _quote(self, string: str) -> str:
|
||||
"""Quote string and escape special characters if necessary.
|
||||
|
||||
Args:
|
||||
string: Input string.
|
||||
|
||||
Returns:
|
||||
Quoted string.
|
||||
"""
|
||||
# Goal here is to match fish_indent behavior
|
||||
|
||||
# Strings without spaces (or other special characters) do not need to be escaped
|
||||
if not any([sub in string for sub in [" ", "'", '"']]):
|
||||
return string
|
||||
|
||||
string = string.replace("'", r"\'")
|
||||
return f"'{string}'"
|
||||
|
||||
def optspecs(
|
||||
self,
|
||||
prog: str,
|
||||
@@ -441,7 +463,7 @@ def optspecs(
|
||||
optspec_var = "__fish_spack_optspecs_" + prog.replace(" ", "_").replace("-", "_")
|
||||
|
||||
if optionals is None:
|
||||
return f"set -g {optspec_var}\n"
|
||||
return "set -g %s\n" % optspec_var
|
||||
|
||||
# Build optspec by iterating over options
|
||||
args = []
|
||||
@@ -468,11 +490,11 @@ def optspecs(
|
||||
long = [f[2:] for f in flags if f.startswith("--")]
|
||||
|
||||
while len(short) > 0 and len(long) > 0:
|
||||
arg = f"{short.pop()}/{long.pop()}{required}"
|
||||
arg = "%s/%s%s" % (short.pop(), long.pop(), required)
|
||||
while len(short) > 0:
|
||||
arg = f"{short.pop()}/{required}"
|
||||
arg = "%s/%s" % (short.pop(), required)
|
||||
while len(long) > 0:
|
||||
arg = f"{long.pop()}{required}"
|
||||
arg = "%s%s" % (long.pop(), required)
|
||||
|
||||
args.append(arg)
|
||||
|
||||
@@ -481,7 +503,7 @@ def optspecs(
|
||||
# indicate that such subcommand exists.
|
||||
args = " ".join(args)
|
||||
|
||||
return f"set -g {optspec_var} {args}\n"
|
||||
return "set -g %s %s\n" % (optspec_var, args)
|
||||
|
||||
@staticmethod
|
||||
def complete_head(
|
||||
@@ -502,14 +524,12 @@ def complete_head(
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
if index is None:
|
||||
return f"complete -c {s[0]} -n '__fish_spack_using_command {subcmd}'"
|
||||
return "complete -c %s -n '__fish_spack_using_command %s'" % (s[0], subcmd)
|
||||
elif nargs in [argparse.ZERO_OR_MORE, argparse.ONE_OR_MORE, argparse.REMAINDER]:
|
||||
return (
|
||||
f"complete -c {s[0]} -n '__fish_spack_using_command_pos_remainder "
|
||||
f"{index} {subcmd}'"
|
||||
)
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos_remainder %d %s'"
|
||||
else:
|
||||
return f"complete -c {s[0]} -n '__fish_spack_using_command_pos {index} {subcmd}'"
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos %d %s'"
|
||||
return head % (s[0], index, subcmd)
|
||||
|
||||
def complete(
|
||||
self,
|
||||
@@ -577,18 +597,25 @@ def positionals(
|
||||
|
||||
if choices is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(f"{head} -f -a {shlex.quote(' '.join(choices))}")
|
||||
commands.append(head + " -f -a %s" % self._quote(" ".join(choices)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, args)
|
||||
if value is not None:
|
||||
commands.append(f"{head} {value}")
|
||||
commands.append(head + " " + value)
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def prog_comment(self, prog: str) -> str:
|
||||
"""Return a comment line for the command."""
|
||||
return f"\n# {prog}\n"
|
||||
"""Return a comment line for the command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Comment line.
|
||||
"""
|
||||
return "\n# %s\n" % prog
|
||||
|
||||
def optionals(
|
||||
self,
|
||||
@@ -631,28 +658,28 @@ def optionals(
|
||||
for f in flags:
|
||||
if f.startswith("--"):
|
||||
long = f[2:]
|
||||
prefix = f"{prefix} -l {long}"
|
||||
prefix += " -l %s" % long
|
||||
elif f.startswith("-"):
|
||||
short = f[1:]
|
||||
assert len(short) == 1
|
||||
prefix = f"{prefix} -s {short}"
|
||||
prefix += " -s %s" % short
|
||||
|
||||
# Check if option require argument.
|
||||
# Currently multi-argument options are not supported, so we treat it like one argument.
|
||||
if nargs != 0:
|
||||
prefix = f"{prefix} -r"
|
||||
prefix += " -r"
|
||||
|
||||
if dest is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(f"{prefix} -f -a {shlex.quote(' '.join(dest))}")
|
||||
commands.append(prefix + " -f -a %s" % self._quote(" ".join(dest)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, dest)
|
||||
if value is not None:
|
||||
commands.append(f"{prefix} {value}")
|
||||
commands.append(prefix + " " + value)
|
||||
|
||||
if help:
|
||||
commands.append(f"{prefix} -d {shlex.quote(help)}")
|
||||
commands.append(prefix + " -d %s" % self._quote(help))
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
@@ -670,11 +697,11 @@ def subcommands(self, prog: str, subcommands: List[Tuple[ArgumentParser, str, st
|
||||
head = self.complete_head(prog, 0)
|
||||
|
||||
for _, subcommand, help in subcommands:
|
||||
command = f"{head} -f -a {shlex.quote(subcommand)}"
|
||||
command = head + " -f -a %s" % self._quote(subcommand)
|
||||
|
||||
if help is not None and len(help) > 0:
|
||||
help = help.split("\n")[0]
|
||||
command = f"{command} -d {shlex.quote(help)}"
|
||||
command += " -d %s" % self._quote(help)
|
||||
|
||||
commands.append(command)
|
||||
|
||||
@@ -720,7 +747,7 @@ def rst_index(out: IO) -> None:
|
||||
|
||||
for i, cmd in enumerate(sorted(commands)):
|
||||
description = description.capitalize() if i == 0 else ""
|
||||
ref = f":ref:`{cmd} <spack-{cmd}>`"
|
||||
ref = ":ref:`%s <spack-%s>`" % (cmd, cmd)
|
||||
comma = "," if i != len(commands) - 1 else ""
|
||||
bar = "| " if i % 8 == 0 else " "
|
||||
out.write(line % (description, bar + ref + comma))
|
||||
@@ -831,10 +858,10 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
|
||||
# check header first so we don't open out files unnecessarily
|
||||
if args.header and not os.path.exists(args.header):
|
||||
tty.die(f"No such file: '{args.header}'")
|
||||
tty.die("No such file: '%s'" % args.header)
|
||||
|
||||
if args.update:
|
||||
tty.msg(f"Updating file: {args.update}")
|
||||
tty.msg("Updating file: %s" % args.update)
|
||||
with open(args.update, "w") as f:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
@@ -82,7 +82,10 @@ def compiler_find(args):
|
||||
"""
|
||||
paths = args.add_paths or None
|
||||
new_compilers = spack.compilers.find_compilers(
|
||||
path_hints=paths, scope=args.scope, mixed_toolchain=args.mixed_toolchain
|
||||
path_hints=paths,
|
||||
scope=args.scope,
|
||||
mixed_toolchain=args.mixed_toolchain,
|
||||
max_workers=args.jobs,
|
||||
)
|
||||
if new_compilers:
|
||||
n = len(new_compilers)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
installation and its deprecator.
|
||||
"""
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
@@ -75,7 +76,12 @@ def setup_parser(sp):
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
"-l", "--link-type", type=str, default=None, choices=["soft", "hard"], help="(deprecated)"
|
||||
"-l",
|
||||
"--link-type",
|
||||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -85,9 +91,6 @@ def setup_parser(sp):
|
||||
|
||||
def deprecate(parser, args):
|
||||
"""Deprecate one spec in favor of another"""
|
||||
if args.link_type is not None:
|
||||
tty.warn("The --link-type option is deprecated and will be removed in a future release.")
|
||||
|
||||
env = ev.active_environment()
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
@@ -141,5 +144,7 @@ def deprecate(parser, args):
|
||||
if not answer:
|
||||
tty.die("Will not deprecate any packages.")
|
||||
|
||||
link_fn = os.link if args.link_type == "hard" else symlink
|
||||
|
||||
for dcate, dcator in zip(all_deprecate, all_deprecators):
|
||||
dcate.package.do_deprecate(dcator, symlink)
|
||||
dcate.package.do_deprecate(dcator, link_fn)
|
||||
|
@@ -135,7 +135,9 @@ def external_find(args):
|
||||
candidate_packages = packages_to_search_for(
|
||||
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||
)
|
||||
detected_packages = spack.detection.by_path(candidate_packages, path_hints=args.path)
|
||||
detected_packages = spack.detection.by_path(
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_specs = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
|
@@ -3,8 +3,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -12,6 +17,7 @@
|
||||
import spack.repo
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.archive import reproducible_zipfile_from_prefix
|
||||
|
||||
description = "manage package source repositories"
|
||||
section = "config"
|
||||
@@ -67,6 +73,12 @@ def setup_parser(subparser):
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
# Zip
|
||||
zip_parser = sp.add_parser("zip", help=repo_zip.__doc__)
|
||||
zip_parser.add_argument(
|
||||
"namespace_or_path", help="namespace or path of a Spack package repository"
|
||||
)
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
"""create a new package repository"""
|
||||
@@ -109,31 +121,18 @@ def repo_add(args):
|
||||
def repo_remove(args):
|
||||
"""remove a repository from Spack's configuration"""
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
namespace_or_path = args.namespace_or_path
|
||||
|
||||
# If the argument is a path, remove that repository from config.
|
||||
canon_path = spack.util.path.canonicalize_path(namespace_or_path)
|
||||
for repo_path in repos:
|
||||
repo_canon_path = spack.util.path.canonicalize_path(repo_path)
|
||||
if canon_path == repo_canon_path:
|
||||
repos.remove(repo_path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
tty.msg("Removed repository %s" % repo_path)
|
||||
return
|
||||
key, repo = _get_repo(repos, args.namespace_or_path)
|
||||
|
||||
# If it is a namespace, remove corresponding repo
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.from_path(path)
|
||||
if repo.namespace == namespace_or_path:
|
||||
repos.remove(path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace))
|
||||
return
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
if not key:
|
||||
tty.die(f"No repository with path or namespace: {args.namespace_or_path}")
|
||||
|
||||
tty.die("No repository with path or namespace: %s" % namespace_or_path)
|
||||
repos.remove(key)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
if repo:
|
||||
tty.msg(f"Removed repository {repo.root} with namespace '{repo.namespace}'")
|
||||
else:
|
||||
tty.msg(f"Removed repository {key}")
|
||||
|
||||
|
||||
def repo_list(args):
|
||||
@@ -147,17 +146,81 @@ def repo_list(args):
|
||||
continue
|
||||
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
tty.msg(f"{len(repos)} package repositor{'y.' if len(repos) == 1 else 'ies.'}")
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
||||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
print(f"{repo.namespace:<{max_ns_len}} {repo.root}")
|
||||
|
||||
|
||||
def repo_zip(args):
|
||||
"""zip a package repository to make it immutable and faster to load"""
|
||||
key, _ = _get_repo(spack.config.get("repos"), args.namespace_or_path)
|
||||
|
||||
if not key:
|
||||
tty.die(f"No repository with path or namespace: {args.namespace_or_path}")
|
||||
|
||||
try:
|
||||
repo = spack.repo.from_path(key)
|
||||
except spack.repo.RepoError:
|
||||
tty.die(f"No repository at path: {key}")
|
||||
|
||||
def _zip_repo_skip(entry: os.DirEntry, depth: int):
|
||||
if entry.name == "__pycache__":
|
||||
return True
|
||||
if depth == 0 and not os.path.exists(os.path.join(entry.path, "package.py")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _zip_repo_path_to_name(path: str) -> str:
|
||||
# use spack/pkg/<repo>/* prefix and rename `package.py` as `__init__.py`
|
||||
rel_path = pathlib.PurePath(path).relative_to(repo.packages_path)
|
||||
if rel_path.name == "package.py":
|
||||
rel_path = rel_path.with_name("__init__.py")
|
||||
return str(rel_path)
|
||||
|
||||
# Create a zipfile in a temporary file
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode="wb", dir=repo.root) as f, zipfile.ZipFile(
|
||||
f, "w", compression=zipfile.ZIP_DEFLATED
|
||||
) as zip:
|
||||
reproducible_zipfile_from_prefix(
|
||||
zip, repo.packages_path, skip=_zip_repo_skip, path_to_name=_zip_repo_path_to_name
|
||||
)
|
||||
|
||||
packages_zip = os.path.join(repo.root, "packages.zip")
|
||||
try:
|
||||
# Inform the user whether or not the repo was modified since it was last zipped
|
||||
if os.path.exists(packages_zip) and filecmp.cmp(f.name, packages_zip):
|
||||
tty.msg(f"{repo.namespace}: {packages_zip} is up to date")
|
||||
return
|
||||
else:
|
||||
os.rename(f.name, packages_zip)
|
||||
tty.msg(f"{repo.namespace} was zipped: {packages_zip}")
|
||||
finally:
|
||||
try:
|
||||
os.unlink(f.name)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def _get_repo(repos: List[str], path_or_name) -> Tuple[Optional[str], Optional[spack.repo.Repo]]:
|
||||
"""Find repo by path or namespace"""
|
||||
canon_path = spack.util.path.canonicalize_path(path_or_name)
|
||||
for path in repos:
|
||||
if canon_path == spack.util.path.canonicalize_path(path):
|
||||
return path, None
|
||||
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.from_path(path)
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
if repo.namespace == path_or_name:
|
||||
return path, repo
|
||||
return None, None
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
@@ -167,5 +230,6 @@ def repo(parser, args):
|
||||
"add": repo_add,
|
||||
"remove": repo_remove,
|
||||
"rm": repo_remove,
|
||||
"zip": repo_zip,
|
||||
}
|
||||
action[args.repo_command](args)
|
||||
|
@@ -243,6 +243,7 @@ def find_compilers(
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
mixed_toolchain: bool = False,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
|
||||
configuration is updated, and the list of new compiler objects is returned.
|
||||
@@ -253,6 +254,7 @@ def find_compilers(
|
||||
scope: configuration scope to modify
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
max_workers: number of processes used to search for compilers
|
||||
"""
|
||||
import spack.detection
|
||||
|
||||
@@ -265,28 +267,30 @@ def find_compilers(
|
||||
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
|
||||
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
|
||||
|
||||
detected_packages = spack.detection.by_path(compiler_pkgs, path_hints=default_paths)
|
||||
detected_packages = spack.detection.by_path(
|
||||
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
|
||||
)
|
||||
|
||||
valid_compilers = {}
|
||||
for name, detected in detected_packages.items():
|
||||
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x)]
|
||||
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x.spec)]
|
||||
if not compilers:
|
||||
continue
|
||||
valid_compilers[name] = compilers
|
||||
|
||||
def _has_fortran_compilers(x):
|
||||
if "compilers" not in x.extra_attributes:
|
||||
if "compilers" not in x.spec.extra_attributes:
|
||||
return False
|
||||
|
||||
return "fortran" in x.extra_attributes["compilers"]
|
||||
return "fortran" in x.spec.extra_attributes["compilers"]
|
||||
|
||||
if mixed_toolchain:
|
||||
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
|
||||
if gccs:
|
||||
best_gcc = sorted(
|
||||
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x).version
|
||||
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x.spec).version
|
||||
)[-1]
|
||||
gfortran = best_gcc.extra_attributes["compilers"]["fortran"]
|
||||
gfortran = best_gcc.spec.extra_attributes["compilers"]["fortran"]
|
||||
for name in ("llvm", "apple-clang"):
|
||||
if name not in valid_compilers:
|
||||
continue
|
||||
@@ -294,11 +298,11 @@ def _has_fortran_compilers(x):
|
||||
for candidate in candidates:
|
||||
if _has_fortran_compilers(candidate):
|
||||
continue
|
||||
candidate.extra_attributes["compilers"]["fortran"] = gfortran
|
||||
candidate.spec.extra_attributes["compilers"]["fortran"] = gfortran
|
||||
|
||||
new_compilers = []
|
||||
for name, detected in valid_compilers.items():
|
||||
for config in CompilerConfigFactory.from_specs(detected):
|
||||
for config in CompilerConfigFactory.from_specs([x.spec for x in detected]):
|
||||
c = _compiler_from_config_entry(config["compiler"])
|
||||
if c in known_compilers:
|
||||
continue
|
||||
|
@@ -19,23 +19,35 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
|
||||
CHECK_COMPILER_EXISTENCE = True
|
||||
|
||||
class Concretizer:
|
||||
"""(DEPRECATED) Only contains logic to enable/disable compiler existence checks."""
|
||||
|
||||
#: Controls whether we check that compiler versions actually exist
|
||||
#: during concretization. Used for testing and for mirror creation
|
||||
check_for_compiler_existence = None
|
||||
|
||||
def __init__(self):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disable_compiler_existence_check():
|
||||
global CHECK_COMPILER_EXISTENCE
|
||||
CHECK_COMPILER_EXISTENCE, saved = False, CHECK_COMPILER_EXISTENCE
|
||||
saved = Concretizer.check_for_compiler_existence
|
||||
Concretizer.check_for_compiler_existence = False
|
||||
yield
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
Concretizer.check_for_compiler_existence = saved
|
||||
|
||||
|
||||
@contextmanager
|
||||
def enable_compiler_existence_check():
|
||||
global CHECK_COMPILER_EXISTENCE
|
||||
CHECK_COMPILER_EXISTENCE, saved = True, CHECK_COMPILER_EXISTENCE
|
||||
saved = Concretizer.check_for_compiler_existence
|
||||
Concretizer.check_for_compiler_existence = True
|
||||
yield
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
Concretizer.check_for_compiler_existence = saved
|
||||
|
||||
|
||||
def find_spec(spec, condition, default=None):
|
||||
|
@@ -14,14 +14,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as hash_types
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.schema.cray_manifest import schema as manifest_schema
|
||||
|
||||
#: Cray systems can store a Spack-compatible description of system
|
||||
@@ -239,7 +237,7 @@ def read(path, apply_updates):
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec)
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
|
||||
|
||||
class ManifestValidationError(spack.error.SpackError):
|
||||
|
@@ -59,11 +59,7 @@
|
||||
import spack.util.lock as lk
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.version as vn
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayout,
|
||||
DirectoryLayoutError,
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import bit_length
|
||||
|
||||
@@ -207,12 +203,12 @@ class InstallRecord:
|
||||
def __init__(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
path: Optional[str],
|
||||
path: str,
|
||||
installed: bool,
|
||||
ref_count: int = 0,
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
deprecated_for: Optional[str] = None,
|
||||
deprecated_for: Optional["spack.spec.Spec"] = None,
|
||||
in_buildcache: bool = False,
|
||||
origin=None,
|
||||
):
|
||||
@@ -599,11 +595,9 @@ class Database:
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
upstream_dbs: Optional[List["Database"]] = None,
|
||||
is_upstream: bool = False,
|
||||
lock_cfg: LockConfiguration = DEFAULT_LOCK_CFG,
|
||||
layout: Optional[DirectoryLayout] = None,
|
||||
) -> None:
|
||||
"""Database for Spack installations.
|
||||
|
||||
@@ -626,7 +620,6 @@ def __init__(
|
||||
"""
|
||||
self.root = root
|
||||
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = os.path.join(self.database_directory, "index.json")
|
||||
@@ -671,6 +664,14 @@ def __init__(
|
||||
|
||||
self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
|
||||
|
||||
# whether there was an error at the start of a read transaction
|
||||
self._error = None
|
||||
|
||||
# For testing: if this is true, an exception is thrown when missing
|
||||
# dependencies are detected (rather than just printing a warning
|
||||
# message)
|
||||
self._fail_when_missing_deps = False
|
||||
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
|
||||
@@ -773,13 +774,7 @@ def query_local_by_spec_hash(self, hash_key):
|
||||
with self.read_transaction():
|
||||
return self._data.get(hash_key, None)
|
||||
|
||||
def _assign_dependencies(
|
||||
self,
|
||||
spec_reader: Type["spack.spec.SpecfileReaderBase"],
|
||||
hash_key: str,
|
||||
installs: dict,
|
||||
data: Dict[str, InstallRecord],
|
||||
):
|
||||
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
spec = data[hash_key].spec
|
||||
@@ -792,20 +787,26 @@ def _assign_dependencies(
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations in the same order,
|
||||
# and that we always check the local installation first: if a downstream Spack
|
||||
# installs a package then dependents in that installation could be using it. If a
|
||||
# hash is installed locally and upstream, there isn't enough information to
|
||||
# determine which one a local package depends on, so the convention ensures that
|
||||
# this isn't an issue.
|
||||
_, record = self.query_by_spec_hash(dhash, data=data)
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
# then dependents in that installation could be using it.
|
||||
# If a hash is installed locally and upstream, there isn't
|
||||
# enough information to determine which one a local package
|
||||
# depends on, so the convention ensures that this isn't an
|
||||
# issue.
|
||||
upstream, record = self.query_by_spec_hash(dhash, data=data)
|
||||
child = record.spec if record else None
|
||||
|
||||
if not child:
|
||||
tty.warn(
|
||||
f"Missing dependency not in database: "
|
||||
f"{spec.cformat('{name}{/hash:7}')} needs {dname}-{dhash[:7]}"
|
||||
msg = "Missing dependency not in database: " "%s needs %s-%s" % (
|
||||
spec.cformat("{name}{/hash:7}"),
|
||||
dname,
|
||||
dhash[:7],
|
||||
)
|
||||
if self._fail_when_missing_deps:
|
||||
raise MissingDependenciesError(msg)
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
@@ -845,7 +846,7 @@ def check(cond, msg):
|
||||
):
|
||||
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
|
||||
|
||||
self.reindex()
|
||||
self.reindex(spack.store.STORE.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
|
||||
)
|
||||
@@ -872,8 +873,8 @@ def invalid_record(hash_key, error):
|
||||
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
|
||||
|
||||
# Pass 1: Iterate through database and build specs w/o dependencies
|
||||
data: Dict[str, InstallRecord] = {}
|
||||
installed_prefixes: Set[str] = set()
|
||||
data = {}
|
||||
installed_prefixes = set()
|
||||
for hash_key, rec in installs.items():
|
||||
try:
|
||||
# This constructs a spec DAG from the list of all installs
|
||||
@@ -910,7 +911,7 @@ def invalid_record(hash_key, error):
|
||||
self._data = data
|
||||
self._installed_prefixes = installed_prefixes
|
||||
|
||||
def reindex(self):
|
||||
def reindex(self, directory_layout):
|
||||
"""Build database index from scratch based on a directory layout.
|
||||
|
||||
Locks the DB if it isn't locked already.
|
||||
@@ -925,116 +926,105 @@ def _read_suppress_error():
|
||||
if os.path.isfile(self._index_path):
|
||||
self._read_from_file(self._index_path)
|
||||
except CorruptDatabaseError as e:
|
||||
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
||||
self._error = e
|
||||
self._data = {}
|
||||
self._installed_prefixes = set()
|
||||
|
||||
with lk.WriteTransaction(self.lock, acquire=_read_suppress_error, release=self._write):
|
||||
old_installed_prefixes, self._installed_prefixes = self._installed_prefixes, set()
|
||||
old_data, self._data = self._data, {}
|
||||
transaction = lk.WriteTransaction(
|
||||
self.lock, acquire=_read_suppress_error, release=self._write
|
||||
)
|
||||
|
||||
with transaction:
|
||||
if self._error:
|
||||
tty.warn("Spack database was corrupt. Will rebuild. Error was:", str(self._error))
|
||||
self._error = None
|
||||
|
||||
old_data = self._data
|
||||
old_installed_prefixes = self._installed_prefixes
|
||||
try:
|
||||
self._reindex(old_data)
|
||||
self._construct_from_directory_layout(directory_layout, old_data)
|
||||
except BaseException:
|
||||
# If anything explodes, restore old data, skip write.
|
||||
self._data = old_data
|
||||
self._installed_prefixes = old_installed_prefixes
|
||||
raise
|
||||
|
||||
def _reindex(self, old_data: Dict[str, InstallRecord]):
|
||||
# Specs on the file system are the source of truth for record.spec. The old database values
|
||||
# if available are the source of truth for the rest of the record.
|
||||
assert self.layout, "Database layout must be set to reindex"
|
||||
def _construct_entry_from_directory_layout(
|
||||
self, directory_layout, old_data, spec, deprecator=None
|
||||
):
|
||||
# Try to recover explicit value from old DB, but
|
||||
# default it to True if DB was corrupt. This is
|
||||
# just to be conservative in case a command like
|
||||
# "autoremove" is run by the user after a reindex.
|
||||
tty.debug("RECONSTRUCTING FROM SPEC.YAML: {0}".format(spec))
|
||||
explicit = True
|
||||
inst_time = os.stat(spec.prefix).st_ctime
|
||||
if old_data is not None:
|
||||
old_info = old_data.get(spec.dag_hash())
|
||||
if old_info is not None:
|
||||
explicit = old_info.explicit
|
||||
inst_time = old_info.installation_time
|
||||
|
||||
specs_from_fs = self.layout.all_specs()
|
||||
deprecated_for = self.layout.deprecated_for(specs_from_fs)
|
||||
extra_args = {"explicit": explicit, "installation_time": inst_time}
|
||||
self._add(spec, directory_layout, **extra_args)
|
||||
if deprecator:
|
||||
self._deprecate(spec, deprecator)
|
||||
|
||||
known_specs: List[spack.spec.Spec] = [
|
||||
*specs_from_fs,
|
||||
*(deprecated for _, deprecated in deprecated_for),
|
||||
*(rec.spec for rec in old_data.values()),
|
||||
]
|
||||
def _construct_from_directory_layout(self, directory_layout, old_data):
|
||||
# Read first the `spec.yaml` files in the prefixes. They should be
|
||||
# considered authoritative with respect to DB reindexing, as
|
||||
# entries in the DB may be corrupted in a way that still makes
|
||||
# them readable. If we considered DB entries authoritative
|
||||
# instead, we would perpetuate errors over a reindex.
|
||||
with directory_layout.disable_upstream_check():
|
||||
# Initialize data in the reconstructed DB
|
||||
self._data = {}
|
||||
self._installed_prefixes = set()
|
||||
|
||||
upstream_hashes = {
|
||||
dag_hash for upstream in self.upstream_dbs for dag_hash in upstream._data
|
||||
}
|
||||
upstream_hashes.difference_update(spec.dag_hash() for spec in known_specs)
|
||||
# Start inspecting the installed prefixes
|
||||
processed_specs = set()
|
||||
|
||||
def create_node(edge: spack.spec.DependencySpec, is_upstream: bool):
|
||||
if is_upstream:
|
||||
return
|
||||
for spec in directory_layout.all_specs():
|
||||
self._construct_entry_from_directory_layout(directory_layout, old_data, spec)
|
||||
processed_specs.add(spec)
|
||||
|
||||
self._data[edge.spec.dag_hash()] = InstallRecord(
|
||||
spec=edge.spec.copy(deps=False),
|
||||
path=edge.spec.external_path if edge.spec.external else None,
|
||||
installed=edge.spec.external,
|
||||
)
|
||||
|
||||
# Store all nodes of known specs, excluding ones found in upstreams
|
||||
tr.traverse_breadth_first_with_visitor(
|
||||
known_specs,
|
||||
tr.CoverNodesVisitor(
|
||||
NoUpstreamVisitor(upstream_hashes, create_node), key=tr.by_dag_hash
|
||||
),
|
||||
)
|
||||
|
||||
# Store the prefix and other information for specs were found on the file system
|
||||
for s in specs_from_fs:
|
||||
record = self._data[s.dag_hash()]
|
||||
record.path = s.prefix
|
||||
record.installed = True
|
||||
record.explicit = True # conservative assumption
|
||||
record.installation_time = os.stat(s.prefix).st_ctime
|
||||
|
||||
# Deprecate specs
|
||||
for new, old in deprecated_for:
|
||||
self._data[old.dag_hash()].deprecated_for = new.dag_hash()
|
||||
|
||||
# Copy data we have from the old database
|
||||
for old_record in old_data.values():
|
||||
record = self._data[old_record.spec.dag_hash()]
|
||||
record.explicit = old_record.explicit
|
||||
record.installation_time = old_record.installation_time
|
||||
record.origin = old_record.origin
|
||||
record.deprecated_for = old_record.deprecated_for
|
||||
|
||||
# Warn when the spec has been removed from the file system (i.e. it was not detected)
|
||||
if not record.installed and old_record.installed:
|
||||
tty.warn(
|
||||
f"Spec {old_record.spec.short_spec} was marked installed in the database "
|
||||
"but was not found on the file system. It is now marked as missing."
|
||||
for spec, deprecator in directory_layout.all_deprecated_specs():
|
||||
self._construct_entry_from_directory_layout(
|
||||
directory_layout, old_data, spec, deprecator
|
||||
)
|
||||
processed_specs.add(spec)
|
||||
|
||||
def create_edge(edge: spack.spec.DependencySpec, is_upstream: bool):
|
||||
if not edge.parent:
|
||||
return
|
||||
parent_record = self._data[edge.parent.dag_hash()]
|
||||
if is_upstream:
|
||||
upstream, child_record = self.query_by_spec_hash(edge.spec.dag_hash())
|
||||
assert upstream and child_record, "Internal error: upstream spec not found"
|
||||
else:
|
||||
child_record = self._data[edge.spec.dag_hash()]
|
||||
parent_record.spec._add_dependency(
|
||||
child_record.spec, depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
for key, entry in old_data.items():
|
||||
# We already took care of this spec using
|
||||
# `spec.yaml` from its prefix.
|
||||
if entry.spec in processed_specs:
|
||||
msg = "SKIPPING RECONSTRUCTION FROM OLD DB: {0}"
|
||||
msg += " [already reconstructed from spec.yaml]"
|
||||
tty.debug(msg.format(entry.spec))
|
||||
continue
|
||||
|
||||
# Then store edges
|
||||
tr.traverse_breadth_first_with_visitor(
|
||||
known_specs,
|
||||
tr.CoverEdgesVisitor(
|
||||
NoUpstreamVisitor(upstream_hashes, create_edge), key=tr.by_dag_hash
|
||||
),
|
||||
)
|
||||
# If we arrived here it very likely means that
|
||||
# we have external specs that are not dependencies
|
||||
# of other specs. This may be the case for externally
|
||||
# installed compilers or externally installed
|
||||
# applications.
|
||||
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
|
||||
try:
|
||||
layout = None if entry.spec.external else directory_layout
|
||||
kwargs = {
|
||||
"spec": entry.spec,
|
||||
"directory_layout": layout,
|
||||
"explicit": entry.explicit,
|
||||
"installation_time": entry.installation_time,
|
||||
}
|
||||
self._add(**kwargs)
|
||||
processed_specs.add(entry.spec)
|
||||
except Exception as e:
|
||||
# Something went wrong, so the spec was not restored
|
||||
# from old data
|
||||
tty.debug(e)
|
||||
|
||||
# Finally update the ref counts
|
||||
for record in self._data.values():
|
||||
for dep in record.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
dep_record = self._data.get(dep.dag_hash())
|
||||
if dep_record: # dep might be upstream
|
||||
dep_record.ref_count += 1
|
||||
if record.deprecated_for:
|
||||
self._data[record.deprecated_for].ref_count += 1
|
||||
|
||||
self._check_ref_counts()
|
||||
self._check_ref_counts()
|
||||
|
||||
def _check_ref_counts(self):
|
||||
"""Ensure consistency of reference counts in the DB.
|
||||
@@ -1043,7 +1033,7 @@ def _check_ref_counts(self):
|
||||
|
||||
Does no locking.
|
||||
"""
|
||||
counts: Dict[str, int] = {}
|
||||
counts = {}
|
||||
for key, rec in self._data.items():
|
||||
counts.setdefault(key, 0)
|
||||
for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
@@ -1127,23 +1117,29 @@ def _read(self):
|
||||
|
||||
def _add(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
allow_missing: bool = False,
|
||||
spec,
|
||||
directory_layout=None,
|
||||
explicit=False,
|
||||
installation_time=None,
|
||||
allow_missing=False,
|
||||
):
|
||||
"""Add an install record for this spec to the database.
|
||||
|
||||
Also ensures dependencies are present and updated in the DB as either installed or missing.
|
||||
Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
|
||||
|
||||
Also ensures dependencies are present and updated in the DB as
|
||||
either installed or missing.
|
||||
|
||||
Args:
|
||||
spec: spec to be added
|
||||
spec (spack.spec.Spec): spec to be added
|
||||
directory_layout: layout of the spec installation
|
||||
explicit:
|
||||
Possible values: True, False, any
|
||||
|
||||
A spec that was installed following a specific user request is marked as explicit.
|
||||
If instead it was pulled-in as a dependency of a user requested spec it's
|
||||
considered implicit.
|
||||
A spec that was installed following a specific user
|
||||
request is marked as explicit. If instead it was
|
||||
pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
installation_time:
|
||||
Date and time of installation
|
||||
@@ -1154,42 +1150,48 @@ def _add(
|
||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||
|
||||
key = spec.dag_hash()
|
||||
spec_pkg_hash = spec._package_hash # type: ignore[attr-defined]
|
||||
spec_pkg_hash = spec._package_hash
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
if upstream:
|
||||
return
|
||||
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
# warnings when a spec is installed, and its build dep
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
explicit=False,
|
||||
installation_time=installation_time,
|
||||
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||
# installed, and its build dep is missing a build dep; there's no need to install
|
||||
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||
allow_missing=dep_allow_missing,
|
||||
)
|
||||
|
||||
# Make sure the directory layout agrees whether the spec is installed
|
||||
if not spec.external and self.layout:
|
||||
path = self.layout.path_for_spec(spec)
|
||||
if not spec.external and directory_layout:
|
||||
path = directory_layout.path_for_spec(spec)
|
||||
installed = False
|
||||
try:
|
||||
self.layout.ensure_installed(spec)
|
||||
directory_layout.ensure_installed(spec)
|
||||
installed = True
|
||||
self._installed_prefixes.add(path)
|
||||
except DirectoryLayoutError as e:
|
||||
if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)):
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(
|
||||
f"{spec.short_spec} is being {action} in the database with prefix {path}, "
|
||||
msg = (
|
||||
"{0} is being {1} in the database with prefix {2}, "
|
||||
"but this directory does not contain an installation of "
|
||||
f"the spec, due to: {e}"
|
||||
"the spec, due to: {3}"
|
||||
)
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||
elif spec.external_path:
|
||||
path = spec.external_path
|
||||
installed = True
|
||||
@@ -1200,27 +1202,23 @@ def _add(
|
||||
if key not in self._data:
|
||||
# Create a new install record with no deps initially.
|
||||
new_spec = spec.copy(deps=False)
|
||||
self._data[key] = InstallRecord(
|
||||
new_spec,
|
||||
path=path,
|
||||
installed=installed,
|
||||
ref_count=0,
|
||||
explicit=explicit,
|
||||
installation_time=installation_time,
|
||||
origin=None if not hasattr(spec, "origin") else spec.origin,
|
||||
)
|
||||
extra_args = {"explicit": explicit, "installation_time": installation_time}
|
||||
# Commands other than 'spack install' may add specs to the DB,
|
||||
# we can record the source of an installed Spec with 'origin'
|
||||
if hasattr(spec, "origin"):
|
||||
extra_args["origin"] = spec.origin
|
||||
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
assert record, f"Missing dependency {dep.spec.short_spec} in DB"
|
||||
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
# Mark concrete once everything is built, and preserve the original hashes of concrete
|
||||
# specs.
|
||||
# Mark concrete once everything is built, and preserve
|
||||
# the original hashes of concrete specs.
|
||||
new_spec._mark_concrete()
|
||||
new_spec._hash = key
|
||||
new_spec._package_hash = spec_pkg_hash
|
||||
@@ -1233,7 +1231,7 @@ def _add(
|
||||
self._data[key].explicit = explicit
|
||||
|
||||
@_autospec
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
def add(self, spec, directory_layout, explicit=False):
|
||||
"""Add spec at path to database, locking and reading DB to sync.
|
||||
|
||||
``add()`` will lock and read from the DB on disk.
|
||||
@@ -1242,9 +1240,9 @@ def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
# TODO: ensure that spec is concrete?
|
||||
# Entire add is transactional.
|
||||
with self.write_transaction():
|
||||
self._add(spec, explicit=explicit)
|
||||
self._add(spec, directory_layout, explicit=explicit)
|
||||
|
||||
def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
def _get_matching_spec_key(self, spec, **kwargs):
|
||||
"""Get the exact spec OR get a single spec that matches."""
|
||||
key = spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
@@ -1256,12 +1254,12 @@ def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
return key
|
||||
|
||||
@_autospec
|
||||
def get_record(self, spec: "spack.spec.Spec", **kwargs) -> Optional[InstallRecord]:
|
||||
def get_record(self, spec, **kwargs):
|
||||
key = self._get_matching_spec_key(spec, **kwargs)
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
return record
|
||||
|
||||
def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
def _decrement_ref_count(self, spec):
|
||||
key = spec.dag_hash()
|
||||
|
||||
if key not in self._data:
|
||||
@@ -1278,7 +1276,7 @@ def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
for dep in spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
self._decrement_ref_count(dep)
|
||||
|
||||
def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
def _increment_ref_count(self, spec):
|
||||
key = spec.dag_hash()
|
||||
|
||||
if key not in self._data:
|
||||
@@ -1287,14 +1285,14 @@ def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
rec = self._data[key]
|
||||
rec.ref_count += 1
|
||||
|
||||
def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
def _remove(self, spec):
|
||||
"""Non-locking version of remove(); does real work."""
|
||||
key = self._get_matching_spec_key(spec)
|
||||
rec = self._data[key]
|
||||
|
||||
# This install prefix is now free for other specs to use, even if the
|
||||
# spec is only marked uninstalled.
|
||||
if not rec.spec.external and rec.installed and rec.path:
|
||||
if not rec.spec.external and rec.installed:
|
||||
self._installed_prefixes.remove(rec.path)
|
||||
|
||||
if rec.ref_count > 0:
|
||||
@@ -1318,7 +1316,7 @@ def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
return rec.spec
|
||||
|
||||
@_autospec
|
||||
def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
def remove(self, spec):
|
||||
"""Removes a spec from the database. To be called on uninstall.
|
||||
|
||||
Reads the database, then:
|
||||
@@ -1333,7 +1331,7 @@ def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
with self.write_transaction():
|
||||
return self._remove(spec)
|
||||
|
||||
def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
|
||||
def deprecator(self, spec):
|
||||
"""Return the spec that the given spec is deprecated for, or None"""
|
||||
with self.read_transaction():
|
||||
spec_key = self._get_matching_spec_key(spec)
|
||||
@@ -1344,14 +1342,14 @@ def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
|
||||
else:
|
||||
return None
|
||||
|
||||
def specs_deprecated_by(self, spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
|
||||
def specs_deprecated_by(self, spec):
|
||||
"""Return all specs deprecated in favor of the given spec"""
|
||||
with self.read_transaction():
|
||||
return [
|
||||
rec.spec for rec in self._data.values() if rec.deprecated_for == spec.dag_hash()
|
||||
]
|
||||
|
||||
def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
|
||||
def _deprecate(self, spec, deprecator):
|
||||
spec_key = self._get_matching_spec_key(spec)
|
||||
spec_rec = self._data[spec_key]
|
||||
|
||||
@@ -1369,17 +1367,17 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") ->
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
def mark(self, spec, key, value):
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
|
||||
def _mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
def _mark(self, spec, key, value):
|
||||
record = self._data[self._get_matching_spec_key(spec)]
|
||||
setattr(record, key, value)
|
||||
|
||||
@_autospec
|
||||
def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
|
||||
def deprecate(self, spec, deprecator):
|
||||
"""Marks a spec as deprecated in favor of its deprecator"""
|
||||
with self.write_transaction():
|
||||
return self._deprecate(spec, deprecator)
|
||||
@@ -1387,16 +1385,16 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
||||
@_autospec
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
direction: str = "children",
|
||||
transitive: bool = True,
|
||||
spec,
|
||||
direction="children",
|
||||
transitive=True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
) -> Set["spack.spec.Spec"]:
|
||||
):
|
||||
"""Return installed specs related to this one."""
|
||||
if direction not in ("parents", "children"):
|
||||
raise ValueError("Invalid direction: %s" % direction)
|
||||
|
||||
relatives: Set[spack.spec.Spec] = set()
|
||||
relatives = set()
|
||||
for spec in self.query(spec):
|
||||
if transitive:
|
||||
to_add = spec.traverse(direction=direction, root=False, deptype=deptype)
|
||||
@@ -1407,13 +1405,17 @@ def installed_relatives(
|
||||
|
||||
for relative in to_add:
|
||||
hash_key = relative.dag_hash()
|
||||
_, record = self.query_by_spec_hash(hash_key)
|
||||
upstream, record = self.query_by_spec_hash(hash_key)
|
||||
if not record:
|
||||
tty.warn(
|
||||
f"Inconsistent state: "
|
||||
f"{'dependent' if direction == 'parents' else 'dependency'} {hash_key} of "
|
||||
f"{spec.dag_hash()} not in DB"
|
||||
reltype = "Dependent" if direction == "parents" else "Dependency"
|
||||
msg = "Inconsistent state! %s %s of %s not in DB" % (
|
||||
reltype,
|
||||
hash_key,
|
||||
spec.dag_hash(),
|
||||
)
|
||||
if self._fail_when_missing_deps:
|
||||
raise MissingDependenciesError(msg)
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
if not record.installed:
|
||||
@@ -1423,7 +1425,7 @@ def installed_relatives(
|
||||
return relatives
|
||||
|
||||
@_autospec
|
||||
def installed_extensions_for(self, extendee_spec: "spack.spec.Spec"):
|
||||
def installed_extensions_for(self, extendee_spec):
|
||||
"""Returns the specs of all packages that extend the given spec"""
|
||||
for spec in self.query():
|
||||
if spec.package.extends(extendee_spec):
|
||||
@@ -1682,7 +1684,7 @@ def unused_specs(
|
||||
self,
|
||||
root_hashes: Optional[Container[str]] = None,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.LINK | dt.RUN,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
) -> "List[spack.spec.Spec]":
|
||||
"""Return all specs that are currently installed but not needed by root specs.
|
||||
|
||||
By default, roots are all explicit specs in the database. If a set of root
|
||||
@@ -1726,33 +1728,6 @@ def update_explicit(self, spec, explicit):
|
||||
rec.explicit = explicit
|
||||
|
||||
|
||||
class NoUpstreamVisitor:
|
||||
"""Gives edges to upstream specs, but does follow edges from upstream specs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
upstream_hashes: Set[str],
|
||||
on_visit: Callable[["spack.spec.DependencySpec", bool], None],
|
||||
):
|
||||
self.upstream_hashes = upstream_hashes
|
||||
self.on_visit = on_visit
|
||||
|
||||
def accept(self, item: tr.EdgeAndDepth) -> bool:
|
||||
self.on_visit(item.edge, self.is_upstream(item))
|
||||
return True
|
||||
|
||||
def is_upstream(self, item: tr.EdgeAndDepth) -> bool:
|
||||
return item.edge.spec.dag_hash() in self.upstream_hashes
|
||||
|
||||
def neighbors(self, item: tr.EdgeAndDepth):
|
||||
# Prune edges from upstream nodes, only follow database tracked dependencies
|
||||
return (
|
||||
[]
|
||||
if self.is_upstream(item)
|
||||
else item.edge.spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES)
|
||||
)
|
||||
|
||||
|
||||
class UpstreamDatabaseLockingError(SpackError):
|
||||
"""Raised when an operation would need to lock an upstream database"""
|
||||
|
||||
|
@@ -2,11 +2,17 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import executable_prefix, set_virtuals_nonbuildable, update_configuration
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
executable_prefix,
|
||||
set_virtuals_nonbuildable,
|
||||
update_configuration,
|
||||
)
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
|
@@ -6,9 +6,9 @@
|
||||
function to update packages.yaml given a list of detected packages.
|
||||
|
||||
Ideally, each detection method should be placed in a specific subpackage
|
||||
and implement at least a function that returns a list of specs.
|
||||
|
||||
The update in packages.yaml can then be done using the function provided here.
|
||||
and implement at least a function that returns a list of DetectedPackage
|
||||
objects. The update in packages.yaml can then be done using the function
|
||||
provided here.
|
||||
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
@@ -17,21 +17,40 @@
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.operating_systems.windows_os as winOs
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.windows_registry
|
||||
|
||||
|
||||
class DetectedPackage(NamedTuple):
|
||||
"""Information on a package that has been detected."""
|
||||
|
||||
#: Spec that was detected
|
||||
spec: spack.spec.Spec
|
||||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(
|
||||
spec_str=spec_str, external_path=prefix, extra_attributes=extra_attributes
|
||||
)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
@@ -46,7 +65,7 @@ def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
|
||||
|
||||
def _pkg_config_dict(
|
||||
external_pkg_entries: List["spack.spec.Spec"],
|
||||
external_pkg_entries: List[DetectedPackage],
|
||||
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
@@ -66,19 +85,22 @@ def _pkg_config_dict(
|
||||
pkg_dict = spack.util.spack_yaml.syaml_dict()
|
||||
pkg_dict["externals"] = []
|
||||
for e in external_pkg_entries:
|
||||
if not _spec_is_valid(e):
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||
("spec", str(e)),
|
||||
("prefix", pathlib.Path(e.external_path).as_posix()),
|
||||
("spec", str(e.spec)),
|
||||
("prefix", e.prefix),
|
||||
]
|
||||
if e.external_modules:
|
||||
external_items.append(("modules", e.external_modules))
|
||||
if e.spec.external_modules:
|
||||
external_items.append(("modules", e.spec.external_modules))
|
||||
|
||||
if e.extra_attributes:
|
||||
if e.spec.extra_attributes:
|
||||
external_items.append(
|
||||
("extra_attributes", spack.util.spack_yaml.syaml_dict(e.extra_attributes.items()))
|
||||
(
|
||||
"extra_attributes",
|
||||
spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()),
|
||||
)
|
||||
)
|
||||
|
||||
# external_items.extend(e.spec.extra_attributes.items())
|
||||
@@ -199,32 +221,33 @@ def library_prefix(library_dir: str) -> str:
|
||||
|
||||
|
||||
def update_configuration(
|
||||
detected_packages: Dict[str, List["spack.spec.Spec"]],
|
||||
detected_packages: Dict[str, List[DetectedPackage]],
|
||||
scope: Optional[str] = None,
|
||||
buildable: bool = True,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages: list of specs to be added
|
||||
detected_packages: list of DetectedPackage objects to be added
|
||||
scope: configuration scope where to add the detected packages
|
||||
buildable: whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
for package_name, entries in detected_packages.items():
|
||||
new_entries = [s for s in entries if s not in predefined_external_specs]
|
||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
external_entries = pkg_config.get("externals", [])
|
||||
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||
|
||||
all_new_specs.extend(new_entries)
|
||||
all_new_specs.extend([x.spec for x in new_entries])
|
||||
if buildable is False:
|
||||
pkg_config["buildable"] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
|
||||
pkgs_cfg = spack.config.get("packages", scope=scope)
|
||||
|
||||
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||
spack.config.set("packages", pkgs_cfg, scope=scope)
|
||||
|
||||
|
@@ -18,16 +18,13 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.elf as elf_utils
|
||||
import spack.util.environment
|
||||
import spack.util.environment as environment
|
||||
import spack.util.ld_so_conf
|
||||
import spack.util.parallel
|
||||
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
WindowsCompilerExternalPaths,
|
||||
WindowsKitExternalPaths,
|
||||
_convert_to_iterable,
|
||||
@@ -88,24 +85,22 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def accept_elf(entry: os.DirEntry, host_compat: Tuple[bool, bool, int]):
|
||||
def accept_elf(path, host_compat):
|
||||
"""Accept an ELF file if the header matches the given compat triplet. In case it's not an ELF
|
||||
(e.g. static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
# Fast path: assume libraries at least have .so in their basename.
|
||||
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
||||
if ".so" not in entry.name:
|
||||
return is_readable_file(entry)
|
||||
if ".so" not in os.path.basename(path):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
try:
|
||||
return host_compat == elf_utils.get_elf_compat(entry.path)
|
||||
return host_compat == elf_utils.get_elf_compat(path)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
return is_readable_file(entry)
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
|
||||
|
||||
def is_readable_file(entry: os.DirEntry) -> bool:
|
||||
return entry.is_file() and os.access(entry.path, os.R_OK)
|
||||
|
||||
|
||||
def system_library_paths() -> List[str]:
|
||||
def libraries_in_ld_and_system_library_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from ``path_hints`` or the
|
||||
following defaults:
|
||||
|
||||
@@ -119,56 +114,81 @@ def system_library_paths() -> List[str]:
|
||||
(i.e. the basename of the library path).
|
||||
|
||||
There may be multiple paths with the same basename. In this case it is
|
||||
assumed there are two different instances of the library."""
|
||||
assumed there are two different instances of the library.
|
||||
|
||||
search_paths: List[str] = []
|
||||
Args:
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
path_hints (list): list of paths to be searched. If ``None``, the default
|
||||
system paths are used.
|
||||
"""
|
||||
if path_hints:
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
else:
|
||||
search_paths = []
|
||||
|
||||
if sys.platform == "win32":
|
||||
search_hints = spack.util.environment.get_path("PATH")
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_libraries(*search_hints))
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
# at the search root. Add both of those options to the search scheme
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints))
|
||||
# if no user provided path was given, add defaults to the search
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
|
||||
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
|
||||
# location, so we handle that case specifically.
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
|
||||
elif sys.platform == "darwin":
|
||||
search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH"))
|
||||
search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH"))
|
||||
search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths())
|
||||
elif sys.platform.startswith("linux"):
|
||||
search_paths.extend(environment.get_path("LD_LIBRARY_PATH"))
|
||||
# Environment variables
|
||||
if sys.platform == "darwin":
|
||||
search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH"))
|
||||
search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH"))
|
||||
elif sys.platform.startswith("linux"):
|
||||
search_paths.extend(environment.get_path("LD_LIBRARY_PATH"))
|
||||
|
||||
# Dynamic linker paths
|
||||
search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths())
|
||||
|
||||
# Drop redundant paths
|
||||
search_paths = list(filter(os.path.isdir, search_paths))
|
||||
# Drop redundant paths
|
||||
search_paths = list(filter(os.path.isdir, search_paths))
|
||||
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
|
||||
return search_paths
|
||||
|
||||
|
||||
def libraries_in_path(search_paths: List[str]) -> Dict[str, str]:
|
||||
try:
|
||||
host_compat = elf_utils.get_elf_compat(sys.executable)
|
||||
accept = lambda entry: accept_elf(entry, host_compat)
|
||||
accept = lambda path: accept_elf(path, host_compat)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
accept = is_readable_file
|
||||
accept = llnl.util.filesystem.is_readable_file
|
||||
|
||||
path_to_lib = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# search path entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
with os.scandir(search_path) as it:
|
||||
for entry in it:
|
||||
if accept(entry):
|
||||
path_to_lib[entry.path] = entry.name
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if accept(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
return path_to_lib
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from the system PATH paths.
|
||||
|
||||
For more details, see `libraries_in_ld_and_system_library_path` regarding
|
||||
return type and contents.
|
||||
|
||||
Args:
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
"""
|
||||
search_hints = (
|
||||
path_hints if path_hints is not None else spack.util.environment.get_path("PATH")
|
||||
)
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints)
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
# at the search root. Add both of those options to the search scheme
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints))
|
||||
if path_hints is None:
|
||||
# if no user provided path was given, add defaults to the search
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
|
||||
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
|
||||
# location, so we handle that case specifically.
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths: List[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
@@ -179,13 +199,10 @@ def _group_by_prefix(paths: List[str]) -> Dict[str, Set[str]]:
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
def __init__(self, paths: Dict[str, str]):
|
||||
self.paths = paths
|
||||
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def search_patterns(self, *, pkg: Type[spack.package_base.PackageBase]) -> Optional[List[str]]:
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
Args:
|
||||
@@ -193,6 +210,15 @@ def search_patterns(self, *, pkg: Type[spack.package_base.PackageBase]) -> Optio
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
"""Returns a list of candidate files found on the system.
|
||||
|
||||
Args:
|
||||
patterns: search patterns to be used for matching files
|
||||
paths: paths where to search for files
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
"""Given a path where a file was found, returns the corresponding prefix.
|
||||
|
||||
@@ -202,8 +228,8 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: Type[spack.package_base.PackageBase], paths: List[str]
|
||||
) -> List["spack.spec.Spec"]:
|
||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
@@ -269,43 +295,52 @@ def detect_specs(
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if not spec.external_path:
|
||||
spec.external_path = prefix
|
||||
if spec.external_path:
|
||||
prefix = spec.external_path
|
||||
|
||||
result.append(spec)
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
|
||||
return result
|
||||
|
||||
def find(self, *, pkg_name: str, repository: spack.repo.Repo) -> List[spack.spec.Spec]:
|
||||
def find(
|
||||
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
repository: repository to retrieve the package
|
||||
initial_guess: initial list of paths to search from the caller if None, default paths
|
||||
are searched. If this is an empty list, nothing will be searched.
|
||||
"""
|
||||
pkg_cls = repository.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
regex = re.compile("|".join(patterns))
|
||||
paths = [path for path, file in self.paths.items() if regex.search(file)]
|
||||
paths.sort()
|
||||
return self.detect_specs(pkg=pkg_cls, paths=paths)
|
||||
if initial_guess is None:
|
||||
initial_guess = self.default_path_hints()
|
||||
initial_guess.extend(common_windows_package_paths(pkg_cls))
|
||||
candidates = self.candidate_files(patterns=patterns, paths=initial_guess)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
@classmethod
|
||||
def in_search_paths(cls, paths: List[str]):
|
||||
return cls(executables_in_path(paths))
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return spack.util.environment.get_path("PATH")
|
||||
|
||||
@classmethod
|
||||
def in_default_paths(cls):
|
||||
return cls.in_search_paths(spack.util.environment.get_path("PATH"))
|
||||
|
||||
def search_patterns(self, *, pkg: Type[spack.package_base.PackageBase]) -> Optional[List[str]]:
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
return pkg.platform_executables()
|
||||
return None
|
||||
result = pkg.platform_executables()
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
result = [path for path, exe in executables_by_path.items() if joined_pattern.search(exe)]
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
@@ -316,18 +351,29 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
|
||||
class LibrariesFinder(Finder):
|
||||
"""Finds libraries in the provided paths matching package search patterns."""
|
||||
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def in_search_paths(cls, paths: List[str]):
|
||||
return cls(libraries_in_path(paths))
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def in_default_paths(cls):
|
||||
return cls.in_search_paths(system_library_paths())
|
||||
|
||||
def search_patterns(self, *, pkg: Type[spack.package_base.PackageBase]) -> Optional[List[str]]:
|
||||
return getattr(pkg, "libraries", None)
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints=paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in libraries_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = library_prefix(path)
|
||||
@@ -338,8 +384,11 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: Iterable[str], *, path_hints: Optional[List[str]] = None
|
||||
) -> Dict[str, List[spack.spec.Spec]]:
|
||||
packages_to_search: Iterable[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
@@ -347,26 +396,31 @@ def by_path(
|
||||
packages_to_search: list of packages to be detected. Each package can be either unqualified
|
||||
of fully qualified
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
if path_hints is None:
|
||||
exe_finder = ExecutablesFinder.in_default_paths()
|
||||
lib_finder = LibrariesFinder.in_default_paths()
|
||||
else:
|
||||
exe_finder = ExecutablesFinder.in_search_paths(path_hints)
|
||||
lib_finder = LibrariesFinder.in_search_paths(path_hints)
|
||||
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
repository = spack.repo.PATH.ensure_unwrapped()
|
||||
with spack.util.parallel.make_concurrent_executor() as executor:
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
exe_finder.find, pkg_name=pkg, repository=repository
|
||||
executables_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
)
|
||||
library_future = executor.submit(lib_finder.find, pkg_name=pkg, repository=repository)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
for pkg_name, futures in detected_specs_by_package.items():
|
||||
@@ -382,7 +436,7 @@ def by_path(
|
||||
)
|
||||
except Exception as e:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name} due to: {e.__class__}: {e}"
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
@@ -68,7 +68,7 @@ def execute(self) -> List[spack.spec.Spec]:
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(entries[unqualified_name])
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -4,14 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.config
|
||||
@@ -20,8 +23,13 @@
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
default_projections = {
|
||||
"all": "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
"all": posixpath.join(
|
||||
"{architecture}", "{compiler.name}-{compiler.version}", "{name}-{version}-{hash}"
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -31,42 +39,6 @@ def _check_concrete(spec):
|
||||
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
||||
|
||||
|
||||
def _get_spec(prefix: str) -> Optional["spack.spec.Spec"]:
|
||||
"""Returns a spec if the prefix contains a spec file in the .spack subdir"""
|
||||
for f in ("spec.json", "spec.yaml"):
|
||||
try:
|
||||
return spack.spec.Spec.from_specfile(os.path.join(prefix, ".spack", f))
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
||||
stack = [root]
|
||||
specs = []
|
||||
|
||||
while stack:
|
||||
prefix = stack.pop()
|
||||
|
||||
spec = _get_spec(prefix)
|
||||
|
||||
if spec:
|
||||
spec.prefix = prefix
|
||||
specs.append(spec)
|
||||
continue
|
||||
|
||||
try:
|
||||
scandir = os.scandir(prefix)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with scandir as entries:
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
stack.append(entry.path)
|
||||
return specs
|
||||
|
||||
|
||||
class DirectoryLayout:
|
||||
"""A directory layout is used to associate unique paths with specs.
|
||||
Different installations are going to want different layouts for their
|
||||
@@ -180,9 +152,20 @@ def read_spec(self, path):
|
||||
def spec_file_path(self, spec):
|
||||
"""Gets full path to spec file"""
|
||||
_check_concrete(spec)
|
||||
# Attempts to convert to JSON if possible.
|
||||
# Otherwise just returns the YAML.
|
||||
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except OSError as err:
|
||||
tty.debug("Could not remove deprecated {0}".format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
return json_path
|
||||
|
||||
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
"""Gets full path to spec file for deprecated spec
|
||||
@@ -216,7 +199,23 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
deprecated_spec.dag_hash() + "_" + self.spec_file_name,
|
||||
)
|
||||
|
||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(deprecated_spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except (IOError, OSError) as err:
|
||||
tty.debug("Could not remove deprecated {0}".format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
|
||||
return json_path
|
||||
|
||||
@contextmanager
|
||||
def disable_upstream_check(self):
|
||||
self.check_upstream = False
|
||||
yield
|
||||
self.check_upstream = True
|
||||
|
||||
def metadata_path(self, spec):
|
||||
return os.path.join(spec.prefix, self.metadata_dir)
|
||||
@@ -272,6 +271,53 @@ def ensure_installed(self, spec):
|
||||
"Spec file in %s does not match hash!" % spec_file_path
|
||||
)
|
||||
|
||||
def all_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
specs = []
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
|
||||
# NOTE: Does not validate filename extension; should happen later
|
||||
path_elems += [self.metadata_dir, "spec.json"]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
if not spec_files: # we're probably looking at legacy yaml...
|
||||
path_elems += [self.metadata_dir, "spec.yaml"]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
specs.extend([self.read_spec(s) for s in spec_files])
|
||||
return specs
|
||||
|
||||
def all_deprecated_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
deprecated_specs = set()
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
|
||||
# NOTE: Does not validate filename extension; should happen later
|
||||
path_elems += [
|
||||
self.metadata_dir,
|
||||
self.deprecated_dir,
|
||||
"*_spec.*",
|
||||
] # + self.spec_file_name]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
get_depr_spec_file = lambda x: os.path.join(
|
||||
os.path.dirname(os.path.dirname(x)), self.spec_file_name
|
||||
)
|
||||
deprecated_specs |= set(
|
||||
(self.read_spec(s), self.read_spec(get_depr_spec_file(s))) for s in spec_files
|
||||
)
|
||||
return deprecated_specs
|
||||
|
||||
def specs_by_hash(self):
|
||||
by_hash = {}
|
||||
for spec in self.all_specs():
|
||||
by_hash[spec.dag_hash()] = spec
|
||||
return by_hash
|
||||
|
||||
def path_for_spec(self, spec):
|
||||
"""Return absolute path from the root to a directory for the spec."""
|
||||
_check_concrete(spec)
|
||||
@@ -337,35 +383,6 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
raise e
|
||||
path = os.path.dirname(path)
|
||||
|
||||
def all_specs(self) -> List["spack.spec.Spec"]:
|
||||
"""Returns a list of all specs detected in self.root, detected by `.spack` directories.
|
||||
Their prefix is set to the directory containing the `.spack` directory. Note that these
|
||||
specs may follow a different layout than the current layout if it was changed after
|
||||
installation."""
|
||||
return specs_from_metadata_dirs(self.root)
|
||||
|
||||
def deprecated_for(
|
||||
self, specs: List["spack.spec.Spec"]
|
||||
) -> List[Tuple["spack.spec.Spec", "spack.spec.Spec"]]:
|
||||
"""Returns a list of tuples of specs (new, old) where new is deprecated for old"""
|
||||
spec_with_deprecated = []
|
||||
for spec in specs:
|
||||
try:
|
||||
deprecated = os.scandir(
|
||||
os.path.join(str(spec.prefix), self.metadata_dir, self.deprecated_dir)
|
||||
)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with deprecated as entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
deprecated_spec = spack.spec.Spec.from_specfile(entry.path)
|
||||
spec_with_deprecated.append((spec, deprecated_spec))
|
||||
except Exception:
|
||||
continue
|
||||
return spec_with_deprecated
|
||||
|
||||
|
||||
class DirectoryLayoutError(SpackError):
|
||||
"""Superclass for directory layout errors."""
|
||||
|
@@ -58,8 +58,9 @@
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import SpecList
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
from spack.variant import UnknownVariantError
|
||||
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
@@ -1624,10 +1625,10 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec in self.user_specs:
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, str(uspec), tests))
|
||||
args.append((i, [str(x) for x in uspec_constraints], tests))
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
@@ -2507,11 +2508,52 @@ def display_specs(specs):
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
if len(root_spec) != 1:
|
||||
m = "The constraints %s are not a valid spec " % spec_constraints
|
||||
m += "concretization target. all specs must have a single name "
|
||||
m += "constraint for concretization."
|
||||
raise InvalidSpecConstraintError(m)
|
||||
spec_constraints.remove(root_spec[0])
|
||||
|
||||
invalid_constraints = []
|
||||
while True:
|
||||
# Attach all anonymous constraints to one named spec
|
||||
s = root_spec[0].copy()
|
||||
for c in spec_constraints:
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
try:
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
invalid_deps_string = ["^" + d for d in e.invalid_deps]
|
||||
invalid_deps = [
|
||||
c
|
||||
for c in spec_constraints
|
||||
if any(c.satisfies(invd) for invd in invalid_deps_string)
|
||||
]
|
||||
if len(invalid_deps) != len(invalid_deps_string):
|
||||
raise e
|
||||
invalid_constraints.extend(invalid_deps)
|
||||
except UnknownVariantError as e:
|
||||
invalid_variants = e.unknown_variants
|
||||
inv_variant_constraints = [
|
||||
c for c in spec_constraints if any(name in c.variants for name in invalid_variants)
|
||||
]
|
||||
if len(inv_variant_constraints) != len(invalid_variants):
|
||||
raise e
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
spec_constraints = [Spec(x) for x in spec_constraints]
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
|
@@ -276,6 +276,52 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
|
||||
def _packages_needed_to_bootstrap_compiler(
|
||||
compiler: "spack.spec.CompilerSpec", architecture: "spack.spec.ArchSpec", pkgs: list
|
||||
) -> List[Tuple["spack.package_base.PackageBase", bool]]:
|
||||
"""
|
||||
Return a list of packages required to bootstrap `pkg`s compiler
|
||||
|
||||
Checks Spack's compiler configuration for a compiler that
|
||||
matches the package spec.
|
||||
|
||||
Args:
|
||||
compiler: the compiler to bootstrap
|
||||
architecture: the architecture for which to boostrap the compiler
|
||||
pkgs: the packages that may need their compiler installed
|
||||
|
||||
Return:
|
||||
list of tuples of packages and a boolean, for concretized compiler-related
|
||||
packages that need to be installed and bool values specify whether the
|
||||
package is the bootstrap compiler (``True``) or one of its dependencies
|
||||
(``False``). The list will be empty if there are no compilers.
|
||||
"""
|
||||
tty.debug(f"Bootstrapping {compiler} compiler")
|
||||
compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture)
|
||||
if compilers:
|
||||
return []
|
||||
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler)
|
||||
|
||||
# Set the architecture for the compiler package in a way that allows the
|
||||
# concretizer to back off if needed for the older bootstrapping compiler
|
||||
dep.constrain(f"platform={str(architecture.platform)}")
|
||||
dep.constrain(f"os={str(architecture.os)}")
|
||||
dep.constrain(f"target={architecture.target.microarchitecture.family.name}:")
|
||||
# concrete CompilerSpec has less info than concrete Spec
|
||||
# concretize as Spec to add that information
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
return packages
|
||||
|
||||
|
||||
def _hms(seconds: int) -> str:
|
||||
"""
|
||||
Convert seconds to hours, minutes, seconds
|
||||
@@ -405,7 +451,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
|
||||
# Add to the DB
|
||||
tty.debug(f"{pre} registering into DB")
|
||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||
spack.store.STORE.db.add(spec, None, explicit=explicit)
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(
|
||||
@@ -442,12 +488,13 @@ def _process_binary_cache_tarball(
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
return True
|
||||
|
||||
|
||||
@@ -921,6 +968,26 @@ def __init__(
|
||||
if package_id(d) != self.pkg_id
|
||||
)
|
||||
|
||||
# Handle bootstrapped compiler
|
||||
#
|
||||
# The bootstrapped compiler is not a dependency in the spec, but it is
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
dep.constrain(f"os={str(arch_spec.os)}")
|
||||
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep)
|
||||
self.dependencies.add(dep_id)
|
||||
|
||||
# List of uninstalled dependencies, which is used to establish
|
||||
# the priority of the build task.
|
||||
#
|
||||
@@ -1099,6 +1166,53 @@ def __str__(self) -> str:
|
||||
installed = f"installed ({len(self.installed)}) = {self.installed}"
|
||||
return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}"
|
||||
|
||||
def _add_bootstrap_compilers(
|
||||
self,
|
||||
compiler: "spack.spec.CompilerSpec",
|
||||
architecture: "spack.spec.ArchSpec",
|
||||
pkgs: List["spack.package_base.PackageBase"],
|
||||
request: BuildRequest,
|
||||
all_deps,
|
||||
) -> None:
|
||||
"""
|
||||
Add bootstrap compilers and dependencies to the build queue.
|
||||
|
||||
Args:
|
||||
compiler: the compiler to boostrap
|
||||
architecture: the architecture for which to bootstrap the compiler
|
||||
pkgs: the package list with possible compiler dependencies
|
||||
request: the associated install request
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
|
||||
for comp_pkg, is_compiler in packages:
|
||||
pkgid = package_id(comp_pkg.spec)
|
||||
if pkgid not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
elif is_compiler:
|
||||
# ensure it's queued as a compiler
|
||||
self._modify_existing_task(pkgid, "compiler", True)
|
||||
|
||||
def _modify_existing_task(self, pkgid: str, attr, value) -> None:
|
||||
"""
|
||||
Update a task in-place to modify its behavior.
|
||||
|
||||
Currently used to update the ``compiler`` field on tasks
|
||||
that were originally created as a dependency of a compiler,
|
||||
but are compilers in their own right.
|
||||
|
||||
For example, ``intel-oneapi-compilers-classic`` depends on
|
||||
``intel-oneapi-compilers``, which can cause the latter to be
|
||||
queued first as a non-compiler, and only later as a compiler.
|
||||
"""
|
||||
for i, tup in enumerate(self.build_pq):
|
||||
key, task = tup
|
||||
if task.pkg_id == pkgid:
|
||||
tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2)
|
||||
setattr(task, attr, value)
|
||||
self.build_pq[i] = (key, task)
|
||||
|
||||
def _add_init_task(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
@@ -1428,7 +1542,42 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
tty.warn(f"Installation request refused: {str(err)}")
|
||||
return
|
||||
|
||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||
|
||||
install_deps = request.install_args.get("install_deps")
|
||||
# Bootstrap compilers first
|
||||
if install_deps and install_compilers:
|
||||
packages_per_compiler: Dict[
|
||||
"spack.spec.CompilerSpec",
|
||||
Dict["spack.spec.ArchSpec", List["spack.package_base.PackageBase"]],
|
||||
] = {}
|
||||
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
compiler = dep_pkg.spec.compiler
|
||||
arch = dep_pkg.spec.architecture
|
||||
if compiler not in packages_per_compiler:
|
||||
packages_per_compiler[compiler] = {}
|
||||
|
||||
if arch not in packages_per_compiler[compiler]:
|
||||
packages_per_compiler[compiler][arch] = []
|
||||
|
||||
packages_per_compiler[compiler][arch].append(dep_pkg)
|
||||
|
||||
compiler = request.pkg.spec.compiler
|
||||
arch = request.pkg.spec.architecture
|
||||
|
||||
if compiler not in packages_per_compiler:
|
||||
packages_per_compiler[compiler] = {}
|
||||
|
||||
if arch not in packages_per_compiler[compiler]:
|
||||
packages_per_compiler[compiler][arch] = []
|
||||
|
||||
packages_per_compiler[compiler][arch].append(request.pkg)
|
||||
|
||||
for compiler, archs in packages_per_compiler.items():
|
||||
for arch, packages in archs.items():
|
||||
self._add_bootstrap_compilers(compiler, arch, packages, request, all_deps)
|
||||
|
||||
if install_deps:
|
||||
for dep in request.traverse_dependencies():
|
||||
@@ -1460,6 +1609,10 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
fail_fast = bool(request.install_args.get("fail_fast"))
|
||||
self.fail_fast = self.fail_fast or fail_fast
|
||||
|
||||
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
|
||||
spack.compilers.find_compilers([compiler_search_prefix])
|
||||
|
||||
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
@@ -1487,6 +1640,8 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
if use_cache:
|
||||
if _install_from_cache(pkg, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
return
|
||||
elif cache_only:
|
||||
raise InstallError("No binary found when cache-only was specified", pkg=pkg)
|
||||
@@ -1514,8 +1669,11 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
)
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
|
||||
# If a compiler, ensure it is added to the configuration
|
||||
if task.compiler:
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
@@ -1916,6 +2074,10 @@ def install(self) -> None:
|
||||
path = spack.util.path.debug_padded_filter(pkg.prefix)
|
||||
_print_installed_pkg(path)
|
||||
|
||||
# It's an already installed compiler, add it to the config
|
||||
if task.compiler:
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
|
||||
else:
|
||||
# At this point we've failed to get a write or a read
|
||||
# lock, which means another process has taken a write
|
||||
|
@@ -104,7 +104,6 @@
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.executable import *
|
||||
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
auto_or_any_combination_of,
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import glob
|
||||
import hashlib
|
||||
import importlib
|
||||
import inspect
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
@@ -116,10 +117,11 @@ def preferred_version(pkg: "PackageBase"):
|
||||
Arguments:
|
||||
pkg: The package whose versions are to be assessed.
|
||||
"""
|
||||
from spack.solver.asp import concretization_version_order
|
||||
|
||||
version, _ = max(pkg.versions.items(), key=concretization_version_order)
|
||||
return version
|
||||
# Here we sort first on the fact that a version is marked
|
||||
# as preferred in the package, then on the fact that the
|
||||
# version is not develop, then lexicographically
|
||||
key_fn = lambda v: (pkg.versions[v].get("preferred", False), not v.isdevelop(), v)
|
||||
return max(pkg.versions, key=key_fn)
|
||||
|
||||
|
||||
class WindowsRPath:
|
||||
@@ -1742,7 +1744,7 @@ def _has_make_target(self, target):
|
||||
bool: True if 'target' is found, else False
|
||||
"""
|
||||
# Prevent altering LC_ALL for 'make' outside this function
|
||||
make = copy.deepcopy(self.module.make)
|
||||
make = copy.deepcopy(inspect.getmodule(self).make)
|
||||
|
||||
# Use English locale for missing target message comparison
|
||||
make.add_default_env("LC_ALL", "C")
|
||||
@@ -1792,7 +1794,7 @@ def _if_make_target_execute(self, target, *args, **kwargs):
|
||||
"""
|
||||
if self._has_make_target(target):
|
||||
# Execute target
|
||||
self.module.make(target, *args, **kwargs)
|
||||
inspect.getmodule(self).make(target, *args, **kwargs)
|
||||
|
||||
def _has_ninja_target(self, target):
|
||||
"""Checks to see if 'target' is a valid target in a Ninja build script.
|
||||
@@ -1803,7 +1805,7 @@ def _has_ninja_target(self, target):
|
||||
Returns:
|
||||
bool: True if 'target' is found, else False
|
||||
"""
|
||||
ninja = self.module.ninja
|
||||
ninja = inspect.getmodule(self).ninja
|
||||
|
||||
# Check if we have a Ninja build script
|
||||
if not os.path.exists("build.ninja"):
|
||||
@@ -1832,7 +1834,7 @@ def _if_ninja_target_execute(self, target, *args, **kwargs):
|
||||
"""
|
||||
if self._has_ninja_target(target):
|
||||
# Execute target
|
||||
self.module.ninja(target, *args, **kwargs)
|
||||
inspect.getmodule(self).ninja(target, *args, **kwargs)
|
||||
|
||||
def _get_needed_resources(self):
|
||||
# We use intersects here cause it would also work if self.spec is abstract
|
||||
|
@@ -20,7 +20,8 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.util.crypto import Checker, checksum
|
||||
import spack.zipcache
|
||||
from spack.util.crypto import Checker, checksum_stream
|
||||
from spack.util.executable import which, which_string
|
||||
|
||||
|
||||
@@ -193,9 +194,20 @@ def __init__(
|
||||
# Cannot use pkg.package_dir because it's a property and we have
|
||||
# classes, not instances.
|
||||
pkg_dir = os.path.abspath(os.path.dirname(cls.module.__file__))
|
||||
path = os.path.join(pkg_dir, self.relative_path)
|
||||
if os.path.exists(path):
|
||||
abs_path = path
|
||||
path = pathlib.Path(os.path.join(pkg_dir, self.relative_path))
|
||||
|
||||
if "packages.zip" in path.parts:
|
||||
# check if it exists in the zip file.
|
||||
idx = path.parts.index("packages.zip")
|
||||
zip_path = str(pathlib.PurePath(*path.parts[: idx + 1]))
|
||||
entry_path = str(pathlib.PurePath(*path.parts[idx + 1 :]))
|
||||
|
||||
_, namelist = spack.zipcache.get(zip_path)
|
||||
if entry_path in namelist:
|
||||
abs_path = str(path)
|
||||
break
|
||||
elif path.exists():
|
||||
abs_path = str(path)
|
||||
break
|
||||
|
||||
if abs_path is None:
|
||||
@@ -215,7 +227,19 @@ def sha256(self) -> str:
|
||||
The sha256 of the patch file.
|
||||
"""
|
||||
if self._sha256 is None and self.path is not None:
|
||||
self._sha256 = checksum(hashlib.sha256, self.path)
|
||||
path = pathlib.PurePath(self.path)
|
||||
if "packages.zip" in path.parts:
|
||||
# split in path to packages.zip and the path within the zip
|
||||
idx = path.parts.index("packages.zip")
|
||||
zip_path = str(pathlib.PurePath(*path.parts[: idx + 1]))
|
||||
entry_path = str(pathlib.PurePath(*path.parts[idx + 1 :]))
|
||||
zip, _ = spack.zipcache.get(zip_path)
|
||||
f = zip.open(entry_path, "r")
|
||||
else:
|
||||
f = open(self.path, "rb")
|
||||
self._sha256 = checksum_stream(hashlib.sha256, f)
|
||||
f.close()
|
||||
|
||||
assert isinstance(self._sha256, str)
|
||||
return self._sha256
|
||||
|
||||
|
@@ -12,6 +12,7 @@
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -24,7 +25,6 @@
|
||||
import spack.store
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
import spack.util.filesystem as ssys
|
||||
import spack.util.path
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
@@ -664,7 +664,7 @@ def is_binary(filename):
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
m_type, _ = ssys.mime_type(filename)
|
||||
m_type, _ = fs.mime_type(filename)
|
||||
|
||||
msg = "[{0}] -> ".format(filename)
|
||||
if m_type == "application":
|
||||
@@ -692,7 +692,7 @@ def fixup_macos_rpath(root, filename):
|
||||
True if fixups were applied, else False
|
||||
"""
|
||||
abspath = os.path.join(root, filename)
|
||||
if ssys.mime_type(abspath) != ("application", "x-mach-binary"):
|
||||
if fs.mime_type(abspath) != ("application", "x-mach-binary"):
|
||||
return False
|
||||
|
||||
# Get Mach-O header commands
|
||||
|
@@ -26,7 +26,8 @@
|
||||
import types
|
||||
import uuid
|
||||
import warnings
|
||||
from typing import Any, Dict, Generator, List, Optional, Set, Tuple, Type, Union
|
||||
import zipimport
|
||||
from typing import IO, Any, Dict, Generator, List, Optional, Set, Tuple, Type, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -46,6 +47,7 @@
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.zipcache
|
||||
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
@@ -100,32 +102,6 @@ def get_data(self, path):
|
||||
return self.prepend.encode() + b"\n" + data
|
||||
|
||||
|
||||
class RepoLoader(_PrependFileLoader):
|
||||
"""Loads a Python module associated with a package in specific repository"""
|
||||
|
||||
#: Code in ``_package_prepend`` is prepended to imported packages.
|
||||
#:
|
||||
#: Spack packages are expected to call `from spack.package import *`
|
||||
#: themselves, but we are allowing a deprecation period before breaking
|
||||
#: external repos that don't do this yet.
|
||||
_package_prepend = "from spack.package import *"
|
||||
|
||||
def __init__(self, fullname, repo, package_name):
|
||||
self.repo = repo
|
||||
self.package_name = package_name
|
||||
self.package_py = repo.filename_for_package_name(package_name)
|
||||
self.fullname = fullname
|
||||
super().__init__(self.fullname, self.package_py, prepend=self._package_prepend)
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
def create_module(self, spec):
|
||||
return SpackNamespace(spec.name)
|
||||
|
||||
def exec_module(self, module):
|
||||
module.__loader__ = self
|
||||
|
||||
|
||||
class ReposFinder:
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
||||
|
||||
@@ -165,10 +141,11 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
|
||||
return None
|
||||
|
||||
loader = self.compute_loader(fullname)
|
||||
if loader is None:
|
||||
result = self.compute_loader(fullname)
|
||||
if result is None:
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
loader, actual_fullname = result
|
||||
return importlib.util.spec_from_loader(actual_fullname, loader)
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
@@ -187,16 +164,28 @@ def compute_loader(self, fullname):
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
package_name = repo.real_name(module_name)
|
||||
if package_name:
|
||||
return RepoLoader(fullname, repo, package_name)
|
||||
# annoyingly there is a many to one mapping for pkg module to file, have to
|
||||
# figure out how to deal with this properly.
|
||||
if repo.zipimporter:
|
||||
return repo.zipimporter, f"{namespace}.{package_name}"
|
||||
else:
|
||||
return (
|
||||
_PrependFileLoader(
|
||||
fullname=fullname,
|
||||
path=repo.filename_for_package_name(package_name),
|
||||
prepend="from spack.package import *",
|
||||
),
|
||||
fullname,
|
||||
)
|
||||
|
||||
# We are importing a full namespace like 'spack.pkg.builtin'
|
||||
if fullname == repo.full_namespace:
|
||||
return SpackNamespaceLoader()
|
||||
return SpackNamespaceLoader(), fullname
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
||||
return SpackNamespaceLoader()
|
||||
return SpackNamespaceLoader(), fullname
|
||||
|
||||
return None
|
||||
|
||||
@@ -207,6 +196,7 @@ def compute_loader(self, fullname):
|
||||
repo_config_name = "repo.yaml" # Top-level filename for repo config.
|
||||
repo_index_name = "index.yaml" # Top-level filename for repository index.
|
||||
packages_dir_name = "packages" # Top-level repo directory containing pkgs.
|
||||
packages_zip_name = "packages.zip" # Top-level filename for zipped packages.
|
||||
package_file_name = "package.py" # Filename for packages in a repository.
|
||||
|
||||
#: Guaranteed unused default value for some functions.
|
||||
@@ -216,9 +206,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -374,6 +364,36 @@ def __getattr__(self, name):
|
||||
return getattr(self, name)
|
||||
|
||||
|
||||
class EvenFasterPackageChecker(collections.abc.Mapping):
|
||||
def __init__(self, zip_path):
|
||||
# The path of the repository managed by this instance
|
||||
self.zipfile, self.namelist = spack.zipcache.get(zip_path)
|
||||
self.invalidate()
|
||||
|
||||
def invalidate(self):
|
||||
self.mtime = os.stat(self.zipfile.filename).st_mtime
|
||||
self.pkgs = {
|
||||
f.rstrip("/"): self.mtime
|
||||
for f in self.namelist
|
||||
if f.endswith("/") and f.count("/") == 1 and f != "./"
|
||||
}
|
||||
|
||||
def last_mtime(self):
|
||||
return self.mtime
|
||||
|
||||
def modified_since(self, since: float) -> List[str]:
|
||||
return list(self.pkgs) if self.mtime > since else []
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.pkgs[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.pkgs)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.pkgs)
|
||||
|
||||
|
||||
class FastPackageChecker(collections.abc.Mapping):
|
||||
"""Cache that maps package names to the stats obtained on the
|
||||
'package.py' files associated with them.
|
||||
@@ -588,14 +608,11 @@ class RepoIndex:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
package_checker: Union[FastPackageChecker, EvenFasterPackageChecker],
|
||||
namespace: str,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
self.packages_path = llnl.path.convert_to_posix_path(self.packages_path)
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers: Dict[str, Indexer] = {}
|
||||
@@ -896,6 +913,9 @@ def dirname_for_package_name(self, pkg_name: str) -> str:
|
||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
|
||||
|
||||
def open_package(self, pkg_name: str) -> IO[bytes]:
|
||||
return self.repo_for_pkg(pkg_name).open_package(pkg_name)
|
||||
|
||||
def exists(self, pkg_name: str) -> bool:
|
||||
"""Whether package with the give name exists in the path's repos.
|
||||
|
||||
@@ -1009,9 +1029,14 @@ def check(condition, msg):
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||
packages_zip = os.path.join(self.root, "packages.zip")
|
||||
self.zipimporter = (
|
||||
zipimport.zipimporter(packages_zip) if os.path.exists(packages_zip) else None
|
||||
)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
self.zipimporter or os.path.isdir(self.packages_path),
|
||||
f"No '{self.packages_path}' or '{packages_zip} found in '{root}'",
|
||||
)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
@@ -1021,7 +1046,9 @@ def check(condition, msg):
|
||||
self._finder: Optional[RepoPath] = None
|
||||
|
||||
# Maps that goes from package name to corresponding file stat
|
||||
self._fast_package_checker: Optional[FastPackageChecker] = None
|
||||
self._fast_package_checker: Optional[
|
||||
Union[EvenFasterPackageChecker, FastPackageChecker]
|
||||
] = None
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index: Optional[RepoIndex] = None
|
||||
@@ -1119,14 +1146,20 @@ def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
||||
f"Repository {self.namespace} does not contain package {spec.fullname}."
|
||||
)
|
||||
|
||||
package_path = self.filename_for_package_name(spec.name)
|
||||
if not os.path.exists(package_path):
|
||||
try:
|
||||
package_py = self.open_package(spec.name)
|
||||
except OSError:
|
||||
# Spec has no files (e.g., package, patches) to copy
|
||||
tty.debug(f"{spec.name} does not have a package to dump")
|
||||
return
|
||||
|
||||
# Install patch files needed by the (concrete) package.
|
||||
fs.mkdirp(path)
|
||||
|
||||
# Install the package.py file itself.
|
||||
with package_py as f, open(os.path.join(path, package_file_name), "wb") as g:
|
||||
shutil.copyfileobj(f, g)
|
||||
|
||||
if spec.concrete:
|
||||
for patch in itertools.chain.from_iterable(spec.package.patches.values()):
|
||||
if patch.path:
|
||||
@@ -1135,9 +1168,6 @@ def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
||||
else:
|
||||
warnings.warn(f"Patch file did not exist: {patch.path}")
|
||||
|
||||
# Install the package.py file itself.
|
||||
fs.install(self.filename_for_package_name(spec.name), path)
|
||||
|
||||
@property
|
||||
def index(self) -> RepoIndex:
|
||||
"""Construct the index for this repo lazily."""
|
||||
@@ -1194,10 +1224,22 @@ def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
pkg_dir = self.dirname_for_package_name(pkg_name)
|
||||
return os.path.join(pkg_dir, package_file_name)
|
||||
|
||||
def open_package(self, pkg_name: str) -> IO[bytes]:
|
||||
"""Open the package.py file for a package in this repo."""
|
||||
if self.zipimporter:
|
||||
zip, _ = spack.zipcache.get(self.zipimporter.archive)
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return zip.open(f"{unqualified_name}/__init__.py")
|
||||
else:
|
||||
return open(self.filename_for_package_name(pkg_name), "rb")
|
||||
|
||||
@property
|
||||
def _pkg_checker(self) -> FastPackageChecker:
|
||||
def _pkg_checker(self) -> Union[FastPackageChecker, EvenFasterPackageChecker]:
|
||||
if self._fast_package_checker is None:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
if self.zipimporter:
|
||||
self._fast_package_checker = EvenFasterPackageChecker(self.zipimporter.archive)
|
||||
else:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
return self._fast_package_checker
|
||||
|
||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
@@ -1230,16 +1272,7 @@ def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"
|
||||
|
||||
def exists(self, pkg_name: str) -> bool:
|
||||
"""Whether a package with the supplied name exists."""
|
||||
if pkg_name is None:
|
||||
return False
|
||||
|
||||
# if the FastPackageChecker is already constructed, use it
|
||||
if self._fast_package_checker:
|
||||
return pkg_name in self._pkg_checker
|
||||
|
||||
# if not, check for the package.py file
|
||||
path = self.filename_for_package_name(pkg_name)
|
||||
return os.path.exists(path)
|
||||
return pkg_name is not None and pkg_name in self._pkg_checker
|
||||
|
||||
def last_mtime(self):
|
||||
"""Time a package file in this repo was last updated."""
|
||||
@@ -1507,6 +1540,14 @@ def use_repositories(
|
||||
PATH = saved
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
def create_module(self, spec):
|
||||
return SpackNamespace(spec.name)
|
||||
|
||||
def exec_module(self, module):
|
||||
module.__loader__ = self
|
||||
|
||||
|
||||
class MockRepositoryBuilder:
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
|
@@ -116,7 +116,7 @@ def rewire_node(spec, explicit):
|
||||
# spec being added to look for mismatches)
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
# add to database, not sure about explicit
|
||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout, explicit=explicit)
|
||||
|
||||
# run post install hooks
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
|
@@ -3,28 +3,22 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
|
||||
class DeprecationMessage(typing.NamedTuple):
|
||||
message: str
|
||||
error: bool
|
||||
|
||||
|
||||
# jsonschema is imported lazily as it is heavy to import
|
||||
# and increases the start-up time
|
||||
def _make_validator():
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
|
||||
def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
@@ -38,31 +32,27 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||
return
|
||||
|
||||
if not deprecated:
|
||||
return
|
||||
|
||||
deprecations = {
|
||||
name: DeprecationMessage(message=x["message"], error=x["error"])
|
||||
for x in deprecated
|
||||
for name in x["names"]
|
||||
}
|
||||
|
||||
# Get a list of the deprecated properties, return if there is none
|
||||
issues = [entry for entry in instance if entry in deprecations]
|
||||
if not issues:
|
||||
deprecated_properties = [x for x in instance if x in deprecated["properties"]]
|
||||
if not deprecated_properties:
|
||||
return
|
||||
|
||||
# Process issues
|
||||
errors = []
|
||||
for name in issues:
|
||||
msg = deprecations[name].message.format(name=name)
|
||||
if deprecations[name].error:
|
||||
errors.append(msg)
|
||||
else:
|
||||
warnings.warn(msg)
|
||||
# Retrieve the template message
|
||||
msg_str_or_func = deprecated["message"]
|
||||
if isinstance(msg_str_or_func, str):
|
||||
msg = msg_str_or_func.format(properties=deprecated_properties)
|
||||
else:
|
||||
msg = msg_str_or_func(instance, deprecated_properties)
|
||||
if msg is None:
|
||||
return
|
||||
|
||||
if errors:
|
||||
yield jsonschema.ValidationError("\n".join(errors))
|
||||
is_error = deprecated["error"]
|
||||
if not is_error:
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
import jsonschema
|
||||
|
||||
yield jsonschema.ValidationError(msg)
|
||||
|
||||
return jsonschema.validators.extend(
|
||||
jsonschema.Draft4Validator,
|
||||
|
@@ -75,6 +75,7 @@
|
||||
"verify_ssl": {"type": "boolean"},
|
||||
"ssl_certs": {"type": "string"},
|
||||
"suppress_gpg_warnings": {"type": "boolean"},
|
||||
"install_missing_compilers": {"type": "boolean"},
|
||||
"debug": {"type": "boolean"},
|
||||
"checksum": {"type": "boolean"},
|
||||
"deprecated": {"type": "boolean"},
|
||||
@@ -95,21 +96,12 @@
|
||||
"binary_index_ttl": {"type": "integer", "minimum": 0},
|
||||
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
|
||||
},
|
||||
"deprecatedProperties": [
|
||||
{
|
||||
"names": ["concretizer"],
|
||||
"message": "Spack supports only clingo as a concretizer from v0.23. "
|
||||
"The config:concretizer config option is ignored.",
|
||||
"error": False,
|
||||
},
|
||||
{
|
||||
"names": ["install_missing_compilers"],
|
||||
"message": "The config:install_missing_compilers option has been deprecated in "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config in "
|
||||
"Spack v0.25.",
|
||||
"error": False,
|
||||
},
|
||||
],
|
||||
"deprecatedProperties": {
|
||||
"properties": ["concretizer"],
|
||||
"message": "Spack supports only clingo as a concretizer from v0.23. "
|
||||
"The config:concretizer config option is ignored.",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -109,6 +109,7 @@
|
||||
"require": requirements,
|
||||
"prefer": prefer_and_conflict,
|
||||
"conflict": prefer_and_conflict,
|
||||
"version": {}, # Here only to warn users on ignored properties
|
||||
"target": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
@@ -139,6 +140,14 @@
|
||||
},
|
||||
"variants": variants,
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["version"],
|
||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||
"of the same file.\n",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
@@ -156,11 +165,14 @@
|
||||
# version strings
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||
},
|
||||
"target": {}, # Here only to warn users on ignored properties
|
||||
"compiler": {}, # Here only to warn users on ignored properties
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": permissions,
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
# attribute, it could be useful to set it here
|
||||
"package_attributes": package_attributes,
|
||||
"providers": {}, # Here only to warn users on ignored properties
|
||||
"variants": variants,
|
||||
"externals": {
|
||||
"type": "array",
|
||||
@@ -192,6 +204,18 @@
|
||||
},
|
||||
},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["target", "compiler", "providers"],
|
||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"can be set only in the 'all' section of the same file. "
|
||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||
"including files:lines where the deprecated attributes are used.\n\n"
|
||||
"\tUse requirements to enforce conditions on specific packages: "
|
||||
f"{REQUIREMENT_URL}\n",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@@ -579,7 +579,7 @@ def _is_checksummed_version(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
return _is_checksummed_git_version(version)
|
||||
|
||||
|
||||
def concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
"""Version order key for concretization, where preferred > not preferred,
|
||||
not deprecated > deprecated, finite > any infinite component; only if all are
|
||||
the same, do we use default version ordering."""
|
||||
@@ -1022,102 +1022,6 @@ def __iter__(self):
|
||||
ConditionSpecCache = Dict[str, Dict[ConditionSpecKey, ConditionIdFunctionPair]]
|
||||
|
||||
|
||||
class ConstraintOrigin(enum.Enum):
|
||||
"""Generates identifiers that can be pased into the solver attached
|
||||
to constraints, and then later retrieved to determine the origin of
|
||||
those constraints when ``SpecBuilder`` creates Specs from the solve
|
||||
result.
|
||||
"""
|
||||
|
||||
DEPENDS_ON = 1
|
||||
REQUIRE = 2
|
||||
|
||||
@staticmethod
|
||||
def _SUFFIXES() -> Dict["ConstraintOrigin", str]:
|
||||
return {ConstraintOrigin.DEPENDS_ON: "_dep", ConstraintOrigin.REQUIRE: "_req"}
|
||||
|
||||
@staticmethod
|
||||
def append_type_suffix(pkg_id: str, kind: "ConstraintOrigin") -> str:
|
||||
"""Given a package identifier and a constraint kind, generate a string ID."""
|
||||
suffix = ConstraintOrigin._SUFFIXES()[kind]
|
||||
return f"{pkg_id}{suffix}"
|
||||
|
||||
@staticmethod
|
||||
def strip_type_suffix(source: str) -> Tuple[int, Optional[str]]:
|
||||
"""Take a combined package/type ID generated by
|
||||
``append_type_suffix``, and extract the package ID and
|
||||
an associated weight.
|
||||
"""
|
||||
if not source:
|
||||
return -1, None
|
||||
for kind, suffix in ConstraintOrigin._SUFFIXES().items():
|
||||
if source.endswith(suffix):
|
||||
return kind.value, source[: -len(suffix)]
|
||||
return -1, source
|
||||
|
||||
|
||||
class SourceContext:
|
||||
"""Tracks context in which a Spec's clause-set is generated (i.e.
|
||||
with ``SpackSolverSetup.spec_clauses``).
|
||||
|
||||
Facts generated for the spec may include this context.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# This can be "literal" for constraints that come from a user
|
||||
# spec (e.g. from the command line); it can be the output of
|
||||
# `ConstraintOrigin.append_type_suffix`; the default is "none"
|
||||
# (which means it isn't important to keep track of the source
|
||||
# in that case).
|
||||
self.source = "none"
|
||||
|
||||
|
||||
class ConditionIdContext(SourceContext):
|
||||
"""Derived from a ``ConditionContext``: for clause-sets generated by
|
||||
imposed/required specs, stores an associated transform.
|
||||
|
||||
This is primarily used for tracking whether we are generating clauses
|
||||
in the context of a required spec, or for an imposed spec.
|
||||
|
||||
Is not a subclass of ``ConditionContext`` because it exists in a
|
||||
lower-level context with less information.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.transform = None
|
||||
|
||||
|
||||
class ConditionContext(SourceContext):
|
||||
"""Tracks context in which a condition (i.e. ``SpackSolverSetup.condition``)
|
||||
is generated (e.g. for a `depends_on`).
|
||||
|
||||
This may modify the required/imposed specs generated as relevant
|
||||
for the context.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
# transformation applied to facts from the required spec. Defaults
|
||||
# to leave facts as they are.
|
||||
self.transform_required = None
|
||||
# transformation applied to facts from the imposed spec. Defaults
|
||||
# to removing "node" and "virtual_node" facts.
|
||||
self.transform_imposed = None
|
||||
|
||||
def requirement_context(self) -> ConditionIdContext:
|
||||
ctxt = ConditionIdContext()
|
||||
ctxt.source = self.source
|
||||
ctxt.transform = self.transform_required
|
||||
return ctxt
|
||||
|
||||
def impose_context(self) -> ConditionIdContext:
|
||||
ctxt = ConditionIdContext()
|
||||
ctxt.source = self.source
|
||||
ctxt.transform = self.transform_imposed
|
||||
return ctxt
|
||||
|
||||
|
||||
class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
@@ -1293,9 +1197,8 @@ def compiler_facts(self):
|
||||
if compiler.compiler_obj is not None:
|
||||
c = compiler.compiler_obj
|
||||
for flag_type, flags in c.flags.items():
|
||||
flag_group = " ".join(flags)
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag, flag_group))
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
|
||||
if compiler.available:
|
||||
self.gen.fact(fn.compiler_available(compiler_id))
|
||||
@@ -1472,7 +1375,7 @@ def _get_condition_id(
|
||||
named_cond: spack.spec.Spec,
|
||||
cache: ConditionSpecCache,
|
||||
body: bool,
|
||||
context: ConditionIdContext,
|
||||
transform: Optional[TransformFunction] = None,
|
||||
) -> int:
|
||||
"""Get the id for one half of a condition (either a trigger or an imposed constraint).
|
||||
|
||||
@@ -1486,15 +1389,15 @@ def _get_condition_id(
|
||||
"""
|
||||
pkg_cache = cache[named_cond.name]
|
||||
|
||||
named_cond_key = (str(named_cond), context.transform)
|
||||
named_cond_key = (str(named_cond), transform)
|
||||
result = pkg_cache.get(named_cond_key)
|
||||
if result:
|
||||
return result[0]
|
||||
|
||||
cond_id = next(self._id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=body, context=context)
|
||||
if context.transform:
|
||||
requirements = context.transform(named_cond, requirements)
|
||||
requirements = self.spec_clauses(named_cond, body=body)
|
||||
if transform:
|
||||
requirements = transform(named_cond, requirements)
|
||||
pkg_cache[named_cond_key] = (cond_id, requirements)
|
||||
|
||||
return cond_id
|
||||
@@ -1505,7 +1408,8 @@ def condition(
|
||||
imposed_spec: Optional[spack.spec.Spec] = None,
|
||||
name: Optional[str] = None,
|
||||
msg: Optional[str] = None,
|
||||
context: Optional[ConditionContext] = None,
|
||||
transform_required: Optional[TransformFunction] = None,
|
||||
transform_imposed: Optional[TransformFunction] = remove_node,
|
||||
):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
@@ -1514,8 +1418,10 @@ def condition(
|
||||
imposed_spec: the constraints that are imposed when this condition is triggered
|
||||
name: name for `required_spec` (required if required_spec is anonymous, ignored if not)
|
||||
msg: description of the condition
|
||||
context: if provided, indicates how to modify the clause-sets for the required/imposed
|
||||
specs based on the type of constraint they are generated for (e.g. `depends_on`)
|
||||
transform_required: transformation applied to facts from the required spec. Defaults
|
||||
to leave facts as they are.
|
||||
transform_imposed: transformation applied to facts from the imposed spec. Defaults
|
||||
to removing "node" and "virtual_node" facts.
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -1523,19 +1429,14 @@ def condition(
|
||||
if not name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_node
|
||||
|
||||
with spec_with_name(required_spec, name):
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
requirement_context = context.requirement_context()
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
|
||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
@@ -1545,9 +1446,8 @@ def condition(
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
impose_context = context.impose_context()
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
|
||||
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
@@ -1555,8 +1455,8 @@ def condition(
|
||||
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body)
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if not node and pred.args[0] in ("node", "virtual_node"):
|
||||
@@ -1628,14 +1528,14 @@ def dependency_holds(input_spec, requirements):
|
||||
if t & depflag
|
||||
]
|
||||
|
||||
context = ConditionContext()
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
pkg.name, ConstraintOrigin.DEPENDS_ON
|
||||
self.condition(
|
||||
cond,
|
||||
dep.spec,
|
||||
name=pkg.name,
|
||||
msg=msg,
|
||||
transform_required=track_dependencies,
|
||||
transform_imposed=dependency_holds,
|
||||
)
|
||||
context.transform_required = track_dependencies
|
||||
context.transform_imposed = dependency_holds
|
||||
|
||||
self.condition(cond, dep.spec, name=pkg.name, msg=msg, context=context)
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
@@ -1713,21 +1613,17 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
when_spec = spack.spec.Spec(pkg_name)
|
||||
|
||||
try:
|
||||
context = ConditionContext()
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
pkg_name, ConstraintOrigin.REQUIRE
|
||||
)
|
||||
if not virtual:
|
||||
context.transform_imposed = remove_node
|
||||
# else: for virtuals we want to emit "node" and
|
||||
# "virtual_node" in imposed specs
|
||||
# With virtual we want to emit "node" and "virtual_node" in imposed specs
|
||||
transform: Optional[TransformFunction] = remove_node
|
||||
if virtual:
|
||||
transform = None
|
||||
|
||||
member_id = self.condition(
|
||||
required_spec=when_spec,
|
||||
imposed_spec=spec,
|
||||
name=pkg_name,
|
||||
transform_imposed=transform,
|
||||
msg=f"{input_spec} is a requirement for package {pkg_name}",
|
||||
context=context,
|
||||
)
|
||||
except Exception as e:
|
||||
# Do not raise if the rule comes from the 'all' subsection, since usability
|
||||
@@ -1782,10 +1678,6 @@ def external_packages(self):
|
||||
if pkg_name not in spack.repo.PATH:
|
||||
continue
|
||||
|
||||
# This package is not among possible dependencies
|
||||
if pkg_name not in self.pkgs:
|
||||
continue
|
||||
|
||||
# Check if the external package is buildable. If it is
|
||||
# not then "external(<pkg>)" is a fact, unless we can
|
||||
# reuse an already installed spec.
|
||||
@@ -1826,9 +1718,7 @@ def external_imposition(input_spec, requirements):
|
||||
]
|
||||
|
||||
try:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = external_imposition
|
||||
self.condition(spec, spec, msg=msg, context=context)
|
||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||
except (spack.error.SpecError, RuntimeError) as e:
|
||||
warnings.warn(f"while setting up external spec {spec}: {e}")
|
||||
continue
|
||||
@@ -1903,7 +1793,6 @@ def spec_clauses(
|
||||
expand_hashes: bool = False,
|
||||
concrete_build_deps=False,
|
||||
required_from: Optional[str] = None,
|
||||
context: Optional[SourceContext] = None,
|
||||
) -> List[AspFunction]:
|
||||
"""Wrap a call to `_spec_clauses()` into a try/except block with better error handling.
|
||||
|
||||
@@ -1919,7 +1808,6 @@ def spec_clauses(
|
||||
transitive=transitive,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
context=context,
|
||||
)
|
||||
except RuntimeError as exc:
|
||||
msg = str(exc)
|
||||
@@ -1936,7 +1824,6 @@ def _spec_clauses(
|
||||
transitive: bool = True,
|
||||
expand_hashes: bool = False,
|
||||
concrete_build_deps: bool = False,
|
||||
context: Optional[SourceContext] = None,
|
||||
) -> List[AspFunction]:
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
@@ -1948,8 +1835,6 @@ def _spec_clauses(
|
||||
expand_hashes: if True, descend into hashes of concrete specs (default False)
|
||||
concrete_build_deps: if False, do not include pure build deps of concrete specs
|
||||
(as they have no effect on runtime constraints)
|
||||
context: tracks what constraint this clause set is generated for (e.g. a
|
||||
`depends_on` constraint in a package.py file)
|
||||
|
||||
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
|
||||
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
|
||||
@@ -2036,19 +1921,13 @@ def _spec_clauses(
|
||||
self.compiler_version_constraints.add(spec.compiler)
|
||||
|
||||
# compiler flags
|
||||
source = context.source if context else "none"
|
||||
for flag_type, flags in spec.compiler_flags.items():
|
||||
flag_group = " ".join(flags)
|
||||
for flag in flags:
|
||||
clauses.append(
|
||||
f.node_flag(spec.name, fn.node_flag(flag_type, flag, flag_group, source))
|
||||
)
|
||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||
if not spec.concrete and flag.propagate is True:
|
||||
clauses.append(
|
||||
f.propagate(
|
||||
spec.name,
|
||||
fn.node_flag(flag_type, flag, flag_group, source),
|
||||
fn.edge_types("link", "run"),
|
||||
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2130,7 +2009,6 @@ def _spec_clauses(
|
||||
body=body,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
context=context,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2148,7 +2026,7 @@ def define_package_versions_and_validate_preferences(
|
||||
# like being a "develop" version or being preferred exist only at a
|
||||
# package.py level, sort them in this partial list here
|
||||
package_py_versions = sorted(
|
||||
pkg_cls.versions.items(), key=concretization_version_order, reverse=True
|
||||
pkg_cls.versions.items(), key=_concretization_version_order, reverse=True
|
||||
)
|
||||
|
||||
if require_checksum and pkg_cls.has_code:
|
||||
@@ -2731,6 +2609,10 @@ def define_runtime_constraints(self):
|
||||
continue
|
||||
|
||||
current_libc = compiler.compiler_obj.default_libc
|
||||
# If this is a compiler yet to be built (config:install_missing_compilers:true)
|
||||
# infer libc from the Python process
|
||||
if not current_libc and compiler.compiler_obj.cc is None:
|
||||
current_libc = spack.util.libc.libc_from_current_python_process()
|
||||
|
||||
if using_libc_compatibility() and current_libc:
|
||||
recorder("*").depends_on(
|
||||
@@ -2762,9 +2644,7 @@ def literal_specs(self, specs):
|
||||
effect_id, requirements = cache[imposed_spec_key]
|
||||
else:
|
||||
effect_id = next(self._id_counter)
|
||||
context = SourceContext()
|
||||
context.source = "literal"
|
||||
requirements = self.spec_clauses(spec, context=context)
|
||||
requirements = self.spec_clauses(spec)
|
||||
root_name = spec.name
|
||||
for clause in requirements:
|
||||
clause_name = clause.args[0]
|
||||
@@ -3152,7 +3032,7 @@ def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerPar
|
||||
Args:
|
||||
input_specs: specs to be concretized
|
||||
"""
|
||||
strict = spack.concretize.CHECK_COMPILER_EXISTENCE
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
default_os = str(spack.platforms.host().default_os)
|
||||
default_target = str(archspec.cpu.host().family)
|
||||
for s in traverse.traverse_nodes(input_specs):
|
||||
@@ -3464,6 +3344,7 @@ def __init__(self, specs, hash_lookup=None):
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
self._flag_compiler_defaults = set()
|
||||
|
||||
# Pass in as arguments reusable specs and plug them in
|
||||
# from this dictionary during reconstruction
|
||||
@@ -3521,10 +3402,14 @@ def node_compiler_version(self, node, compiler, version):
|
||||
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_flag(self, node, node_flag):
|
||||
self._specs[node].compiler_flags.add_flag(
|
||||
node_flag.flag_type, node_flag.flag, False, node_flag.flag_group, node_flag.source
|
||||
)
|
||||
def node_flag_compiler_default(self, node):
|
||||
self._flag_compiler_defaults.add(node)
|
||||
|
||||
def node_flag(self, node, flag_type, flag):
|
||||
self._specs[node].compiler_flags.add_flag(flag_type, flag, False)
|
||||
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx has been selected for this package."""
|
||||
@@ -3565,23 +3450,15 @@ def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""For each spec, determine the order of compiler flags applied to it.
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
|
||||
We order flags so that any node's flags will take priority over
|
||||
those of its dependents. That is, the deepest node in the DAG's
|
||||
flags will appear last on the compile line, in the order they
|
||||
were specified.
|
||||
|
||||
The solver determines which flags are on nodes; this routine
|
||||
imposes order afterwards. The order is:
|
||||
|
||||
1. Flags applied in compiler definitions should come first
|
||||
2. Flags applied by dependents are ordered topologically (with a
|
||||
dependency on `traverse` to resolve the partial order into a
|
||||
stable total order)
|
||||
3. Flags from requirements are then applied (requirements always
|
||||
come from the package and never a parent)
|
||||
4. Command-line flags should come last
|
||||
|
||||
Additionally, for each source (requirements, compiler, command line, and
|
||||
dependents), flags from that source should retain their order and grouping:
|
||||
e.g. for `y cflags="-z -a"` "-z" and "-a" should never have any intervening
|
||||
flags inserted, and should always appear in that order.
|
||||
imposes order afterwards.
|
||||
"""
|
||||
# reverse compilers so we get highest priority compilers that share a spec
|
||||
compilers = dict(
|
||||
@@ -3596,78 +3473,40 @@ def reorder_flags(self):
|
||||
flagmap_from_compiler = compilers[spec.compiler].flags
|
||||
|
||||
for flag_type in spec.compiler_flags.valid_compiler_flags():
|
||||
from_compiler = flagmap_from_compiler.get(flag_type, [])
|
||||
from_sources = []
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
node = SpecBuilder.make_node(pkg=spec.name)
|
||||
|
||||
ordered_flags = []
|
||||
|
||||
# 1. Put compiler flags first
|
||||
from_compiler = tuple(flagmap_from_compiler.get(flag_type, []))
|
||||
extend_flag_list(ordered_flags, from_compiler)
|
||||
|
||||
# 2. Add all sources (the compiler is one of them, so skip any
|
||||
# flag group that matches it exactly)
|
||||
flag_groups = set()
|
||||
for flag in self._specs[node].compiler_flags.get(flag_type, []):
|
||||
flag_groups.add(
|
||||
spack.spec.CompilerFlag(
|
||||
flag.flag_group,
|
||||
propagate=flag.propagate,
|
||||
flag_group=flag.flag_group,
|
||||
source=flag.source,
|
||||
)
|
||||
source_key = (node, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [
|
||||
SpecBuilder.make_node(pkg=s.name)
|
||||
for s in spec.traverse(order="post", direction="parents")
|
||||
]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
)
|
||||
|
||||
# For flags that are applied by dependents, put flags from parents
|
||||
# before children; we depend on the stability of traverse() to
|
||||
# achieve a stable flag order for flags introduced in this manner.
|
||||
topo_order = list(s.name for s in spec.traverse(order="post", direction="parents"))
|
||||
lex_order = list(sorted(flag_groups))
|
||||
|
||||
def _order_index(flag_group):
|
||||
source = flag_group.source
|
||||
# Note: if 'require: ^dependency cflags=...' is ever possible,
|
||||
# this will topologically sort for require as well
|
||||
type_index, pkg_source = ConstraintOrigin.strip_type_suffix(source)
|
||||
if pkg_source in topo_order:
|
||||
major_index = topo_order.index(pkg_source)
|
||||
# If for x->y, x has multiple depends_on declarations that
|
||||
# are activated, and each adds cflags to y, we fall back on
|
||||
# alphabetical ordering to maintain a total order
|
||||
minor_index = lex_order.index(flag_group)
|
||||
else:
|
||||
major_index = len(topo_order) + lex_order.index(flag_group)
|
||||
minor_index = 0
|
||||
return (type_index, major_index, minor_index)
|
||||
|
||||
prioritized_groups = sorted(flag_groups, key=lambda x: _order_index(x))
|
||||
|
||||
for grp in prioritized_groups:
|
||||
grp_flags = tuple(
|
||||
x for (x, y) in spack.compiler.tokenize_flags(grp.flag_group)
|
||||
)
|
||||
if grp_flags == from_compiler:
|
||||
continue
|
||||
as_compiler_flags = list(
|
||||
spack.spec.CompilerFlag(
|
||||
x,
|
||||
propagate=grp.propagate,
|
||||
flag_group=grp.flag_group,
|
||||
source=grp.source,
|
||||
)
|
||||
for x in grp_flags
|
||||
)
|
||||
extend_flag_list(ordered_flags, as_compiler_flags)
|
||||
|
||||
# 3. Now put cmd-line flags last
|
||||
if node.pkg in cmd_specs:
|
||||
cmd_flags = cmd_specs[node.pkg].compiler_flags.get(flag_type, [])
|
||||
extend_flag_list(ordered_flags, cmd_flags)
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for node in sorted_sources:
|
||||
all_src_flags = list()
|
||||
per_pkg_sources = [self._specs[node]]
|
||||
if node.pkg in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[node.pkg])
|
||||
for source in per_pkg_sources:
|
||||
all_src_flags.extend(source.compiler_flags.get(flag_type, []))
|
||||
extend_flag_list(from_sources, all_src_flags)
|
||||
|
||||
# compiler flags from compilers config are lowest precedence
|
||||
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_flags))
|
||||
assert set(compiler_flags) == set(ordered_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_flags})
|
||||
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
|
||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
|
||||
def deprecated(self, node: NodeArgument, version: str) -> None:
|
||||
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
|
||||
@@ -3731,9 +3570,10 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
if spec and spec.concrete and name != "node_flag_source":
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
@@ -3793,8 +3633,7 @@ def _develop_specs_from_env(spec, env):
|
||||
assert spec.variants["dev_path"].value == path, error_msg
|
||||
else:
|
||||
spec.variants.setdefault("dev_path", spack.variant.SingleValuedVariant("dev_path", path))
|
||||
|
||||
assert spec.satisfies(dev_info["spec"])
|
||||
spec.constrain(dev_info["spec"])
|
||||
|
||||
|
||||
def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
|
@@ -43,7 +43,9 @@
|
||||
internal_error("Only nodes can have node_compiler_version").
|
||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("variant_value true for a non-node").
|
||||
:- attr("node_flag", PackageNode, _), not attr("node", PackageNode),
|
||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode),
|
||||
internal_error("node_flag_compiler_default true for non-node").
|
||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("node_flag assigned for non-node").
|
||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("external_spec_selected for non-node").
|
||||
@@ -51,6 +53,10 @@
|
||||
internal_error("non-node depends on something").
|
||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode),
|
||||
internal_error("something depends_on a non-node").
|
||||
:- attr("node_flag_source", Node, _, _), not attr("node", Node),
|
||||
internal_error("node_flag_source assigned for a non-node").
|
||||
:- attr("node_flag_source", _, _, SourceNode), not attr("node", SourceNode),
|
||||
internal_error("node_flag_source assigned with a non-node source").
|
||||
:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode),
|
||||
internal_error("virtual node with no provider").
|
||||
:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode),
|
||||
@@ -148,6 +154,7 @@ unification_set(SetID, VirtualNode)
|
||||
% TODO: literals, at the moment, can only influence the "root" unification set. This needs to be extended later.
|
||||
|
||||
% Node attributes that have multiple node arguments (usually, only the first argument is a node)
|
||||
multiple_nodes_attribute("node_flag_source").
|
||||
multiple_nodes_attribute("depends_on").
|
||||
multiple_nodes_attribute("virtual_on_edge").
|
||||
multiple_nodes_attribute("provider_set").
|
||||
@@ -385,6 +392,7 @@ trigger_condition_holds(ID, RequestorNode) :-
|
||||
attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name);
|
||||
attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
% Special cases
|
||||
attr("node_flag_source", node(X, A1), A2, node(Y, A3)) : condition_requirement(ID, "node_flag_source", A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A3));
|
||||
not cannot_hold(ID, PackageNode).
|
||||
|
||||
condition_holds(ConditionID, node(X, Package))
|
||||
@@ -432,6 +440,13 @@ attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constrai
|
||||
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
|
||||
% For node flag sources we need to look at the condition_set of the source, since it is the dependent
|
||||
% of the package on which I want to impose the constraint
|
||||
attr("node_flag_source", node(X, A1), A2, node(Y, A3))
|
||||
:- impose(ID, node(X, A1)),
|
||||
imposed_constraint(ID, "node_flag_source", A1, A2, A3),
|
||||
condition_set(node(Y, A3), node(X, A1)).
|
||||
|
||||
% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar`
|
||||
% might appear in the HEAD of a rule
|
||||
attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual))
|
||||
@@ -472,8 +487,8 @@ virtual_condition_holds(node(Y, A2), Virtual)
|
||||
% we cannot have additional flag values when we are working with concrete specs
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("hash", node(ID, Package), Hash),
|
||||
attr("node_flag", node(ID, Package), node_flag(FlagType, Flag, _, _)),
|
||||
not imposed_constraint(Hash, "node_flag", Package, node_flag(FlagType, Flag, _, _)),
|
||||
attr("node_flag", node(ID, Package), FlagType, Flag),
|
||||
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag),
|
||||
internal_error("imposed hash without imposing all flag values").
|
||||
|
||||
#defined condition/2.
|
||||
@@ -774,15 +789,22 @@ required_provider(Provider, Virtual)
|
||||
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
|
||||
|
||||
% TODO: the following choice rule allows the solver to add compiler
|
||||
% TODO: the following two choice rules allow the solver to add compiler
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180
|
||||
|
||||
{ attr("node_flag", node(ID, Package), NodeFlag) } :-
|
||||
{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :-
|
||||
requirement_group_member(ConditionID, Package, RequirementID),
|
||||
activate_requirement(node(ID, Package), RequirementID),
|
||||
pkg_fact(Package, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node_flag_set", Package, NodeFlag).
|
||||
imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue).
|
||||
|
||||
{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :-
|
||||
requirement_group_member(ConditionID, Package1, RequirementID),
|
||||
activate_requirement(node(NodeID1, Package1), RequirementID),
|
||||
pkg_fact(Package1, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2),
|
||||
imposed_nodes(EffectID, node(NodeID2, Package2), node(NodeID1, Package1)).
|
||||
|
||||
requirement_weight(node(ID, Package), Group, W) :-
|
||||
W = #min {
|
||||
@@ -1028,22 +1050,23 @@ variant_is_propagated(PackageNode, Variant) :-
|
||||
% 1. The same flag type is not set on this node
|
||||
% 2. This node has the same compiler as the propagation source
|
||||
|
||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), SourceNode) :-
|
||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), _),
|
||||
not attr("node_flag_set", node(PackageID, Package), node_flag(FlagType, _, _, "literal")),
|
||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
|
||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
|
||||
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
|
||||
% Same compiler as propagation source
|
||||
node_compiler(node(PackageID, Package), CompilerID),
|
||||
node_compiler(SourceNode, CompilerID),
|
||||
attr("propagate", SourceNode, node_flag(FlagType, Flag, FlagGroup, Source), _),
|
||||
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
|
||||
node(PackageID, Package) != SourceNode,
|
||||
not runtime(Package).
|
||||
|
||||
attr("node_flag", PackageNode, NodeFlag) :- propagated_flag(PackageNode, NodeFlag, _).
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
|
||||
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
|
||||
|
||||
% Cannot propagate the same flag from two distinct sources
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _, _, _), node(_, Source1)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _, _, _), node(_, Source2)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
|
||||
Source1 < Source2.
|
||||
|
||||
%----
|
||||
@@ -1319,7 +1342,7 @@ node_compiler_weight(node(ID, Package), 100)
|
||||
not compiler_weight(CompilerID, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
error(100, "Compiler {1}@{2} requested for {0} cannot be found.", Package, Compiler, Version)
|
||||
error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
|
||||
:- attr("node_compiler_version", node(ID, Package), Compiler, Version),
|
||||
not node_compiler(node(ID, Package), _).
|
||||
|
||||
@@ -1330,18 +1353,32 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found.", Package, Compi
|
||||
% Compiler flags
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% remember where flags came from
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
|
||||
|
||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||
attr("node_flag", PackageNode, node_flag(FlagType, Flag, FlagGroup, CompilerID))
|
||||
:- compiler_flag(CompilerID, FlagType, Flag, FlagGroup),
|
||||
attr("node_flag", PackageNode, FlagType, Flag)
|
||||
:- compiler_flag(CompilerID, FlagType, Flag),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
flag_type(FlagType),
|
||||
compiler_id(CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, NodeFlag).
|
||||
attr("node_flag_compiler_default", PackageNode)
|
||||
:- not attr("node_flag_set", PackageNode, FlagType, _),
|
||||
compiler_flag(CompilerID, FlagType, Flag),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
flag_type(FlagType),
|
||||
compiler_id(CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
#defined compiler_flag/4.
|
||||
% Flag set to something
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||
|
||||
#defined compiler_flag/3.
|
||||
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
|
@@ -230,13 +230,6 @@ class NodeArgument(NamedTuple):
|
||||
pkg: str
|
||||
|
||||
|
||||
class NodeFlag(NamedTuple):
|
||||
flag_type: str
|
||||
flag: str
|
||||
flag_group: str
|
||||
source: str
|
||||
|
||||
|
||||
def intermediate_repr(sym):
|
||||
"""Returns an intermediate representation of clingo models for Spack's spec builder.
|
||||
|
||||
@@ -255,13 +248,6 @@ def intermediate_repr(sym):
|
||||
return NodeArgument(
|
||||
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
|
||||
)
|
||||
elif sym.name == "node_flag":
|
||||
return NodeFlag(
|
||||
flag_type=intermediate_repr(sym.arguments[0]),
|
||||
flag=intermediate_repr(sym.arguments[1]),
|
||||
flag_group=intermediate_repr(sym.arguments[2]),
|
||||
source=intermediate_repr(sym.arguments[3]),
|
||||
)
|
||||
except RuntimeError:
|
||||
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
|
||||
# that are not functions
|
||||
|
@@ -13,7 +13,6 @@
|
||||
#show attr/2.
|
||||
#show attr/3.
|
||||
#show attr/4.
|
||||
#show attr/5.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/2.
|
||||
|
@@ -781,49 +781,17 @@ class CompilerFlag(str):
|
||||
propagate (bool): if ``True`` the flag value will
|
||||
be passed to the package's dependencies. If
|
||||
``False`` it will not
|
||||
flag_group (str): if this flag was introduced along
|
||||
with several flags via a single source, then
|
||||
this will store all such flags
|
||||
source (str): identifies the type of constraint that
|
||||
introduced this flag (e.g. if a package has
|
||||
``depends_on(... cflags=-g)``, then the ``source``
|
||||
for "-g" would indicate ``depends_on``.
|
||||
"""
|
||||
|
||||
def __new__(cls, value, **kwargs):
|
||||
obj = str.__new__(cls, value)
|
||||
obj.propagate = kwargs.pop("propagate", False)
|
||||
obj.flag_group = kwargs.pop("flag_group", value)
|
||||
obj.source = kwargs.pop("source", None)
|
||||
return obj
|
||||
|
||||
|
||||
_valid_compiler_flags = ["cflags", "cxxflags", "fflags", "ldflags", "ldlibs", "cppflags"]
|
||||
|
||||
|
||||
def _shared_subset_pair_iterate(container1, container2):
|
||||
"""
|
||||
[0, a, c, d, f]
|
||||
[a, d, e, f]
|
||||
|
||||
yields [(a, a), (d, d), (f, f)]
|
||||
|
||||
no repeated elements
|
||||
"""
|
||||
a_idx, b_idx = 0, 0
|
||||
max_a, max_b = len(container1), len(container2)
|
||||
while a_idx < max_a and b_idx < max_b:
|
||||
if container1[a_idx] == container2[b_idx]:
|
||||
yield (container1[a_idx], container2[b_idx])
|
||||
a_idx += 1
|
||||
b_idx += 1
|
||||
else:
|
||||
while container1[a_idx] < container2[b_idx]:
|
||||
a_idx += 1
|
||||
while container1[a_idx] > container2[b_idx]:
|
||||
b_idx += 1
|
||||
|
||||
|
||||
class FlagMap(lang.HashableMap):
|
||||
__slots__ = ("spec",)
|
||||
|
||||
@@ -832,9 +800,23 @@ def __init__(self, spec):
|
||||
self.spec = spec
|
||||
|
||||
def satisfies(self, other):
|
||||
return all(f in self and set(self[f]) >= set(other[f]) for f in other)
|
||||
return all(f in self and self[f] == other[f] for f in other)
|
||||
|
||||
def intersects(self, other):
|
||||
common_types = set(self) & set(other)
|
||||
for flag_type in common_types:
|
||||
if not self[flag_type] or not other[flag_type]:
|
||||
# At least one of the two is empty
|
||||
continue
|
||||
|
||||
if self[flag_type] != other[flag_type]:
|
||||
return False
|
||||
|
||||
if not all(
|
||||
f1.propagate == f2.propagate for f1, f2 in zip(self[flag_type], other[flag_type])
|
||||
):
|
||||
# At least one propagation flag didn't match
|
||||
return False
|
||||
return True
|
||||
|
||||
def constrain(self, other):
|
||||
@@ -842,28 +824,28 @@ def constrain(self, other):
|
||||
|
||||
Return whether the spec changed.
|
||||
"""
|
||||
if other.spec and other.spec._concrete:
|
||||
for k in self:
|
||||
if k not in other:
|
||||
raise UnsatisfiableCompilerFlagSpecError(self[k], "<absent>")
|
||||
|
||||
changed = False
|
||||
for flag_type in other:
|
||||
if flag_type not in self:
|
||||
self[flag_type] = other[flag_type]
|
||||
for k in other:
|
||||
if k in self and not set(self[k]) <= set(other[k]):
|
||||
raise UnsatisfiableCompilerFlagSpecError(
|
||||
" ".join(f for f in self[k]), " ".join(f for f in other[k])
|
||||
)
|
||||
elif k not in self:
|
||||
self[k] = other[k]
|
||||
changed = True
|
||||
else:
|
||||
extra_other = set(other[flag_type]) - set(self[flag_type])
|
||||
if extra_other:
|
||||
self[flag_type] = list(self[flag_type]) + list(
|
||||
x for x in other[flag_type] if x in extra_other
|
||||
)
|
||||
changed = True
|
||||
|
||||
# Next, if any flags in other propagate, we force them to propagate in our case
|
||||
shared = list(sorted(set(other[flag_type]) - extra_other))
|
||||
for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])):
|
||||
if x.propagate:
|
||||
y.propagate = True
|
||||
|
||||
# TODO: what happens if flag groups with a partial (but not complete)
|
||||
# intersection specify different behaviors for flag propagation?
|
||||
|
||||
# Check that the propagation values match
|
||||
if self[k] == other[k]:
|
||||
for i in range(len(other[k])):
|
||||
if self[k][i].propagate != other[k][i].propagate:
|
||||
raise UnsatisfiableCompilerFlagSpecError(
|
||||
self[k][i].propagate, other[k][i].propagate
|
||||
)
|
||||
return changed
|
||||
|
||||
@staticmethod
|
||||
@@ -876,7 +858,7 @@ def copy(self):
|
||||
clone[name] = compiler_flag
|
||||
return clone
|
||||
|
||||
def add_flag(self, flag_type, value, propagation, flag_group=None, source=None):
|
||||
def add_flag(self, flag_type, value, propagation):
|
||||
"""Stores the flag's value in CompilerFlag and adds it
|
||||
to the FlagMap
|
||||
|
||||
@@ -887,8 +869,7 @@ def add_flag(self, flag_type, value, propagation, flag_group=None, source=None):
|
||||
propagation (bool): if ``True`` the flag value will be passed to
|
||||
the packages' dependencies. If``False`` it will not be passed
|
||||
"""
|
||||
flag_group = flag_group or value
|
||||
flag = CompilerFlag(value, propagate=propagation, flag_group=flag_group, source=source)
|
||||
flag = CompilerFlag(value, propagate=propagation)
|
||||
|
||||
if flag_type not in self:
|
||||
self[flag_type] = [flag]
|
||||
@@ -1571,9 +1552,7 @@ def _get_dependency(self, name):
|
||||
raise spack.error.SpecError(err_msg.format(name, len(deps)))
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
|
||||
@@ -1583,9 +1562,7 @@ def edges_from_dependents(
|
||||
"""
|
||||
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
|
||||
|
||||
def edges_to_dependencies(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to children.
|
||||
|
||||
@@ -1684,9 +1661,8 @@ def _add_flag(self, name, value, propagate):
|
||||
elif name in valid_flags:
|
||||
assert self.compiler_flags is not None
|
||||
flags_and_propagation = spack.compiler.tokenize_flags(value, propagate)
|
||||
flag_group = " ".join(x for (x, y) in flags_and_propagation)
|
||||
for flag, propagation in flags_and_propagation:
|
||||
self.compiler_flags.add_flag(name, flag, propagation, flag_group)
|
||||
self.compiler_flags.add_flag(name, flag, propagation)
|
||||
else:
|
||||
# FIXME:
|
||||
# All other flags represent variants. 'foo=true' and 'foo=false'
|
||||
@@ -3911,43 +3887,43 @@ def format_attribute(match_object: Match) -> str:
|
||||
for idx, part in enumerate(parts):
|
||||
if not part:
|
||||
raise SpecFormatStringError("Format string attributes must be non-empty")
|
||||
elif part.startswith("_"):
|
||||
if part.startswith("_"):
|
||||
raise SpecFormatStringError("Attempted to format private attribute")
|
||||
elif isinstance(current, VariantMap):
|
||||
# subscript instead of getattr for variant names
|
||||
try:
|
||||
current = current[part]
|
||||
except KeyError:
|
||||
raise SpecFormatStringError(f"Variant '{part}' does not exist")
|
||||
else:
|
||||
# aliases
|
||||
if part == "arch":
|
||||
part = "architecture"
|
||||
elif part == "version" and not current.versions.concrete:
|
||||
# version (singular) requires a concrete versions list. Avoid
|
||||
# pedantic errors by using versions (plural) when not concrete.
|
||||
# These two are not entirely equivalent for pkg@=1.2.3:
|
||||
# - version prints '1.2.3'
|
||||
# - versions prints '=1.2.3'
|
||||
part = "versions"
|
||||
try:
|
||||
current = getattr(current, part)
|
||||
except AttributeError:
|
||||
raise SpecFormatStringError(
|
||||
f"Attempted to format attribute {attribute}. "
|
||||
f"Spec {'.'.join(parts[:idx])} has no attribute {part}"
|
||||
)
|
||||
if isinstance(current, vn.VersionList) and current == vn.any_version:
|
||||
# don't print empty version lists
|
||||
if part == "variants" and isinstance(current, VariantMap):
|
||||
# subscript instead of getattr for variant names
|
||||
current = current[part]
|
||||
else:
|
||||
# aliases
|
||||
if part == "arch":
|
||||
part = "architecture"
|
||||
elif part == "version":
|
||||
# version (singular) requires a concrete versions list. Avoid
|
||||
# pedantic errors by using versions (plural) when not concrete.
|
||||
# These two are not entirely equivalent for pkg@=1.2.3:
|
||||
# - version prints '1.2.3'
|
||||
# - versions prints '=1.2.3'
|
||||
if not current.versions.concrete:
|
||||
part = "versions"
|
||||
try:
|
||||
current = getattr(current, part)
|
||||
except AttributeError:
|
||||
parent = ".".join(parts[:idx])
|
||||
m = "Attempted to format attribute %s." % attribute
|
||||
m += "Spec %s has no attribute %s" % (parent, part)
|
||||
raise SpecFormatStringError(m)
|
||||
if isinstance(current, vn.VersionList):
|
||||
if current == vn.any_version:
|
||||
# don't print empty version lists
|
||||
return ""
|
||||
|
||||
if callable(current):
|
||||
raise SpecFormatStringError("Attempted to format callable object")
|
||||
|
||||
if current is None:
|
||||
# not printing anything
|
||||
return ""
|
||||
|
||||
if callable(current):
|
||||
raise SpecFormatStringError("Attempted to format callable object")
|
||||
|
||||
if current is None:
|
||||
# not printing anything
|
||||
return ""
|
||||
|
||||
# Set color codes for various attributes
|
||||
color = None
|
||||
if "architecture" in parts:
|
||||
@@ -4706,10 +4682,6 @@ def _load(cls, data):
|
||||
|
||||
return hash_dict[root_spec_hash]["node_spec"]
|
||||
|
||||
@classmethod
|
||||
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise NotImplementedError("Subclasses must implement this method.")
|
||||
|
||||
|
||||
class SpecfileV1(SpecfileReaderBase):
|
||||
@classmethod
|
||||
|
@@ -916,7 +916,7 @@ def interactive_version_filter(
|
||||
orig_url_dict = url_dict # only copy when using editor to modify
|
||||
print_header = True
|
||||
VERSION_COLOR = spack.spec.VERSION_COLOR
|
||||
while True:
|
||||
while sys.stdin.isatty():
|
||||
if print_header:
|
||||
has_filter = version_filter != VersionList([":"])
|
||||
header = []
|
||||
@@ -933,7 +933,9 @@ def interactive_version_filter(
|
||||
num_new = sum(1 for v in sorted_and_filtered if v not in known_versions)
|
||||
header.append(f"{llnl.string.plural(num_new, 'new version')}")
|
||||
if has_filter:
|
||||
header.append(colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@."))
|
||||
header.append(
|
||||
colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@. (clear with c)")
|
||||
)
|
||||
|
||||
version_with_url = [
|
||||
colorize(
|
||||
|
@@ -173,12 +173,7 @@ def __init__(
|
||||
self.hash_length = hash_length
|
||||
self.upstreams = upstreams
|
||||
self.lock_cfg = lock_cfg
|
||||
self.layout = spack.directory_layout.DirectoryLayout(
|
||||
root, projections=projections, hash_length=hash_length
|
||||
)
|
||||
self.db = spack.database.Database(
|
||||
root, upstream_dbs=upstreams, lock_cfg=lock_cfg, layout=self.layout
|
||||
)
|
||||
self.db = spack.database.Database(root, upstream_dbs=upstreams, lock_cfg=lock_cfg)
|
||||
|
||||
timeout_format_str = (
|
||||
f"{str(lock_cfg.package_timeout)}s" if lock_cfg.package_timeout else "No timeout"
|
||||
@@ -192,9 +187,13 @@ def __init__(
|
||||
self.root, default_timeout=lock_cfg.package_timeout
|
||||
)
|
||||
|
||||
self.layout = spack.directory_layout.DirectoryLayout(
|
||||
root, projections=projections, hash_length=hash_length
|
||||
)
|
||||
|
||||
def reindex(self) -> None:
|
||||
"""Convenience function to reindex the store DB with its own layout."""
|
||||
return self.db.reindex()
|
||||
return self.db.reindex(self.layout)
|
||||
|
||||
def __reduce__(self):
|
||||
return Store, (
|
||||
@@ -262,7 +261,7 @@ def restore(token):
|
||||
|
||||
|
||||
def _construct_upstream_dbs_from_install_roots(
|
||||
install_roots: List[str],
|
||||
install_roots: List[str], _test: bool = False
|
||||
) -> List[spack.database.Database]:
|
||||
accumulated_upstream_dbs: List[spack.database.Database] = []
|
||||
for install_root in reversed(install_roots):
|
||||
@@ -272,6 +271,7 @@ def _construct_upstream_dbs_from_install_roots(
|
||||
is_upstream=True,
|
||||
upstream_dbs=upstream_dbs,
|
||||
)
|
||||
next_db._fail_when_missing_deps = _test
|
||||
next_db._read()
|
||||
accumulated_upstream_dbs.insert(0, next_db)
|
||||
|
||||
|
@@ -592,7 +592,7 @@ def test_setting_attributes(self, default_mock_concretization):
|
||||
|
||||
# We can also propagate the settings to classes in the MRO
|
||||
module_wrapper.propagate_changes_to_mro()
|
||||
for cls in s.package.__class__.__mro__:
|
||||
for cls in type(s.package).__mro__:
|
||||
current_module = cls.module
|
||||
if current_module == spack.package_base:
|
||||
break
|
||||
|
@@ -379,8 +379,9 @@ def test_buildcache_create_install(
|
||||
def test_correct_specs_are_pushed(
|
||||
things_to_install, expected, tmpdir, monkeypatch, default_mock_concretization, temporary_store
|
||||
):
|
||||
# Concretize dttop and add it to the temporary database (without prefixes)
|
||||
spec = default_mock_concretization("dttop")
|
||||
spec.package.do_install(fake=True)
|
||||
temporary_store.db.add(spec, directory_layout=None)
|
||||
slash_hash = f"/{spec.dag_hash()}"
|
||||
|
||||
class DontUpload(spack.binary_distribution.Uploader):
|
||||
|
@@ -591,12 +591,14 @@ def test_config_prefer_upstream(
|
||||
"""
|
||||
|
||||
mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
|
||||
prepared_db = spack.database.Database(mock_db_root, layout=gen_mock_layout("/a/"))
|
||||
prepared_db = spack.database.Database(mock_db_root)
|
||||
|
||||
upstream_layout = gen_mock_layout("/a/")
|
||||
|
||||
for spec in ["hdf5 +mpi", "hdf5 ~mpi", "boost+debug~icu+graph", "dependency-install", "patch"]:
|
||||
dep = spack.spec.Spec(spec)
|
||||
dep.concretize()
|
||||
prepared_db.add(dep)
|
||||
prepared_db.add(dep, upstream_layout)
|
||||
|
||||
downstream_db_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
|
||||
db_for_test = spack.database.Database(downstream_db_root, upstream_dbs=[prepared_db])
|
||||
|
@@ -18,6 +18,8 @@
|
||||
develop = SpackCommand("develop")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "mutable_config")
|
||||
class TestDevelop:
|
||||
|
@@ -2336,6 +2336,103 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
||||
assert mpileaks_spec not in after_conc
|
||||
|
||||
|
||||
def test_stack_concretize_extraneous_deps(tmpdir, mock_packages):
|
||||
# FIXME: The new concretizer doesn't handle yet soft
|
||||
# FIXME: constraints for stacks
|
||||
# FIXME: This now works for statically-determinable invalid deps
|
||||
# FIXME: But it still does not work for dynamically determined invalid deps
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [libelf, mpileaks]
|
||||
- install:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- ['^zmpi', '^mpich']
|
||||
specs:
|
||||
- $install
|
||||
"""
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
|
||||
test = ev.read("test")
|
||||
|
||||
for user, concrete in test.concretized_specs():
|
||||
assert concrete.concrete
|
||||
assert not user.concrete
|
||||
if user.name == "libelf":
|
||||
assert not concrete.satisfies("^mpi")
|
||||
elif user.name == "mpileaks":
|
||||
assert concrete.satisfies("^mpi")
|
||||
|
||||
|
||||
def test_stack_concretize_extraneous_variants(tmpdir, mock_packages):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [libelf, mpileaks]
|
||||
- install:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- ['~shared', '+shared']
|
||||
specs:
|
||||
- $install
|
||||
"""
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
|
||||
test = ev.read("test")
|
||||
|
||||
for user, concrete in test.concretized_specs():
|
||||
assert concrete.concrete
|
||||
assert not user.concrete
|
||||
if user.name == "libelf":
|
||||
assert "shared" not in concrete.variants
|
||||
if user.name == "mpileaks":
|
||||
assert concrete.variants["shared"].value == user.variants["shared"].value
|
||||
|
||||
|
||||
def test_stack_concretize_extraneous_variants_with_dash(tmpdir, mock_packages):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [libelf, mpileaks]
|
||||
- install:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- ['shared=False', '+shared-libs']
|
||||
specs:
|
||||
- $install
|
||||
"""
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
|
||||
ev.read("test")
|
||||
|
||||
# Regression test for handling of variants with dashes in them
|
||||
# will fail before this point if code regresses
|
||||
assert True
|
||||
|
||||
|
||||
def test_stack_definition_extension(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
@@ -4204,6 +4301,9 @@ def test_env_include_mixed_views(tmp_path, mutable_mock_env_path, mutable_config
|
||||
{''.join(includes)}
|
||||
specs:
|
||||
- mpileaks
|
||||
packages:
|
||||
mpileaks:
|
||||
compiler: [gcc]
|
||||
"""
|
||||
)
|
||||
|
||||
|
@@ -44,7 +44,7 @@ def test_find_external_single_package(mock_executable):
|
||||
|
||||
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
|
||||
detected_spec = specs_by_package["cmake"]
|
||||
assert len(detected_spec) == 1 and detected_spec[0] == Spec("cmake@1.foo")
|
||||
assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo")
|
||||
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable):
|
||||
@@ -61,10 +61,10 @@ def test_find_external_two_instances_same_package(mock_executable):
|
||||
)
|
||||
|
||||
assert len(detected_specs) == 2
|
||||
spec_to_path = {s: s.external_path for s in detected_specs}
|
||||
spec_to_path = {e.spec: e.prefix for e in detected_specs}
|
||||
assert spec_to_path[Spec("cmake@1.foo")] == (
|
||||
spack.detection.executable_prefix(str(cmake1.parent))
|
||||
), spec_to_path
|
||||
)
|
||||
assert spec_to_path[Spec("cmake@3.17.2")] == (
|
||||
spack.detection.executable_prefix(str(cmake2.parent))
|
||||
)
|
||||
@@ -72,8 +72,12 @@ def test_find_external_two_instances_same_package(mock_executable):
|
||||
|
||||
def test_find_external_update_config(mutable_config):
|
||||
entries = [
|
||||
Spec.from_detection("cmake@1.foo", external_path="/x/y1"),
|
||||
Spec.from_detection("cmake@3.17.2", external_path="/x/y2"),
|
||||
spack.detection.DetectedPackage(
|
||||
Spec.from_detection("cmake@1.foo", external_path="/x/y1/"), "/x/y1/"
|
||||
),
|
||||
spack.detection.DetectedPackage(
|
||||
Spec.from_detection("cmake@3.17.2", external_path="/x/y2/"), "/x/y2/"
|
||||
),
|
||||
]
|
||||
pkg_to_entries = {"cmake": entries}
|
||||
|
||||
@@ -84,8 +88,8 @@ def test_find_external_update_config(mutable_config):
|
||||
cmake_cfg = pkgs_cfg["cmake"]
|
||||
cmake_externals = cmake_cfg["externals"]
|
||||
|
||||
assert {"spec": "cmake@1.foo", "prefix": "/x/y1"} in cmake_externals
|
||||
assert {"spec": "cmake@3.17.2", "prefix": "/x/y2"} in cmake_externals
|
||||
assert {"spec": "cmake@1.foo", "prefix": "/x/y1/"} in cmake_externals
|
||||
assert {"spec": "cmake@3.17.2", "prefix": "/x/y2/"} in cmake_externals
|
||||
|
||||
|
||||
def test_get_executables(working_env, mock_executable):
|
||||
@@ -225,15 +229,19 @@ def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
|
||||
"""Checks that 'spack find external' doesn't overwrite an existing spec in packages.yaml."""
|
||||
pkgs_cfg_init = {
|
||||
"find-externals1": {
|
||||
"externals": [{"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix"}],
|
||||
"externals": [{"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix/"}],
|
||||
"buildable": False,
|
||||
}
|
||||
}
|
||||
|
||||
mutable_config.update_config("packages", pkgs_cfg_init)
|
||||
entries = [
|
||||
Spec.from_detection("find-externals1@1.1", external_path="/x/y1"),
|
||||
Spec.from_detection("find-externals1@1.2", external_path="/x/y2"),
|
||||
spack.detection.DetectedPackage(
|
||||
Spec.from_detection("find-externals1@1.1", external_path="/x/y1/"), "/x/y1/"
|
||||
),
|
||||
spack.detection.DetectedPackage(
|
||||
Spec.from_detection("find-externals1@1.2", external_path="/x/y2/"), "/x/y2/"
|
||||
),
|
||||
]
|
||||
pkg_to_entries = {"find-externals1": entries}
|
||||
scope = spack.config.default_modify_scope("packages")
|
||||
@@ -243,8 +251,8 @@ def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
|
||||
pkg_cfg = pkgs_cfg["find-externals1"]
|
||||
pkg_externals = pkg_cfg["externals"]
|
||||
|
||||
assert {"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix"} in pkg_externals
|
||||
assert {"spec": "find-externals1@1.2", "prefix": "/x/y2"} in pkg_externals
|
||||
assert {"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix/"} in pkg_externals
|
||||
assert {"spec": "find-externals1@1.2", "prefix": "/x/y2/"} in pkg_externals
|
||||
|
||||
|
||||
def test_list_detectable_packages(mutable_config, mutable_mock_repo):
|
||||
@@ -270,7 +278,7 @@ def _determine_variants(cls, exes, version_str):
|
||||
|
||||
assert len(detected_specs) == 1
|
||||
|
||||
gcc = detected_specs[0]
|
||||
gcc = detected_specs[0].spec
|
||||
assert gcc.name == "gcc"
|
||||
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
|
||||
|
@@ -334,6 +334,7 @@ def test_find_command_basic_usage(database):
|
||||
assert "mpileaks" in output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("envirnment is not yet supported on windows")
|
||||
@pytest.mark.regression("9875")
|
||||
def test_find_prefix_in_env(
|
||||
mutable_mock_env_path, install_mockery, mock_fetch, mock_packages, mock_archive
|
||||
|
@@ -16,6 +16,8 @@
|
||||
add = spack.main.SpackCommand("add")
|
||||
install = spack.main.SpackCommand("install")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_gc_without_build_dependency(mutable_database):
|
||||
|
@@ -19,6 +19,7 @@
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.install
|
||||
import spack.compilers as compilers
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
@@ -28,7 +29,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.main import SpackCommand
|
||||
from spack.parser import SpecSyntaxError
|
||||
from spack.spec import Spec
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
|
||||
install = SpackCommand("install")
|
||||
env = SpackCommand("env")
|
||||
@@ -915,6 +916,68 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
assert "foo: No such file or directory" in content
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||
def test_compiler_bootstrap(
|
||||
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch
|
||||
):
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
spack.config.set("config:install_missing_compilers", True)
|
||||
assert CompilerSpec("gcc@=12.0") not in compilers.all_compiler_specs()
|
||||
|
||||
# Test succeeds if it does not raise an error
|
||||
install("pkg-a%gcc@=12.0")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Binary mirrors not supported on windows")
|
||||
def test_compiler_bootstrap_from_binary_mirror(
|
||||
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch, tmpdir
|
||||
):
|
||||
"""
|
||||
Make sure installing compiler from buildcache registers compiler
|
||||
"""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
|
||||
# Install a compiler, because we want to put it in a buildcache
|
||||
install("gcc@=10.2.0")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache("push", "-u", "-f", mirror_dir.strpath, "gcc@10.2.0")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall("-y", "gcc@10.2.0")
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
spack.config.set("config:install_missing_compilers", True)
|
||||
assert CompilerSpec("gcc@=10.2.0") not in compilers.all_compiler_specs()
|
||||
|
||||
# Configure the mirror where we put that buildcache w/ the compiler
|
||||
mirror("add", "test-mirror", mirror_url)
|
||||
|
||||
# Now make sure that when the compiler is installed from binary mirror,
|
||||
# it also gets configured as a compiler. Test succeeds if it does not
|
||||
# raise an error
|
||||
install("--no-check-signature", "--cache-only", "--only", "dependencies", "pkg-b%gcc@=10.2.0")
|
||||
install("--no-cache", "--only", "package", "pkg-b%gcc@10.2.0")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||
@pytest.mark.regression("16221")
|
||||
def test_compiler_bootstrap_already_installed(
|
||||
install_mockery, mock_packages, mock_fetch, mock_archive, mutable_config, monkeypatch
|
||||
):
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
spack.config.set("config:install_missing_compilers", True)
|
||||
|
||||
assert CompilerSpec("gcc@=12.0") not in compilers.all_compiler_specs()
|
||||
|
||||
# Test succeeds if it does not raise an error
|
||||
install("gcc@=12.0")
|
||||
install("pkg-a%gcc@=12.0")
|
||||
|
||||
|
||||
def test_install_fails_no_args(tmpdir):
|
||||
# ensure no spack.yaml in directory
|
||||
with tmpdir.as_cwd():
|
||||
|
@@ -2,10 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import shutil
|
||||
import os
|
||||
|
||||
import spack.store
|
||||
from spack.database import Database
|
||||
from spack.main import SpackCommand
|
||||
|
||||
install = SpackCommand("install")
|
||||
@@ -24,55 +23,31 @@ def test_reindex_basic(mock_packages, mock_archive, mock_fetch, install_mockery)
|
||||
assert spack.store.STORE.db.query() == all_installed
|
||||
|
||||
|
||||
def _clear_db(tmp_path):
|
||||
empty_db = Database(str(tmp_path))
|
||||
with empty_db.write_transaction():
|
||||
pass
|
||||
shutil.rmtree(spack.store.STORE.db.database_directory)
|
||||
shutil.copytree(empty_db.database_directory, spack.store.STORE.db.database_directory)
|
||||
# force a re-read of the database
|
||||
assert len(spack.store.STORE.db.query()) == 0
|
||||
|
||||
|
||||
def test_reindex_db_deleted(mock_packages, mock_archive, mock_fetch, install_mockery, tmp_path):
|
||||
def test_reindex_db_deleted(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
install("libelf@0.8.13")
|
||||
install("libelf@0.8.12")
|
||||
|
||||
all_installed = spack.store.STORE.db.query()
|
||||
|
||||
_clear_db(tmp_path)
|
||||
|
||||
os.remove(spack.store.STORE.db._index_path)
|
||||
reindex()
|
||||
|
||||
assert spack.store.STORE.db.query() == all_installed
|
||||
|
||||
|
||||
def test_reindex_with_deprecated_packages(
|
||||
mock_packages, mock_archive, mock_fetch, install_mockery, tmp_path
|
||||
mock_packages, mock_archive, mock_fetch, install_mockery
|
||||
):
|
||||
install("libelf@0.8.13")
|
||||
install("libelf@0.8.12")
|
||||
|
||||
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
|
||||
|
||||
db = spack.store.STORE.db
|
||||
|
||||
all_installed = db.query(installed=any)
|
||||
non_deprecated = db.query(installed=True)
|
||||
|
||||
_clear_db(tmp_path)
|
||||
all_installed = spack.store.STORE.db.query(installed=any)
|
||||
non_deprecated = spack.store.STORE.db.query(installed=True)
|
||||
|
||||
os.remove(spack.store.STORE.db._index_path)
|
||||
reindex()
|
||||
|
||||
assert db.query(installed=any) == all_installed
|
||||
assert db.query(installed=True) == non_deprecated
|
||||
|
||||
old_libelf = db.query_local_by_spec_hash(
|
||||
db.query_local("libelf@0.8.12", installed=any)[0].dag_hash()
|
||||
)
|
||||
new_libelf = db.query_local_by_spec_hash(
|
||||
db.query_local("libelf@0.8.13", installed=True)[0].dag_hash()
|
||||
)
|
||||
assert old_libelf.deprecated_for == new_libelf.spec.dag_hash()
|
||||
assert new_libelf.deprecated_for is None
|
||||
assert new_libelf.ref_count == 1
|
||||
assert spack.store.STORE.db.query(installed=any) == all_installed
|
||||
assert spack.store.STORE.db.query(installed=True) == non_deprecated
|
||||
|
@@ -50,6 +50,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
return expected_path
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("PermissionError")
|
||||
def test_stage_path(check_stage_path):
|
||||
"""Verify that --path only works with single specs."""
|
||||
stage("--path={0}".format(check_stage_path), "trivial-install-test-package")
|
||||
|
@@ -2,6 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
from spack.main import SpackCommand
|
||||
@@ -10,6 +14,8 @@
|
||||
env = SpackCommand("env")
|
||||
concretize = SpackCommand("concretize")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_undevelop(tmpdir, mutable_config, mock_packages, mutable_mock_env_path):
|
||||
# setup environment
|
||||
|
@@ -205,6 +205,7 @@ def _warn(*args, **kwargs):
|
||||
|
||||
# Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module
|
||||
# the style formatter insists on separating these two lines.
|
||||
@pytest.mark.not_on_windows("Envs unsupported on Windows")
|
||||
class TestUninstallFromEnv:
|
||||
"""Tests an installation with two environments e1 and e2, which each have
|
||||
shared package installations:
|
||||
|
@@ -13,7 +13,6 @@
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -401,6 +400,14 @@ def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
|
||||
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
|
||||
)
|
||||
|
||||
@pytest.mark.xfail(reason="Broken, needs to be fixed")
|
||||
def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
|
||||
client.concretize()
|
||||
cmake = client["cmake"]
|
||||
for spec in [client, cmake]:
|
||||
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
|
||||
|
||||
def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
|
||||
mutable_config.set("compilers", [clang12_with_flags])
|
||||
# Correct arch to use test compiler that has flags
|
||||
@@ -434,13 +441,6 @@ def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12
|
||||
["hypre cflags='-g'", "^openblas cflags='-O3'"],
|
||||
["^openblas cflags='-g'"],
|
||||
),
|
||||
# Setting propagation on parent and dependency -> the
|
||||
# dependency propagation flags override
|
||||
(
|
||||
"hypre cflags=='-g' ^openblas cflags=='-O3'",
|
||||
["hypre cflags='-g'", "^openblas cflags='-O3'"],
|
||||
["^openblas cflags='-g'"],
|
||||
),
|
||||
# Propagation doesn't go across build dependencies
|
||||
(
|
||||
"cmake-client cflags=='-O2 -g'",
|
||||
@@ -648,6 +648,20 @@ def test_external_package(self):
|
||||
assert "externalprereq" not in spec
|
||||
assert spec["externaltool"].compiler.satisfies("gcc")
|
||||
|
||||
def test_external_package_module(self):
|
||||
# No tcl modules on darwin/linux machines
|
||||
# and Windows does not (currently) allow for bash calls
|
||||
# TODO: improved way to check for this.
|
||||
platform = spack.platforms.real_host().name
|
||||
if platform == "darwin" or platform == "linux" or platform == "windows":
|
||||
return
|
||||
|
||||
spec = Spec("externalmodule")
|
||||
spec.concretize()
|
||||
assert spec["externalmodule"].external_modules == ["external-module"]
|
||||
assert "externalprereq" not in spec
|
||||
assert spec["externalmodule"].compiler.satisfies("gcc")
|
||||
|
||||
def test_nobuild_package(self):
|
||||
"""Test that a non-buildable package raise an error if no specs
|
||||
in packages.yaml are compatible with the request.
|
||||
@@ -761,15 +775,15 @@ def test_regression_issue_7239(self):
|
||||
s = Spec("mpileaks")
|
||||
s.concretize()
|
||||
|
||||
assert llnl.util.lang.ObjectWrapper not in s.__class__.__mro__
|
||||
assert llnl.util.lang.ObjectWrapper not in type(s).__mro__
|
||||
|
||||
# Spec wrapped in a build interface
|
||||
build_interface = s["mpileaks"]
|
||||
assert llnl.util.lang.ObjectWrapper in build_interface.__class__.__mro__
|
||||
assert llnl.util.lang.ObjectWrapper in type(build_interface).__mro__
|
||||
|
||||
# Mimics asking the build interface from a build interface
|
||||
build_interface = s["mpileaks"]["mpileaks"]
|
||||
assert llnl.util.lang.ObjectWrapper in build_interface.__class__.__mro__
|
||||
assert llnl.util.lang.ObjectWrapper in type(build_interface).__mro__
|
||||
|
||||
@pytest.mark.regression("7705")
|
||||
def test_regression_issue_7705(self):
|
||||
@@ -1287,7 +1301,7 @@ def mock_fn(*args, **kwargs):
|
||||
return [first_spec]
|
||||
|
||||
if mock_db:
|
||||
temporary_store.db.add(first_spec)
|
||||
temporary_store.db.add(first_spec, None)
|
||||
else:
|
||||
monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", mock_fn)
|
||||
|
||||
@@ -1352,7 +1366,7 @@ def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, m
|
||||
def test_reuse_with_flags(self, mutable_database, mutable_config):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
spec = Spec("pkg-a cflags=-g cxxflags=-g").concretized()
|
||||
spec.package.do_install(fake=True)
|
||||
spack.store.STORE.db.add(spec, None)
|
||||
|
||||
testspec = Spec("pkg-a cflags=-g")
|
||||
testspec.concretize()
|
||||
@@ -2095,13 +2109,11 @@ def test_external_python_extension_find_dependency_from_detection(self, monkeypa
|
||||
"""Test that python extensions have access to a python dependency
|
||||
|
||||
when python isn't otherwise in the DAG"""
|
||||
python_spec = Spec("python@=detected")
|
||||
prefix = os.path.sep + "fake"
|
||||
python_spec = Spec.from_detection("python@=detected", external_path=prefix)
|
||||
|
||||
def find_fake_python(classes, path_hints):
|
||||
return {
|
||||
"python": [Spec.from_detection("python@=detected", external_path=path_hints[0])]
|
||||
}
|
||||
return {"python": [spack.detection.DetectedPackage(python_spec, prefix=path_hints[0])]}
|
||||
|
||||
monkeypatch.setattr(spack.detection, "by_path", find_fake_python)
|
||||
external_conf = {
|
||||
@@ -2116,8 +2128,7 @@ def find_fake_python(classes, path_hints):
|
||||
|
||||
assert "python" in spec["py-extension1"]
|
||||
assert spec["python"].prefix == prefix
|
||||
assert spec["python"].external
|
||||
assert spec["python"].satisfies(python_spec)
|
||||
assert spec["python"] == python_spec
|
||||
|
||||
def test_external_python_extension_find_unified_python(self):
|
||||
"""Test that python extensions use the same python as other specs in unified env"""
|
||||
@@ -2375,6 +2386,26 @@ def test_externals_with_platform_explicitly_set(self, tmp_path):
|
||||
s = Spec("mpich").concretized()
|
||||
assert s.external
|
||||
|
||||
@pytest.mark.regression("43875")
|
||||
def test_concretize_missing_compiler(self, mutable_config, monkeypatch):
|
||||
"""Tests that Spack can concretize a spec with a missing compiler when the
|
||||
option is active.
|
||||
"""
|
||||
|
||||
def _default_libc(self):
|
||||
if self.cc is None:
|
||||
return None
|
||||
return Spec("glibc@=2.28")
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", property(_default_libc))
|
||||
monkeypatch.setattr(
|
||||
spack.util.libc, "libc_from_current_python_process", lambda: Spec("glibc@=2.28")
|
||||
)
|
||||
mutable_config.set("config:install_missing_compilers", True)
|
||||
s = Spec("pkg-a %gcc@=13.2.0").concretized()
|
||||
assert s.satisfies("%gcc@13.2.0")
|
||||
|
||||
@pytest.mark.regression("43267")
|
||||
def test_spec_with_build_dep_from_json(self, tmp_path):
|
||||
"""Tests that we can correctly concretize a spec, when we express its dependency as a
|
||||
@@ -2928,7 +2959,7 @@ def test_concretization_version_order():
|
||||
result = [
|
||||
v
|
||||
for v, _ in sorted(
|
||||
versions, key=spack.solver.asp.concretization_version_order, reverse=True
|
||||
versions, key=spack.solver.asp._concretization_version_order, reverse=True
|
||||
)
|
||||
]
|
||||
assert result == [
|
||||
|
@@ -8,6 +8,8 @@
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
|
||||
pytestmark = [pytest.mark.not_on_windows("Windows uses old concretizer")]
|
||||
|
||||
version_error_messages = [
|
||||
"Cannot satisfy 'fftw@:1.0' and 'fftw@1.1:",
|
||||
" required because quantum-espresso depends on fftw@:1.0",
|
||||
|
@@ -19,6 +19,8 @@
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.util.url import path_to_file_url
|
||||
|
||||
pytestmark = [pytest.mark.not_on_windows("Windows uses old concretizer")]
|
||||
|
||||
|
||||
def update_packages_config(conf_str):
|
||||
conf = syaml.load_config(conf_str)
|
||||
|
@@ -27,6 +27,7 @@ def test_listing_possible_os():
|
||||
assert expected_os in output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("test unsupported on Windows")
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.requires_executables("git")
|
||||
def test_bootstrap_phase(minimal_configuration, config_dumper, capsys):
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
@@ -41,21 +40,20 @@
|
||||
@pytest.fixture()
|
||||
def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
mock_db_root = str(tmpdir.mkdir("mock_db_root"))
|
||||
upstream_layout = gen_mock_layout("/a/")
|
||||
upstream_write_db = spack.database.Database(mock_db_root, layout=upstream_layout)
|
||||
upstream_db = spack.database.Database(mock_db_root, is_upstream=True, layout=upstream_layout)
|
||||
upstream_write_db = spack.database.Database(mock_db_root)
|
||||
upstream_db = spack.database.Database(mock_db_root, is_upstream=True)
|
||||
# Generate initial DB file to avoid reindex
|
||||
with open(upstream_write_db._index_path, "w") as db_file:
|
||||
upstream_write_db._write_to_file(db_file)
|
||||
upstream_layout = gen_mock_layout("/a/")
|
||||
|
||||
downstream_db_root = str(tmpdir.mkdir("mock_downstream_db_root"))
|
||||
downstream_db = spack.database.Database(
|
||||
downstream_db_root, upstream_dbs=[upstream_db], layout=gen_mock_layout("/b/")
|
||||
)
|
||||
downstream_db = spack.database.Database(downstream_db_root, upstream_dbs=[upstream_db])
|
||||
with open(downstream_db._index_path, "w") as db_file:
|
||||
downstream_db._write_to_file(db_file)
|
||||
downstream_layout = gen_mock_layout("/b/")
|
||||
|
||||
yield upstream_write_db, upstream_db, downstream_db
|
||||
yield upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -71,14 +69,14 @@ def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
def test_query_by_install_tree(
|
||||
install_tree, result, upstream_and_downstream_db, mock_packages, monkeypatch, config
|
||||
):
|
||||
up_write_db, up_db, down_db = upstream_and_downstream_db
|
||||
up_write_db, up_db, up_layout, down_db, down_layout = upstream_and_downstream_db
|
||||
|
||||
# Set the upstream DB to contain "pkg-c" and downstream to contain "pkg-b")
|
||||
b = spack.spec.Spec("pkg-b").concretized()
|
||||
c = spack.spec.Spec("pkg-c").concretized()
|
||||
up_write_db.add(c)
|
||||
up_write_db.add(c, up_layout)
|
||||
up_db._read()
|
||||
down_db.add(b)
|
||||
down_db.add(b, down_layout)
|
||||
|
||||
specs = down_db.query(install_tree=install_tree.format(u=up_db.root, d=down_db.root))
|
||||
assert [s.name for s in specs] == result
|
||||
@@ -88,7 +86,9 @@ def test_spec_installed_upstream(
|
||||
upstream_and_downstream_db, mock_custom_repository, config, monkeypatch
|
||||
):
|
||||
"""Test whether Spec.installed_upstream() works."""
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
# a known installed spec should say that it's installed
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
@@ -96,7 +96,7 @@ def test_spec_installed_upstream(
|
||||
assert not spec.installed
|
||||
assert not spec.installed_upstream
|
||||
|
||||
upstream_write_db.add(spec)
|
||||
upstream_write_db.add(spec, upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
monkeypatch.setattr(spack.store.STORE, "db", downstream_db)
|
||||
@@ -112,7 +112,9 @@ def test_spec_installed_upstream(
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
@@ -123,7 +125,7 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("w").concretized()
|
||||
for dep in spec.traverse(root=False):
|
||||
upstream_write_db.add(dep)
|
||||
upstream_write_db.add(dep, upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
@@ -133,11 +135,11 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
upstream_db.get_by_hash(dep.dag_hash())
|
||||
|
||||
new_spec = spack.spec.Spec("w").concretized()
|
||||
downstream_db.add(new_spec)
|
||||
downstream_db.add(new_spec, downstream_layout)
|
||||
for dep in new_spec.traverse(root=False):
|
||||
upstream, record = downstream_db.query_by_spec_hash(dep.dag_hash())
|
||||
assert upstream
|
||||
assert record.path == upstream_db.layout.path_for_spec(dep)
|
||||
assert record.path == upstream_layout.path_for_spec(dep)
|
||||
upstream, record = downstream_db.query_by_spec_hash(new_spec.dag_hash())
|
||||
assert not upstream
|
||||
assert record.installed
|
||||
@@ -146,32 +148,32 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
downstream_db._check_ref_counts()
|
||||
|
||||
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir, capsys, config):
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir):
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", None, None)])
|
||||
|
||||
with spack.repo.use_repositories(builder):
|
||||
y = spack.spec.Spec("y").concretized()
|
||||
z = y["z"]
|
||||
spec = spack.spec.Spec("y").concretized()
|
||||
|
||||
# add dependency to upstream, dependents to downstream
|
||||
upstream_write_db.add(z)
|
||||
upstream_db._read()
|
||||
downstream_db.add(y)
|
||||
|
||||
# remove the dependency from the upstream DB
|
||||
upstream_write_db.remove(z)
|
||||
upstream_write_db.add(spec["z"], upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
# then rereading the downstream DB should warn about the missing dep
|
||||
downstream_db._read_from_file(downstream_db._index_path)
|
||||
assert (
|
||||
f"Missing dependency not in database: y/{y.dag_hash(7)} needs z"
|
||||
in capsys.readouterr().err
|
||||
)
|
||||
new_spec = spack.spec.Spec("y").concretized()
|
||||
downstream_db.add(new_spec, downstream_layout)
|
||||
|
||||
upstream_write_db.remove(new_spec["z"])
|
||||
upstream_db._read()
|
||||
|
||||
new_downstream = spack.database.Database(downstream_db.root, upstream_dbs=[upstream_db])
|
||||
new_downstream._fail_when_missing_deps = True
|
||||
with pytest.raises(spack.database.MissingDependenciesError):
|
||||
new_downstream._read()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
@@ -180,7 +182,9 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
DB. When a package is recorded as installed in both, the results should
|
||||
refer to the downstream DB.
|
||||
"""
|
||||
upstream_write_db, upstream_db, downstream_db = upstream_and_downstream_db
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
@@ -188,8 +192,8 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
|
||||
downstream_db.add(spec)
|
||||
upstream_write_db.add(spec)
|
||||
downstream_db.add(spec, downstream_layout)
|
||||
upstream_write_db.add(spec, upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
upstream, record = downstream_db.query_by_spec_hash(spec.dag_hash())
|
||||
@@ -203,22 +207,33 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
try:
|
||||
orig_db = spack.store.STORE.db
|
||||
spack.store.STORE.db = downstream_db
|
||||
assert queried_spec.prefix == downstream_db.layout.path_for_spec(spec)
|
||||
assert queried_spec.prefix == downstream_layout.path_for_spec(spec)
|
||||
finally:
|
||||
spack.store.STORE.db = orig_db
|
||||
|
||||
|
||||
def test_cannot_write_upstream(tmp_path, mock_packages, config):
|
||||
@pytest.mark.usefixtures("config", "temporary_store")
|
||||
def test_cannot_write_upstream(tmpdir, gen_mock_layout):
|
||||
roots = [str(tmpdir.mkdir(x)) for x in ["a", "b"]]
|
||||
layouts = [gen_mock_layout(x) for x in ["/ra/", "/rb/"]]
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
|
||||
# Instantiate the database that will be used as the upstream DB and make
|
||||
# sure it has an index file
|
||||
with spack.database.Database(str(tmp_path)).write_transaction():
|
||||
upstream_db_independent = spack.database.Database(roots[1])
|
||||
with upstream_db_independent.write_transaction():
|
||||
pass
|
||||
|
||||
# Create it as an upstream
|
||||
db = spack.database.Database(str(tmp_path), is_upstream=True)
|
||||
upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots([roots[1]], _test=True)
|
||||
|
||||
with pytest.raises(spack.database.ForbiddenLockError):
|
||||
db.add(spack.spec.Spec("pkg-a").concretized())
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("x")
|
||||
spec.concretize()
|
||||
|
||||
with pytest.raises(spack.database.ForbiddenLockError):
|
||||
upstream_dbs[0].add(spec, layouts[1])
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config", "temporary_store")
|
||||
@@ -233,17 +248,17 @@ def test_recursive_upstream_dbs(tmpdir, gen_mock_layout):
|
||||
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
db_c = spack.database.Database(roots[2], layout=layouts[2])
|
||||
db_c.add(spec["z"])
|
||||
db_c = spack.database.Database(roots[2])
|
||||
db_c.add(spec["z"], layouts[2])
|
||||
|
||||
db_b = spack.database.Database(roots[1], upstream_dbs=[db_c], layout=layouts[1])
|
||||
db_b.add(spec["y"])
|
||||
db_b = spack.database.Database(roots[1], upstream_dbs=[db_c])
|
||||
db_b.add(spec["y"], layouts[1])
|
||||
|
||||
db_a = spack.database.Database(roots[0], upstream_dbs=[db_b, db_c], layout=layouts[0])
|
||||
db_a.add(spec["x"])
|
||||
db_a = spack.database.Database(roots[0], upstream_dbs=[db_b, db_c])
|
||||
db_a.add(spec["x"], layouts[0])
|
||||
|
||||
upstream_dbs_from_scratch = spack.store._construct_upstream_dbs_from_install_roots(
|
||||
[roots[1], roots[2]]
|
||||
[roots[1], roots[2]], _test=True
|
||||
)
|
||||
db_a_from_scratch = spack.database.Database(
|
||||
roots[0], upstream_dbs=upstream_dbs_from_scratch
|
||||
@@ -351,7 +366,7 @@ def _check_db_sanity(database):
|
||||
_check_merkleiness()
|
||||
|
||||
|
||||
def _check_remove_and_add_package(database: spack.database.Database, spec):
|
||||
def _check_remove_and_add_package(database, spec):
|
||||
"""Remove a spec from the DB, then add it and make sure everything's
|
||||
still ok once it is added. This checks that it was
|
||||
removed, that it's back when added again, and that ref
|
||||
@@ -371,7 +386,7 @@ def _check_remove_and_add_package(database: spack.database.Database, spec):
|
||||
assert concrete_spec not in remaining
|
||||
|
||||
# add it back and make sure everything is ok.
|
||||
database.add(concrete_spec)
|
||||
database.add(concrete_spec, spack.store.STORE.layout)
|
||||
installed = database.query()
|
||||
assert concrete_spec in installed
|
||||
assert installed == original
|
||||
@@ -381,7 +396,7 @@ def _check_remove_and_add_package(database: spack.database.Database, spec):
|
||||
database._check_ref_counts()
|
||||
|
||||
|
||||
def _mock_install(spec: str):
|
||||
def _mock_install(spec):
|
||||
s = spack.spec.Spec(spec).concretized()
|
||||
s.package.do_install(fake=True)
|
||||
|
||||
@@ -621,7 +636,7 @@ def test_080_root_ref_counts(mutable_database):
|
||||
assert mutable_database.get_record("mpich").ref_count == 1
|
||||
|
||||
# Put the spec back
|
||||
mutable_database.add(rec.spec)
|
||||
mutable_database.add(rec.spec, spack.store.STORE.layout)
|
||||
|
||||
# record is present again
|
||||
assert len(mutable_database.query("mpileaks ^mpich", installed=any)) == 1
|
||||
@@ -983,12 +998,9 @@ def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, c
|
||||
# Reindex should pick up libelf as a dependency of libdwarf
|
||||
spack.store.STORE.reindex()
|
||||
|
||||
# Reindexing should warn about libelf not found on the filesystem
|
||||
assert re.search(
|
||||
"libelf@0.8.13.+ was marked installed in the database "
|
||||
"but was not found on the file system",
|
||||
capfd.readouterr().err,
|
||||
)
|
||||
# Reindexing should warn about libelf not being found on the filesystem
|
||||
err = capfd.readouterr()[1]
|
||||
assert "this directory does not contain an installation of the spec" in err
|
||||
|
||||
# And we should still have libelf in the database, but not installed.
|
||||
assert not mutable_database.query_one("libelf", installed=True)
|
||||
@@ -1105,9 +1117,9 @@ def test_database_construction_doesnt_use_globals(tmpdir, config, nullify_global
|
||||
def test_database_read_works_with_trailing_data(tmp_path, default_mock_concretization):
|
||||
# Populate a database
|
||||
root = str(tmp_path)
|
||||
db = spack.database.Database(root, layout=None)
|
||||
db = spack.database.Database(root)
|
||||
spec = default_mock_concretization("pkg-a")
|
||||
db.add(spec)
|
||||
db.add(spec, directory_layout=None)
|
||||
specs_in_db = db.query_local()
|
||||
assert spec in specs_in_db
|
||||
|
||||
@@ -1128,53 +1140,3 @@ def test_database_errors_with_just_a_version_key(tmp_path):
|
||||
|
||||
with pytest.raises(spack.database.InvalidDatabaseVersionError):
|
||||
spack.database.Database(root).query_local()
|
||||
|
||||
|
||||
def test_reindex_with_upstreams(tmp_path, monkeypatch, mock_packages, config):
|
||||
# Reindexing should not put install records of upstream entries into the local database. Here
|
||||
# we install `mpileaks` locally with dependencies in the upstream. And we even install
|
||||
# `mpileaks` with the same hash in the upstream. After reindexing, `mpileaks` should still be
|
||||
# in the local db, and `callpath` should not.
|
||||
mpileaks = spack.spec.Spec("mpileaks").concretized()
|
||||
callpath = mpileaks.dependencies("callpath")[0]
|
||||
|
||||
upstream_store = spack.store.create(
|
||||
{"config": {"install_tree": {"root": str(tmp_path / "upstream")}}}
|
||||
)
|
||||
monkeypatch.setattr(spack.store, "STORE", upstream_store)
|
||||
callpath.package.do_install(fake=True)
|
||||
|
||||
local_store = spack.store.create(
|
||||
{
|
||||
"config": {"install_tree": {"root": str(tmp_path / "local")}},
|
||||
"upstreams": {"my-upstream": {"install_tree": str(tmp_path / "upstream")}},
|
||||
}
|
||||
)
|
||||
monkeypatch.setattr(spack.store, "STORE", local_store)
|
||||
mpileaks.package.do_install(fake=True)
|
||||
|
||||
# Sanity check that callpath is from upstream.
|
||||
assert not local_store.db.query_local("callpath")
|
||||
assert local_store.db.query("callpath")
|
||||
|
||||
# Install mpileaks also upstream with the same hash to ensure that determining upstreamness
|
||||
# checks local installs before upstream databases, even when the local database is being
|
||||
# reindexed.
|
||||
monkeypatch.setattr(spack.store, "STORE", upstream_store)
|
||||
mpileaks.package.do_install(fake=True)
|
||||
|
||||
# Delete the local database
|
||||
shutil.rmtree(local_store.db.database_directory)
|
||||
|
||||
# Create a new instance s.t. we don't have cached specs in memory
|
||||
reindexed_local_store = spack.store.create(
|
||||
{
|
||||
"config": {"install_tree": {"root": str(tmp_path / "local")}},
|
||||
"upstreams": {"my-upstream": {"install_tree": str(tmp_path / "upstream")}},
|
||||
}
|
||||
)
|
||||
reindexed_local_store.db.reindex()
|
||||
|
||||
assert not reindexed_local_store.db.query_local("callpath")
|
||||
assert reindexed_local_store.db.query("callpath") == [callpath]
|
||||
assert reindexed_local_store.db.query_local("mpileaks") == [mpileaks]
|
||||
|
@@ -11,7 +11,11 @@
|
||||
def test_detection_update_config(mutable_config):
|
||||
# mock detected package
|
||||
detected_packages = collections.defaultdict(list)
|
||||
detected_packages["cmake"] = [spack.spec.Spec("cmake@3.27.5", external_path="/usr/bin")]
|
||||
detected_packages["cmake"] = [
|
||||
spack.detection.common.DetectedPackage(
|
||||
spec=spack.spec.Spec("cmake@3.27.5"), prefix="/usr/bin"
|
||||
)
|
||||
]
|
||||
|
||||
# update config for new package
|
||||
spack.detection.common.update_configuration(detected_packages)
|
||||
|
@@ -860,33 +860,3 @@ def test_env_view_on_non_empty_dir_errors(tmp_path, config, mock_packages, tempo
|
||||
env.install_all(fake=True)
|
||||
with pytest.raises(ev.SpackEnvironmentError, match="because it is a non-empty dir"):
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"matrix_line", [("^zmpi", "^mpich"), ("~shared", "+shared"), ("shared=False", "+shared-libs")]
|
||||
)
|
||||
@pytest.mark.regression("40791")
|
||||
def test_stack_enforcement_is_strict(tmp_path, matrix_line, config, mock_packages):
|
||||
"""Ensure that constraints in matrices are applied strictly after expansion, to avoid
|
||||
inconsistencies between abstract user specs and concrete specs.
|
||||
"""
|
||||
manifest = tmp_path / "spack.yaml"
|
||||
manifest.write_text(
|
||||
f"""\
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [libelf, mpileaks]
|
||||
- install:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- [{", ".join(item for item in matrix_line)}]
|
||||
specs:
|
||||
- $install
|
||||
concretizer:
|
||||
unify: false
|
||||
"""
|
||||
)
|
||||
# Here we raise different exceptions depending on whether we solve serially or not
|
||||
with pytest.raises(Exception):
|
||||
with ev.Environment(tmp_path) as e:
|
||||
e.concretize()
|
||||
|
@@ -1,333 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.build_systems.generic
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
"""
|
||||
These tests include the following package DAGs:
|
||||
|
||||
Firstly, w, x, y where w and x apply cflags to y.
|
||||
|
||||
w
|
||||
|\
|
||||
x |
|
||||
|/
|
||||
y
|
||||
|
||||
Secondly, v, y which where v does not apply cflags to y - this is for testing
|
||||
mixing with compiler flag propagation in the absence of compiler flags applied
|
||||
by dependents.
|
||||
|
||||
v
|
||||
|
|
||||
y
|
||||
|
||||
Finally, a diamond dag to check that the topological order is resolved into
|
||||
a total order:
|
||||
|
||||
t
|
||||
|\
|
||||
u x
|
||||
|/
|
||||
y
|
||||
"""
|
||||
|
||||
_pkgx = (
|
||||
"x",
|
||||
"""\
|
||||
class X(Package):
|
||||
version("1.1")
|
||||
version("1.0")
|
||||
|
||||
variant("activatemultiflag", default=False)
|
||||
depends_on('y cflags="-d1"', when="~activatemultiflag")
|
||||
depends_on('y cflags="-d1 -d2"', when="+activatemultiflag")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgy = (
|
||||
"y",
|
||||
"""\
|
||||
class Y(Package):
|
||||
version("2.1")
|
||||
version("2.0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgw = (
|
||||
"w",
|
||||
"""\
|
||||
class W(Package):
|
||||
version("3.1")
|
||||
version("3.0")
|
||||
|
||||
variant("moveflaglater", default=False)
|
||||
|
||||
depends_on('x +activatemultiflag')
|
||||
depends_on('y cflags="-d0"', when="~moveflaglater")
|
||||
depends_on('y cflags="-d3"', when="+moveflaglater")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgv = (
|
||||
"v",
|
||||
"""\
|
||||
class V(Package):
|
||||
version("4.1")
|
||||
version("4.0")
|
||||
|
||||
depends_on("y")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgt = (
|
||||
"t",
|
||||
"""\
|
||||
class T(Package):
|
||||
version("5.0")
|
||||
|
||||
depends_on("u")
|
||||
depends_on("x+activatemultiflag")
|
||||
depends_on("y cflags='-c1 -c2'")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgu = (
|
||||
"u",
|
||||
"""\
|
||||
class U(Package):
|
||||
version("6.0")
|
||||
|
||||
depends_on("y cflags='-e1 -e2'")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
yield create_test_repo(tmpdir, [_pkgt, _pkgu, _pkgv, _pkgw, _pkgx, _pkgy])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
def update_concretize_scope(conf_str, section):
|
||||
conf = syaml.load_config(conf_str)
|
||||
spack.config.set(section, conf[section], scope="concretize")
|
||||
|
||||
|
||||
def test_mix_spec_and_requirements(concretize_scope, test_repo):
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
require: cflags="-c"
|
||||
"""
|
||||
update_concretize_scope(conf_str, "packages")
|
||||
|
||||
s1 = Spec('y cflags="-a"').concretized()
|
||||
assert s1.satisfies('cflags="-a -c"')
|
||||
|
||||
|
||||
def test_mix_spec_and_dependent(concretize_scope, test_repo):
|
||||
s1 = Spec('x ^y cflags="-a"').concretized()
|
||||
assert s1["y"].satisfies('cflags="-a -d1"')
|
||||
|
||||
|
||||
def _compiler_cfg_one_entry_with_cflags(cflags):
|
||||
return f"""\
|
||||
compilers::
|
||||
- compiler:
|
||||
spec: gcc@12.100.100
|
||||
paths:
|
||||
cc: /usr/bin/fake-gcc
|
||||
cxx: /usr/bin/fake-g++
|
||||
f77: null
|
||||
fc: null
|
||||
flags:
|
||||
cflags: {cflags}
|
||||
operating_system: debian6
|
||||
modules: []
|
||||
"""
|
||||
|
||||
|
||||
def test_mix_spec_and_compiler_cfg(concretize_scope, test_repo):
|
||||
conf_str = _compiler_cfg_one_entry_with_cflags("-Wall")
|
||||
update_concretize_scope(conf_str, "compilers")
|
||||
|
||||
s1 = Spec('y %gcc@12.100.100 cflags="-O2"').concretized()
|
||||
assert s1.satisfies('cflags="-Wall -O2"')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cmd_flags,req_flags,cmp_flags,dflags,expected_order",
|
||||
[
|
||||
("-a -b", "-c", None, False, "-c -a -b"),
|
||||
("-x7 -x4", "-x5 -x6", None, False, "-x5 -x6 -x7 -x4"),
|
||||
("-x7 -x4", "-x5 -x6", "-x3 -x8", False, "-x3 -x8 -x5 -x6 -x7 -x4"),
|
||||
("-x7 -x4", "-x5 -x6", "-x3 -x8", True, "-x3 -x8 -d1 -d2 -x5 -x6 -x7 -x4"),
|
||||
("-x7 -x4", None, "-x3 -x8", False, "-x3 -x8 -x7 -x4"),
|
||||
("-x7 -x4", None, "-x3 -x8", True, "-x3 -x8 -d1 -d2 -x7 -x4"),
|
||||
# The remaining test cover cases of intersection
|
||||
("-a -b", "-a -c", None, False, "-c -a -b"),
|
||||
("-a -b", None, "-a -c", False, "-c -a -b"),
|
||||
("-a -b", "-a -c", "-a -d", False, "-d -c -a -b"),
|
||||
("-a -d2 -d1", "-d2 -c", "-d1 -b", True, "-b -c -a -d2 -d1"),
|
||||
("-a", "-d0 -d2 -c", "-d1 -b", True, "-b -d1 -d0 -d2 -c -a"),
|
||||
],
|
||||
)
|
||||
def test_flag_order_and_grouping(
|
||||
concretize_scope, test_repo, cmd_flags, req_flags, cmp_flags, dflags, expected_order
|
||||
):
|
||||
"""Check consistent flag ordering and grouping on a package "y"
|
||||
with flags introduced from a variety of sources.
|
||||
|
||||
The ordering rules are explained in ``asp.SpecBuilder.reorder_flags``.
|
||||
"""
|
||||
if req_flags:
|
||||
conf_str = f"""\
|
||||
packages:
|
||||
y:
|
||||
require: cflags="{req_flags}"
|
||||
"""
|
||||
update_concretize_scope(conf_str, "packages")
|
||||
|
||||
if cmp_flags:
|
||||
conf_str = _compiler_cfg_one_entry_with_cflags(cmp_flags)
|
||||
update_concretize_scope(conf_str, "compilers")
|
||||
|
||||
compiler_spec = ""
|
||||
if cmp_flags:
|
||||
compiler_spec = "%gcc@12.100.100"
|
||||
|
||||
if dflags:
|
||||
spec_str = f"x+activatemultiflag {compiler_spec} ^y"
|
||||
expected_dflags = "-d1 -d2"
|
||||
else:
|
||||
spec_str = f"y {compiler_spec}"
|
||||
expected_dflags = None
|
||||
|
||||
if cmd_flags:
|
||||
spec_str += f' cflags="{cmd_flags}"'
|
||||
|
||||
root_spec = Spec(spec_str).concretized()
|
||||
spec = root_spec["y"]
|
||||
satisfy_flags = " ".join(x for x in [cmd_flags, req_flags, cmp_flags, expected_dflags] if x)
|
||||
assert spec.satisfies(f'cflags="{satisfy_flags}"')
|
||||
assert spec.compiler_flags["cflags"] == expected_order.split()
|
||||
|
||||
|
||||
def test_two_dependents_flag_mixing(concretize_scope, test_repo):
|
||||
root_spec1 = Spec("w~moveflaglater").concretized()
|
||||
spec1 = root_spec1["y"]
|
||||
assert spec1.compiler_flags["cflags"] == "-d0 -d1 -d2".split()
|
||||
|
||||
root_spec2 = Spec("w+moveflaglater").concretized()
|
||||
spec2 = root_spec2["y"]
|
||||
assert spec2.compiler_flags["cflags"] == "-d3 -d1 -d2".split()
|
||||
|
||||
|
||||
def test_propagate_and_compiler_cfg(concretize_scope, test_repo):
|
||||
conf_str = _compiler_cfg_one_entry_with_cflags("-f2")
|
||||
update_concretize_scope(conf_str, "compilers")
|
||||
|
||||
root_spec = Spec("v %gcc@12.100.100 cflags=='-f1'").concretized()
|
||||
assert root_spec["y"].satisfies("cflags='-f1 -f2'")
|
||||
|
||||
|
||||
# Note: setting flags on a dependency overrides propagation, which
|
||||
# is tested in test/concretize.py:test_compiler_flag_propagation
|
||||
|
||||
|
||||
def test_propagate_and_pkg_dep(concretize_scope, test_repo):
|
||||
root_spec1 = Spec("x ~activatemultiflag cflags=='-f1'").concretized()
|
||||
assert root_spec1["y"].satisfies("cflags='-f1 -d1'")
|
||||
|
||||
|
||||
def test_propagate_and_require(concretize_scope, test_repo):
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
require: cflags="-f2"
|
||||
"""
|
||||
update_concretize_scope(conf_str, "packages")
|
||||
|
||||
root_spec1 = Spec("v cflags=='-f1'").concretized()
|
||||
assert root_spec1["y"].satisfies("cflags='-f1 -f2'")
|
||||
|
||||
# Next, check that a requirement does not "undo" a request for
|
||||
# propagation from the command-line spec
|
||||
conf_str = """\
|
||||
packages:
|
||||
v:
|
||||
require: cflags="-f1"
|
||||
"""
|
||||
update_concretize_scope(conf_str, "packages")
|
||||
|
||||
root_spec2 = Spec("v cflags=='-f1'").concretized()
|
||||
assert root_spec2["y"].satisfies("cflags='-f1'")
|
||||
|
||||
# Note: requirements cannot enforce propagation: any attempt to do
|
||||
# so will generate a concretization error; this likely relates to
|
||||
# the note about #37180 in concretize.lp
|
||||
|
||||
|
||||
def test_dev_mix_flags(tmp_path, concretize_scope, mutable_mock_env_path, test_repo):
|
||||
src_dir = tmp_path / "x-src"
|
||||
|
||||
env_content = f"""\
|
||||
spack:
|
||||
specs:
|
||||
- y %gcc@12.100.100 cflags=='-fsanitize=address'
|
||||
develop:
|
||||
y:
|
||||
spec: y cflags=='-fsanitize=address'
|
||||
path: {src_dir}
|
||||
"""
|
||||
|
||||
conf_str = _compiler_cfg_one_entry_with_cflags("-f1")
|
||||
update_concretize_scope(conf_str, "compilers")
|
||||
|
||||
manifest_file = tmp_path / ev.manifest_name
|
||||
manifest_file.write_text(env_content)
|
||||
e = ev.create("test", manifest_file)
|
||||
with e:
|
||||
e.concretize()
|
||||
e.write()
|
||||
|
||||
(result,) = list(j for i, j in e.concretized_specs() if j.name == "y")
|
||||
|
||||
assert result["y"].satisfies("cflags='-fsanitize=address -f1'")
|
||||
|
||||
|
||||
def test_diamond_dep_flag_mixing(concretize_scope, test_repo):
|
||||
"""A diamond where each dependent applies flags to the bottom
|
||||
dependency. The goal is to ensure that the flag ordering is
|
||||
(a) topological and (b) repeatable for elements not subject to
|
||||
this partial ordering (i.e. the flags for the left and right
|
||||
nodes of the diamond always appear in the same order).
|
||||
`Spec.traverse` is responsible for handling both of these needs.
|
||||
"""
|
||||
root_spec1 = Spec("t").concretized()
|
||||
spec1 = root_spec1["y"]
|
||||
assert spec1.satisfies('cflags="-c1 -c2 -d1 -d2 -e1 -e2"')
|
||||
assert spec1.compiler_flags["cflags"] == "-c1 -c2 -e1 -e2 -d1 -d2".split()
|
@@ -11,8 +11,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.patch
|
||||
@@ -257,8 +255,8 @@ def install_upstream(tmpdir_factory, gen_mock_layout, install_mockery):
|
||||
installs are using the upstream installs).
|
||||
"""
|
||||
mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
|
||||
prepared_db = spack.database.Database(mock_db_root)
|
||||
upstream_layout = gen_mock_layout("/a/")
|
||||
prepared_db = spack.database.Database(mock_db_root, layout=upstream_layout)
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(
|
||||
name="install-upstream-fixture",
|
||||
@@ -268,7 +266,8 @@ def install_upstream(tmpdir_factory, gen_mock_layout, install_mockery):
|
||||
|
||||
def _install_upstream(*specs):
|
||||
for spec_str in specs:
|
||||
prepared_db.add(Spec(spec_str).concretized())
|
||||
s = spack.spec.Spec(spec_str).concretized()
|
||||
prepared_db.add(s, upstream_layout)
|
||||
downstream_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
|
||||
return downstream_root, upstream_layout
|
||||
|
||||
@@ -281,7 +280,7 @@ def test_installed_upstream_external(install_upstream, mock_fetch):
|
||||
"""
|
||||
store_root, _ = install_upstream("externaltool")
|
||||
with spack.store.use_store(store_root):
|
||||
dependent = Spec("externaltest")
|
||||
dependent = spack.spec.Spec("externaltest")
|
||||
dependent.concretize()
|
||||
|
||||
new_dependency = dependent["externaltool"]
|
||||
@@ -300,8 +299,8 @@ def test_installed_upstream(install_upstream, mock_fetch):
|
||||
"""
|
||||
store_root, upstream_layout = install_upstream("dependency-install")
|
||||
with spack.store.use_store(store_root):
|
||||
dependency = Spec("dependency-install").concretized()
|
||||
dependent = Spec("dependent-install").concretized()
|
||||
dependency = spack.spec.Spec("dependency-install").concretized()
|
||||
dependent = spack.spec.Spec("dependent-install").concretized()
|
||||
|
||||
new_dependency = dependent["dependency-install"]
|
||||
assert new_dependency.installed_upstream
|
||||
@@ -608,7 +607,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
|
||||
s.to_json(f)
|
||||
|
||||
# And register it in the database
|
||||
temporary_store.db.add(s, explicit=True)
|
||||
temporary_store.db.add(s, directory_layout=temporary_store.layout, explicit=True)
|
||||
|
||||
# Push it to a binary cache
|
||||
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
|
||||
|
@@ -12,6 +12,8 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lock as ulk
|
||||
import llnl.util.tty as tty
|
||||
@@ -433,6 +435,76 @@ def test_fake_install(install_mockery):
|
||||
assert os.path.isdir(pkg.prefix.lib)
|
||||
|
||||
|
||||
def test_packages_needed_to_bootstrap_compiler_none(install_mockery):
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
|
||||
packages = inst._packages_needed_to_bootstrap_compiler(
|
||||
spec.compiler, spec.architecture, [spec.package]
|
||||
)
|
||||
assert not packages
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="fails when assuming Spec.package can only be called on concrete specs")
|
||||
def test_packages_needed_to_bootstrap_compiler_packages(install_mockery, monkeypatch):
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
spec.concretize()
|
||||
|
||||
def _conc_spec(compiler):
|
||||
return spack.spec.Spec("pkg-a").concretized()
|
||||
|
||||
# Ensure we can get past functions that are precluding obtaining
|
||||
# packages.
|
||||
monkeypatch.setattr(spack.compilers, "compilers_for_spec", _none)
|
||||
monkeypatch.setattr(spack.compilers, "pkg_spec_for_compiler", _conc_spec)
|
||||
monkeypatch.setattr(spack.spec.Spec, "concretize", _noop)
|
||||
|
||||
packages = inst._packages_needed_to_bootstrap_compiler(
|
||||
spec.compiler, spec.architecture, [spec.package]
|
||||
)
|
||||
assert packages
|
||||
|
||||
|
||||
def test_update_tasks_for_compiler_packages_as_compiler(mock_packages, config, monkeypatch):
|
||||
spec = spack.spec.Spec("trivial-install-test-package").concretized()
|
||||
installer = inst.PackageInstaller([spec.package], {})
|
||||
|
||||
# Add a task to the queue
|
||||
installer._add_init_task(spec.package, installer.build_requests[0], False, {})
|
||||
|
||||
# monkeypatch to make the list of compilers be what we test
|
||||
def fake_package_list(compiler, architecture, pkgs):
|
||||
return [(spec.package, True)]
|
||||
|
||||
monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", fake_package_list)
|
||||
|
||||
installer._add_bootstrap_compilers("fake", "fake", "fake", None, {})
|
||||
|
||||
# Check that the only task is now a compiler task
|
||||
assert len(installer.build_pq) == 1
|
||||
assert installer.build_pq[0][1].compiler
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64",
|
||||
reason="OneAPI compiler is not supported on other architectures",
|
||||
)
|
||||
def test_bootstrapping_compilers_with_different_names_from_spec(
|
||||
install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host
|
||||
):
|
||||
"""Tests that, when we bootstrap '%oneapi' we can translate it to the
|
||||
'intel-oneapi-compilers' package.
|
||||
"""
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec = spack.spec.Spec("trivial-install-test-package%oneapi@=22.2.0").concretized()
|
||||
spec.package.do_install()
|
||||
assert (
|
||||
spack.spec.CompilerSpec("oneapi@=22.2.0") in spack.compilers.all_compiler_specs()
|
||||
)
|
||||
|
||||
|
||||
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
|
||||
"""Test happy path for dump_packages with dependencies."""
|
||||
|
||||
@@ -624,6 +696,26 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
assert inst.package_id(dep) in installer.installed
|
||||
|
||||
|
||||
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
|
||||
from collections import defaultdict
|
||||
|
||||
def _pkgs(compiler, architecture, pkgs):
|
||||
spec = spack.spec.Spec("mpi").concretized()
|
||||
return [(spec.package, True)]
|
||||
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
request = installer.build_requests[0]
|
||||
all_deps = defaultdict(set)
|
||||
|
||||
monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", _pkgs)
|
||||
installer._add_bootstrap_compilers("fake", "fake", [request.pkg], request, all_deps)
|
||||
|
||||
ids = list(installer.build_tasks)
|
||||
assert len(ids) == 1
|
||||
task = installer.build_tasks[ids[0]]
|
||||
assert task.compiler
|
||||
|
||||
|
||||
def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
|
||||
"""Test of _prepare_for_install's early return for installed task path."""
|
||||
installer = create_installer(["dependent-install"], {})
|
||||
@@ -637,6 +729,18 @@ def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
|
||||
installer._prepare_for_install(task)
|
||||
|
||||
|
||||
def test_installer_init_requests(install_mockery):
|
||||
"""Test of installer initial requests."""
|
||||
spec_name = "dependent-install"
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
|
||||
|
||||
def test_install_task_use_cache(install_mockery, monkeypatch):
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
request = installer.build_requests[0]
|
||||
|
@@ -286,7 +286,6 @@ def compilers(compiler, arch_spec):
|
||||
|
||||
|
||||
# TODO (post-34236): Remove when remove deprecated run_test(), etc.
|
||||
@pytest.mark.not_on_windows("echo not available on Windows")
|
||||
@pytest.mark.parametrize(
|
||||
"msg,installed,purpose,expected",
|
||||
[
|
||||
|
@@ -105,21 +105,25 @@ def test_schema_validation(meta_schema, config_name):
|
||||
|
||||
def test_deprecated_properties(module_suffixes_schema):
|
||||
# Test that an error is reported when 'error: True'
|
||||
msg_fmt = r"{name} is deprecated"
|
||||
module_suffixes_schema["deprecatedProperties"] = [
|
||||
{"names": ["tcl"], "message": msg_fmt, "error": True}
|
||||
]
|
||||
msg_fmt = r"deprecated properties detected [properties={properties}]"
|
||||
module_suffixes_schema["deprecatedProperties"] = {
|
||||
"properties": ["tcl"],
|
||||
"message": msg_fmt,
|
||||
"error": True,
|
||||
}
|
||||
v = spack.schema.Validator(module_suffixes_schema)
|
||||
data = {"tcl": {"all": {"suffixes": {"^python": "py"}}}}
|
||||
|
||||
expected_match = "tcl is deprecated"
|
||||
expected_match = "deprecated properties detected"
|
||||
with pytest.raises(jsonschema.ValidationError, match=expected_match):
|
||||
v.validate(data)
|
||||
|
||||
# Test that just a warning is reported when 'error: False'
|
||||
module_suffixes_schema["deprecatedProperties"] = [
|
||||
{"names": ["tcl"], "message": msg_fmt, "error": False}
|
||||
]
|
||||
module_suffixes_schema["deprecatedProperties"] = {
|
||||
"properties": ["tcl"],
|
||||
"message": msg_fmt,
|
||||
"error": False,
|
||||
}
|
||||
v = spack.schema.Validator(module_suffixes_schema)
|
||||
data = {"tcl": {"all": {"suffixes": {"^python": "py"}}}}
|
||||
# The next validation doesn't raise anymore
|
||||
|
@@ -245,65 +245,6 @@ def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected):
|
||||
assert c1 == c2
|
||||
assert c1 == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs,expected_lhs,expected_rhs,propagated_lhs,propagated_rhs",
|
||||
[
|
||||
(
|
||||
'mpich cppflags="-O3"',
|
||||
'mpich cppflags="-O2"',
|
||||
'mpich cppflags="-O3 -O2"',
|
||||
'mpich cppflags="-O2 -O3"',
|
||||
[],
|
||||
[],
|
||||
),
|
||||
(
|
||||
'mpich cflags="-O3 -g"',
|
||||
'mpich cflags=="-O3"',
|
||||
'mpich cflags="-O3 -g"',
|
||||
'mpich cflags=="-O3 -g"',
|
||||
[("cflags", "-O3")],
|
||||
[("cflags", "-O3")],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_constrain_compiler_flags(
|
||||
self, lhs, rhs, expected_lhs, expected_rhs, propagated_lhs, propagated_rhs
|
||||
):
|
||||
"""Constraining is asymmetric for compiler flags. Also note that
|
||||
Spec equality does not account for flag propagation, so the checks
|
||||
here are manual.
|
||||
"""
|
||||
lhs, rhs, expected_lhs, expected_rhs = (
|
||||
Spec(lhs),
|
||||
Spec(rhs),
|
||||
Spec(expected_lhs),
|
||||
Spec(expected_rhs),
|
||||
)
|
||||
|
||||
assert lhs.intersects(rhs)
|
||||
assert rhs.intersects(lhs)
|
||||
|
||||
c1, c2 = lhs.copy(), rhs.copy()
|
||||
c1.constrain(rhs)
|
||||
c2.constrain(lhs)
|
||||
|
||||
assert c1 == expected_lhs
|
||||
assert c2 == expected_rhs
|
||||
for x in [c1, c2]:
|
||||
assert x.satisfies(lhs)
|
||||
assert x.satisfies(rhs)
|
||||
|
||||
def _propagated_flags(_spec):
|
||||
result = set()
|
||||
for flagtype in _spec.compiler_flags:
|
||||
for flag in _spec.compiler_flags[flagtype]:
|
||||
if flag.propagate:
|
||||
result.add((flagtype, flag))
|
||||
return result
|
||||
|
||||
assert set(propagated_lhs) <= _propagated_flags(c1)
|
||||
assert set(propagated_rhs) <= _propagated_flags(c2)
|
||||
|
||||
def test_constrain_specs_by_hash(self, default_mock_concretization, database):
|
||||
"""Test that Specs specified only by their hashes can constrain eachother."""
|
||||
mpich_dag_hash = "/" + database.query_one("mpich").dag_hash()
|
||||
@@ -370,11 +311,14 @@ def test_concrete_specs_which_satisfies_abstract(self, lhs, rhs, default_mock_co
|
||||
("mpich~~foo", "mpich++foo"),
|
||||
("mpich++foo", "mpich~~foo"),
|
||||
("mpich foo==True", "mpich foo==False"),
|
||||
('mpich cppflags="-O3"', 'mpich cppflags="-O2"'),
|
||||
('mpich cppflags="-O3"', 'mpich cppflags=="-O3"'),
|
||||
("libelf@0:2.0", "libelf@2.1:3"),
|
||||
("libelf@0:2.5%gcc@4.8:4.9", "libelf@2.1:3%gcc@4.5:4.7"),
|
||||
("libelf+debug", "libelf~debug"),
|
||||
("libelf+debug~foo", "libelf+debug+foo"),
|
||||
("libelf debug=True", "libelf debug=False"),
|
||||
('libelf cppflags="-O3"', 'libelf cppflags="-O2"'),
|
||||
("libelf platform=test target=be os=be", "libelf target=fe os=fe"),
|
||||
("namespace=builtin.mock", "namespace=builtin"),
|
||||
],
|
||||
@@ -403,6 +347,10 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs):
|
||||
("mpich", "mpich++foo"),
|
||||
("mpich", "mpich~~foo"),
|
||||
("mpich", "mpich foo==1"),
|
||||
# Flags semantics is currently different from other variant
|
||||
("mpich", 'mpich cflags="-O3"'),
|
||||
("mpich cflags=-O3", 'mpich cflags="-O3 -Ofast"'),
|
||||
("mpich cflags=-O2", 'mpich cflags="-O3"'),
|
||||
("multivalue-variant foo=bar", "multivalue-variant +foo"),
|
||||
("multivalue-variant foo=bar", "multivalue-variant ~foo"),
|
||||
("multivalue-variant fee=bar", "multivalue-variant fee=baz"),
|
||||
@@ -741,13 +689,6 @@ def test_spec_formatting(self, default_mock_concretization):
|
||||
("{/hash}", "/", lambda s: "/" + s.dag_hash()),
|
||||
]
|
||||
|
||||
variants_segments = [
|
||||
("{variants.debug}", spec, "debug"),
|
||||
("{variants.foo}", spec, "foo"),
|
||||
("{^pkg-a.variants.bvv}", spec["pkg-a"], "bvv"),
|
||||
("{^pkg-a.variants.foo}", spec["pkg-a"], "foo"),
|
||||
]
|
||||
|
||||
other_segments = [
|
||||
("{spack_root}", spack.paths.spack_root),
|
||||
("{spack_install}", spack.store.STORE.layout.root),
|
||||
@@ -775,12 +716,6 @@ def check_prop(check_spec, fmt_str, prop, getter):
|
||||
callpath, fmt_str = depify("callpath", named_str, sigil)
|
||||
assert spec.format(fmt_str) == getter(callpath)
|
||||
|
||||
for named_str, test_spec, variant_name in variants_segments:
|
||||
assert test_spec.format(named_str) == str(test_spec.variants[variant_name])
|
||||
assert test_spec.format(named_str[:-1] + ".value}") == str(
|
||||
test_spec.variants[variant_name].value
|
||||
)
|
||||
|
||||
for named_str, expected in other_segments:
|
||||
actual = spec.format(named_str)
|
||||
assert expected == actual
|
||||
@@ -840,7 +775,6 @@ def test_spec_formatting_sigil_mismatches(self, default_mock_concretization, fmt
|
||||
r"{dag_hash}",
|
||||
r"{foo}",
|
||||
r"{+variants.debug}",
|
||||
r"{variants.this_variant_does_not_exist}",
|
||||
],
|
||||
)
|
||||
def test_spec_formatting_bad_formats(self, default_mock_concretization, fmt_str):
|
||||
@@ -1517,8 +1451,8 @@ def test_abstract_contains_semantic(lhs, rhs, expected, mock_packages):
|
||||
(CompilerSpec, "gcc@5", "gcc@5-tag", (True, False, True)),
|
||||
# Flags (flags are a map, so for convenience we initialize a full Spec)
|
||||
# Note: the semantic is that of sv variants, not mv variants
|
||||
(Spec, "cppflags=-foo", "cppflags=-bar", (True, False, False)),
|
||||
(Spec, "cppflags='-bar -foo'", "cppflags=-bar", (True, True, False)),
|
||||
(Spec, "cppflags=-foo", "cppflags=-bar", (False, False, False)),
|
||||
(Spec, "cppflags='-bar -foo'", "cppflags=-bar", (False, False, False)),
|
||||
(Spec, "cppflags=-foo", "cppflags=-foo", (True, True, True)),
|
||||
(Spec, "cppflags=-foo", "cflags=-foo", (True, False, False)),
|
||||
# Versions
|
||||
|
@@ -864,8 +864,8 @@ def test_ambiguous_hash(mutable_database):
|
||||
|
||||
assert x1 != x2 # doesn't hold when only the dag hash is modified.
|
||||
|
||||
mutable_database.add(x1)
|
||||
mutable_database.add(x2)
|
||||
mutable_database.add(x1, directory_layout=None)
|
||||
mutable_database.add(x2, directory_layout=None)
|
||||
|
||||
# ambiguity in first hash character
|
||||
s1 = SpecParser("/x").next_spec()
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -316,11 +315,8 @@ def test_test_part_pass(install_mockery, mock_fetch, mock_test_stage):
|
||||
name = "test_echo"
|
||||
msg = "nothing"
|
||||
with spack.install_test.test_part(pkg, name, "echo"):
|
||||
if sys.platform == "win32":
|
||||
print(msg)
|
||||
else:
|
||||
echo = which("echo")
|
||||
echo(msg)
|
||||
echo = which("echo")
|
||||
echo(msg)
|
||||
|
||||
for part_name, status in pkg.tester.test_parts.items():
|
||||
assert part_name.endswith(name)
|
||||
|
@@ -12,6 +12,7 @@
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_remove_extensions_ordered(install_mockery, mock_fetch, tmpdir):
|
||||
view_dir = str(tmpdir.join("view"))
|
||||
layout = DirectoryLayout(view_dir)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user