Compare commits

..

6 Commits

Author SHA1 Message Date
Adrien Bernede
b2e526678a Merge branch 'develop' into woptim/extend-commit-fetch 2023-02-27 16:15:46 +01:00
Adrien M. BERNEDE
ecdde4a7fb Remove shallow clone fetch 2023-02-27 11:48:58 +01:00
Adrien M. BERNEDE
cbf2cb1a49 Fix wrong syntax 2023-02-27 11:36:06 +01:00
Adrien M. BERNEDE
5de57e6450 Missing repository arg 2023-02-27 11:29:09 +01:00
Adrien M. BERNEDE
a5d71af83a Attempt at getting the commit with a fetch 2023-02-27 11:27:23 +01:00
Adrien M. BERNEDE
9331d47808 Add a step that forces the fetch of the specific commit
This will fetch even if the commit is on a PR from a fork
2023-02-23 16:38:58 +01:00
301 changed files with 2527 additions and 4827 deletions

View File

@@ -20,10 +20,9 @@ concretizer:
# needed to reach a solution increases noticeably with the number of targets
# considered.
targets:
# Determine whether we want to target specific or generic
# microarchitectures. Valid values are: "microarchitectures" or "generic".
# An example of "microarchitectures" would be "skylake" or "bulldozer",
# while an example of "generic" would be "aarch64" or "x86_64_v4".
# Determine whether we want to target specific or generic microarchitectures.
# An example of the first kind might be for instance "skylake" or "bulldozer",
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
granularity: microarchitectures
# If "false" allow targets that are incompatible with the current host (for
# instance concretize with target "icelake" while running on "haswell").
@@ -34,4 +33,4 @@ concretizer:
# environments can always be activated. When "false" perform concretization separately
# on each root spec, allowing different versions and variants of the same package in
# an environment.
unify: true
unify: true

File diff suppressed because it is too large Load Diff

93
lib/spack/env/cc vendored
View File

@@ -427,48 +427,6 @@ isystem_include_dirs_list=""
libs_list=""
other_args_list=""
# Global state for keeping track of -Wl,-rpath -Wl,/path
wl_expect_rpath=no
parse_Wl() {
# drop -Wl
shift
while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then
rp="$1"
wl_expect_rpath=no
else
rp=""
case "$1" in
-rpath=*)
rp="${1#-rpath=}"
;;
--rpath=*)
rp="${1#--rpath=}"
;;
-rpath|--rpath)
wl_expect_rpath=yes
;;
"$dtags_to_strip")
;;
*)
append other_args_list "-Wl,$1"
;;
esac
fi
if [ -n "$rp" ]; then
if system_dir "$rp"; then
append system_rpath_dirs_list "$rp"
else
append rpath_dirs_list "$rp"
fi
fi
shift
done
# By lack of local variables, always set this to empty string.
rp=""
}
while [ $# -ne 0 ]; do
@@ -568,9 +526,54 @@ while [ $# -ne 0 ]; do
append other_args_list "-l$arg"
;;
-Wl,*)
IFS=,
parse_Wl $1
unset IFS
arg="${1#-Wl,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
case "$arg" in
-rpath=*) rp="${arg#-rpath=}" ;;
--rpath=*) rp="${arg#--rpath=}" ;;
-rpath,*) rp="${arg#-rpath,}" ;;
--rpath,*) rp="${arg#--rpath,}" ;;
-rpath|--rpath)
shift; arg="$1"
case "$arg" in
-Wl,*)
rp="${arg#-Wl,}"
;;
*)
die "-Wl,-rpath was not followed by -Wl,*"
;;
esac
;;
"$dtags_to_strip")
: # We want to remove explicitly this flag
;;
*)
append other_args_list "-Wl,$arg"
;;
esac
;;
-Xlinker,*)
arg="${1#-Xlinker,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
case "$arg" in
-rpath=*) rp="${arg#-rpath=}" ;;
--rpath=*) rp="${arg#--rpath=}" ;;
-rpath|--rpath)
shift; arg="$1"
case "$arg" in
-Xlinker,*)
rp="${arg#-Xlinker,}"
;;
*)
die "-Xlinker,-rpath was not followed by -Xlinker,*"
;;
esac
;;
*)
append other_args_list "-Xlinker,$arg"
;;
esac
;;
-Xlinker)
if [ "$2" = "-rpath" ]; then

View File

@@ -16,6 +16,7 @@
import sys
import tempfile
from contextlib import contextmanager
from sys import platform as _platform
from typing import Callable, List, Match, Optional, Tuple, Union
from llnl.util import tty
@@ -25,7 +26,9 @@
from spack.util.executable import Executable, which
from spack.util.path import path_to_os_path, system_path_filter
if sys.platform != "win32":
is_windows = _platform == "win32"
if not is_windows:
import grp
import pwd
else:
@@ -151,7 +154,7 @@ def lookup(name):
def getuid():
if sys.platform == "win32":
if is_windows:
import ctypes
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
@@ -164,7 +167,7 @@ def getuid():
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
if sys.platform == "win32":
if is_windows:
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or os.path.islink(dst):
@@ -193,7 +196,7 @@ def _get_mime_type():
"""Generate method to call `file` system command to aquire mime type
for a specified path
"""
if sys.platform == "win32":
if is_windows:
# -h option (no-dereference) does not exist in Windows
return file_command("-b", "--mime-type")
else:
@@ -548,7 +551,7 @@ def get_owner_uid(path, err_msg=None):
else:
p_stat = os.stat(path)
if sys.platform != "win32":
if _platform != "win32":
owner_uid = p_stat.st_uid
else:
sid = win32security.GetFileSecurity(
@@ -581,7 +584,7 @@ def group_ids(uid=None):
Returns:
(list of int): gids of groups the user is a member of
"""
if sys.platform == "win32":
if is_windows:
tty.warn("Function is not supported on Windows")
return []
@@ -601,7 +604,7 @@ def group_ids(uid=None):
@system_path_filter(arg_slice=slice(1))
def chgrp(path, group, follow_symlinks=True):
"""Implement the bash chgrp function on a single path"""
if sys.platform == "win32":
if is_windows:
raise OSError("Function 'chgrp' is not supported on Windows")
if isinstance(group, str):
@@ -1128,7 +1131,7 @@ def open_if_filename(str_or_file, mode="r"):
@system_path_filter
def touch(path):
"""Creates an empty file at the specified path."""
if sys.platform == "win32":
if is_windows:
perms = os.O_WRONLY | os.O_CREAT
else:
perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
@@ -1190,7 +1193,7 @@ def temp_cwd():
yield tmp_dir
finally:
kwargs = {}
if sys.platform == "win32":
if is_windows:
kwargs["ignore_errors"] = False
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
shutil.rmtree(tmp_dir, **kwargs)
@@ -1435,7 +1438,7 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
try:
isdir = f.is_dir()
except OSError as e:
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
@@ -1544,11 +1547,11 @@ def readonly_file_handler(ignore_errors=False):
"""
def error_remove_readonly(func, path, exc):
if sys.platform != "win32":
if not is_windows:
raise RuntimeError("This method should only be invoked on Windows")
excvalue = exc[1]
if (
sys.platform == "win32"
is_windows
and func in (os.rmdir, os.remove, os.unlink)
and excvalue.errno == errno.EACCES
):
@@ -1578,7 +1581,7 @@ def remove_linked_tree(path):
# Windows readonly files cannot be removed by Python
# directly.
if sys.platform == "win32":
if is_windows:
kwargs["ignore_errors"] = False
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
@@ -2092,7 +2095,7 @@ def names(self):
# on non Windows platform
# Windows valid library extensions are:
# ['.dll', '.lib']
valid_exts = [".dll", ".lib"] if sys.platform == "win32" else [".dylib", ".so", ".a"]
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
for ext in valid_exts:
i = name.rfind(ext)
if i != -1:
@@ -2240,7 +2243,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
message = message.format(find_libraries.__name__, type(libraries))
raise TypeError(message)
if sys.platform == "win32":
if is_windows:
static_ext = "lib"
# For linking (runtime=False) you need the .lib files regardless of
# whether you are doing a shared or static link
@@ -2272,7 +2275,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
# finally search all of root recursively. The search stops when the first
# match is found.
common_lib_dirs = ["lib", "lib64"]
if sys.platform == "win32":
if is_windows:
common_lib_dirs.extend(["bin", "Lib"])
for subdir in common_lib_dirs:
@@ -2407,7 +2410,7 @@ def _link(self, path, dest_dir):
# For py2 compatibility, we have to catch the specific Windows error code
# associate with trying to create a file that already exists (winerror 183)
except OSError as e:
if sys.platform == "win32" and e.winerror == 183:
if e.winerror == 183:
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(dest_file)

View File

@@ -5,13 +5,15 @@
import errno
import os
import shutil
import sys
import tempfile
from os.path import exists, join
from sys import platform as _platform
from llnl.util import lang
if sys.platform == "win32":
is_windows = _platform == "win32"
if is_windows:
from win32file import CreateHardLink
@@ -21,7 +23,7 @@ def symlink(real_path, link_path):
On Windows, use junctions if os.symlink fails.
"""
if sys.platform != "win32":
if not is_windows:
os.symlink(real_path, link_path)
elif _win32_can_symlink():
# Windows requires target_is_directory=True when the target is a dir.
@@ -97,7 +99,7 @@ def _win32_is_junction(path):
if os.path.islink(path):
return False
if sys.platform == "win32":
if is_windows:
import ctypes.wintypes
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW

View File

@@ -25,7 +25,7 @@ def architecture_compatible(self, target, constraint):
return (
not target.architecture
or not constraint.architecture
or target.architecture.intersects(constraint.architecture)
or target.architecture.satisfies(constraint.architecture)
)
@memoized
@@ -104,7 +104,7 @@ def compiler_compatible(self, parent, child, **kwargs):
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.intersects(cversion):
if pversion.satisfies(cversion):
return True
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
pversion, cversion

View File

@@ -721,7 +721,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
dependency_pkg_cls = None
try:
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
assert any(v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions))
except Exception:
summary = (
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"

View File

@@ -6,8 +6,6 @@
import codecs
import collections
import hashlib
import io
import itertools
import json
import multiprocessing.pool
import os
@@ -22,8 +20,7 @@
import urllib.parse
import urllib.request
import warnings
from contextlib import closing, contextmanager
from gzip import GzipFile
from contextlib import closing
from urllib.error import HTTPError, URLError
import ruamel.yaml as yaml
@@ -742,31 +739,34 @@ def get_buildfile_manifest(spec):
return data
def prefixes_to_hashes(spec):
return {
str(s.prefix): s.dag_hash()
for s in itertools.chain(
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
)
}
def get_buildinfo_dict(spec, rel=False):
"""Create metadata for a tarball"""
def write_buildinfo_file(spec, workdir, rel=False):
"""
Create a cache file containing information
required for the relocation
"""
manifest = get_buildfile_manifest(spec)
return {
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
"relative_rpaths": rel,
"buildpath": spack.store.layout.root,
"spackprefix": spack.paths.prefix,
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
"relocate_textfiles": manifest["text_to_relocate"],
"relocate_binaries": manifest["binary_to_relocate"],
"relocate_links": manifest["link_to_relocate"],
"hardlinks_deduped": manifest["hardlinks_deduped"],
"prefix_to_hash": prefixes_to_hashes(spec),
}
prefix_to_hash = dict()
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
deps = spack.build_environment.get_rpath_deps(spec.package)
for d in deps + spec.dependencies(deptype="run"):
prefix_to_hash[str(d.prefix)] = d.dag_hash()
# Create buildinfo data and write it to disk
buildinfo = {}
buildinfo["sbang_install_path"] = spack.hooks.sbang.sbang_install_path()
buildinfo["relative_rpaths"] = rel
buildinfo["buildpath"] = spack.store.layout.root
buildinfo["spackprefix"] = spack.paths.prefix
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
buildinfo["relocate_links"] = manifest["link_to_relocate"]
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
buildinfo["prefix_to_hash"] = prefix_to_hash
filename = buildinfo_file_name(workdir)
with open(filename, "w") as outfile:
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
def tarball_directory_name(spec):
@@ -1139,68 +1139,6 @@ def generate_key_index(key_prefix, tmpdir=None):
shutil.rmtree(tmpdir)
@contextmanager
def gzip_compressed_tarfile(path):
"""Create a reproducible, compressed tarfile"""
# Create gzip compressed tarball of the install prefix
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
# If the filename="" is dropped, Python will use fileobj.name instead.
# This should effectively mimick `gzip --no-name`.
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with open(path, "wb") as fileobj, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
yield tar
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
# We only add files, symlinks, hardlinks, and directories
# No character devices, block devices and FIFOs should ever enter a tarball.
if tarinfo.isdev():
return None
# For distribution, it makes no sense to user/group data; since (a) they don't exist
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
# ownership if it can. We want to extract as the current user. By setting owner to root,
# root will extract as root, and non-privileged user will extract as themselves.
tarinfo.uid = 0
tarinfo.gid = 0
tarinfo.uname = ""
tarinfo.gname = ""
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
# touched; as long as the content does not change, this ensures we get stable tarballs.
tarinfo.mtime = 0
# Normalize mode
if tarinfo.isfile() or tarinfo.islnk():
# If user can execute, use 0o755; else 0o644
# This is to avoid potentially unsafe world writable & exeutable files that may get
# extracted when Python or tar is run with privileges
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
else: # symbolic link and directories
tarinfo.mode = 0o755
return tarinfo
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
# Serialize buildinfo for the tarball
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
tarinfo = tarfile.TarInfo(name=path)
tarinfo.size = len(bstring)
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
with gzip_compressed_tarfile(tarfile_path) as tar:
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
def _build_tarball(
spec,
out_url,
@@ -1279,26 +1217,39 @@ def _build_tarball(
os.remove(temp_tarfile_path)
else:
binaries_dir = spec.prefix
mkdirp(os.path.join(workdir, ".spack"))
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec, relative)
write_buildinfo_file(spec, workdir, relative)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
try:
if relative:
make_package_relative(workdir, spec, buildinfo, allow_root)
make_package_relative(workdir, spec, allow_root)
elif not allow_root:
ensure_package_relocatable(buildinfo, binaries_dir)
ensure_package_relocatable(workdir, binaries_dir)
except Exception as e:
shutil.rmtree(workdir)
shutil.rmtree(tarfile_dir)
shutil.rmtree(tmpdir)
tty.die(e)
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
# create gzip compressed tarball of the install prefix
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
tar.add(name=binaries_dir, arcname=pkg_dir)
if not relative:
# Add buildinfo file
buildinfo_path = buildinfo_file_name(workdir)
buildinfo_arcname = buildinfo_file_name(pkg_dir)
tar.add(name=buildinfo_path, arcname=buildinfo_arcname)
# remove copy of install directory
if relative:
shutil.rmtree(workdir)
shutil.rmtree(workdir)
# get the sha256 checksum of the tarball
checksum = checksum_tarball(tarfile_path)
@@ -1585,12 +1536,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
return None
def make_package_relative(workdir, spec, buildinfo, allow_root):
def make_package_relative(workdir, spec, allow_root):
"""
Change paths in binaries to relative paths. Change absolute symlinks
to relative symlinks.
"""
prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
old_layout_root = buildinfo["buildpath"]
orig_path_names = list()
cur_path_names = list()
@@ -1614,8 +1566,9 @@ def make_package_relative(workdir, spec, buildinfo, allow_root):
relocate.make_link_relative(cur_path_names, orig_path_names)
def ensure_package_relocatable(buildinfo, binaries_dir):
def ensure_package_relocatable(workdir, binaries_dir):
"""Check if package binaries are relocatable."""
buildinfo = read_buildinfo_file(workdir)
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
relocate.ensure_binaries_are_relocatable(binaries)

View File

@@ -208,7 +208,7 @@ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, tes
# This will be None for things that don't depend on python
python_spec = item.get("python", None)
# Skip specs which are not compatible
if not abstract_spec.intersects(candidate_spec):
if not abstract_spec.satisfies(candidate_spec):
continue
if python_spec is not None and python_spec not in abstract_spec:

View File

@@ -69,13 +69,13 @@
from spack.installer import InstallError
from spack.util.cpus import cpus_available
from spack.util.environment import (
SYSTEM_DIRS,
EnvironmentModifications,
env_flag,
filter_system_paths,
get_path,
inspect_path,
is_system_path,
system_dirs,
validate,
)
from spack.util.executable import Executable
@@ -397,7 +397,7 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
env.set("SPACK_SYSTEM_DIRS", ":".join(system_dirs))
compiler.setup_custom_environment(pkg, env)
@@ -423,14 +423,6 @@ def set_wrapper_variables(pkg, env):
compiler = pkg.compiler
env.extend(spack.schema.environment.parse(compiler.environment))
# Before setting up PATH to Spack compiler wrappers, make sure compiler is in PATH
# This ensures that non-wrapped executables from the compiler bin directory are available
bindirs = dedupe(
[os.path.dirname(c) for c in [compiler.cc, compiler.cxx, compiler.fc, compiler.f77]]
)
for bindir in bindirs:
env.prepend_path("PATH", bindir)
if compiler.extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
@@ -493,13 +485,7 @@ def update_compiler_args_for_dep(dep):
query = pkg.spec[dep.name]
dep_link_dirs = list()
try:
# In some circumstances (particularly for externals) finding
# libraries packages can be time consuming, so indicate that
# we are performing this operation (and also report when it
# finishes).
tty.debug("Collecting libraries for {0}".format(dep.name))
dep_link_dirs.extend(query.libs.directories)
tty.debug("Libraries for {0} have been collected.".format(dep.name))
except NoLibrariesError:
tty.debug("No libraries found for {0}".format(dep.name))
@@ -786,9 +772,7 @@ def setup_package(pkg, dirty, context="build"):
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
tty.debug("setup_package: grabbing modifications from dependencies")
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
tty.debug("setup_package: collected all modifications from dependencies")
# architecture specific setup
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
@@ -796,7 +780,6 @@ def setup_package(pkg, dirty, context="build"):
platform.setup_platform_environment(pkg, env_mods)
if context == "build":
tty.debug("setup_package: setup build environment for root")
builder = spack.builder.create(pkg)
builder.setup_build_environment(env_mods)
@@ -807,7 +790,6 @@ def setup_package(pkg, dirty, context="build"):
" includes and omit it when invoked with '--cflags'."
)
elif context == "test":
tty.debug("setup_package: setup test environment for root")
env_mods.extend(
inspect_path(
pkg.spec.prefix,
@@ -824,7 +806,6 @@ def setup_package(pkg, dirty, context="build"):
# Load modules on an already clean environment, just before applying Spack's
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
if need_compiler:
tty.debug("setup_package: loading compiler modules")
for mod in pkg.compiler.modules:
load_module(mod)
@@ -962,7 +943,6 @@ def default_modifications_for_dep(dep):
_make_runnable(dep, env)
def add_modifications_for_dep(dep):
tty.debug("Adding env modifications for {0}".format(dep.name))
# Some callers of this function only want the custom modifications.
# For callers that want both custom and default modifications, we want
# to perform the default modifications here (this groups custom
@@ -988,7 +968,6 @@ def add_modifications_for_dep(dep):
builder.setup_dependent_build_environment(env, spec)
else:
dpkg.setup_dependent_run_environment(env, spec)
tty.debug("Added env modifications for {0}".format(dep.name))
# Note that we want to perform environment modifications in a fixed order.
# The Spec.traverse method provides this: i.e. in addition to

View File

@@ -361,7 +361,7 @@ def append_dep(s, d):
def _spec_matches(spec, match_string):
return spec.intersects(match_string)
return spec.satisfies(match_string)
def _remove_attributes(src_dict, dest_dict):
@@ -490,28 +490,16 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
def get_spec_filter_list(env, affected_pkgs):
"""Given a list of package names and an active/concretized
environment, return the set of all concrete specs from the
environment that could have been affected by changing the
list of packages.
If a ``dependent_traverse_depth`` is given, it is used to limit
upward (in the parent direction) traversal of specs of touched
packages. E.g. if 1 is provided, then only direct dependents
of touched package specs are traversed to produce specs that
could have been affected by changing the package, while if 0 is
provided, only the changed specs themselves are traversed. If ``None``
is given, upward traversal of touched package specs is done all
the way to the environment roots. Providing a negative number
results in no traversals at all, yielding an empty set.
Arguments:
env (spack.environment.Environment): Active concrete environment
affected_pkgs (List[str]): Affected package names
dependent_traverse_depth: Optional integer to limit dependent
traversal, or None to disable the limit.
Returns:
@@ -528,11 +516,10 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
visited = set()
dag_hash = lambda s: s.dag_hash()
for match in env_matches:
for dep_level, parent in match.traverse(direction="parents", key=dag_hash, depth=True):
if dependent_traverse_depth is None or dep_level <= dependent_traverse_depth:
affected_specs.update(
parent.traverse(direction="children", visited=visited, key=dag_hash)
)
for parent in match.traverse(direction="parents", key=dag_hash):
affected_specs.update(
parent.traverse(direction="children", visited=visited, key=dag_hash)
)
return affected_specs
@@ -593,18 +580,6 @@ def generate_gitlab_ci_yaml(
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
build_group = cdash_handler.build_group if cdash_handler else None
dependent_depth = os.environ.get("SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH", None)
if dependent_depth is not None:
try:
dependent_depth = int(dependent_depth)
except (TypeError, ValueError):
tty.warn(
"Unrecognized value ({0}) ".format(dependent_depth),
"provide forSPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH, ",
"ignoring it.",
)
dependent_depth = None
prune_untouched_packages = False
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
@@ -620,9 +595,7 @@ def generate_gitlab_ci_yaml(
tty.debug("affected pkgs:")
for p in affected_pkgs:
tty.debug(" {0}".format(p))
affected_specs = get_spec_filter_list(
env, affected_pkgs, dependent_traverse_depth=dependent_depth
)
affected_specs = get_spec_filter_list(env, affected_pkgs)
tty.debug("all affected specs:")
for s in affected_specs:
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
@@ -965,7 +938,7 @@ def generate_gitlab_ci_yaml(
bs_arch = c_spec.architecture
bs_arch_family = bs_arch.target.microarchitecture.family
if (
c_spec.intersects(compiler_pkg_spec)
c_spec.satisfies(compiler_pkg_spec)
and bs_arch_family == spec_arch_family
):
# We found the bootstrap compiler this release spec

View File

@@ -498,11 +498,11 @@ def list_fn(args):
if not args.allarch:
arch = spack.spec.Spec.default_arch()
specs = [s for s in specs if s.intersects(arch)]
specs = [s for s in specs if s.satisfies(arch)]
if args.specs:
constraints = set(args.specs)
specs = [s for s in specs if any(s.intersects(c) for c in constraints)]
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
if sys.stdout.isatty():
builds = len(specs)
tty.msg("%s." % plural(builds, "cached build"))

View File

@@ -39,14 +39,19 @@
compiler flags:
@g{cflags="flags"} cppflags, cflags, cxxflags,
fflags, ldflags, ldlibs
@g{==} propagate flags to package dependencies
@g{cflags=="flags"} propagate flags to package dependencies
cppflags, cflags, cxxflags, fflags,
ldflags, ldlibs
variants:
@B{+variant} enable <variant>
@B{++variant} propagate enable <variant>
@r{-variant} or @r{~variant} disable <variant>
@r{--variant} or @r{~~variant} propagate disable <variant>
@B{variant=value} set non-boolean <variant> to <value>
@B{variant==value} propagate non-boolean <variant> to <value>
@B{variant=value1,value2,value3} set multi-value <variant> values
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
@B{variant==value1,value2,value3} propagate multi-value <variant> values
architecture variants:
@m{platform=platform} linux, darwin, cray, etc.

View File

@@ -283,7 +283,7 @@ def print_tests(pkg):
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
if pkg.name in c_names:
v_names.extend(["c", "cxx", "fortran"])
if pkg.spec.intersects("llvm+clang"):
if pkg.spec.satisfies("llvm+clang"):
v_names.extend(["c", "cxx"])
# TODO Refactor END

View File

@@ -335,7 +335,7 @@ def not_excluded_fn(args):
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
def not_excluded(x):
return not any(x.satisfies(y) for y in exclude_specs)
return not any(x.satisfies(y, strict=True) for y in exclude_specs)
return not_excluded

View File

@@ -26,6 +26,7 @@
description = "run spack's unit tests (wrapper around pytest)"
section = "developer"
level = "long"
is_windows = sys.platform == "win32"
def setup_parser(subparser):
@@ -211,7 +212,7 @@ def unit_test(parser, args, unknown_args):
# mock configuration used by unit tests
# Note: skip on windows here because for the moment,
# clingo is wholly unsupported from bootstrap
if sys.platform != "win32":
if not is_windows:
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
if pytest is None:

View File

@@ -28,6 +28,8 @@
__all__ = ["Compiler"]
is_windows = sys.platform == "win32"
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
@@ -596,7 +598,7 @@ def search_regexps(cls, language):
suffixes = [""]
# Windows compilers generally have an extension of some sort
# as do most files on Windows, handle that case here
if sys.platform == "win32":
if is_windows:
ext = r"\.(?:exe|bat)"
cls_suf = [suf + ext for suf in cls.suffixes]
ext_suf = [ext]

View File

@@ -84,7 +84,7 @@ def _to_dict(compiler):
d = {}
d["spec"] = str(compiler.spec)
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
d["flags"] = dict((fname, fvals) for fname, fvals in compiler.flags)
d["flags"].update(
dict(
(attr, getattr(compiler, attr, None))

View File

@@ -134,7 +134,7 @@ def _valid_virtuals_and_externals(self, spec):
externals = spec_externals(cspec)
for ext in externals:
if ext.intersects(spec):
if ext.satisfies(spec):
usable.append(ext)
# If nothing is in the usable list now, it's because we aren't
@@ -200,7 +200,7 @@ def concretize_version(self, spec):
# List of versions we could consider, in sorted order
pkg_versions = spec.package_class.versions
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
usable = [v for v in pkg_versions if any(v.satisfies(sv) for sv in spec.versions)]
yaml_prefs = PackagePrefs(spec.name, "version")
@@ -344,7 +344,7 @@ def concretize_architecture(self, spec):
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
if not new_target_arch.intersects(curr_target_arch):
if not new_target_arch.satisfies(curr_target_arch):
# new_target is an incorrect guess based on preferences
# and/or default
valid_target_ranges = str(curr_target).split(",")

View File

@@ -1525,7 +1525,7 @@ def _query(
if not (start_date < inst_date < end_date):
continue
if query_spec is any or rec.spec.satisfies(query_spec):
if query_spec is any or rec.spec.satisfies(query_spec, strict=True):
results.append(rec.spec)
return results

View File

@@ -29,6 +29,7 @@
import spack.util.spack_yaml
import spack.util.windows_registry
is_windows = sys.platform == "win32"
#: Information on a package that has been detected
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
@@ -183,7 +184,7 @@ def library_prefix(library_dir):
elif "lib" in lowered_components:
idx = lowered_components.index("lib")
return os.sep.join(components[:idx])
elif sys.platform == "win32" and "bin" in lowered_components:
elif is_windows and "bin" in lowered_components:
idx = lowered_components.index("bin")
return os.sep.join(components[:idx])
else:
@@ -259,13 +260,13 @@ def find_windows_compiler_bundled_packages():
class WindowsKitExternalPaths(object):
if sys.platform == "win32":
if is_windows:
plat_major_ver = str(winOs.windows_version()[0])
@staticmethod
def find_windows_kit_roots():
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
if sys.platform != "win32":
if not is_windows:
return []
program_files = os.environ["PROGRAMFILES(x86)"]
kit_base = os.path.join(
@@ -358,7 +359,7 @@ def compute_windows_program_path_for_package(pkg):
pkg (spack.package_base.PackageBase): package for which
Program Files location is to be computed
"""
if sys.platform != "win32":
if not is_windows:
return []
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
@@ -378,7 +379,7 @@ def compute_windows_user_path_for_package(pkg):
installs see:
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8
"""
if sys.platform != "win32":
if not is_windows:
return []
# Current user directory

View File

@@ -31,6 +31,8 @@
path_to_dict,
)
is_windows = sys.platform == "win32"
def common_windows_package_paths():
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
@@ -55,7 +57,7 @@ def executables_in_path(path_hints):
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
if sys.platform == "win32":
if is_windows:
path_hints.extend(common_windows_package_paths())
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
return path_to_dict(search_paths)
@@ -147,7 +149,7 @@ def by_library(packages_to_check, path_hints=None):
path_to_lib_name = (
libraries_in_ld_and_system_library_path(path_hints=path_hints)
if sys.platform != "win32"
if not is_windows
else libraries_in_windows_paths(path_hints)
)

View File

@@ -21,6 +21,7 @@
import spack.util.spack_json as sjson
from spack.error import SpackError
is_windows = sys.platform == "win32"
# Note: Posixpath is used here as opposed to
# os.path.join due to spack.spec.Spec.format
# requiring forward slash path seperators at this stage
@@ -345,7 +346,7 @@ def remove_install_directory(self, spec, deprecated=False):
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
if sys.platform == "win32":
if is_windows:
kwargs = {
"ignore_errors": False,
"onerror": fs.readonly_file_handler(ignore_errors=False),

View File

@@ -349,8 +349,7 @@ def _is_dev_spec_and_has_changed(spec):
def _spec_needs_overwrite(spec, changed_dev_specs):
"""Check whether the current spec needs to be overwritten because either it has
changed itself or one of its dependencies have changed
"""
changed itself or one of its dependencies have changed"""
# if it's not installed, we don't need to overwrite it
if not spec.installed:
return False
@@ -2314,7 +2313,7 @@ def _concretize_from_constraints(spec_constraints, tests=False):
invalid_deps = [
c
for c in spec_constraints
if any(c.satisfies(invd) for invd in invalid_deps_string)
if any(c.satisfies(invd, strict=True) for invd in invalid_deps_string)
]
if len(invalid_deps) != len(invalid_deps_string):
raise e

View File

@@ -28,6 +28,7 @@
import os.path
import re
import shutil
import sys
import urllib.parse
from typing import List, Optional
@@ -52,6 +53,7 @@
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
is_windows = sys.platform == "win32"
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE = (
"The contents of {subject} look like {content_type}. Either the URL"
@@ -860,6 +862,9 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
)
with working_dir(dest):
# By defaults, on all references are fetched by the clone
fetch_args = ["fetch", "origin", commit]
git(*fetch_args)
checkout_args = ["checkout", commit]
if not debug:
checkout_args.insert(1, "--quiet")
@@ -1501,7 +1506,7 @@ def _from_merged_attrs(fetcher, pkg, version):
return fetcher(**attrs)
def for_package_version(pkg, version=None):
def for_package_version(pkg, version):
"""Determine a fetch strategy based on the arguments supplied to
version() in the package description."""
@@ -1512,18 +1517,8 @@ def for_package_version(pkg, version=None):
check_pkg_attributes(pkg)
if version is not None:
assert not pkg.spec.concrete, "concrete specs should not pass the 'version=' argument"
# Specs are initialized with the universe range, if no version information is given,
# so here we make sure we always match the version passed as argument
if not isinstance(version, spack.version.VersionBase):
version = spack.version.Version(version)
version_list = spack.version.VersionList()
version_list.add(version)
pkg.spec.versions = version_list
else:
version = pkg.version
if not isinstance(version, spack.version.VersionBase):
version = spack.version.Version(version)
# if it's a commit, we must use a GitFetchStrategy
if isinstance(version, spack.version.GitVersion):

View File

@@ -30,7 +30,8 @@
#: Groupdb does not exist on Windows, prevent imports
#: on supported systems
if sys.platform != "win32":
is_windows = sys.platform == "win32"
if not is_windows:
import grp
#: Spack itself also limits the shebang line to at most 4KB, which should be plenty.

View File

@@ -84,6 +84,9 @@
#: queue invariants).
STATUS_REMOVED = "removed"
is_windows = sys.platform == "win32"
is_osx = sys.platform == "darwin"
class InstallAction(object):
#: Don't perform an install
@@ -166,9 +169,9 @@ def _do_fake_install(pkg):
if not pkg.name.startswith("lib"):
library = "lib" + library
plat_shared = ".dll" if sys.platform == "win32" else ".so"
plat_static = ".lib" if sys.platform == "win32" else ".a"
dso_suffix = ".dylib" if sys.platform == "darwin" else plat_shared
plat_shared = ".dll" if is_windows else ".so"
plat_static = ".lib" if is_windows else ".a"
dso_suffix = ".dylib" if is_osx else plat_shared
# Install fake command
fs.mkdirp(pkg.prefix.bin)

View File

@@ -575,7 +575,7 @@ def setup_main_options(args):
if args.debug:
spack.util.debug.register_interrupt_handler()
spack.config.set("config:debug", True, scope="command_line")
spack.util.environment.TRACING_ENABLED = True
spack.util.environment.tracing_enabled = True
if args.timestamp:
tty.set_timestamp(True)

View File

@@ -492,7 +492,7 @@ def get_matching_versions(specs, num_versions=1):
break
# Generate only versions that satisfy the spec.
if spec.concrete or v.intersects(spec.versions):
if spec.concrete or v.satisfies(spec.versions):
s = spack.spec.Spec(pkg.name)
s.versions = VersionList([v])
s.variants = spec.variants.copy()

View File

@@ -207,7 +207,7 @@ def merge_config_rules(configuration, spec):
# evaluated in order of appearance in the module file
spec_configuration = module_specific_configuration.pop("all", {})
for constraint, action in module_specific_configuration.items():
if spec.satisfies(constraint):
if spec.satisfies(constraint, strict=True):
if hasattr(constraint, "override") and constraint.override:
spec_configuration = {}
update_dictionary_extending_lists(spec_configuration, action)

View File

@@ -71,7 +71,7 @@ def guess_core_compilers(name, store=False):
# A compiler is considered to be a core compiler if any of the
# C, C++ or Fortran compilers reside in a system directory
is_system_compiler = any(
os.path.dirname(x) in spack.util.environment.SYSTEM_DIRS
os.path.dirname(x) in spack.util.environment.system_dirs
for x in compiler["paths"].values()
if x is not None
)

View File

@@ -92,6 +92,9 @@
_spack_configure_argsfile = "spack-configure-args.txt"
is_windows = sys.platform == "win32"
def deprecated_version(pkg, version):
"""Return True if the version is deprecated, False otherwise.
@@ -162,7 +165,7 @@ def windows_establish_runtime_linkage(self):
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
"""
if sys.platform == "win32":
if is_windows:
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
self.win_rpath.add_rpath(*self.win_add_rpath())
self.win_rpath.establish_link()
@@ -207,7 +210,7 @@ def to_windows_exe(exe):
plat_exe = []
if hasattr(cls, "executables"):
for exe in cls.executables:
if sys.platform == "win32":
if is_windows:
exe = to_windows_exe(exe)
plat_exe.append(exe)
return plat_exe
@@ -1197,7 +1200,7 @@ def _make_fetcher(self):
# one element (the root package). In case there are resources
# associated with the package, append their fetcher to the
# composite.
root_fetcher = fs.for_package_version(self)
root_fetcher = fs.for_package_version(self, self.version)
fetcher = fs.FetchStrategyComposite() # Composite fetcher
fetcher.append(root_fetcher) # Root fetcher is always present
resources = self._get_needed_resources()
@@ -1308,7 +1311,7 @@ def provides(self, vpkg_name):
True if this package provides a virtual package with the specified name
"""
return any(
any(self.spec.intersects(c) for c in constraints)
any(self.spec.satisfies(c) for c in constraints)
for s, constraints in self.provided.items()
if s.name == vpkg_name
)
@@ -1614,7 +1617,7 @@ def content_hash(self, content=None):
# TODO: resources
if self.spec.versions.concrete:
try:
source_id = fs.for_package_version(self).source_id()
source_id = fs.for_package_version(self, self.version).source_id()
except (fs.ExtrapolationError, fs.InvalidArgsError):
# ExtrapolationError happens if the package has no fetchers defined.
# InvalidArgsError happens when there are version directives with args,
@@ -1777,7 +1780,7 @@ def _get_needed_resources(self):
# conflict with the spec, so we need to invoke
# when_spec.satisfies(self.spec) vs.
# self.spec.satisfies(when_spec)
if when_spec.intersects(self.spec):
if when_spec.satisfies(self.spec, strict=False):
resources.extend(resource_list)
# Sorts the resources by the length of the string representing their
# destination. Since any nested resource must contain another
@@ -2398,7 +2401,7 @@ def rpath(self):
# on Windows, libraries of runtime interest are typically
# stored in the bin directory
if sys.platform == "win32":
if is_windows:
rpaths = [self.prefix.bin]
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
else:

View File

@@ -73,7 +73,7 @@ def __call__(self, spec):
# integer is the index of the first spec in order that satisfies
# spec, or it's a number larger than any position in the order.
match_index = next(
(i for i, s in enumerate(spec_order) if spec.intersects(s)), len(spec_order)
(i for i, s in enumerate(spec_order) if spec.satisfies(s)), len(spec_order)
)
if match_index < len(spec_order) and spec_order[match_index] == spec:
# If this is called with multiple specs that all satisfy the same
@@ -185,7 +185,7 @@ def _package(maybe_abstract_spec):
),
extra_attributes=entry.get("extra_attributes", {}),
)
if external_spec.intersects(spec):
if external_spec.satisfies(spec):
external_specs.append(external_spec)
# Defensively copy returned specs

View File

@@ -37,7 +37,7 @@
def slingshot_network():
return os.path.exists("/opt/cray/pe") and os.path.exists("/lib64/libcxi.so")
return os.path.exists("/lib64/libcxi.so")
def _target_name_from_craype_target_name(name):

View File

@@ -10,7 +10,7 @@ def get_projection(projections, spec):
"""
all_projection = None
for spec_like, projection in projections.items():
if spec.satisfies(spec_like):
if spec.satisfies(spec_like, strict=True):
return projection
elif spec_like == "all":
all_projection = projection

View File

@@ -72,7 +72,7 @@ def providers_for(self, virtual_spec):
# Add all the providers that satisfy the vpkg spec.
if virtual_spec.name in self.providers:
for p_spec, spec_set in self.providers[virtual_spec.name].items():
if p_spec.intersects(virtual_spec, deps=False):
if p_spec.satisfies(virtual_spec, deps=False):
result.update(spec_set)
# Return providers in order. Defensively copy.
@@ -186,7 +186,7 @@ def update(self, spec):
provider_spec = provider_spec_readonly.copy()
provider_spec.compiler_flags = spec.compiler_flags.copy()
if spec.intersects(provider_spec, deps=False):
if spec.satisfies(provider_spec, deps=False):
provided_name = provided_spec.name
provider_map = self.providers.setdefault(provided_name, {})

View File

@@ -501,7 +501,7 @@ def _compute_specs_from_answer_set(self):
key = providers[0]
candidate = answer.get(key)
if candidate and candidate.intersects(input_spec):
if candidate and candidate.satisfies(input_spec):
self._concrete_specs.append(answer[key])
self._concrete_specs_by_input[input_spec] = answer[key]
else:
@@ -1402,12 +1402,7 @@ def flag_defaults(self):
# flags from compilers.yaml
compilers = all_compilers_in_config()
seen = set()
for compiler in compilers:
# if there are multiple with the same spec, only use the first
if compiler.spec in seen:
continue
seen.add(compiler.spec)
for name, flags in compiler.flags.items():
for flag in flags:
self.gen.fact(
@@ -1878,7 +1873,7 @@ def define_version_constraints(self):
for pkg_name, versions in sorted(self.version_constraints):
# version must be *one* of the ones the spec allows.
allowed_versions = [
v for v in sorted(self.possible_versions[pkg_name]) if v.intersects(versions)
v for v in sorted(self.possible_versions[pkg_name]) if v.satisfies(versions)
]
# This is needed to account for a variable number of
@@ -2292,8 +2287,7 @@ def reorder_flags(self):
The solver determines wihch flags are on nodes; this routine
imposes order afterwards.
"""
# reverse compilers so we get highest priority compilers that share a spec
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
compilers = dict((c.spec, c) for c in all_compilers_in_config())
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
for spec in self._specs.values():

View File

@@ -54,6 +54,7 @@
import itertools
import os
import re
import sys
import warnings
from typing import Tuple
@@ -117,6 +118,7 @@
"SpecDeprecatedError",
]
is_windows = sys.platform == "win32"
#: Valid pattern for an identifier in Spack
identifier_re = r"\w[\w-]*"
@@ -191,7 +193,9 @@ def __call__(self, match):
@lang.lazy_lexicographic_ordering
class ArchSpec(object):
"""Aggregate the target platform, the operating system and the target microarchitecture."""
"""Aggregate the target platform, the operating system and the target
microarchitecture into an architecture spec..
"""
@staticmethod
def _return_arch(os_tag, target_tag):
@@ -360,11 +364,17 @@ def target_or_none(t):
self._target = value
def satisfies(self, other: "ArchSpec") -> bool:
"""Return True if all concrete specs matching self also match other, otherwise False.
def satisfies(self, other, strict=False):
"""Predicate to check if this spec satisfies a constraint.
Args:
other: spec to be satisfied
other (ArchSpec or str): constraint on the current instance
strict (bool): if ``False`` the function checks if the current
instance *might* eventually satisfy the constraint. If
``True`` it check if the constraint is satisfied right now.
Returns:
True if the constraint is satisfied, False otherwise.
"""
other = self._autospec(other)
@@ -372,69 +382,47 @@ def satisfies(self, other: "ArchSpec") -> bool:
for attribute in ("platform", "os"):
other_attribute = getattr(other, attribute)
self_attribute = getattr(self, attribute)
if other_attribute and self_attribute != other_attribute:
return False
if strict or self.concrete:
if other_attribute and self_attribute != other_attribute:
return False
else:
if other_attribute and self_attribute and self_attribute != other_attribute:
return False
return self._target_satisfies(other, strict=True)
# Check target
return self.target_satisfies(other, strict=strict)
def intersects(self, other: "ArchSpec") -> bool:
"""Return True if there exists at least one concrete spec that matches both
self and other, otherwise False.
This operation is commutative, and if two specs intersect it means that one
can constrain the other.
Args:
other: spec to be checked for compatibility
"""
other = self._autospec(other)
# Check platform and os
for attribute in ("platform", "os"):
other_attribute = getattr(other, attribute)
self_attribute = getattr(self, attribute)
if other_attribute and self_attribute and self_attribute != other_attribute:
return False
return self._target_satisfies(other, strict=False)
def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
if strict is True:
need_to_check = bool(other.target)
else:
need_to_check = bool(other.target and self.target)
def target_satisfies(self, other, strict):
need_to_check = (
bool(other.target) if strict or self.concrete else bool(other.target and self.target)
)
# If there's no need to check we are fine
if not need_to_check:
return True
# other_target is there and strict=True
# self is not concrete, but other_target is there and strict=True
if self.target is None:
return False
return bool(self._target_intersection(other))
return bool(self.target_intersection(other))
def _target_constrain(self, other: "ArchSpec") -> bool:
if not other._target_satisfies(self, strict=False):
def target_constrain(self, other):
if not other.target_satisfies(self, strict=False):
raise UnsatisfiableArchitectureSpecError(self, other)
if self.target_concrete:
return False
elif other.target_concrete:
self.target = other.target
return True
# Compute the intersection of every combination of ranges in the lists
results = self._target_intersection(other)
attribute_str = ",".join(results)
results = self.target_intersection(other)
# Do we need to dedupe here?
self.target = ",".join(results)
if self.target == attribute_str:
return False
self.target = attribute_str
return True
def _target_intersection(self, other):
def target_intersection(self, other):
results = []
if not self.target or not other.target:
@@ -478,7 +466,7 @@ def _target_intersection(self, other):
results.append("%s:%s" % (n_min, n_max))
return results
def constrain(self, other: "ArchSpec") -> bool:
def constrain(self, other):
"""Projects all architecture fields that are specified in the given
spec onto the instance spec if they're missing from the instance
spec.
@@ -493,7 +481,7 @@ def constrain(self, other: "ArchSpec") -> bool:
"""
other = self._autospec(other)
if not other.intersects(self):
if not other.satisfies(self):
raise UnsatisfiableArchitectureSpecError(other, self)
constrained = False
@@ -503,7 +491,7 @@ def constrain(self, other: "ArchSpec") -> bool:
setattr(self, attr, ovalue)
constrained = True
constrained |= self._target_constrain(other)
self.target_constrain(other)
return constrained
@@ -519,9 +507,7 @@ def concrete(self):
@property
def target_concrete(self):
"""True if the target is not a range or list."""
return (
self.target is not None and ":" not in str(self.target) and "," not in str(self.target)
)
return ":" not in str(self.target) and "," not in str(self.target)
def to_dict(self):
d = syaml.syaml_dict(
@@ -607,31 +593,11 @@ def _autospec(self, compiler_spec_like):
return compiler_spec_like
return CompilerSpec(compiler_spec_like)
def intersects(self, other: "CompilerSpec") -> bool:
"""Return True if all concrete specs matching self also match other, otherwise False.
For compiler specs this means that the name of the compiler must be the same for
self and other, and that the versions ranges should intersect.
Args:
other: spec to be satisfied
"""
def satisfies(self, other, strict=False):
other = self._autospec(other)
return self.name == other.name and self.versions.intersects(other.versions)
return self.name == other.name and self.versions.satisfies(other.versions, strict=strict)
def satisfies(self, other: "CompilerSpec") -> bool:
"""Return True if all concrete specs matching self also match other, otherwise False.
For compiler specs this means that the name of the compiler must be the same for
self and other, and that the version range of self is a subset of that of other.
Args:
other: spec to be satisfied
"""
other = self._autospec(other)
return self.name == other.name and self.versions.satisfies(other.versions)
def constrain(self, other: "CompilerSpec") -> bool:
def constrain(self, other):
"""Intersect self's versions with other.
Return whether the CompilerSpec changed.
@@ -639,7 +605,7 @@ def constrain(self, other: "CompilerSpec") -> bool:
other = self._autospec(other)
# ensure that other will actually constrain this spec.
if not other.intersects(self):
if not other.satisfies(self):
raise UnsatisfiableCompilerSpecError(other, self)
return self.versions.intersect(other.versions)
@@ -772,25 +738,24 @@ def __init__(self, spec):
super(FlagMap, self).__init__()
self.spec = spec
def satisfies(self, other):
return all(f in self and self[f] == other[f] for f in other)
def intersects(self, other):
common_types = set(self) & set(other)
for flag_type in common_types:
if not self[flag_type] or not other[flag_type]:
# At least one of the two is empty
continue
if self[flag_type] != other[flag_type]:
return False
def satisfies(self, other, strict=False):
if strict or (self.spec and self.spec._concrete):
return all(f in self and set(self[f]) == set(other[f]) for f in other)
else:
if not all(
f1.propagate == f2.propagate for f1, f2 in zip(self[flag_type], other[flag_type])
set(self[f]) == set(other[f]) for f in other if (other[f] != [] and f in self)
):
# At least one propagation flag didn't match
return False
return True
# Check that the propagation values match
for flag_type in other:
if not all(
other[flag_type][i].propagate == self[flag_type][i].propagate
for i in range(len(other[flag_type]))
if flag_type in self
):
return False
return True
def constrain(self, other):
"""Add all flags in other that aren't in self to self.
@@ -2648,9 +2613,9 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
# it's possible to build that configuration with Spack
continue
for conflict_spec, when_list in x.package_class.conflicts.items():
if x.satisfies(conflict_spec):
if x.satisfies(conflict_spec, strict=True):
for when_spec, msg in when_list:
if x.satisfies(when_spec):
if x.satisfies(when_spec, strict=True):
when = when_spec.copy()
when.name = x.name
matches.append((x, conflict_spec, when, msg))
@@ -2702,7 +2667,7 @@ def inject_patches_variant(root):
# Add any patches from the package to the spec.
patches = []
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
if s.satisfies(cond, strict=True):
for patch in patch_list:
patches.append(patch)
if patches:
@@ -2720,7 +2685,7 @@ def inject_patches_variant(root):
patches = []
for cond, dependency in pkg_deps[dspec.spec.name].items():
for pcond, patch_list in dependency.patches.items():
if dspec.parent.satisfies(cond) and dspec.spec.satisfies(pcond):
if dspec.parent.satisfies(cond, strict=True) and dspec.spec.satisfies(pcond):
patches.extend(patch_list)
if patches:
all_patches = spec_to_patches.setdefault(id(dspec.spec), [])
@@ -2978,7 +2943,7 @@ def _evaluate_dependency_conditions(self, name):
# evaluate when specs to figure out constraints on the dependency.
dep = None
for when_spec, dependency in conditions.items():
if self.satisfies(when_spec):
if self.satisfies(when_spec, strict=True):
if dep is None:
dep = dp.Dependency(self.name, Spec(name), type=())
try:
@@ -3013,7 +2978,7 @@ def _find_provider(self, vdep, provider_index):
# result.
for provider in providers:
for spec in providers:
if spec is not provider and provider.intersects(spec):
if spec is not provider and provider.satisfies(spec):
providers.remove(spec)
# Can't have multiple providers for the same thing in one spec.
if len(providers) > 1:
@@ -3330,15 +3295,9 @@ def update_variant_validate(self, variant_name, values):
pkg_variant.validate_or_raise(self.variants[variant_name], pkg_cls)
def constrain(self, other, deps=True):
"""Intersect self with other in-place. Return True if self changed, False otherwise.
"""Merge the constraints of other with self.
Args:
other: constraint to be added to self
deps: if False, constrain only the root node, otherwise constrain dependencies
as well.
Raises:
spack.error.UnsatisfiableSpecError: when self cannot be constrained
Returns True if the spec changed as a result, False if not.
"""
# If we are trying to constrain a concrete spec, either the spec
# already satisfies the constraint (and the method returns False)
@@ -3418,9 +3377,6 @@ def constrain(self, other, deps=True):
if deps:
changed |= self._constrain_dependencies(other)
if other.concrete and not self.concrete and other.satisfies(self):
self._finalize_concretization()
return changed
def _constrain_dependencies(self, other):
@@ -3433,7 +3389,7 @@ def _constrain_dependencies(self, other):
# TODO: might want more detail than this, e.g. specific deps
# in violation. if this becomes a priority get rid of this
# check and be more specific about what's wrong.
if not other._intersects_dependencies(self):
if not other.satisfies_dependencies(self):
raise UnsatisfiableDependencySpecError(other, self)
if any(not d.name for d in other.traverse(root=False)):
@@ -3495,49 +3451,58 @@ def _autospec(self, spec_like):
return spec_like
return Spec(spec_like)
def intersects(self, other: "Spec", deps: bool = True) -> bool:
"""Return True if there exists at least one concrete spec that matches both
self and other, otherwise False.
def satisfies(self, other, deps=True, strict=False):
"""Determine if this spec satisfies all constraints of another.
This operation is commutative, and if two specs intersect it means that one
can constrain the other.
There are two senses for satisfies, depending on the ``strict``
argument.
Args:
other: spec to be checked for compatibility
deps: if True check compatibility of dependency nodes too, if False only check root
* ``strict=False``: the left-hand side and right-hand side have
non-empty intersection. For example ``zlib`` satisfies
``zlib@1.2.3`` and ``zlib@1.2.3`` satisfies ``zlib``. In this
sense satisfies is a commutative operation: ``x.satisfies(y)``
if and only if ``y.satisfies(x)``.
* ``strict=True``: the left-hand side is a subset of the right-hand
side. For example ``zlib@1.2.3`` satisfies ``zlib``, but ``zlib``
does not satisfy ``zlib@1.2.3``. In this sense satisfies is not
commutative: the left-hand side should be at least as constrained
as the right-hand side.
"""
other = self._autospec(other)
if other.concrete and self.concrete:
return self.dag_hash() == other.dag_hash()
# Optimizations for right-hand side concrete:
# 1. For subset (strict=True) tests this means the left-hand side must
# be the same singleton with identical hash. Notice that package hashes
# can be different for otherwise indistinguishable concrete Spec objects.
# 2. For non-empty intersection (strict=False) we only have a fast path
# when the left-hand side is also concrete.
if other.concrete:
if strict:
return self.concrete and self.dag_hash() == other.dag_hash()
elif self.concrete:
return self.dag_hash() == other.dag_hash()
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
if self.virtual and other.virtual:
# Two virtual specs intersect only if there are providers for both
lhs = spack.repo.path.providers_for(str(self))
rhs = spack.repo.path.providers_for(str(other))
intersection = [s for s in lhs if any(s.intersects(z) for z in rhs)]
return bool(intersection)
# A provider can satisfy a virtual dependency.
elif self.virtual or other.virtual:
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
# A concrete provider can satisfy a virtual dependency.
if not self.virtual and other.virtual:
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.path.get_pkg_class(non_virtual_spec.fullname)
pkg = pkg_cls(non_virtual_spec)
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
pkg = pkg_cls(self)
except spack.repo.UnknownEntityError:
# If we can't get package info on this spec, don't treat
# it as a provider of this vdep.
return False
if pkg.provides(virtual_spec.name):
if pkg.provides(other.name):
for provided, when_specs in pkg.provided.items():
if any(
non_virtual_spec.intersects(when, deps=False) for when in when_specs
self.satisfies(when, deps=False, strict=strict) for when in when_specs
):
if provided.intersects(virtual_spec):
if provided.satisfies(other):
return True
return False
@@ -3548,41 +3513,75 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
and self.namespace != other.namespace
):
return False
if self.versions and other.versions:
if not self.versions.intersects(other.versions):
if not self.versions.satisfies(other.versions, strict=strict):
return False
if self.compiler and other.compiler:
if not self.compiler.intersects(other.compiler):
return False
if not self.variants.intersects(other.variants):
elif strict and (self.versions or other.versions):
return False
if self.architecture and other.architecture:
if not self.architecture.intersects(other.architecture):
# None indicates no constraints when not strict.
if self.compiler and other.compiler:
if not self.compiler.satisfies(other.compiler, strict=strict):
return False
elif strict and (other.compiler and not self.compiler):
return False
if not self.compiler_flags.intersects(other.compiler_flags):
var_strict = strict
if (not self.name) or (not other.name):
var_strict = True
if not self.variants.satisfies(other.variants, strict=var_strict):
return False
# Architecture satisfaction is currently just string equality.
# If not strict, None means unconstrained.
if self.architecture and other.architecture:
if not self.architecture.satisfies(other.architecture, strict):
return False
elif strict and (other.architecture and not self.architecture):
return False
if not self.compiler_flags.satisfies(other.compiler_flags, strict=strict):
return False
# If we need to descend into dependencies, do it, otherwise we're done.
if deps:
return self._intersects_dependencies(other)
deps_strict = strict
if self._concrete and not other.name:
# We're dealing with existing specs
deps_strict = True
return self.satisfies_dependencies(other, strict=deps_strict)
else:
return True
def _intersects_dependencies(self, other):
def satisfies_dependencies(self, other, strict=False):
"""
This checks constraints on common dependencies against each other.
"""
other = self._autospec(other)
if not other._dependencies or not self._dependencies:
# one spec *could* eventually satisfy the other
# If there are no constraints to satisfy, we're done.
if not other._dependencies:
return True
if strict:
# if we have no dependencies, we can't satisfy any constraints.
if not self._dependencies:
return False
# use list to prevent double-iteration
selfdeps = list(self.traverse(root=False))
otherdeps = list(other.traverse(root=False))
if not all(any(d.satisfies(dep, strict=True) for d in selfdeps) for dep in otherdeps):
return False
elif not self._dependencies:
# if not strict, this spec *could* eventually satisfy the
# constraints on other.
return True
# Handle first-order constraints directly
for name in self.common_dependencies(other):
if not self[name].intersects(other[name], deps=False):
if not self[name].satisfies(other[name], deps=False):
return False
# For virtual dependencies, we need to dig a little deeper.
@@ -3610,89 +3609,6 @@ def _intersects_dependencies(self, other):
return True
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
"""Return True if all concrete specs matching self also match other, otherwise False.
Args:
other: spec to be satisfied
deps: if True descend to dependencies, otherwise only check root node
"""
other = self._autospec(other)
if other.concrete:
# The left-hand side must be the same singleton with identical hash. Notice that
# package hashes can be different for otherwise indistinguishable concrete Spec
# objects.
return self.concrete and self.dag_hash() == other.dag_hash()
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
# A concrete provider can satisfy a virtual dependency.
if not self.virtual and other.virtual:
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
pkg = pkg_cls(self)
except spack.repo.UnknownEntityError:
# If we can't get package info on this spec, don't treat
# it as a provider of this vdep.
return False
if pkg.provides(other.name):
for provided, when_specs in pkg.provided.items():
if any(self.satisfies(when, deps=False) for when in when_specs):
if provided.intersects(other):
return True
return False
# namespaces either match, or other doesn't require one.
if (
other.namespace is not None
and self.namespace is not None
and self.namespace != other.namespace
):
return False
if not self.versions.satisfies(other.versions):
return False
if self.compiler and other.compiler:
if not self.compiler.satisfies(other.compiler):
return False
elif other.compiler and not self.compiler:
return False
if not self.variants.satisfies(other.variants):
return False
if self.architecture and other.architecture:
if not self.architecture.satisfies(other.architecture):
return False
elif other.architecture and not self.architecture:
return False
if not self.compiler_flags.satisfies(other.compiler_flags):
return False
# If we need to descend into dependencies, do it, otherwise we're done.
if not deps:
return True
# If there are no constraints to satisfy, we're done.
if not other._dependencies:
return True
# If we have no dependencies, we can't satisfy any constraints.
if not self._dependencies:
return False
# If we arrived here, then rhs is abstract. At the moment we don't care about the edge
# structure of an abstract DAG - hence the deps=False parameter.
return all(
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
for rhs in other.traverse(root=False)
)
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spec.virtual]

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools
import warnings
import archspec.cpu
@@ -32,6 +33,14 @@ def _impl(self, other):
return _impl
#: Translation table from archspec deprecated names
_DEPRECATED_ARCHSPEC_NAMES = {
"graviton": "cortex_a72",
"graviton2": "neoverse_n1",
"graviton3": "neoverse_v1",
}
class Target(object):
def __init__(self, name, module_name=None):
"""Target models microarchitectures and their compatibility.
@@ -43,6 +52,10 @@ def __init__(self, name, module_name=None):
like Cray (e.g. craype-compiler)
"""
if not isinstance(name, archspec.cpu.Microarchitecture):
if name in _DEPRECATED_ARCHSPEC_NAMES:
msg = "'target={}' is deprecated, use 'target={}' instead"
name, old_name = _DEPRECATED_ARCHSPEC_NAMES[name], name
warnings.warn(msg.format(old_name, name))
name = archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
self.microarchitecture = name
self.module_name = module_name

View File

@@ -183,7 +183,7 @@ def test_optimization_flags_with_custom_versions(
def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constraint_tuple):
architecture = spack.spec.ArchSpec(architecture_tuple)
constraint = spack.spec.ArchSpec(constraint_tuple)
assert not architecture.satisfies(constraint)
assert not architecture.satisfies(constraint, strict=True)
@pytest.mark.parametrize(

View File

@@ -2,13 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import filecmp
import glob
import io
import os
import platform
import sys
import tarfile
import urllib.error
import urllib.request
import urllib.response
@@ -954,81 +952,3 @@ def fake_build_tarball(node, push_url, **kwargs):
bindist.push([spec], push_url, include_root=root, include_dependencies=deps)
assert packages_to_push == expected
def test_reproducible_tarball_is_reproducible(tmpdir):
p = tmpdir.mkdir("prefix")
p.mkdir("bin")
p.mkdir(".spack")
app = p.join("bin", "app")
tarball_1 = str(tmpdir.join("prefix-1.tar.gz"))
tarball_2 = str(tmpdir.join("prefix-2.tar.gz"))
with open(app, "w") as f:
f.write("hello world")
buildinfo = {"metadata": "yes please"}
# Create a tarball with a certain mtime of bin/app
os.utime(app, times=(0, 0))
bindist._do_create_tarball(tarball_1, binaries_dir=p, pkg_dir="pkg", buildinfo=buildinfo)
# Do it another time with different mtime of bin/app
os.utime(app, times=(10, 10))
bindist._do_create_tarball(tarball_2, binaries_dir=p, pkg_dir="pkg", buildinfo=buildinfo)
# They should be bitwise identical:
assert filecmp.cmp(tarball_1, tarball_2, shallow=False)
# Sanity check for contents:
with tarfile.open(tarball_1, mode="r") as f:
for m in f.getmembers():
assert m.uid == m.gid == m.mtime == 0
assert m.uname == m.gname == ""
assert set(f.getnames()) == {
"pkg",
"pkg/bin",
"pkg/bin/app",
"pkg/.spack",
"pkg/.spack/binary_distribution",
}
def test_tarball_normalized_permissions(tmpdir):
p = tmpdir.mkdir("prefix")
p.mkdir("bin")
p.mkdir("share")
p.mkdir(".spack")
app = p.join("bin", "app")
data = p.join("share", "file")
tarball = str(tmpdir.join("prefix.tar.gz"))
# Everyone can write & execute. This should turn into 0o755 when the tarball is
# extracted (on a different system).
with open(app, "w", opener=lambda path, flags: os.open(path, flags, 0o777)) as f:
f.write("hello world")
# User doesn't have execute permissions, but group/world have; this should also
# turn into 0o644 (user read/write, group&world only read).
with open(data, "w", opener=lambda path, flags: os.open(path, flags, 0o477)) as f:
f.write("hello world")
bindist._do_create_tarball(tarball, binaries_dir=p, pkg_dir="pkg", buildinfo={})
with tarfile.open(tarball) as tar:
path_to_member = {member.name: member for member in tar.getmembers()}
# directories should have 0o755
assert path_to_member["pkg"].mode == 0o755
assert path_to_member["pkg/bin"].mode == 0o755
assert path_to_member["pkg/.spack"].mode == 0o755
# executable-by-user files should be 0o755
assert path_to_member["pkg/bin/app"].mode == 0o755
# not-executable-by-user files should be 0o644
assert path_to_member["pkg/share/file"].mode == 0o644

View File

@@ -127,13 +127,13 @@ def test_static_to_shared_library(build_environment):
"linux": (
"/bin/mycc -shared"
" -Wl,--disable-new-dtags"
" -Wl,-soname -Wl,{2} -Wl,--whole-archive {0}"
" -Wl,-soname,{2} -Wl,--whole-archive {0}"
" -Wl,--no-whole-archive -o {1}"
),
"darwin": (
"/bin/mycc -dynamiclib"
" -Wl,--disable-new-dtags"
" -install_name {1} -Wl,-force_load -Wl,{0} -o {1}"
" -install_name {1} -Wl,-force_load,{0} -o {1}"
),
}

View File

@@ -16,7 +16,7 @@
import spack.config
import spack.spec
from spack.paths import build_env_path
from spack.util.environment import SYSTEM_DIRS, set_env
from spack.util.environment import set_env, system_dirs
from spack.util.executable import Executable, ProcessError
#
@@ -160,7 +160,7 @@ def wrapper_environment(working_env):
SPACK_DEBUG_LOG_ID="foo-hashabc",
SPACK_COMPILER_SPEC="gcc@4.4.7",
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
SPACK_SYSTEM_DIRS=":".join(SYSTEM_DIRS),
SPACK_SYSTEM_DIRS=":".join(system_dirs),
SPACK_CC_RPATH_ARG="-Wl,-rpath,",
SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
SPACK_F77_RPATH_ARG="-Wl,-rpath,",
@@ -342,16 +342,6 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
)
def test_Wl_parsing(wrapper_environment):
check_args(
cc,
["-Wl,-rpath,/a,--enable-new-dtags,-rpath=/b,--rpath", "-Wl,/c"],
[real_cc]
+ target_args
+ ["-Wl,--disable-new-dtags", "-Wl,-rpath,/a", "-Wl,-rpath,/b", "-Wl,-rpath,/c"],
)
def test_dep_rpath(wrapper_environment):
"""Ensure RPATHs for root package are added."""
check_args(cc, test_args, [real_cc] + target_args + common_compile_args)

View File

@@ -408,36 +408,19 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
touched = ["libdwarf"]
# Make sure we return the correct set of possibly affected specs,
# given a dependent traversal depth and the fact that the touched
# package is libdwarf. Passing traversal depth of None or something
# equal to or larger than the greatest depth in the graph are
# equivalent and result in traversal of all specs from the touched
# package to the root. Passing negative traversal depth results in
# no spec traversals. Passing any other number yields differing
# numbers of possibly affected specs.
# traversing both directions from libdwarf in the graphs depicted
# above (and additionally including dependencies of dependents of
# libdwarf) results in the following possibly affected env specs:
# mpileaks, callpath, dyninst, libdwarf, libelf, and mpich.
# Unaffected specs are hypre and it's dependencies.
full_set = set(["mpileaks", "mpich", "callpath", "dyninst", "libdwarf", "libelf"])
empty_set = set([])
depth_2_set = set(["mpich", "callpath", "dyninst", "libdwarf", "libelf"])
depth_1_set = set(["dyninst", "libdwarf", "libelf"])
depth_0_set = set(["libdwarf", "libelf"])
affected_specs = ci.get_spec_filter_list(e1, touched)
affected_pkg_names = set([s.name for s in affected_specs])
expected_affected_pkg_names = set(
["mpileaks", "mpich", "callpath", "dyninst", "libdwarf", "libelf"]
)
expectations = {
None: full_set,
3: full_set,
100: full_set,
-1: empty_set,
0: depth_0_set,
1: depth_1_set,
2: depth_2_set,
}
for key, val in expectations.items():
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
affected_pkg_names = set([s.name for s in affected_specs])
print(f"{key}: {affected_pkg_names}")
assert affected_pkg_names == val
assert affected_pkg_names == expected_affected_pkg_names
@pytest.mark.regression("29947")

View File

@@ -1755,12 +1755,6 @@ def test_ci_generate_prune_untouched(
mirror_url
)
)
# Dependency graph rooted at callpath
# callpath -> dyninst -> libelf
# -> libdwarf -> libelf
# -> mpich
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
@@ -1771,96 +1765,19 @@ def fake_compute_affected(r1=None, r2=None):
def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
return False
env_hashes = {}
with ev.read("test") as active_env:
with ev.read("test"):
monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected)
monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed)
active_env.concretize()
for s in active_env.all_specs():
env_hashes[s.name] = s.dag_hash()
ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
print(contents)
yaml_contents = syaml.load(contents)
generated_hashes = []
for ci_key in yaml_contents.keys():
if ci_key.startswith("(specs)"):
generated_hashes.append(
yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]
)
assert env_hashes["archive-files"] not in generated_hashes
for spec_name in ["callpath", "dyninst", "mpich", "libdwarf", "libelf"]:
assert env_hashes[spec_name] in generated_hashes
def test_ci_generate_prune_env_vars(
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment, monkeypatch
):
"""Make sure environment variables controlling untouched spec
pruning behave as expected."""
os.environ.update(
{
"SPACK_PRUNE_UNTOUCHED": "TRUE", # enables pruning of untouched specs
}
)
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
f.write(
"""\
spack:
specs:
- libelf
gitlab-ci:
mappings:
- match:
- arch=test-debian6-core2
runner-attributes:
tags:
- donotcare
image: donotcare
"""
)
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
def fake_compute_affected(r1=None, r2=None):
return ["libdwarf"]
def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
return False
expected_depth_param = None
def check_get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
assert dependent_traverse_depth == expected_depth_param
return set()
monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected)
monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed)
monkeypatch.setattr(ci, "get_spec_filter_list", check_get_spec_filter_list)
expectations = {"-1": -1, "0": 0, "True": None}
for key, val in expectations.items():
with ev.read("test"):
os.environ.update({"SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH": key})
expected_depth_param = val
# Leaving out the mirror in the spack.yaml above means the
# pipeline generation command will fail, pretty much immediately.
# But for this test, we only care how the environment variables
# for pruning are handled, the faster the better. So allow the
# spack command to fail.
ci_cmd("generate", fail_on_error=False)
if "archive-files" in ci_key:
print("Error: archive-files should have been pruned")
assert False
def test_ci_subcommands_without_mirror(

View File

@@ -82,8 +82,8 @@ def test_change_match_spec():
change("--match-spec", "mpileaks@2.2", "mpileaks@2.3")
assert not any(x.intersects("mpileaks@2.2") for x in e.user_specs)
assert any(x.intersects("mpileaks@2.3") for x in e.user_specs)
assert not any(x.satisfies("mpileaks@2.2") for x in e.user_specs)
assert any(x.satisfies("mpileaks@2.3") for x in e.user_specs)
def test_change_multiple_matches():
@@ -97,8 +97,8 @@ def test_change_multiple_matches():
change("--match-spec", "mpileaks", "-a", "mpileaks%gcc")
assert all(x.intersects("%gcc") for x in e.user_specs if x.name == "mpileaks")
assert any(x.intersects("%clang") for x in e.user_specs if x.name == "libelf")
assert all(x.satisfies("%gcc") for x in e.user_specs if x.name == "mpileaks")
assert any(x.satisfies("%clang") for x in e.user_specs if x.name == "libelf")
def test_env_add_virtual():
@@ -111,7 +111,7 @@ def test_env_add_virtual():
hashes = e.concretized_order
assert len(hashes) == 1
spec = e.specs_by_hash[hashes[0]]
assert spec.intersects("mpi")
assert spec.satisfies("mpi")
def test_env_add_nonexistant_fails():
@@ -687,7 +687,7 @@ def test_env_with_config():
with e:
e.concretize()
assert any(x.intersects("mpileaks@2.2") for x in e._get_environment_specs())
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
def test_with_config_bad_include():
@@ -1630,9 +1630,9 @@ def test_stack_concretize_extraneous_deps(tmpdir, config, mock_packages):
assert concrete.concrete
assert not user.concrete
if user.name == "libelf":
assert not concrete.satisfies("^mpi")
assert not concrete.satisfies("^mpi", strict=True)
elif user.name == "mpileaks":
assert concrete.satisfies("^mpi")
assert concrete.satisfies("^mpi", strict=True)
def test_stack_concretize_extraneous_variants(tmpdir, config, mock_packages):

View File

@@ -16,6 +16,8 @@
from spack.main import SpackCommand
from spack.spec import Spec
is_windows = sys.platform == "win32"
@pytest.fixture
def executables_found(monkeypatch):
@@ -37,7 +39,7 @@ def _win_exe_ext():
def define_plat_exe(exe):
if sys.platform == "win32":
if is_windows:
exe += ".bat"
return exe

View File

@@ -276,7 +276,7 @@ def test_install_commit(mock_git_version_info, install_mockery, mock_packages, m
assert filename in installed
with open(spec.prefix.bin.join(filename), "r") as f:
content = f.read().strip()
assert content == "[0]" # contents are weird for another test
assert content == "[]" # contents are weird for another test
def test_install_overwrite_multiple(

View File

@@ -7,6 +7,7 @@
from spack.main import SpackCommand
is_windows = sys.platform == "win32"
resource = SpackCommand("resource")
#: these are hashes used in mock packages
@@ -22,7 +23,7 @@
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730",
]
if sys.platform != "win32"
if not is_windows
else [
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
"1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd",
@@ -67,7 +68,7 @@ def test_resource_list_only_hashes(mock_packages, capfd):
def test_resource_show(mock_packages, capfd):
test_hash = (
"c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8"
if sys.platform != "win32"
if not is_windows
else "3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11"
)
with capfd.disabled():

View File

@@ -14,6 +14,8 @@
import spack.extensions
import spack.main
is_windows = sys.platform == "win32"
class Extension:
"""Helper class to simplify the creation of simple command extension
@@ -272,7 +274,7 @@ def test_variable_in_extension_path(config, working_env):
os.environ["_MY_VAR"] = os.path.join("my", "var")
ext_paths = [os.path.join("~", "${_MY_VAR}", "spack-extension-1")]
# Home env variable is USERPROFILE on Windows
home_env = "USERPROFILE" if sys.platform == "win32" else "HOME"
home_env = "USERPROFILE" if is_windows else "HOME"
expected_ext_paths = [
os.path.join(os.environ[home_env], os.environ["_MY_VAR"], "spack-extension-1")
]

View File

@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import os
import sys
@@ -26,6 +25,8 @@
from spack.spec import Spec
from spack.version import ver
is_windows = sys.platform == "win32"
def check_spec(abstract, concrete):
if abstract.versions.concrete:
@@ -293,11 +294,11 @@ def test_concretize_with_provides_when(self):
we ask for some advanced version.
"""
repo = spack.repo.path
assert not any(s.intersects("mpich2@:1.0") for s in repo.providers_for("mpi@2.1"))
assert not any(s.intersects("mpich2@:1.1") for s in repo.providers_for("mpi@2.2"))
assert not any(s.intersects("mpich@:1") for s in repo.providers_for("mpi@2"))
assert not any(s.intersects("mpich@:1") for s in repo.providers_for("mpi@3"))
assert not any(s.intersects("mpich2") for s in repo.providers_for("mpi@3"))
assert not any(s.satisfies("mpich2@:1.0") for s in repo.providers_for("mpi@2.1"))
assert not any(s.satisfies("mpich2@:1.1") for s in repo.providers_for("mpi@2.2"))
assert not any(s.satisfies("mpich@:1") for s in repo.providers_for("mpi@2"))
assert not any(s.satisfies("mpich@:1") for s in repo.providers_for("mpi@3"))
assert not any(s.satisfies("mpich2") for s in repo.providers_for("mpi@3"))
def test_provides_handles_multiple_providers_of_same_version(self):
""" """
@@ -331,24 +332,6 @@ def test_compiler_flags_from_compiler_and_dependent(self):
for spec in [client, cmake]:
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
def test_compiler_flags_differ_identical_compilers(self):
# Correct arch to use test compiler that has flags
spec = Spec("a %clang@12.2.0 platform=test os=fe target=fe")
# Get the compiler that matches the spec (
compiler = spack.compilers.compiler_for_spec("clang@12.2.0", spec.architecture)
# Clear cache for compiler config since it has its own cache mechanism outside of config
spack.compilers._cache_config_file = []
# Configure spack to have two identical compilers with different flags
default_dict = spack.compilers._to_dict(compiler)
different_dict = copy.deepcopy(default_dict)
different_dict["compiler"]["flags"] = {"cflags": "-O2"}
with spack.config.override("compilers", [different_dict]):
spec.concretize()
assert spec.satisfies("cflags=-O2")
def test_concretize_compiler_flag_propagate(self):
spec = Spec("hypre cflags=='-g' ^openblas")
spec.concretize()
@@ -1155,7 +1138,7 @@ def test_custom_compiler_version(self):
def test_all_patches_applied(self):
uuidpatch = (
"a60a42b73e03f207433c5579de207c6ed61d58e4d12dd3b5142eb525728d89ea"
if sys.platform != "win32"
if not is_windows
else "d0df7988457ec999c148a4a2af25ce831bfaad13954ba18a4446374cb0aef55e"
)
localpatch = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
@@ -1462,7 +1445,7 @@ def test_concrete_specs_are_not_modified_on_reuse(
with spack.config.override("concretizer:reuse", True):
s = spack.spec.Spec(spec_str).concretized()
assert s.installed is expect_installed
assert s.satisfies(spec_str)
assert s.satisfies(spec_str, strict=True)
@pytest.mark.regression("26721,19736")
def test_sticky_variant_in_package(self):

View File

@@ -54,6 +54,8 @@
from spack.util.pattern import Bunch
from spack.util.web import FetchError
is_windows = sys.platform == "win32"
def ensure_configuration_fixture_run_before(request):
"""Ensure that fixture mutating the configuration run before the one where
@@ -157,9 +159,7 @@ def latest_commit():
return git("rev-list", "-n1", "HEAD", output=str, error=str).strip()
# Add two commits on main branch
# A commit without a previous version counts as "0"
write_file(filename, "[0]")
write_file(filename, "[]")
git("add", filename)
commit("first commit")
commits.append(latest_commit())
@@ -621,7 +621,7 @@ def ensure_debug(monkeypatch):
tty.set_debug(current_debug_level)
@pytest.fixture(autouse=sys.platform == "win32", scope="session")
@pytest.fixture(autouse=is_windows, scope="session")
def platform_config():
spack.config.add_default_platform_scope(spack.platforms.real_host().name)
@@ -633,7 +633,7 @@ def default_config():
This ensures we can test the real default configuration without having
tests fail when the user overrides the defaults that we test against."""
defaults_path = os.path.join(spack.paths.etc_path, "defaults")
if sys.platform == "win32":
if is_windows:
defaults_path = os.path.join(defaults_path, "windows")
with spack.config.use_configuration(defaults_path) as defaults_config:
yield defaults_config
@@ -690,7 +690,7 @@ def configuration_dir(tmpdir_factory, linux_os):
tmpdir.ensure("user", dir=True)
# Slightly modify config.yaml and compilers.yaml
if sys.platform == "win32":
if is_windows:
locks = False
else:
locks = True
@@ -1675,11 +1675,11 @@ def mock_executable(tmpdir):
"""
import jinja2
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
shebang = "#!/bin/sh\n" if not is_windows else "@ECHO OFF"
def _factory(name, output, subdir=("bin",)):
f = tmpdir.ensure(*subdir, dir=True).join(name)
if sys.platform == "win32":
if is_windows:
f += ".bat"
t = jinja2.Template("{{ shebang }}{{ output }}\n")
f.write(t.render(shebang=shebang, output=output))

View File

@@ -33,6 +33,8 @@
from spack.schema.database_index import schema
from spack.util.executable import Executable
is_windows = sys.platform == "win32"
pytestmark = pytest.mark.db
@@ -449,7 +451,7 @@ def test_005_db_exists(database):
lock_file = os.path.join(database.root, ".spack-db", "lock")
assert os.path.exists(str(index_file))
# Lockfiles not currently supported on Windows
if sys.platform != "win32":
if not is_windows:
assert os.path.exists(str(lock_file))
with open(index_file) as fd:

View File

@@ -86,13 +86,13 @@ def test_env_change_spec_in_definition(tmpdir, mock_packages, config, mutable_mo
e.concretize()
e.write()
assert any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
assert any(x.satisfies("mpileaks@2.1%gcc") for x in e.user_specs)
e.change_existing_spec(spack.spec.Spec("mpileaks@2.2"), list_name="desired_specs")
e.write()
assert any(x.intersects("mpileaks@2.2%gcc") for x in e.user_specs)
assert not any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs)
assert any(x.satisfies("mpileaks@2.2%gcc") for x in e.user_specs)
assert not any(x.satisfies("mpileaks@2.1%gcc") for x in e.user_specs)
def test_env_change_spec_in_matrix_raises_error(

View File

@@ -230,6 +230,16 @@ def test_path_manipulation(env):
assert os.environ["PATH_LIST_WITH_DUPLICATES"].count("/duplicate") == 1
def test_extra_arguments(env):
"""Tests that we can attach extra arguments to any command."""
env.set("A", "dummy value", who="Pkg1")
for x in env:
assert "who" in x.args
env.apply_modifications()
assert "dummy value" == os.environ["A"]
def test_extend(env):
"""Tests that we can construct a list of environment modifications
starting from another list.

View File

@@ -24,6 +24,8 @@
import spack.store
import spack.util.lock as lk
is_windows = sys.platform == "win32"
def _mock_repo(root, namespace):
"""Create an empty repository at the specified root
@@ -526,7 +528,7 @@ def _repoerr(repo, name):
# The call to install_tree will raise the exception since not mocking
# creation of dependency package files within *install* directories.
with pytest.raises(IOError, match=path if sys.platform != "win32" else ""):
with pytest.raises(IOError, match=path if not is_windows else ""):
inst.dump_packages(spec, path)
# Now try the error path, which requires the mock directory structure
@@ -877,7 +879,7 @@ def _chgrp(path, group, follow_symlinks=True):
metadatadir = spack.store.layout.metadata_path(spec)
# Regex matching with Windows style paths typically fails
# so we skip the match check here
if sys.platform == "win32":
if is_windows:
metadatadir = None
# Should fail with a "not a directory" error
with pytest.raises(OSError, match=metadatadir):

View File

@@ -11,8 +11,9 @@
import spack.paths
from spack.compiler import _parse_non_system_link_dirs
is_windows = sys.platform == "win32"
drive = ""
if sys.platform == "win32":
if is_windows:
match = re.search(r"[A-Za-z]:", spack.paths.test_path)
if match:
drive = match.group()
@@ -209,7 +210,7 @@ def test_obscure_parsing_rules():
]
# TODO: add a comment explaining why this happens
if sys.platform == "win32":
if is_windows:
paths.remove(os.path.join(root, "second", "path"))
check_link_paths("obscure-parsing-rules.txt", paths)

View File

@@ -13,6 +13,8 @@
import spack.paths
is_windows = sys.platform == "win32"
@pytest.fixture()
def library_list():
@@ -26,7 +28,7 @@ def library_list():
"/dir3/libz.so",
"libmpi.so.20.10.1", # shared object libraries may be versioned
]
if sys.platform != "win32"
if not is_windows
else [
"/dir1/liblapack.lib",
"/dir2/libpython3.6.dll",
@@ -57,10 +59,10 @@ def header_list():
# TODO: Remove below when llnl.util.filesystem.find_libraries becomes spec aware
plat_static_ext = "lib" if sys.platform == "win32" else "a"
plat_static_ext = "lib" if is_windows else "a"
plat_shared_ext = "dll" if sys.platform == "win32" else "so"
plat_shared_ext = "dll" if is_windows else "so"
plat_apple_shared_ext = "dylib"
@@ -76,8 +78,7 @@ def test_joined_and_str(self, library_list):
expected = " ".join(
[
"/dir1/liblapack.%s" % plat_static_ext,
"/dir2/libpython3.6.%s"
% (plat_apple_shared_ext if sys.platform != "win32" else "dll"),
"/dir2/libpython3.6.%s" % (plat_apple_shared_ext if not is_windows else "dll"),
"/dir1/libblas.%s" % plat_static_ext,
"/dir3/libz.%s" % plat_shared_ext,
"libmpi.%s.20.10.1" % plat_shared_ext,
@@ -92,8 +93,7 @@ def test_joined_and_str(self, library_list):
expected = ";".join(
[
"/dir1/liblapack.%s" % plat_static_ext,
"/dir2/libpython3.6.%s"
% (plat_apple_shared_ext if sys.platform != "win32" else "dll"),
"/dir2/libpython3.6.%s" % (plat_apple_shared_ext if not is_windows else "dll"),
"/dir1/libblas.%s" % plat_static_ext,
"/dir3/libz.%s" % plat_shared_ext,
"libmpi.%s.20.10.1" % plat_shared_ext,

View File

@@ -62,7 +62,8 @@
import llnl.util.multiproc as mp
from llnl.util.filesystem import getuid, touch
if sys.platform != "win32":
is_windows = sys.platform == "win32"
if not is_windows:
import fcntl
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@@ -126,7 +127,7 @@ def make_readable(*paths):
# stat.S_IREAD constants or a corresponding integer value). All other
# bits are ignored."
for path in paths:
if sys.platform != "win32":
if not is_windows:
mode = 0o555 if os.path.isdir(path) else 0o444
else:
mode = stat.S_IREAD
@@ -135,7 +136,7 @@ def make_readable(*paths):
def make_writable(*paths):
for path in paths:
if sys.platform != "win32":
if not is_windows:
mode = 0o755 if os.path.isdir(path) else 0o744
else:
mode = stat.S_IWRITE
@@ -615,7 +616,7 @@ def test_read_lock_read_only_dir_writable_lockfile(lock_dir, lock_path):
pass
@pytest.mark.skipif(False if sys.platform == "win32" else getuid() == 0, reason="user is root")
@pytest.mark.skipif(False if is_windows else getuid() == 0, reason="user is root")
def test_read_lock_no_lockfile(lock_dir, lock_path):
"""read-only directory, no lockfile (so can't create)."""
with read_only(lock_dir):

View File

@@ -237,7 +237,7 @@ def test_guess_core_compilers(self, factory, module_configuration, monkeypatch):
module_configuration("missing_core_compilers")
# Our mock paths must be detected as system paths
monkeypatch.setattr(spack.util.environment, "SYSTEM_DIRS", ["/path/to"])
monkeypatch.setattr(spack.util.environment, "system_dirs", ["/path/to"])
# We don't want to really write into user configuration
# when running tests

View File

@@ -16,12 +16,6 @@
from spack.version import VersionChecksumError
def pkg_factory(name):
"""Return a package object tied to an abstract spec"""
pkg_cls = spack.repo.path.get_pkg_class(name)
return pkg_cls(Spec(name))
@pytest.mark.usefixtures("config", "mock_packages")
class TestPackage(object):
def test_load_package(self):
@@ -190,7 +184,8 @@ def test_url_for_version_with_only_overrides_with_gaps(mock_packages, config):
)
def test_fetcher_url(spec_str, expected_type, expected_url):
"""Ensure that top-level git attribute can be used as a default."""
fetcher = spack.fetch_strategy.for_package_version(pkg_factory(spec_str), "1.0")
s = Spec(spec_str).concretized()
fetcher = spack.fetch_strategy.for_package_version(s.package, "1.0")
assert isinstance(fetcher, expected_type)
assert fetcher.url == expected_url
@@ -209,7 +204,8 @@ def test_fetcher_url(spec_str, expected_type, expected_url):
def test_fetcher_errors(spec_str, version_str, exception_type):
"""Verify that we can't extrapolate versions for non-URL packages."""
with pytest.raises(exception_type):
spack.fetch_strategy.for_package_version(pkg_factory(spec_str), version_str)
s = Spec(spec_str).concretized()
spack.fetch_strategy.for_package_version(s.package, version_str)
@pytest.mark.usefixtures("mock_packages", "config")
@@ -224,12 +220,11 @@ def test_fetcher_errors(spec_str, version_str, exception_type):
)
def test_git_url_top_level_url_versions(version_str, expected_url, digest):
"""Test URL fetch strategy inference when url is specified with git."""
s = Spec("git-url-top-level").concretized()
# leading 62 zeros of sha256 hash
leading_zeros = "0" * 62
fetcher = spack.fetch_strategy.for_package_version(
pkg_factory("git-url-top-level"), version_str
)
fetcher = spack.fetch_strategy.for_package_version(s.package, version_str)
assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
assert fetcher.url == expected_url
assert fetcher.digest == leading_zeros + digest
@@ -250,9 +245,9 @@ def test_git_url_top_level_url_versions(version_str, expected_url, digest):
)
def test_git_url_top_level_git_versions(version_str, tag, commit, branch):
"""Test git fetch strategy inference when url is specified with git."""
fetcher = spack.fetch_strategy.for_package_version(
pkg_factory("git-url-top-level"), version_str
)
s = Spec("git-url-top-level").concretized()
fetcher = spack.fetch_strategy.for_package_version(s.package, version_str)
assert isinstance(fetcher, spack.fetch_strategy.GitFetchStrategy)
assert fetcher.url == "https://example.com/some/git/repo"
assert fetcher.tag == tag
@@ -264,8 +259,9 @@ def test_git_url_top_level_git_versions(version_str, tag, commit, branch):
@pytest.mark.parametrize("version_str", ["1.0", "1.1", "1.2", "1.3"])
def test_git_url_top_level_conflicts(version_str):
"""Test git fetch strategy inference when url is specified with git."""
s = Spec("git-url-top-level").concretized()
with pytest.raises(spack.fetch_strategy.FetcherConflict):
spack.fetch_strategy.for_package_version(pkg_factory("git-url-top-level"), version_str)
spack.fetch_strategy.for_package_version(s.package, version_str)
def test_rpath_args(mutable_database):
@@ -305,8 +301,9 @@ def test_bundle_patch_directive(mock_directive_bundle, clear_directive_functions
)
def test_fetch_options(version_str, digest_end, extra_options):
"""Test fetch options inference."""
s = Spec("fetch-options").concretized()
leading_zeros = "000000000000000000000000000000"
fetcher = spack.fetch_strategy.for_package_version(pkg_factory("fetch-options"), version_str)
fetcher = spack.fetch_strategy.for_package_version(s.package, version_str)
assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
assert fetcher.digest == leading_zeros + digest_end
assert fetcher.extra_options == extra_options

View File

@@ -20,6 +20,7 @@
from spack.spec import Spec
from spack.stage import Stage
from spack.util.executable import Executable
from spack.util.path import is_windows
# various sha256 sums (using variables for legibility)
# many file based shas will differ between Windows and other platforms
@@ -28,22 +29,22 @@
# files with contents 'foo', 'bar', and 'baz'
foo_sha256 = (
"b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c"
if sys.platform != "win32"
if not is_windows
else "bf874c7dd3a83cf370fdc17e496e341de06cd596b5c66dbf3c9bb7f6c139e3ee"
)
bar_sha256 = (
"7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730"
if sys.platform != "win32"
if not is_windows
else "556ddc69a75d0be0ecafc82cd4657666c8063f13d762282059c39ff5dbf18116"
)
baz_sha256 = (
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c"
if sys.platform != "win32"
if not is_windows
else "d30392e66c636a063769cbb1db08cd3455a424650d4494db6379d73ea799582b"
)
biz_sha256 = (
"a69b288d7393261e613c276c6d38a01461028291f6e381623acc58139d01f54d"
if sys.platform != "win32"
if not is_windows
else "2f2b087a8f84834fd03d4d1d5b43584011e869e4657504ef3f8b0a672a5c222e"
)
@@ -55,7 +56,7 @@
platform_url_sha = (
"252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866"
if sys.platform != "win32"
if not is_windows
else "ecf44a8244a486e9ef5f72c6cb622f99718dcd790707ac91af0b8c9a4ab7a2bb"
)
@@ -159,17 +160,17 @@ def test_patch_order(mock_packages, config):
mid2_sha256 = (
"mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234"
if sys.platform != "win32"
if not is_windows
else "mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234"
)
mid1_sha256 = (
"0b62284961dab49887e31319843431ee5b037382ac02c4fe436955abef11f094"
if sys.platform != "win32"
if not is_windows
else "aeb16c4dec1087e39f2330542d59d9b456dd26d791338ae6d80b6ffd10c89dfa"
)
top_sha256 = (
"f7de2947c64cb6435e15fb2bef359d1ed5f6356b2aebb7b20535e3772904e6db"
if sys.platform != "win32"
if not is_windows
else "ff34cb21271d16dbf928374f610bb5dd593d293d311036ddae86c4846ff79070"
)
@@ -218,7 +219,7 @@ def test_patched_dependency(mock_packages, config, install_mockery, mock_fetch):
# make sure the patch makes it into the dependency spec
t_sha = (
"c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8"
if sys.platform != "win32"
if not is_windows
else "3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11"
)
assert (t_sha,) == spec["libelf"].variants["patches"].value

File diff suppressed because it is too large Load Diff

View File

@@ -1040,44 +1040,18 @@ def test_compare_abstract_specs():
assert a <= b or b < a
@pytest.mark.parametrize(
"lhs_str,rhs_str,expected",
[
# Git shasum vs generic develop
(
f"develop-branch-version@git.{'a' * 40}=develop",
"develop-branch-version@develop",
(True, True, False),
),
# Two different shasums
(
f"develop-branch-version@git.{'a' * 40}=develop",
f"develop-branch-version@git.{'b' * 40}=develop",
(False, False, False),
),
# Git shasum vs. git tag
(
f"develop-branch-version@git.{'a' * 40}=develop",
"develop-branch-version@git.0.2.15=develop",
(False, False, False),
),
# Git tag vs. generic develop
(
"develop-branch-version@git.0.2.15=develop",
"develop-branch-version@develop",
(True, True, False),
),
],
)
def test_git_ref_spec_equivalences(mock_packages, lhs_str, rhs_str, expected):
lhs = SpecParser(lhs_str).next_spec()
rhs = SpecParser(rhs_str).next_spec()
intersect, lhs_sat_rhs, rhs_sat_lhs = expected
def test_git_ref_spec_equivalences(mock_packages):
spec_hash_fmt = "develop-branch-version@git.{hash}=develop"
s1 = SpecParser(spec_hash_fmt.format(hash="a" * 40)).next_spec()
s2 = SpecParser(spec_hash_fmt.format(hash="b" * 40)).next_spec()
s3 = SpecParser("develop-branch-version@git.0.2.15=develop").next_spec()
s_no_git = SpecParser("develop-branch-version@develop").next_spec()
assert lhs.intersects(rhs) is intersect
assert rhs.intersects(lhs) is intersect
assert lhs.satisfies(rhs) is lhs_sat_rhs
assert rhs.satisfies(lhs) is rhs_sat_lhs
assert s1.satisfies(s_no_git)
assert s2.satisfies(s_no_git)
assert not s_no_git.satisfies(s1)
assert not s2.satisfies(s1)
assert not s3.satisfies(s1)
@pytest.mark.regression("32471")

View File

@@ -11,6 +11,8 @@
import spack.util.environment as envutil
is_windows = sys.platform == "win32"
@pytest.fixture()
def prepare_environment_for_tests():
@@ -21,14 +23,14 @@ def prepare_environment_for_tests():
def test_is_system_path():
sys_path = "C:\\Users" if sys.platform == "win32" else "/usr/bin"
sys_path = "C:\\Users" if is_windows else "/usr/bin"
assert envutil.is_system_path(sys_path)
assert not envutil.is_system_path("/nonsense_path/bin")
assert not envutil.is_system_path("")
assert not envutil.is_system_path(None)
if sys.platform == "win32":
if is_windows:
test_paths = [
"C:\\Users",
"C:\\",
@@ -49,7 +51,7 @@ def test_is_system_path():
def test_filter_system_paths():
nonsense_prefix = "C:\\nonsense_path" if sys.platform == "win32" else "/nonsense_path"
nonsense_prefix = "C:\\nonsense_path" if is_windows else "/nonsense_path"
expected = [p for p in test_paths if p.startswith(nonsense_prefix)]
filtered = envutil.filter_system_paths(test_paths)
assert expected == filtered

View File

@@ -14,11 +14,13 @@
import spack.util.executable as ex
from spack.hooks.sbang import filter_shebangs_in_directory
is_windows = sys.platform == "win32"
def test_read_unicode(tmpdir, working_env):
script_name = "print_unicode.py"
# read the unicode back in and see whether things work
if sys.platform == "win32":
if is_windows:
script = ex.Executable("%s %s" % (sys.executable, script_name))
else:
script = ex.Executable("./%s" % script_name)

View File

@@ -13,6 +13,9 @@
import spack.config
import spack.util.path as sup
is_windows = sys.platform == "win32"
#: Some lines with lots of placeholders
padded_lines = [
"==> [2021-06-23-15:59:05.020387] './configure' '--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga", # noqa: E501
@@ -33,7 +36,7 @@ def test_sanitze_file_path(tmpdir):
"""Test filtering illegal characters out of potential file paths"""
# *nix illegal files characters are '/' and none others
illegal_file_path = str(tmpdir) + "//" + "abcdefghi.txt"
if sys.platform == "win32":
if is_windows:
# Windows has a larger set of illegal characters
illegal_file_path = os.path.join(tmpdir, 'a<b>cd?e:f"g|h*i.txt')
real_path = sup.sanitize_file_path(illegal_file_path)
@@ -43,7 +46,7 @@ def test_sanitze_file_path(tmpdir):
# This class pertains to path string padding manipulation specifically
# which is used for binary caching. This functionality is not supported
# on Windows as of yet.
@pytest.mark.skipif(sys.platform == "win32", reason="Padding funtionality unsupported on Windows")
@pytest.mark.skipif(is_windows, reason="Padding funtionality unsupported on Windows")
class TestPathPadding:
@pytest.mark.parametrize("padded,fixed", zip(padded_lines, fixed_lines))
def test_padding_substitution(self, padded, fixed):
@@ -119,7 +122,7 @@ def test_path_debug_padded_filter(debug, monkeypatch):
string = fmt.format(prefix, os.sep, os.sep.join([sup.SPACK_PATH_PADDING_CHARS] * 2), suffix)
expected = (
fmt.format(prefix, os.sep, "[padded-to-{0}-chars]".format(72), suffix)
if debug <= 1 and sys.platform != "win32"
if debug <= 1 and not is_windows
else string
)

View File

@@ -638,11 +638,11 @@ def test_satisfies_and_constrain(self):
b["foobar"] = SingleValuedVariant("foobar", "fee")
b["shared"] = BoolValuedVariant("shared", True)
assert a.intersects(b)
assert b.intersects(a)
assert not a.satisfies(b)
assert not b.satisfies(a)
assert b.satisfies(a)
assert not a.satisfies(b, strict=True)
assert not b.satisfies(a, strict=True)
# foo=bar,baz foobar=fee feebar=foo shared=True
c = VariantMap(None)

View File

@@ -600,7 +600,6 @@ def test_versions_from_git(git, mock_git_version_info, monkeypatch, mock_package
with working_dir(repo_path):
git("checkout", commit)
with open(os.path.join(repo_path, filename), "r") as f:
expected = f.read()
@@ -608,38 +607,30 @@ def test_versions_from_git(git, mock_git_version_info, monkeypatch, mock_package
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@pytest.mark.parametrize(
"commit_idx,expected_satisfies,expected_not_satisfies",
[
# Spec based on earliest commit
(-1, ("@:0",), ("@1.0",)),
# Spec based on second commit (same as version 1.0)
(-2, ("@1.0",), ("@1.1:",)),
# Spec based on 4th commit (in timestamp order)
(-4, ("@1.1", "@1.0:1.2"), tuple()),
],
)
def test_git_hash_comparisons(
mock_git_version_info,
install_mockery,
mock_packages,
monkeypatch,
commit_idx,
expected_satisfies,
expected_not_satisfies,
):
def test_git_hash_comparisons(mock_git_version_info, install_mockery, mock_packages, monkeypatch):
"""Check that hashes compare properly to versions"""
repo_path, filename, commits = mock_git_version_info
monkeypatch.setattr(
spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
)
spec = spack.spec.Spec(f"git-test-commit@{commits[commit_idx]}").concretized()
for item in expected_satisfies:
assert spec.satisfies(item)
# Spec based on earliest commit
spec0 = spack.spec.Spec("git-test-commit@%s" % commits[-1])
spec0.concretize()
assert spec0.satisfies("@:0")
assert not spec0.satisfies("@1.0")
for item in expected_not_satisfies:
assert not spec.satisfies(item)
# Spec based on second commit (same as version 1.0)
spec1 = spack.spec.Spec("git-test-commit@%s" % commits[-2])
spec1.concretize()
assert spec1.satisfies("@1.0")
assert not spec1.satisfies("@1.1:")
# Spec based on 4th commit (in timestamp order)
spec4 = spack.spec.Spec("git-test-commit@%s" % commits[-4])
spec4.concretize()
assert spec4.satisfies("@1.1")
assert spec4.satisfies("@1.0:1.2")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@@ -747,27 +738,3 @@ def test_git_ref_can_be_assigned_a_version(vstring, eq_vstring, is_commit):
assert v.is_ref
assert not v._ref_lookup
assert v_equivalent.version == v.ref_version
@pytest.mark.parametrize(
"lhs_str,rhs_str,expected",
[
# VersionBase
("4.7.3", "4.7.3", (True, True, True)),
("4.7.3", "4.7", (True, True, False)),
("4.7.3", "4", (True, True, False)),
("4.7.3", "4.8", (False, False, False)),
# GitVersion
(f"git.{'a' * 40}=develop", "develop", (True, True, False)),
(f"git.{'a' * 40}=develop", f"git.{'a' * 40}=develop", (True, True, True)),
(f"git.{'a' * 40}=develop", f"git.{'b' * 40}=develop", (False, False, False)),
],
)
def test_version_intersects_satisfies_semantic(lhs_str, rhs_str, expected):
lhs, rhs = ver(lhs_str), ver(rhs_str)
intersect, lhs_sat_rhs, rhs_sat_lhs = expected
assert lhs.intersects(rhs) is intersect
assert lhs.intersects(rhs) is rhs.intersects(lhs)
assert lhs.satisfies(rhs) is lhs_sat_rhs
assert rhs.satisfies(lhs) is rhs_sat_lhs

View File

@@ -28,6 +28,8 @@
ALLOWED_SINGLE_EXT_ARCHIVE_TYPES = PRE_EXTS + EXTS + NOTAR_EXTS
is_windows = sys.platform == "win32"
try:
import bz2 # noqa
@@ -156,7 +158,7 @@ def _unzip(archive_file):
archive_file (str): absolute path of the file to be decompressed
"""
extracted_file = os.path.basename(strip_extension(archive_file, "zip"))
if sys.platform == "win32":
if is_windows:
return _untar(archive_file)
else:
exe = "unzip"
@@ -168,7 +170,7 @@ def _unzip(archive_file):
def _unZ(archive_file):
if sys.platform == "win32":
if is_windows:
result = _7zip(archive_file)
else:
result = _system_gunzip(archive_file)
@@ -187,7 +189,7 @@ def _lzma_decomp(archive_file):
with lzma.open(archive_file) as lar:
shutil.copyfileobj(lar, ar)
else:
if sys.platform == "win32":
if is_windows:
return _7zip(archive_file)
else:
return _xz(archive_file)
@@ -225,7 +227,7 @@ def _xz(archive_file):
"""Decompress lzma compressed .xz files via xz command line
tool. Available only on Unix
"""
if sys.platform == "win32":
if is_windows:
raise RuntimeError("XZ tool unavailable on Windows")
decompressed_file = os.path.basename(strip_extension(archive_file, "xz"))
working_dir = os.getcwd()
@@ -308,7 +310,7 @@ def decompressor_for(path, extension=None):
# Catch tar.xz/tar.Z files here for Windows
# as the tar utility on Windows cannot handle such
# compression types directly
if ("xz" in extension or "Z" in extension) and sys.platform == "win32":
if ("xz" in extension or "Z" in extension) and is_windows:
return _win_compressed_tarball_handler
return _untar

File diff suppressed because it is too large Load Diff

View File

@@ -21,6 +21,8 @@
import spack.error
import spack.paths
is_windows = sys.platform == "win32"
class Lock(llnl.util.lock.Lock):
"""Lock that can be disabled.
@@ -32,7 +34,7 @@ class Lock(llnl.util.lock.Lock):
def __init__(self, *args, **kwargs):
super(Lock, self).__init__(*args, **kwargs)
self._enable = spack.config.get("config:locks", sys.platform != "win32")
self._enable = spack.config.get("config:locks", not is_windows)
def _lock(self, op, timeout=0):
if self._enable:

View File

@@ -178,7 +178,7 @@ def visit_FunctionDef(self, func):
conditions.append(None)
else:
# Check statically whether spec satisfies the condition
conditions.append(self.spec.satisfies(cond_spec))
conditions.append(self.spec.satisfies(cond_spec, strict=True))
except AttributeError:
# In this case the condition for the 'when' decorator is

View File

@@ -22,6 +22,8 @@
import spack.util.spack_yaml as syaml
is_windows = sys.platform == "win32"
__all__ = ["substitute_config_variables", "substitute_path_variables", "canonicalize_path"]
@@ -151,7 +153,7 @@ def sanitize_file_path(pth):
# instances of illegal characters on join
pth_cmpnts = pth.split(os.path.sep)
if sys.platform == "win32":
if is_windows:
drive_match = r"[a-zA-Z]:"
is_abs = bool(re.match(drive_match, pth_cmpnts[0]))
drive = pth_cmpnts[0] + os.path.sep if is_abs else ""
@@ -208,7 +210,7 @@ def path_filter_caller(*args, **kwargs):
def get_system_path_max():
# Choose a conservative default
sys_max_path_length = 256
if sys.platform == "win32":
if is_windows:
sys_max_path_length = 260
else:
try:
@@ -236,7 +238,7 @@ class Path:
unix = 0
windows = 1
platform_path = windows if sys.platform == "win32" else unix
platform_path = windows if is_windows else unix
def format_os_path(path, mode=Path.unix):
@@ -485,7 +487,7 @@ def debug_padded_filter(string, level=1):
Returns (str): filtered string if current debug level does not exceed
level and not windows; otherwise, unfiltered string
"""
if sys.platform == "win32":
if is_windows:
return string
return padding_filter(string) if tty.debug_level() <= level else string

View File

@@ -13,7 +13,8 @@
from llnl.util import tty
if sys.platform == "win32":
is_windows = sys.platform == "win32"
if is_windows:
import winreg
@@ -153,7 +154,7 @@ def __init__(self, key, root_key=HKEY.HKEY_CURRENT_USER):
to get an entrypoint, the HKEY constants are always open, or an already
open key can be used instead.
"""
if sys.platform != "win32":
if not is_windows:
raise RuntimeError(
"Cannot instantiate Windows Registry class on non Windows platforms"
)
@@ -166,7 +167,7 @@ def invalid_reg_ref_error_handler(self):
try:
yield
except FileNotFoundError as e:
if sys.platform == "win32" and e.winerror == 2:
if e.winerror == 2:
tty.debug("Key %s at position %s does not exist" % (self.key, str(self.root)))
else:
raise e
@@ -181,7 +182,7 @@ def _load_key(self):
winreg.OpenKeyEx(self.root.hkey, self.key, access=winreg.KEY_READ),
)
except FileNotFoundError as e:
if sys.platform == "win32" and e.winerror == 2:
if e.winerror == 2:
self._reg = -1
tty.debug("Key %s at position %s does not exist" % (self.key, str(self.root)))
else:

View File

@@ -203,7 +203,8 @@ def implicit_variant_conversion(method):
@functools.wraps(method)
def convert(self, other):
# We don't care if types are different as long as I can convert other to type(self)
# We don't care if types are different as long as I can convert
# other to type(self)
try:
other = type(self)(other.name, other._original_value)
except (error.SpecError, ValueError):
@@ -348,12 +349,7 @@ def satisfies(self, other):
# (`foo=bar` will never satisfy `baz=bar`)
return other.name == self.name
def intersects(self, other):
"""Returns True if there are variant matching both self and other, False otherwise."""
if isinstance(other, (SingleValuedVariant, BoolValuedVariant)):
return other.intersects(self)
return other.name == self.name
@implicit_variant_conversion
def compatible(self, other):
"""Returns True if self and other are compatible, False otherwise.
@@ -368,7 +364,7 @@ def compatible(self, other):
"""
# If names are different then `self` is not compatible with `other`
# (`foo=bar` is incompatible with `baz=bar`)
return self.intersects(other)
return other.name == self.name
@implicit_variant_conversion
def constrain(self, other):
@@ -479,9 +475,6 @@ def satisfies(self, other):
self.value == other.value or other.value == "*" or self.value == "*"
)
def intersects(self, other):
return self.satisfies(other)
def compatible(self, other):
return self.satisfies(other)
@@ -582,11 +575,29 @@ def substitute(self, vspec):
# Set the item
super(VariantMap, self).__setitem__(vspec.name, vspec)
def satisfies(self, other):
return all(k in self and self[k].satisfies(other[k]) for k in other)
def satisfies(self, other, strict=False):
"""Returns True if this VariantMap is more constrained than other,
False otherwise.
def intersects(self, other):
return all(self[k].intersects(other[k]) for k in other if k in self)
Args:
other (VariantMap): VariantMap instance to satisfy
strict (bool): if True return False if a key is in other and
not in self, otherwise discard that key and proceed with
evaluation
Returns:
bool: True or False
"""
to_be_checked = [k for k in other]
strict_or_concrete = strict
if self.spec is not None:
strict_or_concrete |= self.spec._concrete
if not strict_or_concrete:
to_be_checked = filter(lambda x: x in self, to_be_checked)
return all(k in self and self[k].satisfies(other[k]) for k in to_be_checked)
def constrain(self, other):
"""Add all variants in other that aren't in self to self. Also

View File

@@ -133,9 +133,6 @@ def __hash__(self):
def __str__(self):
return self.data
def __repr__(self):
return f"VersionStrComponent('{self.data}')"
def __eq__(self, other):
if isinstance(other, VersionStrComponent):
return self.data == other.data
@@ -245,9 +242,9 @@ def __init__(self, string: str) -> None:
if string and not VALID_VERSION.match(string):
raise ValueError("Bad characters in version string: %s" % string)
self.separators, self.version = self._generate_separators_and_components(string)
self.separators, self.version = self._generate_seperators_and_components(string)
def _generate_separators_and_components(self, string):
def _generate_seperators_and_components(self, string):
segments = SEGMENT_REGEX.findall(string)
components = tuple(int(m[0]) if m[0] else VersionStrComponent(m[1]) for m in segments)
separators = tuple(m[2] for m in segments)
@@ -351,26 +348,11 @@ def isdevelop(self):
return False
@coerced
def intersects(self, other: "VersionBase") -> bool:
"""Return True if self intersects with other, False otherwise.
Two versions intersect if one can be constrained by the other. For instance
@4.7 and @4.7.3 intersect (the intersection being @4.7.3).
Arg:
other: version to be checked for intersection
"""
n = min(len(self.version), len(other.version))
return self.version[:n] == other.version[:n]
@coerced
def satisfies(self, other: "VersionBase") -> bool:
"""Return True if self is at least as specific and share a common prefix with other.
For instance, @4.7.3 satisfies @4.7 but not vice-versa.
Arg:
other: version to be checked for intersection
def satisfies(self, other):
"""A Version 'satisfies' another if it is at least as specific and has
a common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
a suitable compiler.
"""
nself = len(self.version)
nother = len(other.version)
@@ -484,8 +466,9 @@ def is_predecessor(self, other):
def is_successor(self, other):
return other.is_predecessor(self)
@coerced
def overlaps(self, other):
return self.intersects(other)
return self in other or other in self
@coerced
def union(self, other):
@@ -565,7 +548,7 @@ def __init__(self, string):
if "=" in pruned_string:
self.ref, self.ref_version_str = pruned_string.split("=")
_, self.ref_version = self._generate_separators_and_components(self.ref_version_str)
_, self.ref_version = self._generate_seperators_and_components(self.ref_version_str)
self.user_supplied_reference = True
else:
self.ref = pruned_string
@@ -595,9 +578,6 @@ def _cmp(self, other_lookups=None):
if ref_info:
prev_version, distance = ref_info
if prev_version is None:
prev_version = "0"
# Extend previous version by empty component and distance
# If commit is exactly a known version, no distance suffix
prev_tuple = VersionBase(prev_version).version if prev_version else ()
@@ -607,22 +587,14 @@ def _cmp(self, other_lookups=None):
return self.version
@coerced
def intersects(self, other):
# If they are both references, they must match exactly
if self.is_ref and other.is_ref:
return self.version == other.version
# Otherwise the ref_version of the reference must intersect with the version of the other
v1 = self.ref_version if self.is_ref else self.version
v2 = other.ref_version if other.is_ref else other.version
n = min(len(v1), len(v2))
return v1[:n] == v2[:n]
@coerced
def satisfies(self, other):
# In the case of two GitVersions we require the ref_versions
# to satisfy one another and the versions to be an exact match.
"""A Version 'satisfies' another if it is at least as specific and has
a common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
a suitable compiler. In the case of two GitVersions we require the ref_versions
to satisfy one another and the versions to be an exact match.
"""
self_cmp = self._cmp(other.ref_lookup)
other_cmp = other._cmp(self.ref_lookup)
@@ -759,7 +731,7 @@ def __init__(self, start, end):
# means the range [1.2.3, 1.3), which is non-empty.
min_len = min(len(start), len(end))
if end.up_to(min_len) < start.up_to(min_len):
raise ValueError(f"Invalid Version range: {self}")
raise ValueError("Invalid Version range: %s" % self)
def lowest(self):
return self.start
@@ -833,32 +805,26 @@ def __contains__(self, other):
)
return in_upper
def intersects(self, other) -> bool:
"""Return two if two version ranges overlap with each other, False otherwise.
@coerced
def satisfies(self, other):
"""
x.satisfies(y) in general means that x and y have a
non-zero intersection. For VersionRange this means they overlap.
This is a commutative operation.
`satisfies` is a commutative binary operator, meaning that
x.satisfies(y) if and only if y.satisfies(x).
Examples:
Note: in some cases we have the keyword x.satisfies(y, strict=True)
to mean strict set inclusion, which is not commutative. However, this
lacks in VersionRange for unknown reasons.
Examples
- 1:3 satisfies 2:4, as their intersection is 2:3.
- 1:2 does not satisfy 3:4, as their intersection is empty.
- 4.5:4.7 satisfies 4.7.2:4.8, as their intersection is 4.7.2:4.7
Args:
other: version range to be checked for intersection
"""
return self.overlaps(other)
@coerced
def satisfies(self, other):
"""A version range satisfies another if it is a subset of the other.
Examples:
- 1:2 does not satisfy 3:4, as their intersection is empty.
- 1:3 does not satisfy 2:4, as they overlap but neither is a subset of the other
- 1:3 satisfies 1:4.
"""
return self.intersection(other) == self
@coerced
def overlaps(self, other):
return (
@@ -916,33 +882,34 @@ def union(self, other):
@coerced
def intersection(self, other):
if not self.overlaps(other):
if self.overlaps(other):
if self.start is None:
start = other.start
else:
start = self.start
if other.start is not None:
if other.start > start or other.start in start:
start = other.start
if self.end is None:
end = other.end
else:
end = self.end
# TODO: does this make sense?
# This is tricky:
# 1.6.5 in 1.6 = True (1.6.5 is more specific)
# 1.6 < 1.6.5 = True (lexicographic)
# Should 1.6 NOT be less than 1.6.5? Hmm.
# Here we test (not end in other.end) first to avoid paradox.
if other.end is not None and end not in other.end:
if other.end < end or other.end in end:
end = other.end
return VersionRange(start, end)
else:
return VersionList()
if self.start is None:
start = other.start
else:
start = self.start
if other.start is not None:
if other.start > start or other.start in start:
start = other.start
if self.end is None:
end = other.end
else:
end = self.end
# TODO: does this make sense?
# This is tricky:
# 1.6.5 in 1.6 = True (1.6.5 is more specific)
# 1.6 < 1.6.5 = True (lexicographic)
# Should 1.6 NOT be less than 1.6.5? Hmm.
# Here we test (not end in other.end) first to avoid paradox.
if other.end is not None and end not in other.end:
if other.end < end or other.end in end:
end = other.end
return VersionRange(start, end)
def __hash__(self):
return hash((self.start, self.end))
@@ -1055,9 +1022,6 @@ def overlaps(self, other):
o += 1
return False
def intersects(self, other):
return self.overlaps(other)
def to_dict(self):
"""Generate human-readable dict for YAML."""
if self.concrete:
@@ -1076,10 +1040,31 @@ def from_dict(dictionary):
raise ValueError("Dict must have 'version' or 'versions' in it.")
@coerced
def satisfies(self, other) -> bool:
# This exploits the fact that version lists are "reduced" and normalized, so we can
# never have a list like [1:3, 2:4] since that would be normalized to [1:4]
return all(any(lhs.satisfies(rhs) for rhs in other) for lhs in self)
def satisfies(self, other, strict=False):
"""A VersionList satisfies another if some version in the list
would satisfy some version in the other list. This uses
essentially the same algorithm as overlaps() does for
VersionList, but it calls satisfies() on member Versions
and VersionRanges.
If strict is specified, this version list must lie entirely
*within* the other in order to satisfy it.
"""
if not other or not self:
return False
if strict:
return self in other
s = o = 0
while s < len(self) and o < len(other):
if self[s].satisfies(other[o]):
return True
elif self[s] < other[o]:
s += 1
else:
o += 1
return False
@coerced
def update(self, other):
@@ -1351,6 +1336,10 @@ def lookup_ref(self, ref):
# won't properly update the local rev-list)
self.fetcher.git("fetch", "--tags", output=os.devnull, error=os.devnull)
# We need to do an attempt at fetching the commit in order to
# be sure to get it in case it comes from a PR in a fork.
self.fetcher.git("fetch", "origin", "%s" % ref, output=os.devnull, error=os.devnull)
# Ensure ref is a commit object known to git
# Note the brackets are literals, the ref replaces the format string
try:

View File

@@ -218,7 +218,7 @@ spack:
- target:
- 'target=aarch64'
- 'target=neoverse_n1'
- 'target=graviton2'
specs:

View File

@@ -125,7 +125,7 @@ spack:
- target:
- 'target=aarch64'
- 'target=neoverse_n1'
- 'target=graviton2'
specs:

View File

@@ -240,8 +240,6 @@ spack:
- llvm
- llvm-amdgpu
- rocblas
- paraview
- visit
runner-attributes:
tags: [ "spack", "huge", "ppc64le" ]
variables:

View File

@@ -43,7 +43,7 @@ spack:
- target:
- 'target=aarch64'
- 'target=neoverse_n1'
- 'target=graviton2'
specs:

View File

@@ -8,6 +8,8 @@
from spack.package import *
is_windows = sys.platform == "win32"
def check(condition, msg):
"""Raise an install error if condition is False."""
@@ -57,7 +59,7 @@ def install(self, spec, prefix):
os.environ["for_install"] == "for_install",
"Couldn't read env var set in compile envieonmnt",
)
cmake_exe_ext = ".exe" if sys.platform == "win32" else ""
cmake_exe_ext = ".exe" if is_windows else ""
cmake_exe = join_path(prefix.bin, "cmake{}".format(cmake_exe_ext))
touch(cmake_exe)
set_executable(cmake_exe)

View File

@@ -1,18 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class V1Consumer(Package):
"""Mimic the real netlib-lapack, that may be built on top of an
optimized blas.
"""
homepage = "https://dev.null"
version("1.0")
depends_on("v2")
depends_on("v1")

View File

@@ -16,7 +16,6 @@ class Ace(MakefilePackage):
homepage = "https://www.dre.vanderbilt.edu/~schmidt/ACE.html"
url = "https://download.dre.vanderbilt.edu/previous_versions/ACE-6.5.1.tar.gz"
version("7.1.0", sha256="d78d9f3f2dee6ccb46a8c296367369349054fd475dff3c5b36e2dff3dee0bf8f")
version("6.5.12", "de96c68a6262d6b9ba76b5057c02c7e6964c070b1328a63bf70259e9530a7996")
version("6.5.6", "7717cad84d4a9c3d6b2c47963eb555d96de0be657870bcab6fcef4c0423af0de")
version("6.5.1", "1f318adadb19da23c9be570a9c600a330056b18950fe0bf0eb1cf5cac8b72a32")

View File

@@ -16,7 +16,6 @@ class Alglib(MakefilePackage):
homepage = "https://www.alglib.net/"
url = "https://www.alglib.net/translator/re/alglib-3.11.0.cpp.gpl.tgz"
version("3.20.0", sha256="e7357f0f894313ff1b640ec9cb5e8b63f06d2d3411c2143a374aa0e9740da8a9")
version("3.11.0", sha256="34e391594aac89fb354bdaf58c42849489cd1199197398ba98bb69961f42bdb0")
build_directory = "src"

View File

@@ -14,7 +14,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
mesh refinement (AMR) applications."""
homepage = "https://amrex-codes.github.io/amrex/"
url = "https://github.com/AMReX-Codes/amrex/releases/download/23.03/amrex-23.03.tar.gz"
url = "https://github.com/AMReX-Codes/amrex/releases/download/23.02/amrex-23.02.tar.gz"
git = "https://github.com/AMReX-Codes/amrex.git"
test_requires_compiler = True
@@ -24,7 +24,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
maintainers("WeiqunZhang", "asalmgren", "etpalmer63")
version("develop", branch="development")
version("23.03", sha256="e17c721b1aba4f66e467723f61b59e56c02cf1b72cab5a2680b13ff6e79ef903")
version("23.02", sha256="f443c5eb4b89f4a74bf0e1b8a5943da18ab81cdc76aff12e8282ca43ffd06412")
version("23.01", sha256="3b1770653a7c6d3e6167bc3cce98cbf838962102c510d1f872ab08f1115933b7")
version("22.12", sha256="7b11e547e70bdd6f4b36682708a755d173eaecd8738536306d4217df4dd1be3d")
@@ -124,13 +123,11 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
depends_on("python@2.7:", type="build", when="@:20.04")
depends_on("cmake@3.5:", type="build", when="@:18.10")
depends_on("cmake@3.13:", type="build", when="@18.11:19.03")
depends_on("cmake@3.14:", type="build", when="@19.04:22.05")
depends_on("cmake@3.17:", type="build", when="@22.06:23.01")
depends_on("cmake@3.18:", type="build", when="@23.02:")
depends_on("cmake@3.14:", type="build", when="@19.04:")
# cmake @3.17: is necessary to handle cuda @11: correctly
depends_on("cmake@3.17:", type="build", when="^cuda @11:")
depends_on("cmake@3.17:", type="build", when="@22.06:")
depends_on("cmake@3.20:", type="build", when="+rocm")
depends_on("cmake@3.22:", type="build", when="+sycl")
depends_on("hdf5@1.10.4: +mpi", when="+hdf5")
depends_on("rocrand", type="build", when="+rocm")
depends_on("rocprim", type="build", when="@21.05: +rocm")
@@ -138,6 +135,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
depends_on("hypre@2.19.0:", type="link", when="@21.03: ~cuda +hypre")
depends_on("hypre@2.20.0:", type="link", when="@21.03: +cuda +hypre")
depends_on("petsc", type="link", when="+petsc")
depends_on("cmake@3.22:", type="build", when="+sycl")
depends_on("intel-oneapi-compilers@2023.0.0:", type="build", when="@23.01: +sycl")
depends_on("intel-oneapi-mkl", type=("build", "link"), when="+sycl")

View File

@@ -15,7 +15,6 @@ class Appres(AutotoolsPackage, XorgPackage):
homepage = "https://cgit.freedesktop.org/xorg/app/appres"
xorg_mirror_path = "app/appres-1.0.4.tar.gz"
version("1.0.6", sha256="848f383ff429612fb9df840d79e97dc193dc72dbbf53d3217a8d1e90a5aa1e26")
version("1.0.4", sha256="22cb6f639c891ffdbb5371bc50a88278185789eae6907d05e9e0bd1086a80803")
depends_on("libx11")

View File

@@ -12,7 +12,6 @@ class AprUtil(AutotoolsPackage):
homepage = "https://apr.apache.org/"
url = "https://archive.apache.org/dist/apr/apr-util-1.6.1.tar.gz"
version("1.6.3", sha256="2b74d8932703826862ca305b094eef2983c27b39d5c9414442e9976a9acf1983")
version("1.6.1", sha256="b65e40713da57d004123b6319828be7f1273fbc6490e145874ee1177e112c459")
version("1.6.0", sha256="483ef4d59e6ac9a36c7d3fd87ad7b9db7ad8ae29c06b9dd8ff22dda1cc416389")
version("1.5.4", sha256="976a12a59bc286d634a21d7be0841cc74289ea9077aa1af46be19d1a6e844c19")

View File

@@ -12,7 +12,6 @@ class Apr(AutotoolsPackage):
homepage = "https://apr.apache.org/"
url = "https://archive.apache.org/dist/apr/apr-1.7.0.tar.gz"
version("1.7.2", sha256="3d8999b216f7b6235343a4e3d456ce9379aa9a380ffb308512f133f0c5eb2db9")
version("1.7.0", sha256="48e9dbf45ae3fdc7b491259ffb6ccf7d63049ffacbc1c0977cced095e4c2d5a2")
version("1.6.2", sha256="4fc24506c968c5faf57614f5d0aebe0e9d0b90afa47a883e1a1ca94f15f4a42e")
version("1.5.2", sha256="1af06e1720a58851d90694a984af18355b65bb0d047be03ec7d659c746d6dbdb")

View File

@@ -27,12 +27,11 @@ class Armcomputelibrary(SConsPackage):
immediate support for new Arm® technologies e.g. SVE2."""
homepage = "https://arm-software.github.io/ComputeLibrary/latest/"
url = "https://github.com/ARM-software/ComputeLibrary/archive/refs/tags/v23.02.zip"
url = "https://github.com/ARM-software/ComputeLibrary/archive/refs/tags/v22.11.zip"
git = "https://github.com/ARM-software/ComputeLibrary.git"
maintainers = ["annop-w"]
version("23.02", sha256="bed1b24047ce00155e552204bc3983e86f46775414c554a34a7ece931d67ec62")
version("22.11", sha256="2f70f54d84390625222503ea38650c00c49d4b70bc86a6b9aeeebee9d243865f")
version("22.08", sha256="5d76d07406b105f0bdf74ef80263236cb03baf0ade882f2bf8446bbc239e0079")
version("22.05", sha256="8ff308448874c6b72c1ce8d9f28af41d8b47c8e5c43b8ccc069da744e3c0a421")
@@ -42,7 +41,11 @@ class Armcomputelibrary(SConsPackage):
phases = ["build"]
variant("build_type", default="release", values=("release", "debug"))
variant(
"build_type",
default="release",
values=("release", "debug"),
)
variant(
"threads",
default="cppthreads",
@@ -62,9 +65,21 @@ class Armcomputelibrary(SConsPackage):
description="Target Architecture. The x86_32 and x86_64 targets can only be"
" used with neon=0 and opencl=1.",
)
variant("sve", default=False, description="Build for SVE.")
variant("sve2", default=False, description="Build for SVE2.")
variant("neon", default=True, description="Enable Arm® Neon™ support")
variant(
"sve",
default=False,
description="Build for SVE.",
)
variant(
"sve2",
default=False,
description="Build for SVE2.",
)
variant(
"neon",
default=True,
description="Enable Arm® Neon™ support",
)
variant(
"experimental_dynamic_fusion",
default=False,
@@ -75,10 +90,26 @@ class Armcomputelibrary(SConsPackage):
default=False,
description="Enable fixed format kernels for GEMM.",
)
variant("benchmark_examples", default=False, description="Build benchmark examples programs.")
variant("validate_examples", default=False, description="Build validate examples programs.")
variant("validation_tests", default=False, description="Build validation test programs.")
variant("benchmark_tests", default=False, description="Build benchmark test programs.")
variant(
"benchmark_examples",
default=False,
description="Build benchmark examples programs.",
)
variant(
"validate_examples",
default=False,
description="Build validate examples programs.",
)
variant(
"validation_tests",
default=False,
description="Build validation test programs.",
)
variant(
"benchmark_tests",
default=False,
description="Build benchmark test programs.",
)
def build_args(self, spec, prefix):
args = ["-j{0}".format(make_jobs)]

View File

@@ -52,9 +52,7 @@ class Ascent(CMakePackage, CudaPackage):
version("develop", branch="develop", submodules=True)
version("0.9.0", tag="v0.9.0", submodules=True, preferred=True)
version("0.8.0", tag="v0.8.0", submodules=True)
version("0.8.0", tag="v0.8.0", submodules=True, preferred=True)
version("0.7.1", tag="v0.7.1", submodules=True)
@@ -82,16 +80,11 @@ class Ascent(CMakePackage, CudaPackage):
variant("vtkh", default=True, description="Build VTK-h filter and rendering support")
variant("openmp", default=(sys.platform != "darwin"), description="build openmp support")
variant("raja", default=True, description="Build with RAJA support")
variant("umpire", default=True, description="Build with Umpire support")
variant("mfem", default=False, description="Build MFEM filter support")
variant("dray", default=False, description="Build with Devil Ray support")
variant("adios2", default=False, description="Build Adios2 filter support")
variant("fides", default=False, description="Build Fides filter support")
# caliper
variant("caliper", default=False, description="Build Caliper support")
# variants for dev-tools (docs, etc)
variant("doc", default=False, description="Build Ascent's documentation")
@@ -126,7 +119,6 @@ class Ascent(CMakePackage, CudaPackage):
#######################
depends_on("conduit@:0.7.2", when="@:0.7.1")
depends_on("conduit@0.8.2:", when="@0.8:")
depends_on("conduit@0.8.6:", when="@0.9:")
depends_on("conduit+python", when="+python")
depends_on("conduit~python", when="~python")
depends_on("conduit+mpi", when="+mpi")
@@ -149,56 +141,30 @@ class Ascent(CMakePackage, CudaPackage):
depends_on("mpi", when="+mpi")
depends_on("py-mpi4py", when="+mpi+python")
#############################
# TPLs for Runtime Features
#############################
#######################
# RAJA and Umpire
#######################
depends_on("raja", when="+raja")
depends_on("umpire", when="+umpire")
#######################
# BabelFlow
#######################
depends_on("babelflow", when="+babelflow+mpi")
depends_on("parallelmergetree", when="+babelflow+mpi")
#######################
# VTK-m
#######################
#############################
# TPLs for Runtime Features
#############################
depends_on("vtk-m@1.9:", when="@0.9.0:")
depends_on("vtk-m~tbb", when="@0.9.0: +vtkh")
depends_on("vtk-m+openmp", when="@0.9.0: +vtkh+openmp")
depends_on("vtk-m~openmp", when="@0.9.0: +vtkh~openmp")
depends_on("vtk-m+openmp", when="@0.9.0: +vtkh+openmp")
depends_on("vtk-m~openmp", when="@0.9.0: +vtkh~openmp")
depends_on("vtk-m~cuda", when="@0.9.0: +vtkh~cuda")
depends_on("vtk-m+cuda", when="@0.9.0: +vtkh+cuda")
depends_on("vtk-m+fpic", when="@0.8.0: +vtkh")
depends_on("vtk-m~shared+fpic", when="@0.8.0: +vtkh~shared")
#######################
# VTK-h
#######################
# Ascent 0.9.0 includes VTK-h, prior to 0.9.0
# VTK-h was developed externally
depends_on("vtk-h", when="+vtkh")
depends_on("vtk-h@:0.7", when="@:0.7 +vtkh")
depends_on("vtk-h@0.8.1:", when="@0.8: +vtkh")
# propagate relevent variants to vtk-h
depends_on("vtk-h+openmp", when="@:0.8.0 +vtkh+openmp")
depends_on("vtk-h~openmp", when="@:0.8.0 +vtkh~openmp")
depends_on("vtk-h+cuda", when="@:0.8.0 +vtkh+cuda")
depends_on("vtk-h~cuda", when="@:0.8.0 +vtkh~cuda")
propagate_cuda_arch("vtk-h", "@:0.8.0 +vtkh")
depends_on("vtk-h+shared", when="@:0.8.0 +vtkh+shared")
depends_on("vtk-h~shared", when="@:0.8.0 +vtkh~shared")
depends_on("vtk-h+openmp", when="+vtkh+openmp")
depends_on("vtk-h~openmp", when="+vtkh~openmp")
depends_on("vtk-h+cuda", when="+vtkh+cuda")
depends_on("vtk-h~cuda", when="+vtkh~cuda")
propagate_cuda_arch("vtk-h", "+vtkh")
depends_on("vtk-h+shared", when="+vtkh+shared")
depends_on("vtk-h~shared", when="+vtkh~shared")
# When using VTK-h ascent also needs VTK-m
depends_on("vtk-m@:1.7", when="@:0.8.0 +vtkh")
depends_on("vtk-m+testlib", when="@:0.8.0 +vtkh+test^vtk-m")
depends_on("vtk-m@:1.7", when="+vtkh")
depends_on("vtk-m+testlib", when="+vtkh+test^vtk-m")
# mfem
depends_on("mfem~threadsafe~openmp+conduit", when="+mfem")
@@ -211,25 +177,20 @@ class Ascent(CMakePackage, CudaPackage):
# fides
depends_on("fides", when="+fides")
#######################
# Devil Ray
#######################
# Ascent 0.9.0 includes Devil Ray, prior to 0.9.0
# Devil Ray was developed externally
# devil ray variants with mpi
# we have to specify both because mfem makes us
depends_on("dray~test~utils", when="@:0.8.0 +dray")
depends_on("dray@0.1.8:", when="@:0.8.0 +dray")
depends_on("dray~test~utils", when="+dray")
depends_on("dray@0.1.8:", when="@0.8: +dray")
# propagate relevent variants to dray
depends_on("dray+cuda", when="@:0.8.0 +dray+cuda")
depends_on("dray~cuda", when="@:0.8.0 +dray~cuda")
propagate_cuda_arch("dray", "@:0.8.0 +dray")
depends_on("dray+mpi", when="@:0.8.0 +dray+mpi")
depends_on("dray~mpi", when="@:0.8.0 +dray~mpi")
depends_on("dray+shared", when="@:0.8.0 +dray+shared")
depends_on("dray~shared", when="@:0.8.0 +dray~shared")
depends_on("dray+openmp", when="@:0.8.0 +dray+openmp")
depends_on("dray~openmp", when="@:0.8.0 +dray~openmp")
depends_on("dray+cuda", when="+dray+cuda")
depends_on("dray~cuda", when="+dray~cuda")
propagate_cuda_arch("dray", "+dray")
depends_on("dray+mpi", when="+dray+mpi")
depends_on("dray~mpi", when="+dray~mpi")
depends_on("dray+shared", when="+dray+shared")
depends_on("dray~shared", when="+dray~shared")
depends_on("dray+openmp", when="+dray+openmp")
depends_on("dray~openmp", when="+dray~openmp")
# Adios2
depends_on("adios2", when="+adios2")
@@ -239,11 +200,6 @@ class Ascent(CMakePackage, CudaPackage):
depends_on("adios2+shared", when="+adios2+shared")
depends_on("adios2~shared", when="+adios2~shared")
#######################
# Caliper
#######################
depends_on("caliper", when="+caliper")
#######################
# Documentation related
#######################
@@ -546,18 +502,16 @@ def hostconfig(self):
#######################
# VTK-h (and deps)
#######################
cfg.write("# vtk-h support \n")
if "+vtkh" in spec:
cfg.write("# vtk-h\n")
if self.spec.satisfies("@0.8.1:"):
cfg.write(cmake_cache_entry("ENABLE_VTKH", "ON"))
else:
cfg.write(cmake_cache_entry("VTKH_DIR", spec["vtk-h"].prefix))
cfg.write("# vtk-m from spack\n")
cfg.write(cmake_cache_entry("VTKM_DIR", spec["vtk-m"].prefix))
cfg.write("# vtk-h from spack\n")
cfg.write(cmake_cache_entry("VTKH_DIR", spec["vtk-h"].prefix))
if "+cuda" in spec:
cfg.write(cmake_cache_entry("VTKm_ENABLE_CUDA", "ON"))
cfg.write(cmake_cache_entry("CMAKE_CUDA_HOST_COMPILER", env["SPACK_CXX"]))
@@ -565,38 +519,7 @@ def hostconfig(self):
cfg.write(cmake_cache_entry("VTKm_ENABLE_CUDA", "OFF"))
else:
if self.spec.satisfies("@0.8.1:"):
cfg.write("# vtk-h\n")
cfg.write(cmake_cache_entry("ENABLE_VTKH", "OFF"))
else:
cfg.write("# vtk-h not build by spack\n")
#######################
# RAJA
#######################
if "+raja" in spec:
cfg.write("# RAJA from spack \n")
cfg.write(cmake_cache_entry("RAJA_DIR", spec["raja"].prefix))
else:
cfg.write("# RAJA not built by spack \n")
#######################
# Umpire
#######################
if "+umpire" in spec:
cfg.write("# umpire from spack \n")
cfg.write(cmake_cache_entry("UMPIRE_DIR", spec["umpire"].prefix))
else:
cfg.write("# umpire not built by spack \n")
#######################
# Camp
#######################
if "+umpire" in spec or "+raja" in spec:
cfg.write("# camp from spack \n")
cfg.write(cmake_cache_entry("CAMP_DIR", spec["camp"].prefix))
else:
cfg.write("# camp not built by spack \n")
cfg.write("# vtk-h not built by spack \n")
#######################
# MFEM
@@ -611,20 +534,10 @@ def hostconfig(self):
# Devil Ray
#######################
if "+dray" in spec:
cfg.write("# devil ray\n")
if self.spec.satisfies("@0.8.1:"):
cfg.write(cmake_cache_entry("ENABLE_DRAY", "ON"))
cfg.write(cmake_cache_entry("ENABLE_APCOMP", "ON"))
else:
cfg.write("# devil ray from spack \n")
cfg.write(cmake_cache_entry("DRAY_DIR", spec["dray"].prefix))
cfg.write("# devil ray from spack \n")
cfg.write(cmake_cache_entry("DRAY_DIR", spec["dray"].prefix))
else:
if self.spec.satisfies("@0.8.1:"):
cfg.write("# devil ray\n")
cfg.write(cmake_cache_entry("ENABLE_DRAY", "OFF"))
cfg.write(cmake_cache_entry("ENABLE_APCOMP", "OFF"))
else:
cfg.write("# devil ray not build by spack\n")
cfg.write("# devil ray not built by spack \n")
#######################
# Adios2
@@ -646,16 +559,6 @@ def hostconfig(self):
else:
cfg.write("# fides not built by spack \n")
#######################
# Caliper
#######################
cfg.write("# caliper from spack \n")
if "+caliper" in spec:
cfg.write(cmake_cache_entry("CALIPER_DIR", spec["caliper"].prefix))
cfg.write(cmake_cache_entry("ADIAK_DIR", spec["adiak"].prefix))
else:
cfg.write("# caliper not built by spack \n")
#######################
# Finish host-config
#######################

View File

@@ -16,7 +16,6 @@ class Aspell(AutotoolsPackage, GNUMirrorPackage):
extendable = True # support activating dictionaries
version("0.60.8", sha256="f9b77e515334a751b2e60daab5db23499e26c9209f5e7b7443b05235ad0226f2")
version("0.60.6.1", sha256="f52583a83a63633701c5f71db3dc40aab87b7f76b29723aeb27941eff42df6e1")
patch("fix_cpp.patch")

View File

@@ -17,7 +17,6 @@ class Atk(Package):
list_url = "https://ftp.gnome.org/pub/gnome/sources/atk"
list_depth = 1
version("2.38.0", sha256="ac4de2a4ef4bd5665052952fe169657e65e895c5057dffb3c2a810f6191a0c36")
version("2.36.0", sha256="fb76247e369402be23f1f5c65d38a9639c1164d934e40f6a9cf3c9e96b652788")
version("2.30.0", sha256="dd4d90d4217f2a0c1fee708a555596c2c19d26fef0952e1ead1938ab632c027b")
version("2.28.1", sha256="cd3a1ea6ecc268a2497f0cd018e970860de24a6d42086919d6bf6c8e8d53f4fc")

View File

@@ -13,7 +13,6 @@ class Awscli(PythonPackage):
pypi = "awscli/awscli-1.16.308.tar.gz"
version("1.27.84", sha256="a27a7d1f3efa9dd2acf9d8bd72b939337d53da4ac71721cde6d5dff94aa446f7")
version("1.27.56", sha256="58fd7122547db71646c053c914bd4f9b673356dd8c9520ae6d35560a8aec208b")
version("1.16.308", sha256="3632fb1db2538128509a7b5e89f2a2c4ea3426bec139944247bddc4d79bf7603")
version("1.16.179", sha256="6a87114d1325358d000abe22b2103baae7b91f053ff245b9fde33cb0affb5e4f")
@@ -22,8 +21,7 @@ class Awscli(PythonPackage):
depends_on("python@3.7:", when="@1.25:", type=("build", "run"))
# py-botocore is pinned to the patch version number
depends_on("py-botocore@1.29.84", when="@1.27.84", type=("build", "run"))
depends_on("py-botocore@1.29.56", when="@1.27.56", type=("build", "run"))
depends_on("py-botocore@1.29.56", when="@1.27", type=("build", "run"))
depends_on("py-botocore@1.13.44", when="@1.16.308", type=("build", "run"))
depends_on("py-botocore@1.12.169", when="@1.16.179", type=("build", "run"))

View File

@@ -13,12 +13,9 @@ class Bacio(CMakePackage):
homepage = "https://noaa-emc.github.io/NCEPLIBS-bacio"
url = "https://github.com/NOAA-EMC/NCEPLIBS-bacio/archive/refs/tags/v2.4.1.tar.gz"
git = "https://github.com/NOAA-EMC/NCEPLIBS-bacio"
maintainers("t-brown", "edwardhartnett", "AlexanderRichert-NOAA", "Hang-Lei-NOAA")
version("develop", branch="develop")
version("2.6.0", sha256="03fef581e1bd3710fb8d2f2659a6c3e01a0437c1350ba53958d2ff1ffef47bcb")
version("2.5.0", sha256="540a0ed73941d70dbf5d7b21d5d0a441e76fad2bfe37dfdfea0db3e98fc0fbfb")
# Prefer version 2.4.1 because the library and include directory
@@ -32,11 +29,9 @@ class Bacio(CMakePackage):
)
variant("pic", default=True, description="Build with position-independent-code")
variant("shared", default=False, description="Build shared library", when="@2.6.0:")
def cmake_args(self):
args = [self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic")]
args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared"))
return args

View File

@@ -14,7 +14,6 @@ class Bc(AutotoolsPackage, GNUMirrorPackage):
homepage = "https://www.gnu.org/software/bc"
gnu_mirror_path = "bc/bc-1.07.tar.gz"
version("1.07.1", sha256="62adfca89b0a1c0164c2cdca59ca210c1d44c3ffc46daf9931cf4942664cb02a")
version("1.07", sha256="55cf1fc33a728d7c3d386cc7b0cb556eb5bacf8e0cb5a3fcca7f109fc61205ad")
depends_on("ed", type="build")

View File

@@ -17,7 +17,6 @@ class Bdftopcf(AutotoolsPackage, XorgPackage):
homepage = "https://cgit.freedesktop.org/xorg/app/bdftopcf"
xorg_mirror_path = "app/bdftopcf-1.0.5.tar.gz"
version("1.1", sha256="699d1a62012035b1461c7f8e3f05a51c8bd6f28f348983249fb89bbff7309b47")
version("1.0.5", sha256="78a5ec945de1d33e6812167b1383554fda36e38576849e74a9039dc7364ff2c3")
depends_on("libxfont")

View File

@@ -15,7 +15,6 @@ class Beforelight(AutotoolsPackage, XorgPackage):
homepage = "https://cgit.freedesktop.org/xorg/app/beforelight"
xorg_mirror_path = "app/beforelight-1.0.5.tar.gz"
version("1.0.6", sha256="735579a7671a9f9de16b7211cf0ba39027183bdc3e82a937fbccfdd893e64a2e")
version("1.0.5", sha256="93bb3c457d6d5e8def3180fdee07bc84d1b7f0e5378a95812e2193cd51455cdc")
depends_on("libx11")

View File

@@ -12,7 +12,6 @@ class Bitmap(AutotoolsPackage, XorgPackage):
homepage = "https://cgit.freedesktop.org/xorg/app/bitmap"
xorg_mirror_path = "app/bitmap-1.0.8.tar.gz"
version("1.1.0", sha256="60ca941e8e38e1f8f9c61d3e86c098878113fd11eac4e07177c111f0bf00779e")
version("1.0.8", sha256="1a2fbd10a2ca5cd93f7b77bbb0555b86d8b35e0fc18d036b1607c761755006fc")
depends_on("libx11")

View File

@@ -18,7 +18,6 @@ class Blktrace(MakefilePackage):
homepage = "https://brick.kernel.dk"
url = "https://brick.kernel.dk/snaps/blktrace-1.2.0.tar.gz"
version("1.3.0", sha256="88c25b3bb3254ab029d4c62df5a9ab863a5c70918a604040da8fe39873c6bacb")
version("1.2.0", sha256="d14029bc096026dacb206bf115c912dcdb795320b5aba6dff3e46d7f94c5242d")
version("1.1.0", sha256="dc1e5da64b8fef454ec24aa4fcc760112b4ea7c973e2485961aa5668b3a8ce1d")
version("1.0.5", sha256="783b4c8743498de74b3492725815d31f3842828baf8710c53bc4e7e82cee387c")

Some files were not shown because too many files have changed in this diff Show More