Compare commits

..

14 Commits

Author SHA1 Message Date
Todd Gamblin
03cba85029 bugfix: don't include root in general depth rule 2023-03-11 16:21:53 -08:00
Todd Gamblin
2c7a2fa3e0 Don't require priority attribute on models. 2023-02-22 10:53:16 -08:00
Todd Gamblin
7e8fb4b8d0 WIP 2023-01-30 11:48:03 -08:00
Todd Gamblin
54087cf274 WIP 2023-01-24 17:19:21 -08:00
Todd Gamblin
b0e67d3411 WIP 2023-01-24 17:19:21 -08:00
Todd Gamblin
d92b653f0b WIP 2023-01-24 17:19:21 -08:00
Todd Gamblin
3435806007 WIP 2023-01-24 17:19:21 -08:00
Todd Gamblin
06d5abe895 WIP 2023-01-24 17:19:19 -08:00
Todd Gamblin
dc40e121a3 WIP 2023-01-24 17:18:50 -08:00
Todd Gamblin
03acf14d86 tests: test consistency of solver and spec_clauses
Previously we didn't check whether all facts on a specs were represented in the models
returned by the solver. This is an important check as it ensures that we can have
conditions (dependencies, conflicts, etc.) on any property modeled by spec_clauses.

This caught the issue reported in #32497 (https://github.com/spack/spack/issues/32497),
specifically that we don't currently model patch facts. We can fix this in a follow-on
but it is marked XFAIL for now.
2023-01-24 17:18:50 -08:00
Todd Gamblin
9ffb642b95 refactor: use AspFunction consistently before and after solve
Currently we create `AspFunction` objects as inputs to solves, but we don't use them
when extracting symbols from clingo solves. Use them more consistently in both
scenarios, and simplify the code.
2023-01-24 17:18:47 -08:00
Todd Gamblin
20e698c6b0 concretizer: add depth calculation 2023-01-24 17:12:10 -08:00
Todd Gamblin
c410f3f392 rename optimization criteria to be clearer about roots/non-roots 2023-01-24 17:12:10 -08:00
Todd Gamblin
b37fab40b5 solver: rework optimization criteria with larger constants
To allow room for DAG-ordered optimization, rework the way we write optimization
criteria in `concretize.lp`, and convert build offsets to use constants.
2023-01-24 17:12:10 -08:00
1740 changed files with 4722 additions and 10802 deletions

View File

@@ -89,7 +89,7 @@ jobs:
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # @v1
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
@@ -106,7 +106,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
uses: docker/build-push-action@37abcedcc1da61a57767b7588cb9d03eb57e28b3 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -44,7 +44,7 @@ jobs:
cache: 'pip'
- name: Install Python packages
run: |
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.

View File

@@ -83,16 +83,6 @@ if defined _sp_flags (
exit /B 0
)
)
if not defined _sp_subcommand (
if not defined _sp_args (
if not defined _sp_flags (
python "%spack%" --help
exit /B 0
)
)
)
:: pass parsed variables outside of local scope. Need to do
:: this because delayedexpansion can only be set by setlocal
echo %_sp_flags%>flags
@@ -102,24 +92,24 @@ endlocal
set /p _sp_subcommand=<subcmd
set /p _sp_flags=<flags
set /p _sp_args=<args
if "%_sp_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
if "%_sp_subcommand%"=="ECHO is on." (set "_sp_subcommand=")
if "%_sp_flags%"=="ECHO is off." (set "_sp_flags=")
if "%_sp_flags%"=="ECHO is on." (set "_sp_flags=")
if "%_sp_args%"=="ECHO is off." (set "_sp_args=")
if "%_sp_args%"=="ECHO is on." (set "_sp_args=")
set str_subcommand=%_sp_subcommand:"='%
set str_flags=%_sp_flags:"='%
set str_args=%_sp_args:"='%
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
if "%str_args%"=="ECHO is off." (set "_sp_args=")
del subcmd
del flags
del args
:: Filter out some commands. For any others, just run the command.
if %_sp_subcommand% == "cd" (
if "%_sp_subcommand%" == "cd" (
goto :case_cd
) else if %_sp_subcommand% == "env" (
) else if "%_sp_subcommand%" == "env" (
goto :case_env
) else if %_sp_subcommand% == "load" (
) else if "%_sp_subcommand%" == "load" (
goto :case_load
) else if %_sp_subcommand% == "unload" (
) else if "%_sp_subcommand%" == "unload" (
goto :case_load
) else (
goto :default_case
@@ -153,21 +143,19 @@ goto :end_switch
:: If no args or args contain --bat or -h/--help: just execute.
if NOT defined _sp_args (
goto :default_case
)
set args_no_quote=%_sp_args:"=%
if NOT "%args_no_quote%"=="%args_no_quote:--help=%" (
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%args_no_quote%"=="%args_no_quote: -h=%" (
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
goto :default_case
) else if NOT "%args_no_quote%"=="%args_no_quote:--bat=%" (
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
) else if NOT "%args_no_quote%"=="%args_no_quote:deactivate=%" (
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
for /f "tokens=* USEBACKQ" %%I in (
`call python %spack% %_sp_flags% env deactivate --bat %args_no_quote:deactivate=%`
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
) do %%I
) else if NOT "%args_no_quote%"=="%args_no_quote:activate=%" (
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
for /f "tokens=* USEBACKQ" %%I in (
`python %spack% %_sp_flags% env activate --bat %args_no_quote:activate=%`
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
) do %%I
) else (
goto :default_case
@@ -232,4 +220,4 @@ for %%I in (%~2) do (
:pathadd "%~1" "%%I\%%Z"
)
)
exit /B %ERRORLEVEL%
exit /B %ERRORLEVEL%

View File

@@ -346,7 +346,7 @@ the Environment and then install the concretized specs.
(see :ref:`build-jobs`). To speed up environment builds further, independent
packages can be installed in parallel by launching more Spack instances. For
example, the following will build at most four packages in parallel using
three background jobs:
three background jobs:
.. code-block:: console
@@ -394,7 +394,7 @@ version (and other constraints) passed as the spec argument to the
For packages with ``git`` attributes, git branches, tags, and commits can
also be used as valid concrete versions (see :ref:`version-specifier`).
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
the ``main`` branch of the package, and ``spack install`` will install from
that git clone if ``foo`` is in the environment.
Further development on ``foo`` can be tested by reinstalling the environment,
@@ -630,35 +630,6 @@ The following two Environment manifests are identical:
Spec matrices can be used to install swaths of software across various
toolchains.
Note that ordering of matrices is important. For example, the
following environments are identical:
.. code-block:: yaml
spack:
specs:
- matrix:
- [hdf5@1.10.2+mpi]
- [^mpich, ^openmpi]
- ['%gcc']
- matrix:
- [hdf5@1.12.1+mpi]
- ['%gcc']
- [^mpich, ^openmpi]
spack:
specs:
- hdf5@1.10.2+mpi ^mpich%gcc
- hdf5@1.10.2+mpi ^openmpi%gcc
- hdf5@1.12.1+mpi %gcc ^mpich
- hdf5@1.12.1+mpi %gcc ^openmpi
Notice how the first matrix applies the compiler constraints to the
mpi dependencies, whereas the second matrix applies the compiler
constraints directly to the root hdf5 node. This gives users the full
breadth of expressiveness of the spec syntax through the matrix
interface.
^^^^^^^^^^^^^^^^^^^^
Spec List References
^^^^^^^^^^^^^^^^^^^^
@@ -1149,19 +1120,19 @@ index once every package is pushed. Note how this target uses the generated
SPACK ?= spack
BUILDCACHE_DIR = $(CURDIR)/tarballs
.PHONY: all
all: push
include env.mk
example/push/%: example/install/%
@mkdir -p $(dir $@)
$(info About to push $(SPEC) to a buildcache)
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
@touch $@
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
$(info Updating the buildcache index)
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)

View File

@@ -116,7 +116,7 @@ creates a simple python file:
# FIXME: Add a list of GitHub accounts to
# notify when the package is updated.
# maintainers("github_user1", "github_user2")
# maintainers = ["github_user1", "github_user2"]
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")

View File

@@ -268,7 +268,7 @@ generates a boilerplate template for your package, and opens up the new
# FIXME: Add a list of GitHub accounts to
# notify when the package is updated.
# maintainers("github_user1", "github_user2")
# maintainers = ["github_user1", "github_user2"]
version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c")
@@ -319,8 +319,14 @@ The rest of the tasks you need to do are as follows:
#. Add a comma-separated list of maintainers.
Add a list of Github accounts of people who want to be notified
any time the package is modified. See :ref:`package_maintainers`.
The ``maintainers`` field is a list of GitHub accounts of people
who want to be notified any time the package is modified. When a
pull request is submitted that updates the package, these people
will be requested to review the PR. This is useful for developers
who maintain a Spack package for their own software, as well as
users who rely on a piece of software and want to ensure that the
package doesn't break. It also gives users a list of people to
contact for help when someone reports a build error with the package.
#. Add ``depends_on()`` calls for the package's dependencies.
@@ -491,31 +497,6 @@ some examples:
In general, you won't have to remember this naming convention because
:ref:`cmd-spack-create` and :ref:`cmd-spack-edit` handle the details for you.
.. _package_maintainers:
-----------
Maintainers
-----------
Each package in Spack may have one or more maintainers, i.e. one or more
GitHub accounts of people who want to be notified any time the package is
modified.
When a pull request is submitted that updates the package, these people will
be requested to review the PR. This is useful for developers who maintain a
Spack package for their own software, as well as users who rely on a piece of
software and want to ensure that the package doesn't break. It also gives users
a list of people to contact for help when someone reports a build error with
the package.
To add maintainers to a package, simply declare them with the ``maintainers`` directive:
.. code-block:: python
maintainers("user1", "user2")
The list of maintainers is additive, and includes all the accounts eventually declared in base classes.
-----------------
Trusted Downloads
-----------------

View File

@@ -23,7 +23,7 @@
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, symlink
from spack.util.executable import Executable, which
from spack.util.executable import CommandNotFoundError, Executable, which
from spack.util.path import path_to_os_path, system_path_filter
is_windows = _platform == "win32"
@@ -117,7 +117,13 @@ def path_contains_subdirectory(path, root):
@memoized
def file_command(*args):
"""Creates entry point to `file` system command with provided arguments"""
file_cmd = which("file", required=True)
try:
file_cmd = which("file", required=True)
except CommandNotFoundError as e:
if is_windows:
raise CommandNotFoundError("`file` utility is not available on Windows")
else:
raise e
for arg in args:
file_cmd.add_default_arg(arg)
return file_cmd
@@ -128,11 +134,7 @@ def _get_mime_type():
"""Generate method to call `file` system command to aquire mime type
for a specified path
"""
if is_windows:
# -h option (no-dereference) does not exist in Windows
return file_command("-b", "--mime-type")
else:
return file_command("-b", "-h", "--mime-type")
return file_command("-b", "-h", "--mime-type")
@memoized

View File

@@ -47,7 +47,7 @@
import spack.util.url as url_util
import spack.util.web as web_util
from spack.caches import misc_cache_location
from spack.relocate_text import utf8_paths_to_single_binary_regex
from spack.relocate import utf8_paths_to_single_binary_regex
from spack.spec import Spec
from spack.stage import Stage
from spack.util.executable import which
@@ -294,12 +294,10 @@ def update_spec(self, spec, found_list):
cur_entry["spec"] = new_entry["spec"]
break
else:
current_list.append(
{
"mirror_url": new_entry["mirror_url"],
"spec": new_entry["spec"],
}
)
current_list.append = {
"mirror_url": new_entry["mirror_url"],
"spec": new_entry["spec"],
}
def update(self, with_cooldown=False):
"""Make sure local cache of buildcache index files is up to date.
@@ -1732,16 +1730,16 @@ def is_backup_file(file):
# For all buildcaches
# relocate the install prefixes in text files including dependencies
relocate.relocate_text(text_names, prefix_to_prefix_text)
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
# relocate the install prefixes in binary files including dependencies
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
# If we are installing back to the same location
# relocate the sbang location if the spack directory changed
else:
if old_spack_prefix != new_spack_prefix:
relocate.relocate_text(text_names, prefix_to_prefix_text)
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):

View File

@@ -171,7 +171,7 @@ def mypy_root_spec():
def black_root_spec():
"""Return the root spec used to bootstrap black"""
return _root_spec("py-black@:22.12.0")
return _root_spec("py-black")
def flake8_root_spec():

View File

@@ -8,7 +8,7 @@
import spack.directives
import spack.package_base
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
from ._checks import BaseBuilder, apply_macos_rpath_fixups
class Package(spack.package_base.PackageBase):
@@ -38,16 +38,7 @@ class GenericBuilder(BaseBuilder):
legacy_methods: Tuple[str, ...] = ()
#: Names associated with package attributes in the old build-system format
legacy_attributes: Tuple[str, ...] = (
"archive_files",
"install_time_test_callbacks",
)
#: Callback names for post-install phase tests
install_time_test_callbacks = []
legacy_attributes: Tuple[str, ...] = ("archive_files",)
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
# unconditionally perform any post-install phase tests
spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -120,7 +120,6 @@ def std_meson_args(self):
of package writers.
"""
# standard Meson arguments
std_meson_args = MesonBuilder.std_args(self.pkg)
std_meson_args += getattr(self, "meson_flag_args", [])
return std_meson_args
@@ -183,10 +182,7 @@ def meson_args(self):
def meson(self, pkg, spec, prefix):
"""Run ``meson`` in the build directory"""
options = []
if self.spec["meson"].satisfies("@0.64:"):
options.append("setup")
options.append(os.path.abspath(self.root_mesonlists_dir))
options = [os.path.abspath(self.root_mesonlists_dir)]
options += self.std_meson_args
options += self.meson_args()
with fs.working_dir(self.build_directory, create=True):

View File

@@ -77,7 +77,7 @@ def toolchain_version(self):
Override this method to select a specific version of the toolchain or change
selection heuristics.
Default is whatever version of msvc has been selected by concretization"""
return "v" + self.pkg.compiler.platform_toolset_ver
return self.compiler.msvc_version
@property
def std_msbuild_args(self):

View File

@@ -92,7 +92,7 @@ def makefile_root(self):
This path is relative to the root of the extracted tarball,
not to the ``build_directory``. Defaults to the current directory.
"""
return self.stage.source_path
return self.stage.source_dir
@property
def nmakefile_name(self):

View File

@@ -267,7 +267,7 @@ def update_external_dependencies(self, extendee_spec=None):
python.external_path = self.spec.external_path
python._mark_concrete()
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
self.spec.add_dependency_edge(python, ("build", "link", "run"))
def get_external_python_for_prefix(self):
"""

View File

@@ -138,7 +138,7 @@ class ROCmPackage(PackageBase):
depends_on("llvm-amdgpu", when="+rocm")
depends_on("hsa-rocr-dev", when="+rocm")
depends_on("hip +rocm", when="+rocm")
depends_on("hip", when="+rocm")
conflicts("^blt@:0.3.6", when="+rocm")

View File

@@ -530,9 +530,10 @@ def ci_rebuild(args):
if not verify_binaries:
install_args.append("--no-check-signature")
cdash_args = []
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
install_args.extend(cdash_handler.args())
cdash_args.extend(cdash_handler.args())
slash_hash = "/{}".format(job_spec.dag_hash())
deps_install_args = install_args

View File

@@ -6,7 +6,6 @@
import argparse
import os.path
import textwrap
from llnl.util.lang import stable_partition
@@ -416,40 +415,6 @@ def add_cdash_args(subparser, add_help):
cdash_subgroup.add_argument("--cdash-buildstamp", default=None, help=cdash_help["buildstamp"])
def print_cdash_help():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent(
"""\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
"""
),
)
add_cdash_args(parser, True)
parser.print_help()
def sanitize_reporter_options(namespace: argparse.Namespace):
"""Sanitize options that affect generation and configuration of reports, like
CDash or JUnit.
Args:
namespace: options parsed from cli
"""
has_any_cdash_option = (
namespace.cdash_upload_url or namespace.cdash_build or namespace.cdash_site
)
if namespace.log_format == "junit" and has_any_cdash_option:
raise argparse.ArgumentTypeError("cannot pass any cdash option when --log-format=junit")
# If any CDash option is passed, assume --log-format=cdash is implied
if namespace.log_format is None and has_any_cdash_option:
namespace.log_format = "cdash"
namespace.reporter = _cdash_reporter(namespace)
class ConfigSetAction(argparse.Action):
"""Generic action for setting spack config options from CLI.

View File

@@ -70,7 +70,7 @@ class {class_name}({base_class_name}):
# FIXME: Add a list of GitHub accounts to
# notify when the package is updated.
# maintainers("github_user1", "github_user2")
# maintainers = ["github_user1", "github_user2"]
{versions}

View File

@@ -46,14 +46,6 @@ def setup_parser(subparser):
)
def shift(asp_function):
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
if not asp_function.args:
raise ValueError(f"Can't shift ASP function with no arguments: {str(asp_function)}")
first, *rest = asp_function.args
return asp.AspFunction(first, rest)
def compare_specs(a, b, to_string=False, color=None):
"""
Generate a comparison, including diffs (for each side) and an intersection.
@@ -79,7 +71,7 @@ def compare_specs(a, b, to_string=False, color=None):
# get facts for specs, making sure to include build dependencies of concrete
# specs and to descend into dependency hashes so we include all facts.
a_facts = set(
shift(func)
func.shift()
for func in setup.spec_clauses(
a,
body=True,
@@ -89,7 +81,7 @@ def compare_specs(a, b, to_string=False, color=None):
if func.name == "attr"
)
b_facts = set(
shift(func)
func.shift()
for func in setup.spec_clauses(
b,
body=True,

View File

@@ -39,19 +39,12 @@
compiler flags:
@g{cflags="flags"} cppflags, cflags, cxxflags,
fflags, ldflags, ldlibs
@g{cflags=="flags"} propagate flags to package dependencies
cppflags, cflags, cxxflags, fflags,
ldflags, ldlibs
variants:
@B{+variant} enable <variant>
@B{++variant} propagate enable <variant>
@r{-variant} or @r{~variant} disable <variant>
@r{--variant} or @r{~~variant} propagate disable <variant>
@B{variant=value} set non-boolean <variant> to <value>
@B{variant==value} propagate non-boolean <variant> to <value>
@B{variant=value1,value2,value3} set multi-value <variant> values
@B{variant==value1,value2,value3} propagate multi-value <variant> values
architecture variants:
@m{platform=platform} linux, darwin, cray, etc.
@@ -75,8 +68,6 @@
hdf5 @c{@1.8:} @g{%gcc} hdf5 1.8 or higher built with gcc
hdf5 @B{+mpi} hdf5 with mpi enabled
hdf5 @r{~mpi} hdf5 with mpi disabled
hdf5 @B{++mpi} hdf5 with mpi enabled and propagates
hdf5 @r{~~mpi} hdf5 with mpi disabled and propagates
hdf5 @B{+mpi} ^mpich hdf5 with mpi, using mpich
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 with mpi, using openmpi 1.7
boxlib @B{dim=2} boxlib built for 2 dimensions

View File

@@ -7,6 +7,7 @@
import os
import shutil
import sys
import textwrap
from typing import List
import llnl.util.filesystem as fs
@@ -259,7 +260,7 @@ def default_log_file(spec):
def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> str:
"""Return the filename to be used for reporting to JUnit or CDash format."""
result = args.log_file or default_log_file(specs[0])
result = args.log_file or args.cdash_upload_url or default_log_file(specs[0])
return result
@@ -347,6 +348,21 @@ def install_specs_outside_environment(specs, install_kwargs):
builder.install()
def print_cdash_help():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent(
"""\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
"""
),
)
arguments.add_cdash_args(parser, True)
parser.print_help()
def install_all_specs_from_active_environment(
install_kwargs, only_concrete, cli_test_arg, reporter_factory
):
@@ -480,7 +496,7 @@ def install(parser, args):
tty.set_verbose(args.verbose or args.install_verbose)
if args.help_cdash:
spack.cmd.common.arguments.print_cdash_help()
print_cdash_help()
return
if args.no_checksum:
@@ -489,8 +505,6 @@ def install(parser, args):
if args.deprecated:
spack.config.set("config:deprecated", True, scope="command_line")
spack.cmd.common.arguments.sanitize_reporter_options(args)
def reporter_factory(specs):
if args.log_format is None:
return None

View File

@@ -97,28 +97,41 @@ def setup_parser(subparser):
def _process_result(result, show, required_format, kwargs):
result.raise_if_unsat()
opt, _, _ = min(result.answers)
opt, *_ = min(result.answers)
# dump the solutions as concretized specs
if ("opt" in show) and (not required_format):
tty.msg("Best of %d considered solutions." % result.nmodels)
tty.msg("Optimization Criteria:")
maxlen = max(len(s[2]) for s in result.criteria)
color.cprint("@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " "))
maxlen = max(len(name) for name in result.criteria)
max_depth = max(len(v) for v in result.criteria.values() if isinstance(v, list))
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
color.cprint(
fmt
% (
i,
name,
"-" if build_cost is None else installed_cost,
installed_cost if build_cost is None else build_cost,
header = "@*{"
header += "".join(f"{depth:<4}" for depth in range(max_depth))
header += "Criterion}"
color.cprint(header)
# make non-zero numbers red
def highlight(n, c):
return color.colorize(f"@{c}{{{n:<4}}}" if n > 0 else f"{n:<4}")
for i, (name, cost) in enumerate(result.criteria.items(), 1):
colored_name = name.replace("build:", "@c{build:}")
colored_name = colored_name.replace("reuse:", "@B{reuse:}")
colored_name = colored_name.replace("fixed:", "@G{fixed:}")
colored_name = color.colorize(colored_name)
if isinstance(cost, int):
print(highlight(cost, "G") + " " * (max_depth - 1) + colored_name)
else:
print(
"".join(highlight(c, "c" if "build:" in name else "B") for c in cost)
+ colored_name
)
)
print()
# dump the solutions as concretized specs
if "solutions" in show:
for spec in result.specs:
# With -y, just print YAML to output.

View File

@@ -11,6 +11,7 @@
import re
import shutil
import sys
import textwrap
from llnl.util import lang, tty
from llnl.util.tty import colify
@@ -170,11 +171,20 @@ def test_run(args):
# cdash help option
if args.help_cdash:
arguments.print_cdash_help()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=textwrap.dedent(
"""\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
"""
),
)
arguments.add_cdash_args(parser, True)
parser.print_help()
return
arguments.sanitize_reporter_options(args)
# set config option for fail-fast
if args.fail_fast:
spack.config.set("config:fail_fast", True, scope="command_line")
@@ -227,22 +237,22 @@ def test_run(args):
)
def report_filename(args, test_suite):
if args.log_file:
if os.path.isabs(args.log_file):
return args.log_file
else:
log_dir = os.getcwd()
return os.path.join(log_dir, args.log_file)
else:
return os.path.join(os.getcwd(), "test-%s" % test_suite.name)
def create_reporter(args, specs_to_test, test_suite):
if args.log_format is None:
return None
filename = report_filename(args, test_suite)
filename = args.cdash_upload_url
if not filename:
if args.log_file:
if os.path.isabs(args.log_file):
log_file = args.log_file
else:
log_dir = os.getcwd()
log_file = os.path.join(log_dir, args.log_file)
else:
log_file = os.path.join(os.getcwd(), "test-%s" % test_suite.name)
filename = log_file
context_manager = spack.report.test_context_manager(
reporter=args.reporter(),
filename=filename,

View File

@@ -63,7 +63,7 @@ def tutorial(parser, args):
if not tty.get_yes_or_no("Are you sure you want to proceed?"):
tty.die("Aborted")
rm_cmds = [f"rm -f {f}" for f in rm_configs]
rm_cmds = ["rm -f %s" % f for f in rm_configs]
tty.msg("Reverting compiler and repository configuration", *rm_cmds)
for path in rm_configs:
if os.path.exists(path):
@@ -71,19 +71,19 @@ def tutorial(parser, args):
tty.msg(
"Ensuring that the tutorial binary mirror is configured:",
f"spack mirror add tutorial {tutorial_mirror}",
"spack mirror add tutorial %s" % tutorial_mirror,
)
mirror_config = syaml_dict()
mirror_config["tutorial"] = tutorial_mirror
spack.config.set("mirrors", mirror_config, scope="user")
tty.msg("Ensuring that we trust tutorial binaries", f"spack gpg trust {tutorial_key}")
tty.msg("Ensuring that we trust tutorial binaries", "spack gpg trust %s" % tutorial_key)
spack.util.gpg.trust(tutorial_key)
# Note that checkout MUST be last. It changes Spack under our feet.
# If you don't put this last, you'll get import errors for the code
# that follows (exacerbated by the various lazy singletons we use)
tty.msg(f"Ensuring we're on the {tutorial_branch} branch")
tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(*spack.spack_version_info[:2]))
git = spack.util.git.git(required=True)
with working_dir(spack.paths.prefix):
git("checkout", tutorial_branch)

View File

@@ -89,11 +89,6 @@ def cxx14_flag(self):
return "-std=c++14"
return "-h std=c++14"
@property
def cxx17_flag(self):
if self.is_clang_based:
return "-std=c++17"
@property
def c99_flag(self):
if self.is_clang_based:

View File

@@ -103,22 +103,11 @@ def short_msvc_version(self):
"""
This is the shorthand VCToolset version of form
MSVC<short-ver> *NOT* the full version, for that see
Msvc.msvc_version or MSVC.platform_toolset_ver for the
raw platform toolset version
Msvc.msvc_version
"""
ver = self.platform_toolset_ver
ver = self.msvc_version[:2].joined.string[:3]
return "MSVC" + ver
@property
def platform_toolset_ver(self):
"""
This is the platform toolset version of current MSVC compiler
i.e. 142.
This is different from the VC toolset version as established
by `short_msvc_version`
"""
return self.msvc_version[:2].joined.string[:3]
@property
def cl_version(self):
"""Cl toolset version"""

View File

@@ -162,7 +162,7 @@ def entries_to_specs(entries):
continue
parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
parent_spec._add_dependency(dep_spec, deptypes)
return spec_dict

View File

@@ -107,14 +107,6 @@
]
def reader(version):
reader_cls = {
Version("5"): spack.spec.SpecfileV1,
Version("6"): spack.spec.SpecfileV3,
}
return reader_cls[version]
def _now():
"""Returns the time since the epoch"""
return time.time()
@@ -682,7 +674,7 @@ def _write_to_file(self, stream):
except (TypeError, ValueError) as e:
raise sjson.SpackJSONError("error writing JSON database:", str(e))
def _read_spec_from_dict(self, spec_reader, hash_key, installs, hash=ht.dag_hash):
def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
"""Recursively construct a spec from a hash in a YAML database.
Does not do any locking.
@@ -700,7 +692,7 @@ def _read_spec_from_dict(self, spec_reader, hash_key, installs, hash=ht.dag_hash
spec_dict[hash.name] = hash_key
# Build spec from dict first.
spec = spec_reader.from_node_dict(spec_dict)
spec = spack.spec.Spec.from_node_dict(spec_dict)
return spec
def db_for_spec_hash(self, hash_key):
@@ -740,7 +732,7 @@ def query_local_by_spec_hash(self, hash_key):
with self.read_transaction():
return self._data.get(hash_key, None)
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
def _assign_dependencies(self, hash_key, installs, data):
# Add dependencies from other records in the install DB to
# form a full spec.
spec = data[hash_key].spec
@@ -750,7 +742,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
spec_node_dict = spec_node_dict[spec.name]
if "dependencies" in spec_node_dict:
yaml_deps = spec_node_dict["dependencies"]
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(yaml_deps):
# It is important that we always check upstream installations
# in the same order, and that we always check the local
# installation first: if a downstream Spack installs a package
@@ -773,7 +765,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
tty.warn(msg)
continue
spec._add_dependency(child, deptypes=dtypes)
spec._add_dependency(child, dtypes)
def _read_from_file(self, filename):
"""Fill database from file, do not maintain old data.
@@ -805,7 +797,6 @@ def check(cond, msg):
# TODO: better version checking semantics.
version = Version(db["version"])
spec_reader = reader(version)
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
@@ -841,7 +832,7 @@ def invalid_record(hash_key, error):
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
spec = self._read_spec_from_dict(spec_reader, hash_key, installs)
spec = self._read_spec_from_dict(hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
@@ -857,7 +848,7 @@ def invalid_record(hash_key, error):
# Pass 2: Assign dependencies once all specs are created.
for hash_key in data:
try:
self._assign_dependencies(spec_reader, hash_key, installs, data)
self._assign_dependencies(hash_key, installs, data)
except MissingDependenciesError:
raise
except Exception as e:
@@ -1176,7 +1167,7 @@ def _add(
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
new_spec._add_dependency(record.spec, dep.deptypes)
if not upstream:
record.ref_count += 1

View File

@@ -54,7 +54,6 @@ class OpenMpi(Package):
"conflicts",
"depends_on",
"extends",
"maintainers",
"provides",
"patch",
"variant",
@@ -768,22 +767,6 @@ def build_system(*values, **kwargs):
)
@directive(dicts=())
def maintainers(*names: str):
"""Add a new maintainer directive, to specify maintainers in a declarative way.
Args:
names: GitHub username for the maintainer
"""
def _execute_maintainer(pkg):
maintainers_from_base = getattr(pkg, "maintainers", [])
# Here it is essential to copy, otherwise we might add to an empty list in the parent
pkg.maintainers = list(sorted(set(maintainers_from_base + list(names))))
return _execute_maintainer
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -104,15 +104,6 @@ def default_manifest_yaml():
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 4
READER_CLS = {
1: spack.spec.SpecfileV1,
2: spack.spec.SpecfileV1,
3: spack.spec.SpecfileV2,
4: spack.spec.SpecfileV3,
}
# Magic names
# The name of the standalone spec list in the manifest yaml
user_speclist_name = "specs"
@@ -1445,7 +1436,7 @@ def _concretize_separately(self, tests=False):
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
test_dependency.copy(), deptypes="test"
test_dependency.copy(), deptype="test"
)
results = [
@@ -1951,7 +1942,7 @@ def _to_lockfile_dict(self):
"_meta": {
"file-type": "spack-lockfile",
"lockfile-version": lockfile_format_version,
"specfile-version": spack.spec.SPECFILE_FORMAT_VERSION,
"specfile-version": spack.spec.specfile_format_version,
},
# users specs + hashes are the 'roots' of the environment
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
@@ -1984,19 +1975,10 @@ def _read_lockfile_dict(self, d):
# Track specs by their DAG hash, allows handling DAG hash collisions
first_seen = {}
current_lockfile_format = d["_meta"]["lockfile-version"]
try:
reader = READER_CLS[current_lockfile_format]
except KeyError:
msg = (
f"Spack {spack.__version__} cannot read environment lockfiles using the "
f"v{current_lockfile_format} format"
)
raise RuntimeError(msg)
# First pass: Put each spec in the map ignoring dependencies
for lockfile_key, node_dict in json_specs_by_hash.items():
spec = reader.from_node_dict(node_dict)
spec = Spec.from_node_dict(node_dict)
if not spec._hash:
# in v1 lockfiles, the hash only occurs as a key
spec._hash = lockfile_key
@@ -2005,11 +1987,8 @@ def _read_lockfile_dict(self, d):
# Second pass: For each spec, get its dependencies from the node dict
# and add them to the spec
for lockfile_key, node_dict in json_specs_by_hash.items():
name, data = reader.name_and_data(node_dict)
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
specs_by_hash[lockfile_key]._add_dependency(
specs_by_hash[dep_hash], deptypes=deptypes
)
for _, dep_hash, deptypes, _ in Spec.dependencies_from_node_dict(node_dict):
specs_by_hash[lockfile_key]._add_dependency(specs_by_hash[dep_hash], deptypes)
# Traverse the root specs one at a time in the order they appear.
# The first time we see each DAG hash, that's the one we want to
@@ -2265,12 +2244,15 @@ def _concretize_from_constraints(spec_constraints, tests=False):
m += "concretization target. all specs must have a single name "
m += "constraint for concretization."
raise InvalidSpecConstraintError(m)
spec_constraints.remove(root_spec[0])
invalid_constraints = []
while True:
# Combine constraints into a single spec
s = Spec(" ".join([str(c) for c in spec_constraints if c not in invalid_constraints]))
# Attach all anonymous constraints to one named spec
s = root_spec[0].copy()
for c in spec_constraints:
if c not in invalid_constraints:
s.constrain(c)
try:
return s.concretized(tests=tests)
except spack.spec.InvalidDependencyError as e:

View File

@@ -95,22 +95,6 @@ def _ensure_one_stage_entry(stage_path):
return os.path.join(stage_path, stage_entries[0])
def _filesummary(path, print_bytes=16):
try:
n = print_bytes
with open(path, "rb") as f:
size = os.fstat(f.fileno()).st_size
if size <= 2 * n:
short_contents = f.read(2 * n)
else:
short_contents = f.read(n)
f.seek(-n, 2)
short_contents += b"..." + f.read(n)
return size, short_contents
except OSError:
return 0, b""
def fetcher(cls):
"""Decorator used to register fetch strategies."""
all_strategies.append(cls)
@@ -516,14 +500,9 @@ def check(self):
checker = crypto.Checker(self.digest)
if not checker.check(self.archive_file):
# On failure, provide some information about the file size and
# contents, so that we can quickly see what the issue is (redirect
# was not followed, empty file, text instead of binary, ...)
size, contents = _filesummary(self.archive_file)
raise ChecksumError(
f"{checker.hash_name} checksum failed for {self.archive_file}",
f"Expected {self.digest} but got {checker.sum}. "
f"File size = {size} bytes. Contents = {contents!r}",
"%s checksum failed for %s" % (checker.hash_name, self.archive_file),
"Expected %s but got %s" % (self.digest, checker.sum),
)
@_needs_stage

View File

@@ -90,11 +90,11 @@ def view_copy(src, dst, view, spec=None):
prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep)
if spack.relocate.is_binary(dst):
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
spack.relocate.unsafe_relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
else:
prefix_to_projection[spack.store.layout.root] = view._root
prefix_to_projection[orig_sbang] = new_sbang
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
spack.relocate.unsafe_relocate_text(files=[dst], prefixes=prefix_to_projection)
try:
stat = os.stat(src)
os.chown(dst, stat.st_uid, stat.st_gid)

View File

@@ -283,7 +283,7 @@ def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spe
if root_spec.concrete:
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
root_spec._add_dependency(dependency, deptypes=())
root_spec._add_dependency(dependency, ())
else:
break

View File

@@ -292,8 +292,8 @@ def from_json(stream, repository):
index.providers = _transform(
providers,
lambda vpkg, plist: (
spack.spec.SpecfileV3.from_node_dict(vpkg),
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
spack.spec.Spec.from_node_dict(vpkg),
set(spack.spec.Spec.from_node_dict(p) for p in plist),
),
)
return index

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import itertools
import multiprocessing.pool
import os
import re
import shutil
@@ -26,8 +27,6 @@
import spack.util.elf as elf
import spack.util.executable as executable
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
is_macos = str(spack.platforms.real_host()) == "darwin"
@@ -47,6 +46,49 @@ def __init__(self, file_path, root_path):
)
class BinaryStringReplacementError(spack.error.SpackError):
def __init__(self, file_path, old_len, new_len):
"""The size of the file changed after binary path substitution
Args:
file_path (str): file with changing size
old_len (str): original length of the file
new_len (str): length of the file after substitution
"""
super(BinaryStringReplacementError, self).__init__(
"Doing a binary string replacement in %s failed.\n"
"The size of the file changed from %s to %s\n"
"when it should have remanined the same." % (file_path, old_len, new_len)
)
class BinaryTextReplaceError(spack.error.SpackError):
def __init__(self, msg):
msg += (
" To fix this, compile with more padding "
"(config:install_tree:padded_length), or install to a shorter prefix."
)
super(BinaryTextReplaceError, self).__init__(msg)
class CannotGrowString(BinaryTextReplaceError):
def __init__(self, old, new):
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
super(CannotGrowString, self).__init__(msg)
class CannotShrinkCString(BinaryTextReplaceError):
def __init__(self, old, new, full_old_string):
# Just interpolate binary string to not risk issues with invalid
# unicode, which would be really bad user experience: error in error.
# We have no clue if we actually deal with a real C-string nor what
# encoding it has.
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
old, new, full_old_string
)
super(CannotShrinkCString, self).__init__(msg)
@memoized
def _patchelf():
"""Return the full path to the patchelf binary, if available, else None."""
@@ -408,6 +450,108 @@ def needs_text_relocation(m_type, m_subtype):
return m_type == "text"
def apply_binary_replacements(f, prefix_to_prefix, suffix_safety_size=7):
"""
Given a file opened in rb+ mode, apply the string replacements as
specified by an ordered dictionary of prefix to prefix mappings. This
method takes special care of null-terminated C-strings. C-string constants
are problematic because compilers and linkers optimize readonly strings for
space by aliasing those that share a common suffix (only suffix since all
of them are null terminated). See https://github.com/spack/spack/pull/31739
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
If no null terminator is found, we simply pad with leading /, assuming that
it's a long C-string; the full C-string after replacement has a large suffix
in common with its original value.
If there *is* a null terminator we can do the same as long as the replacement
has a sufficiently long common suffix with the original prefix.
As a last resort when the replacement does not have a long enough common suffix,
we can try to shorten the string, but this only works if the new length is
sufficiently short (typically the case when going from large padding -> normal path)
If the replacement string is longer, or all of the above fails, we error out.
Arguments:
f: file opened in rb+ mode
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
bytes representing the old prefixes and the values are the new
suffix_safety_size (int): in case of null terminated strings, what size
of the suffix should remain to avoid aliasing issues?
"""
assert suffix_safety_size >= 0
assert f.tell() == 0
# Look for exact matches of our paths, and also look if there's a null terminator
# soon after (this covers the case where we search for /abc but match /abc/ with
# a trailing dir seperator).
regex = re.compile(
b"("
+ b"|".join(re.escape(p) for p in prefix_to_prefix.keys())
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
)
# We *could* read binary data in chunks to avoid loading all in memory,
# but it's nasty to deal with matches across boundaries, so let's stick to
# something simple.
for match in regex.finditer(f.read()):
# The matching prefix (old) and its replacement (new)
old = match.group(1)
new = prefix_to_prefix[old]
# Did we find a trailing null within a N + 1 bytes window after the prefix?
null_terminated = match.end(0) > match.end(1)
# Suffix string length, excluding the null byte
# Only makes sense if null_terminated
suffix_strlen = match.end(0) - match.end(1) - 1
# How many bytes are we shrinking our string?
bytes_shorter = len(old) - len(new)
# We can't make strings larger.
if bytes_shorter < 0:
raise CannotGrowString(old, new)
# If we don't know whether this is a null terminated C-string (we're looking
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
# simply pad with leading dir separators.
elif (
not null_terminated
or suffix_strlen >= suffix_safety_size # == is enough, but let's be defensive
or old[-suffix_safety_size + suffix_strlen :]
== new[-suffix_safety_size + suffix_strlen :]
):
replacement = b"/" * bytes_shorter + new
# If it *was* null terminated, all that matters is that we can leave N bytes
# of old suffix in place. Note that > is required since we also insert an
# additional null terminator.
elif bytes_shorter > suffix_safety_size:
replacement = new + match.group(2) # includes the trailing null
# Otherwise... we can't :(
else:
raise CannotShrinkCString(old, new, match.group()[:-1])
f.seek(match.start())
f.write(replacement)
def _replace_prefix_bin(filename, prefix_to_prefix):
"""Replace all the occurrences of the old prefix with a new prefix in binary
files. See :func:`~spack.relocate.apply_binary_replacements` for details.
Args:
filename (str): target binary file
byte_prefixes (OrderedDict): ordered dictionary where the keys are
bytes representing the old prefixes and the values are the new
prefixes (all bytes utf-8 encoded)
"""
with open(filename, "rb+") as f:
apply_binary_replacements(f, prefix_to_prefix)
def relocate_macho_binaries(
path_names,
old_layout_root,
@@ -656,32 +800,120 @@ def relocate_links(links, prefix_to_prefix):
symlink(new_target, link)
def relocate_text(files, prefixes):
def utf8_path_to_binary_regex(prefix):
"""Create a (binary) regex that matches the input path in utf8"""
prefix_bytes = re.escape(prefix).encode("utf-8")
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
def byte_strings_to_single_binary_regex(prefixes):
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
def utf8_paths_to_single_binary_regex(prefixes):
"""Create a (binary) regex that matches any input path in utf8"""
return byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
def _replace_prefix_text_file(file, regex, prefix_to_prefix):
"""Given a text file opened in rb+, substitute all old with new prefixes and write
in-place (file size may grow or shrink)."""
def replacement(match):
return match.group(1) + prefix_to_prefix[match.group(2)] + match.group(3)
data = file.read()
file.seek(0)
file.write(re.sub(regex, replacement, data))
file.truncate()
def _replace_prefix_text(filename, regex, prefix_to_prefix):
with open(filename, "rb+") as f:
_replace_prefix_text_file(f, regex, prefix_to_prefix)
def unsafe_relocate_text(files, prefixes, concurrency=32):
"""Relocate text file from the original installation prefix to the
new prefix.
Relocation also affects the the path in Spack's sbang script.
Note: unsafe when files contains duplicates, such as repeated paths,
symlinks, hardlinks.
Args:
files (list): Text files to be relocated
prefixes (OrderedDict): String prefixes which need to be changed
concurrency (int): Preferred degree of parallelism
"""
TextFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(files)
# This now needs to be handled by the caller in all cases
# orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
# new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
# Transform to binary string
prefix_to_prefix = OrderedDict(
(k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefixes.items()
)
# Create a regex of the form (pre check)(prefix 1|prefix 2|prefix 3)(post check).
regex = byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
args = [(filename, regex, prefix_to_prefix) for filename in files]
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
tp.map(llnl.util.lang.star(_replace_prefix_text), args)
finally:
tp.terminate()
tp.join()
def relocate_text_bin(binaries, prefixes):
"""Replace null terminated path strings hard-coded into binaries.
def unsafe_relocate_text_bin(binaries, prefixes, concurrency=32):
"""Replace null terminated path strings hard coded into binaries.
The new install prefix must be shorter than the original one.
Note: unsafe when files contains duplicates, such as repeated paths,
symlinks, hardlinks.
Args:
binaries (list): binaries to be relocated
prefixes (OrderedDict): String prefixes which need to be changed.
concurrency (int): Desired degree of parallelism.
Raises:
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
BinaryTextReplaceError: when the new path is longer than the old path
"""
BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
byte_prefixes = collections.OrderedDict({})
for orig_prefix, new_prefix in prefixes.items():
if orig_prefix != new_prefix:
if isinstance(orig_prefix, bytes):
orig_bytes = orig_prefix
else:
orig_bytes = orig_prefix.encode("utf-8")
if isinstance(new_prefix, bytes):
new_bytes = new_prefix
else:
new_bytes = new_prefix.encode("utf-8")
byte_prefixes[orig_bytes] = new_bytes
# Do relocations on text in binaries that refers to the install tree
# multiprocesing.ThreadPool.map requires single argument
args = []
for binary in binaries:
args.append((binary, byte_prefixes))
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
finally:
tp.terminate()
tp.join()
def is_relocatable(spec):

View File

@@ -1,288 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains pure-Python classes and functions for replacing
paths inside text files and binaries."""
import re
from collections import OrderedDict
from typing import Dict, Union
import spack.error
Prefix = Union[str, bytes]
def encode_path(p: Prefix) -> bytes:
return p if isinstance(p, bytes) else p.encode("utf-8")
def _prefix_to_prefix_as_bytes(prefix_to_prefix) -> Dict[bytes, bytes]:
return OrderedDict((encode_path(k), encode_path(v)) for (k, v) in prefix_to_prefix.items())
def utf8_path_to_binary_regex(prefix: str):
"""Create a binary regex that matches the input path in utf8"""
prefix_bytes = re.escape(prefix).encode("utf-8")
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
def _byte_strings_to_single_binary_regex(prefixes):
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
def utf8_paths_to_single_binary_regex(prefixes):
"""Create a (binary) regex that matches any input path in utf8"""
return _byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
def filter_identity_mappings(prefix_to_prefix):
"""Drop mappings that are not changed."""
# NOTE: we don't guard against the following case:
# [/abc/def -> /abc/def, /abc -> /x] *will* be simplified to
# [/abc -> /x], meaning that after this simplification /abc/def will be
# mapped to /x/def instead of /abc/def. This should not be a problem.
return OrderedDict((k, v) for (k, v) in prefix_to_prefix.items() if k != v)
class PrefixReplacer:
"""Base class for applying a prefix to prefix map
to a list of binaries or text files.
Child classes implement _apply_to_file to do the
actual work, which is different when it comes to
binaries and text files."""
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
"""
Arguments:
prefix_to_prefix (OrderedDict):
A ordered mapping from prefix to prefix. The order is
relevant to support substring fallbacks, for example
[("/first/sub", "/x"), ("/first", "/y")] will ensure
/first/sub is matched and replaced before /first.
"""
self.prefix_to_prefix = filter_identity_mappings(prefix_to_prefix)
@property
def is_noop(self) -> bool:
"""Returns true when the prefix to prefix map
is mapping everything to the same location (identity)
or there are no prefixes to replace."""
return not bool(self.prefix_to_prefix)
def apply(self, filenames: list):
if self.is_noop:
return
for filename in filenames:
self.apply_to_filename(filename)
def apply_to_filename(self, filename):
if self.is_noop:
return
with open(filename, "rb+") as f:
self.apply_to_file(f)
def apply_to_file(self, f):
if self.is_noop:
return
self._apply_to_file(f)
class TextFilePrefixReplacer(PrefixReplacer):
"""This class applies prefix to prefix mappings for relocation
on text files.
Note that UTF-8 encoding is assumed."""
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
"""
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
bytes representing the old prefixes and the values are the new.
"""
super().__init__(prefix_to_prefix)
# Single regex for all paths.
self.regex = _byte_strings_to_single_binary_regex(self.prefix_to_prefix.keys())
@classmethod
def from_strings_or_bytes(
cls, prefix_to_prefix: Dict[Prefix, Prefix]
) -> "TextFilePrefixReplacer":
"""Create a TextFilePrefixReplacer from an ordered prefix to prefix map."""
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix))
def _apply_to_file(self, f):
"""Text replacement implementation simply reads the entire file
in memory and applies the combined regex."""
replacement = lambda m: m.group(1) + self.prefix_to_prefix[m.group(2)] + m.group(3)
data = f.read()
new_data = re.sub(self.regex, replacement, data)
if id(data) == id(new_data):
return
f.seek(0)
f.write(new_data)
f.truncate()
class BinaryFilePrefixReplacer(PrefixReplacer):
def __init__(self, prefix_to_prefix, suffix_safety_size=7):
"""
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
bytes representing the old prefixes and the values are the new
suffix_safety_size (int): in case of null terminated strings, what size
of the suffix should remain to avoid aliasing issues?
"""
assert suffix_safety_size >= 0
super().__init__(prefix_to_prefix)
self.suffix_safety_size = suffix_safety_size
self.regex = self.binary_text_regex(self.prefix_to_prefix.keys(), suffix_safety_size)
@classmethod
def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
"""
Create a regex that looks for exact matches of prefixes, and also tries to
match a C-string type null terminator in a small lookahead window.
Arguments:
binary_prefixes (list): List of byte strings of prefixes to match
suffix_safety_size (int): Sizeof the lookahed for null-terminated string.
Returns: compiled regex
"""
return re.compile(
b"("
+ b"|".join(re.escape(p) for p in binary_prefixes)
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
)
@classmethod
def from_strings_or_bytes(
cls, prefix_to_prefix: Dict[Prefix, Prefix], suffix_safety_size: int = 7
) -> "BinaryFilePrefixReplacer":
"""Create a BinaryFilePrefixReplacer from an ordered prefix to prefix map.
Arguments:
prefix_to_prefix (OrderedDict): Ordered mapping of prefix to prefix.
suffix_safety_size (int): Number of bytes to retain at the end of a C-string
to avoid binary string-aliasing issues.
"""
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix), suffix_safety_size)
def _apply_to_file(self, f):
"""
Given a file opened in rb+ mode, apply the string replacements as
specified by an ordered dictionary of prefix to prefix mappings. This
method takes special care of null-terminated C-strings. C-string constants
are problematic because compilers and linkers optimize readonly strings for
space by aliasing those that share a common suffix (only suffix since all
of them are null terminated). See https://github.com/spack/spack/pull/31739
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
If no null terminator is found, we simply pad with leading /, assuming that
it's a long C-string; the full C-string after replacement has a large suffix
in common with its original value.
If there *is* a null terminator we can do the same as long as the replacement
has a sufficiently long common suffix with the original prefix.
As a last resort when the replacement does not have a long enough common suffix,
we can try to shorten the string, but this only works if the new length is
sufficiently short (typically the case when going from large padding -> normal path)
If the replacement string is longer, or all of the above fails, we error out.
Arguments:
f: file opened in rb+ mode
"""
assert f.tell() == 0
# We *could* read binary data in chunks to avoid loading all in memory,
# but it's nasty to deal with matches across boundaries, so let's stick to
# something simple.
for match in self.regex.finditer(f.read()):
# The matching prefix (old) and its replacement (new)
old = match.group(1)
new = self.prefix_to_prefix[old]
# Did we find a trailing null within a N + 1 bytes window after the prefix?
null_terminated = match.end(0) > match.end(1)
# Suffix string length, excluding the null byte
# Only makes sense if null_terminated
suffix_strlen = match.end(0) - match.end(1) - 1
# How many bytes are we shrinking our string?
bytes_shorter = len(old) - len(new)
# We can't make strings larger.
if bytes_shorter < 0:
raise CannotGrowString(old, new)
# If we don't know whether this is a null terminated C-string (we're looking
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
# simply pad with leading dir separators.
elif (
not null_terminated
or suffix_strlen >= self.suffix_safety_size # == is enough, but let's be defensive
or old[-self.suffix_safety_size + suffix_strlen :]
== new[-self.suffix_safety_size + suffix_strlen :]
):
replacement = b"/" * bytes_shorter + new
# If it *was* null terminated, all that matters is that we can leave N bytes
# of old suffix in place. Note that > is required since we also insert an
# additional null terminator.
elif bytes_shorter > self.suffix_safety_size:
replacement = new + match.group(2) # includes the trailing null
# Otherwise... we can't :(
else:
raise CannotShrinkCString(old, new, match.group()[:-1])
f.seek(match.start())
f.write(replacement)
class BinaryStringReplacementError(spack.error.SpackError):
def __init__(self, file_path, old_len, new_len):
"""The size of the file changed after binary path substitution
Args:
file_path (str): file with changing size
old_len (str): original length of the file
new_len (str): length of the file after substitution
"""
super(BinaryStringReplacementError, self).__init__(
"Doing a binary string replacement in %s failed.\n"
"The size of the file changed from %s to %s\n"
"when it should have remanined the same." % (file_path, old_len, new_len)
)
class BinaryTextReplaceError(spack.error.SpackError):
def __init__(self, msg):
msg += (
" To fix this, compile with more padding "
"(config:install_tree:padded_length), or install to a shorter prefix."
)
super(BinaryTextReplaceError, self).__init__(msg)
class CannotGrowString(BinaryTextReplaceError):
def __init__(self, old, new):
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
super(CannotGrowString, self).__init__(msg)
class CannotShrinkCString(BinaryTextReplaceError):
def __init__(self, old, new, full_old_string):
# Just interpolate binary string to not risk issues with invalid
# unicode, which would be really bad user experience: error in error.
# We have no clue if we actually deal with a real C-string nor what
# encoding it has.
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
old, new, full_old_string
)
super(CannotShrinkCString, self).__init__(msg)

View File

@@ -70,7 +70,7 @@ def rewire_node(spec, explicit):
for rel_path in manifest.get("text_to_relocate", [])
]
if text_to_relocate:
relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
relocate.unsafe_relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
bins_to_relocate = [
os.path.join(tempdir, spec.dag_hash(), rel_path)
@@ -97,7 +97,7 @@ def rewire_node(spec, explicit):
spec.build_spec.prefix,
spec.prefix,
)
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
relocate.unsafe_relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
# Copy package into place, except for spec.json (because spec.json
# describes the old spec and not the new spliced spec).
shutil.copytree(

View File

@@ -13,6 +13,7 @@
import re
import types
import warnings
from typing import Dict, List, Tuple, Union
import archspec.cpu
@@ -124,84 +125,87 @@ def getter(node):
# The space of possible priorities for optimization targets
# is partitioned in the following ranges:
#
# [0-100) Optimization criteria for software being reused
# [100-200) Fixed criteria that are higher priority than reuse, but lower than build
# [200-300) Optimization criteria for software being built
# [300-1000) High-priority fixed criteria
# [1000-inf) Error conditions
# +=============================================================+
# | Priority | Description |
# +=============================================================+
# | 10,000,000+ | Error conditions |
# +-------------+-----------------------------------------------+
# | 9,999,999 | |
# | ... | High-priority criteria |
# | 1,000,000 | |
# +-------------+-----------------------------------------------+
# | 999,999 | |
# | ... | Standard criteria for built packages |
# | 100,001 | |
# +-------------+-----------------------------------------------+
# | 100,000 | Number of packages being built |
# +-------------+-----------------------------------------------+
# | 99,999 | |
# | ... | Standard criteria for reused packages |
# | 0 | |
# +-------------+-----------------------------------------------+
#
# Each optimization target is a minimization with optimal value 0.
#
#: High fixed priority offset for criteria that supersede all build criteria
high_fixed_priority_offset = 300
high_fixed_priority_offset = 10_000_000
#: Priority offset for "build" criteria (regular criterio shifted to
#: higher priority for specs we have to build)
build_priority_offset = 200
build_priority_offset = 100_000
#: Priority offset of "fixed" criteria (those w/o build criteria)
fixed_priority_offset = 100
#: max priority for an error
max_error_priority = 3
def build_criteria_names(costs, arg_tuples):
def build_criteria_names(
costs: List[int], opt_criteria: List["AspFunction"], max_depth: int
) -> Dict[str, Union[int, List[Tuple[int, int]]]]:
"""Construct an ordered mapping from criteria names to costs."""
# pull optimization criteria names out of the solution
priorities_names = []
num_fixed = 0
num_high_fixed = 0
for args in arg_tuples:
priority, name = args[:2]
priority = int(priority)
# ensure names of all criteria are unique
names = {criterion.args[0] for criterion in opt_criteria}
assert len(names) == len(opt_criteria), "names of optimization criteria must be unique"
# add the priority of this opt criterion and its name
priorities_names.append((priority, name))
# split opt criteria into two lists
fixed_criteria = [oc for oc in opt_criteria if oc.args[1] == "fixed"]
leveled_criteria = [oc for oc in opt_criteria if oc.args[1] == "leveled"]
# if the priority is less than fixed_priority_offset, then it
# has an associated build priority -- the same criterion but for
# nodes that we have to build.
if priority < fixed_priority_offset:
build_priority = priority + build_priority_offset
priorities_names.append((build_priority, name))
elif priority >= high_fixed_priority_offset:
num_high_fixed += 1
else:
num_fixed += 1
# first non-error criterion
solve_index = max_error_priority + 1
# sort the criteria by priority
priorities_names = sorted(priorities_names, reverse=True)
# compute without needing max_depth from solve
max_leveled_costs = (len(costs) - max_error_priority - 3) / 2
assert max_leveled_costs * 2 == len(costs) - max_error_priority - 3
assert max_leveled_costs % len(leveled_criteria) == 0
max_leveled_costs = int(max_leveled_costs)
# We only have opt-criterion values for non-error types
# error type criteria are excluded (they come first)
error_criteria = len(costs) - len(priorities_names)
costs = costs[error_criteria:]
n_leveled_costs = len(leveled_criteria) * (max_depth + 1)
# split list into three parts: build criteria, fixed criteria, non-build criteria
num_criteria = len(priorities_names)
num_build = (num_criteria - num_fixed - num_high_fixed) // 2
build_index = solve_index + 1 + max_leveled_costs
fixed_costs = [costs[solve_index], costs[build_index]]
build_start_idx = num_high_fixed
fixed_start_idx = num_high_fixed + num_build
installed_start_idx = num_high_fixed + num_build + num_fixed
build_costs = costs[solve_index + 1 : solve_index + 1 + n_leveled_costs]
reuse_costs = costs[build_index + 1 : build_index + 1 + n_leveled_costs]
assert len(build_costs) == len(reuse_costs) == n_leveled_costs
high_fixed = priorities_names[:build_start_idx]
build = priorities_names[build_start_idx:fixed_start_idx]
fixed = priorities_names[fixed_start_idx:installed_start_idx]
installed = priorities_names[installed_start_idx:]
criteria = {}
# mapping from priority to index in cost list
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
def add_fixed(criterion_idx, cost_idx):
name = fixed_criteria[criterion_idx].args[2]
criteria["fixed: " + name] = costs[cost_idx]
# make a list that has each name with its build and non-build costs
criteria = [(cost, None, name) for cost, (p, name) in zip(costs[:build_start_idx], high_fixed)]
criteria += [
(cost, None, name)
for cost, (p, name) in zip(costs[fixed_start_idx:installed_start_idx], fixed)
]
add_fixed(0, solve_index)
for (i, name), (b, _) in zip(installed, build):
criteria.append((costs[indices[i]], costs[indices[b]], name))
for i, fn in enumerate(leveled_criteria):
name = fn.args[2]
criteria["build: " + name] = build_costs[i :: len(leveled_criteria)]
add_fixed(1, build_index)
for i, fn in enumerate(leveled_criteria):
name = fn.args[2]
criteria["reuse: " + name] = reuse_costs[i :: len(leveled_criteria)]
return criteria
@@ -251,7 +255,11 @@ def _id(thing):
class AspFunction(AspObject):
def __init__(self, name, args=None):
self.name = name
self.args = () if args is None else tuple(args)
def simplify(arg):
return arg if isinstance(arg, (str, bool, int)) else str(arg)
self.args = () if args is None else tuple(simplify(arg) for arg in args)
def _cmp_key(self):
return (self.name, self.args)
@@ -286,10 +294,29 @@ def argify(arg):
elif isinstance(arg, int):
return clingo.Number(arg)
else:
return clingo.String(str(arg))
return clingo.String(arg)
return clingo.Function(self.name, [argify(arg) for arg in self.args], positive=positive)
@staticmethod
def from_symbol(symbol):
def deargify(arg):
if arg.type is clingo.SymbolType.Number:
return arg.number
elif arg.type is clingo.SymbolType.String and arg.string in ("True", "False"):
return arg.string == "True"
else:
return arg.string
return AspFunction(symbol.name, [deargify(arg) for arg in symbol.arguments])
def shift(self):
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
if not self.args:
raise ValueError(f"Can't shift ASP function with no arguments: {str(self)}")
first, *rest = self.args
return AspFunction(first, rest)
def __str__(self):
return "%s(%s)" % (self.name, ", ".join(str(_id(arg)) for arg in self.args))
@@ -494,7 +521,8 @@ def _compute_specs_from_answer_set(self):
self._concrete_specs, self._unsolved_specs = [], []
self._concrete_specs_by_input = {}
best = min(self.answers)
opt, _, answer = best
opt, _, answer, _ = best
for input_spec in self.abstract_specs:
key = input_spec.name
if input_spec.virtual:
@@ -576,13 +604,13 @@ def stringify(sym):
return sym.string or str(sym)
def extract_args(model, predicate_name):
"""Extract the arguments to predicates with the provided name from a model.
def extract_functions(model, function_name):
"""Extract ASP functions with the given name from a model.
Pull out all the predicates with name ``predicate_name`` from the model, and return
their stringified arguments as tuples.
Pull out all the functions with name ``function_name`` from the model, and return them as
``AspFunction`` objects.
"""
return [stringify(sym.arguments) for sym in model if sym.name == predicate_name]
return [AspFunction.from_symbol(sym) for sym in model if sym.name == function_name]
class PyclingoDriver(object):
@@ -681,11 +709,10 @@ def solve(self, setup, specs, reuse=None, output=None, control=None):
self.control = control or default_clingo_control()
# set up the problem -- this generates facts and rules
self.assumptions = []
timer.start("setup")
with self.control.backend() as backend:
self.backend = backend
setup.setup(self, specs, reuse=reuse)
timer.stop("setup")
with timer.measure("setup"):
with self.control.backend() as backend:
self.backend = backend
setup.setup(self, specs, reuse=reuse)
timer.start("load")
# read in the main ASP program and display logic -- these are
@@ -731,7 +758,8 @@ def visit(node):
cores = [] # unsatisfiable cores if they do not
def on_model(model):
models.append((model.cost, model.symbols(shown=True, terms=True)))
priorities = getattr(model, "priority", None)
models.append((model.cost, priorities, model.symbols(shown=True, terms=True)))
solve_kwargs = {
"assumptions": self.assumptions,
@@ -752,24 +780,28 @@ def on_model(model):
if result.satisfiable:
# get the best model
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
min_cost, best_model = min(models)
min_cost, priorities, best_model = min(models)
# first check for errors
error_args = extract_args(best_model, "error")
error_args = [fn.args for fn in extract_functions(best_model, "error")]
errors = sorted((int(priority), msg, args) for priority, msg, *args in error_args)
for _, msg, args in errors:
self.handle_error(msg, *args)
# build specs from spec attributes in the model
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
answers = builder.build_specs(spec_attrs)
spec_attrs = extract_functions(best_model, "attr")
with timer.measure("build"):
answers = builder.build_specs(spec_attrs)
# add best spec to the results
result.answers.append((list(min_cost), 0, answers))
result.answers.append((list(min_cost), 0, answers, spec_attrs))
# get optimization criteria
criteria_args = extract_args(best_model, "opt_criterion")
result.criteria = build_criteria_names(min_cost, criteria_args)
criteria = extract_functions(best_model, "opt_criterion")
depths = extract_functions(best_model, "depth")
max_depth = max(d.args[1] for d in depths)
result.criteria = build_criteria_names(min_cost, criteria, max_depth)
# record the number of models the solver considered
result.nmodels = len(models)
@@ -779,7 +811,7 @@ def on_model(model):
# print any unknown functions in the model
for sym in best_model:
if sym.name not in ("attr", "error", "opt_criterion"):
if sym.name not in ("attr", "error", "opt_criterion", "depth", "const_max_depth"):
tty.debug(
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
)
@@ -1409,23 +1441,23 @@ def spec_clauses(self, *args, **kwargs):
def _spec_clauses(
self,
spec,
body=False,
transitive=True,
expand_hashes=False,
concrete_build_deps=False,
body: bool = False,
transitive: bool = True,
expand_hashes: bool = False,
concrete_build_deps: bool = False,
deptype: Union[str, Tuple[str, ...]] = "all",
):
"""Return a list of clauses for a spec mandates are true.
Arguments:
spec (spack.spec.Spec): the spec to analyze
body (bool): if True, generate clauses to be used in rule bodies
spec: the spec to analyze
body: if True, generate clauses to be used in rule bodies
(final values) instead of rule heads (setters).
transitive (bool): if False, don't generate clauses from
dependencies (default True)
expand_hashes (bool): if True, descend into hashes of concrete specs
(default False)
concrete_build_deps (bool): if False, do not include pure build deps
transitive: if False, don't generate clauses from dependencies.
expand_hashes: If transitive and True, descend into hashes of concrete specs.
concrete_build_deps: if False, do not include pure build deps
of concrete specs (as they have no effect on runtime constraints)
deptype: dependency types to follow when transitive (default "all").
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
@@ -1553,7 +1585,7 @@ class Body(object):
# add all clauses from dependencies
if transitive:
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
for dspec in spec.edges_to_dependencies():
for dspec in spec.edges_to_dependencies(deptype=deptype):
dep = dspec.spec
if spec.concrete:
@@ -1585,6 +1617,7 @@ class Body(object):
body=body,
expand_hashes=expand_hashes,
concrete_build_deps=concrete_build_deps,
deptype=deptype,
)
)
@@ -2259,7 +2292,7 @@ def depends_on(self, pkg, dep, type):
assert len(dependencies) < 2, msg
if not dependencies:
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
else:
# TODO: This assumes that each solve unifies dependencies
dependencies[0].add_type(type)
@@ -2316,7 +2349,7 @@ def deprecated(self, pkg, version):
tty.warn(msg.format(pkg, version))
@staticmethod
def sort_fn(function_tuple):
def sort_fn(function):
"""Ensure attributes are evaluated in the correct order.
hash attributes are handled first, since they imply entire concrete specs
@@ -2326,7 +2359,7 @@ def sort_fn(function_tuple):
the concrete specs on which they depend because all nodes are fully constructed before we
consider which ones are external.
"""
name = function_tuple[0]
name = function.args[0]
if name == "hash":
return (-5, 0)
elif name == "node":
@@ -2340,23 +2373,24 @@ def sort_fn(function_tuple):
else:
return (-1, 0)
def build_specs(self, function_tuples):
def build_specs(self, functions):
# Functions don't seem to be in particular order in output. Sort
# them here so that directives that build objects (like node and
# node_compiler) are called in the right order.
self.function_tuples = sorted(set(function_tuples), key=self.sort_fn)
self.functions = sorted(set(functions), key=self.sort_fn)
self._specs = {}
for name, args in self.function_tuples:
if SpecBuilder.ignored_attributes.match(name):
for attr in self.functions:
fn = attr.shift() # attr("foo", "bar") -> foo("bar")
if SpecBuilder.ignored_attributes.match(fn.name):
continue
action = getattr(self, name, None)
action = getattr(self, fn.name, None)
# print out unknown actions so we can display them for debugging
if not action:
msg = 'UNKNOWN SYMBOL: attr("%s", %s)' % (name, ", ".join(str(a) for a in args))
tty.debug(msg)
tty.debug(f"UNKNOWN SYMBOL: {attr}")
continue
msg = (
@@ -2368,8 +2402,8 @@ def build_specs(self, function_tuples):
# ignore predicates on virtual packages, as they're used for
# solving but don't construct anything. Do not ignore error
# predicates on virtual packages.
if name != "error":
pkg = args[0]
if fn.name != "error":
pkg = fn.args[0]
if spack.repo.path.is_virtual(pkg):
continue
@@ -2379,7 +2413,7 @@ def build_specs(self, function_tuples):
if spec and spec.concrete:
continue
action(*args)
action(*fn.args)
# namespace assignment is done after the fact, as it is not
# currently part of the solve

View File

@@ -23,9 +23,17 @@ literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
% in better reporting for users. See #30669 for details.
1 { literal_solved(ID) : literal(ID) }.
opt_criterion(300, "number of input specs not concretized").
#minimize{ 0@300: #true }.
#minimize { 1@300,ID : literal_not_solved(ID) }.
% priority ranges for optimization criteria
% note that clingo's weight_t is int32_t, so the max priority we can use is 2,147,483,647
#const max_error_priority = 3.
#const error_prio = 10000000.
#const solve_prio = 1000000.
#const build_prio = 100000. % n_nodes x depth_offset x max levels needs to be less than this
#const depth_offset = 100. % depth_offset-1 is the max id for leveled criteria
opt_criterion(solve_prio, "fixed", "number of input specs not concretized").
#minimize{ 0@solve_prio: #true }.
#minimize{ 1@solve_prio,ID : literal_not_solved(ID) }.
% Map constraint on the literal ID to the correct PSID
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
@@ -65,7 +73,8 @@ version_declared(Package, Version, Weight) :- version_declared(Package, Version,
version_declared(Package, Version) :- version_declared(Package, Version, _).
% a spec with a git hash version is equivalent to one with the same matched version
version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package, Constraint, EquivalentVersion),
version_satisfies(Package, Constraint, HashVersion) :-
version_satisfies(Package, Constraint, EquivalentVersion),
version_equivalent(Package, HashVersion, EquivalentVersion).
#defined version_equivalent/3.
@@ -146,7 +155,10 @@ possible_version_weight(Package, Weight)
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
:- attr("node_version_satisfies", Package, Constraint),
C = #count{ Version : attr("version", Package, Version), version_satisfies(Package, Constraint, Version)},
C = #count{
Version
: attr("version", Package, Version), version_satisfies(Package, Constraint, Version)
},
C < 1.
attr("node_version_satisfies", Package, Constraint)
@@ -1077,11 +1089,11 @@ build(Package) :- not attr("hash", Package, _), attr("node", Package).
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
% 0 - 99 Priorities for non-built nodes.
build_priority(Package, 200) :- build(Package), attr("node", Package), optimize_for_reuse().
build_priority(Package, 0) :- not build(Package), attr("node", Package), optimize_for_reuse().
build_priority(Package, build_prio) :- build(Package), attr("node", Package), optimize_for_reuse().
build_priority(Package, 0) :- not build(Package), attr("node", Package), optimize_for_reuse().
% don't adjust build priorities if reuse is not enabled
build_priority(Package, 0) :- attr("node", Package), not optimize_for_reuse().
build_priority(Package, build_prio) :- attr("node", Package), not optimize_for_reuse().
% don't assign versions from installed packages unless reuse is enabled
% NOTE: that "installed" means the declared version was only included because
@@ -1101,22 +1113,56 @@ build_priority(Package, 0) :- attr("node", Package), not optimize_for_reuse().
#defined installed_hash/2.
%-----------------------------------------------------------------------------
% Calculate min depth of nodes in the DAG
% We use this to optimize nodes closer to roots with higher precedence.
%-----------------------------------------------------------------------------
#const max_depth = 4.
% roots have depth 0.
depth(Package, 0) :- attr("root", Package).
%depth(Package, D+1) :- depth(Dependent, D), depends_on(Dependent, Package), D < max_depth.
%parent_depth(Package, D) :-
% depends_on(Dependent, Package),
% depth(Dependent, D),
% D < max_depth - 1.
%depth(Package, M+1) :-
% M = #min{ D: parent_depth(Package, D); max_depth - 1 },
% attr("node", Package).
% other nodes' depth is the minimum depth of any dependent plus one.
depth(Package, N + 1) :-
N = #min{
D: depends_on(Dependent, Package),
depth(Dependent, D),
D < max_depth;
max_depth - 1
},
N = 0..max_depth - 1,
not attr("root", Package),
attr("node", Package).
%-----------------------------------------------------------------
% Optimization to avoid errors
%-----------------------------------------------------------------
% Some errors are handled as rules instead of constraints because
% it allows us to explain why something failed. Here we optimize
% HEAVILY against the facts generated by those rules.
#minimize{ 0@1000: #true}.
#minimize{ 0@1001: #true}.
#minimize{ 0@1002: #true}.
#minimize{ 1000@1000+Priority,Msg: error(Priority, Msg) }.
#minimize{ 1000@1000+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
% ensure that error costs are always in the solution.
#minimize{ 0@error_prio + (0..max_error_priority): #true}.
% TODO: why 1000 and not just 1? 1000 seems unnecessary since priorities are lexicographic.
#minimize{ 1000@error_prio+Priority,Msg: error(Priority, Msg) }.
#minimize{ 1000@error_prio+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
#minimize{ 1000@error_prio+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
%-----------------------------------------------------------------------------
% How to optimize the spec (high to low priority)
@@ -1126,199 +1172,157 @@ build_priority(Package, 0) :- attr("node", Package), not optimize_for_reuse().
% 2. a `#minimize{ 0@2 : #true }.` statement that ensures the criterion
% is displayed (clingo doesn't display sums over empty sets by default)
% Ensure that values are returned by clingo for every distinct optimization criterion.
% Some criteria are "fixed" and have only one bucket. Others are summed into multiple
% buckets -- per build priority and per depth in the graph.
% If we don't do this, it's very hard to read the sums back. We use `0@...` because
% it doesn't affect the sums -- it just ensure that clingo returns them.
% "fixed" criteria have one bucket -- their priority.
#minimize{ 0@N: opt_criterion(N, "fixed", _) }.
% "leveled" criteria sum into a bucket per depth in the graph, per build priority
#minimize{
0@(((max_depth - D - 1) * depth_offset) + N + build_prio)
: opt_criterion(N, "leveled", _), depth(_, D)
}.
#minimize{
0@(((max_depth - D - 1) * depth_offset) + N)
: opt_criterion(N, "leveled", _), depth(_, D)
}.
% Try hard to reuse installed packages (i.e., minimize the number built)
opt_criterion(100, "number of packages to build (vs. reuse)").
#minimize { 0@100: #true }.
#minimize { 1@100,Package : build(Package), optimize_for_reuse() }.
opt_criterion(build_prio, "fixed", "number of packages to build (vs. reuse)").
#minimize { 1@build_prio,Package : build(Package), optimize_for_reuse() }.
#defined optimize_for_reuse/0.
% A condition group specifies one or more specs that must be satisfied.
% Specs declared first are preferred, so we assign increasing weights and
% minimize the weights.
opt_criterion(75, "requirement weight").
#minimize{ 0@275: #true }.
#minimize{ 0@75: #true }.
#minimize {
Weight@75+Priority
opt_criterion(65, "leveled", "requirement weight").
#minimize{
Weight@(65 + ((max_depth - D - 1) * depth_offset) + Priority), Package
: requirement_weight(Package, Weight),
build_priority(Package, Priority)
build_priority(Package, Priority),
depth(Package, D)
}.
% Minimize the number of deprecated versions being used
opt_criterion(73, "deprecated versions used").
#minimize{ 0@273: #true }.
#minimize{ 0@73: #true }.
opt_criterion(60, "leveled", "deprecated versions used").
#minimize{
1@73+Priority,Package
1@(60 + ((max_depth - D - 1) * depth_offset) + Priority), Package
: attr("deprecated", Package, _),
build_priority(Package, Priority)
build_priority(Package, Priority),
depth(Package, D)
}.
% Minimize the:
% 1. Version weight
% 2. Number of variants with a non default value, if not set
% for the root package.
opt_criterion(70, "version weight").
#minimize{ 0@270: #true }.
#minimize{ 0@70: #true }.
#minimize {
Weight@70+Priority
: attr("root", Package), version_weight(Package, Weight),
build_priority(Package, Priority)
}.
opt_criterion(65, "number of non-default variants (roots)").
#minimize{ 0@265: #true }.
#minimize{ 0@65: #true }.
#minimize {
1@65+Priority,Package,Variant,Value
: variant_not_default(Package, Variant, Value),
attr("root", Package),
build_priority(Package, Priority)
}.
opt_criterion(60, "preferred providers for roots").
#minimize{ 0@260: #true }.
#minimize{ 0@60: #true }.
opt_criterion(55, "leveled", "version badness").
#minimize{
Weight@60+Priority,Provider,Virtual
: provider_weight(Provider, Virtual, Weight),
attr("root", Provider),
build_priority(Provider, Priority)
}.
opt_criterion(55, "default values of variants not being used (roots)").
#minimize{ 0@255: #true }.
#minimize{ 0@55: #true }.
#minimize{
1@55+Priority,Package,Variant,Value
: variant_default_not_used(Package, Variant, Value),
attr("root", Package),
build_priority(Package, Priority)
}.
% Try to use default variants or variants that have been set
opt_criterion(50, "number of non-default variants (non-roots)").
#minimize{ 0@250: #true }.
#minimize{ 0@50: #true }.
#minimize {
1@50+Priority,Package,Variant,Value
: variant_not_default(Package, Variant, Value),
not attr("root", Package),
build_priority(Package, Priority)
}.
% Minimize the weights of the providers, i.e. use as much as
% possible the most preferred providers
opt_criterion(45, "preferred providers (non-roots)").
#minimize{ 0@245: #true }.
#minimize{ 0@45: #true }.
#minimize{
Weight@45+Priority,Provider,Virtual
: provider_weight(Provider, Virtual, Weight), not attr("root", Provider),
build_priority(Provider, Priority)
}.
% Try to minimize the number of compiler mismatches in the DAG.
opt_criterion(40, "compiler mismatches that are not from CLI").
#minimize{ 0@240: #true }.
#minimize{ 0@40: #true }.
#minimize{
1@40+Priority,Package,Dependency
: compiler_mismatch(Package, Dependency),
build_priority(Package, Priority)
}.
opt_criterion(39, "compiler mismatches that are not from CLI").
#minimize{ 0@239: #true }.
#minimize{ 0@39: #true }.
#minimize{
1@39+Priority,Package,Dependency
: compiler_mismatch_required(Package, Dependency),
build_priority(Package, Priority)
}.
% Try to minimize the number of compiler mismatches in the DAG.
opt_criterion(35, "OS mismatches").
#minimize{ 0@235: #true }.
#minimize{ 0@35: #true }.
#minimize{
1@35+Priority,Package,Dependency
: node_os_mismatch(Package, Dependency),
build_priority(Package, Priority)
}.
opt_criterion(30, "non-preferred OS's").
#minimize{ 0@230: #true }.
#minimize{ 0@30: #true }.
#minimize{
Weight@30+Priority,Package
: node_os_weight(Package, Weight),
build_priority(Package, Priority)
}.
% Choose more recent versions for nodes
opt_criterion(25, "version badness").
#minimize{ 0@225: #true }.
#minimize{ 0@25: #true }.
#minimize{
Weight@25+Priority,Package
Weight@(55 + ((max_depth - D - 1) * depth_offset) + Priority), Package
: version_weight(Package, Weight),
build_priority(Package, Priority)
build_priority(Package, Priority),
depth(Package, D)
}.
% Try to use all the default values of variants
opt_criterion(20, "default values of variants not being used (non-roots)").
#minimize{ 0@220: #true }.
#minimize{ 0@20: #true }.
opt_criterion(50, "leveled", "number of non-default variants").
#minimize{
1@20+Priority,Package,Variant,Value
1@(50 + ((max_depth - D - 1) * depth_offset) + Priority), Package, Variant, Value
: variant_not_default(Package, Variant, Value),
build_priority(Package, Priority),
depth(Package, D)
}.
opt_criterion(45, "leveled", "preferred providers").
#minimize{
Weight@(45 + ((max_depth - D - 1) * depth_offset) + Priority), Provider, Virtual
: provider_weight(Provider, Virtual, Weight),
build_priority(Provider, Priority),
depth(Package, D)
}.
opt_criterion(40, "leveled", "default values of variants not being used").
#minimize{
1@(40 + ((max_depth - D - 1) * depth_offset) + Priority), Package, Variant, Value
: variant_default_not_used(Package, Variant, Value),
not attr("root", Package),
build_priority(Package, Priority)
build_priority(Package, Priority),
depth(Package, D)
}.
% Try to use preferred compilers
opt_criterion(15, "non-preferred compilers").
#minimize{ 0@215: #true }.
#minimize{ 0@15: #true }.
opt_criterion(35, "leveled", "compiler mismatches (not from CLI)").
#minimize{
Weight@15+Priority,Package
1@(35 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
: compiler_mismatch(Dependent, Package),
build_priority(Package, Priority),
depth(Package, D)
}.
opt_criterion(30, "leveled", "compiler mismatches (from CLI)").
#minimize{
1@(30 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
: compiler_mismatch_required(Dependent, Package),
build_priority(Package, Priority),
depth(Package, D)
}.
opt_criterion(25, "leveled", "OS mismatches").
#minimize{
1@(25 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
: node_os_mismatch(Dependent, Package),
build_priority(Package, Priority),
depth(Package, D)
}.
opt_criterion(20, "leveled", "non-preferred compilers").
#minimize{
Weight@(20 + ((max_depth - D - 1) * depth_offset) + Priority), Package
: compiler_weight(Package, Weight),
build_priority(Package, Priority)
build_priority(Package, Priority),
depth(Package, D)
}.
opt_criterion(15, "leveled", "non-preferred OS's").
#minimize{
Weight@(15 + ((max_depth - D - 1) * depth_offset) + Priority), Package
: node_os_weight(Package, Weight),
build_priority(Package, Priority),
depth(Package, D)
}.
% Minimize the number of mismatches for targets in the DAG, try
% to select the preferred target.
opt_criterion(10, "target mismatches").
#minimize{ 0@210: #true }.
#minimize{ 0@10: #true }.
opt_criterion(10, "leveled", "target mismatches").
#minimize{
1@10+Priority,Package,Dependency
: node_target_mismatch(Package, Dependency),
build_priority(Package, Priority)
1@(10 + ((max_depth - D - 1) * depth_offset) + Priority), Dependent, Package
: node_target_mismatch(Dependent, Package),
build_priority(Package, Priority),
depth(Package, D)
}.
opt_criterion(5, "non-preferred targets").
#minimize{ 0@205: #true }.
#minimize{ 0@5: #true }.
opt_criterion(5, "leveled", "non-preferred targets").
#minimize{
Weight@5+Priority,Package
Weight@(5 + ((max_depth - D - 1) * depth_offset) + Priority), Package
: node_target_weight(Package, Weight),
build_priority(Package, Priority)
build_priority(Package, Priority),
depth(Package, D)
}.
%-----------------
% Domain heuristic
%-----------------
#heuristic attr("version", Package, Version) : version_declared(Package, Version, 0), attr("node", Package). [10, true]
#heuristic version_weight(Package, 0) : version_declared(Package, Version, 0), attr("node", Package). [10, true]
#heuristic attr("node_target", Package, Target) : package_target_weight(Target, Package, 0), attr("node", Package). [10, true]
#heuristic node_target_weight(Package, 0) : attr("node", Package). [10, true]
#heuristic attr("variant_value", Package, Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", Package). [10, true]
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
#heuristic attr("node", Package) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
#heuristic attr("node_os", Package, OS) : buildable_os(OS). [10, true]
%#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
%#heuristic attr("node", Package) : possible_provider_weight(Package, Virtual, 0, _), attr("virtual_node", Virtual). [10, true]
%#heuristic attr("node_os", Package, OS) : buildable_os(OS). [10, true]
%#heuristic attr("node_target", Dependency, Target): depends_on(Package, Dependency), attr("node_target", Package, Target). [20, true]
%#heuristic attr("node_os", Dependency, OS): depends_on(Package, Dependency), attr("node_os", Package, OS). [20, true]
%-----------
% Notes

View File

@@ -15,7 +15,7 @@
#show attr/4.
% names of optimization criteria
#show opt_criterion/2.
#show opt_criterion/3.
% error types
#show error/2.
@@ -25,4 +25,16 @@
#show error/6.
#show error/7.
% depths
#show depth/2.
%#show parent_depth/2.
% debug
%#show depends_on/2.
%node(Package) :- attr("node", Package).
%#show node/1.
%version(Package, Version) :- attr("version", Package, Version).
%#show version/2.

View File

@@ -168,7 +168,7 @@
)
#: specfile format version. Must increase monotonically
SPECFILE_FORMAT_VERSION = 3
specfile_format_version = 3
def colorize_spec(spec):
@@ -1529,14 +1529,14 @@ def _set_compiler(self, compiler):
)
self.compiler = compiler
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
if spec.name not in self._dependencies:
self.add_dependency_edge(spec, deptypes=deptypes)
self.add_dependency_edge(spec, deptypes)
return
# Keep the intersection of constraints when a dependency is added
# multiple times. Currently, we only allow identical edge types.
# multiple times. Currently we only allow identical edge types.
orig = self._dependencies[spec.name]
try:
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
@@ -1550,39 +1550,34 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
)
def add_dependency_edge(
self,
dependency_spec: "Spec",
*,
deptypes: dp.DependencyArgument,
):
def add_dependency_edge(self, dependency_spec, deptype):
"""Add a dependency edge to this spec.
Args:
dependency_spec: spec of the dependency
deptypes: dependency types for this edge
dependency_spec (Spec): spec of the dependency
deptype (str or tuple): dependency types
"""
deptypes = dp.canonical_deptype(deptypes)
deptype = dp.canonical_deptype(deptype)
# Check if we need to update edges that are already present
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
if any(d in edge.deptypes for d in deptypes):
if any(d in edge.deptypes for d in deptype):
msg = (
'cannot add a dependency on "{0.spec}" of {1} type '
'when the "{0.parent}" has the edge {0!s} already'
)
raise spack.error.SpecError(msg.format(edge, deptypes))
raise spack.error.SpecError(msg.format(edge, deptype))
for edge in selected:
if id(dependency_spec) == id(edge.spec):
# If we are here, it means the edge object was previously added to
# both the parent and the child. When we update this object they'll
# both see the deptype modification.
edge.add_type(deptypes)
edge.add_type(deptype)
return
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
edge = DependencySpec(self, dependency_spec, deptypes=deptype)
self._dependencies.add(edge)
dependency_spec._dependents.add(edge)
@@ -2032,7 +2027,7 @@ def to_dict(self, hash=ht.dag_hash):
node_list.append(node)
hash_set.add(node_hash)
meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)])
meta_dict = syaml.syaml_dict([("version", specfile_format_version)])
inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
spec_dict = syaml.syaml_dict([("spec", inner_dict)])
return spec_dict
@@ -2068,13 +2063,137 @@ def to_json(self, stream=None, hash=ht.dag_hash):
@staticmethod
def from_specfile(path):
"""Construct a spec from a JSON or YAML spec file path"""
"""Construct a spec from aJSON or YAML spec file path"""
with open(path, "r") as fd:
file_content = fd.read()
if path.endswith(".json"):
return Spec.from_json(file_content)
return Spec.from_yaml(file_content)
@staticmethod
def from_node_dict(node):
spec = Spec()
if "name" in node.keys():
# New format
name = node["name"]
else:
# Old format
name = next(iter(node))
node = node[name]
for h in ht.hashes:
setattr(spec, h.attr, node.get(h.name, None))
spec.name = name
spec.namespace = node.get("namespace", None)
if "version" in node or "versions" in node:
spec.versions = vn.VersionList.from_dict(node)
if "arch" in node:
spec.architecture = ArchSpec.from_dict(node)
if "compiler" in node:
spec.compiler = CompilerSpec.from_dict(node)
else:
spec.compiler = None
if "parameters" in node:
for name, values in node["parameters"].items():
if name in _valid_compiler_flags:
spec.compiler_flags[name] = []
for val in values:
spec.compiler_flags.add_flag(name, val, False)
else:
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
elif "variants" in node:
for name, value in node["variants"].items():
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
for name in FlagMap.valid_compiler_flags():
spec.compiler_flags[name] = []
spec.external_path = None
spec.external_modules = None
if "external" in node:
# This conditional is needed because sometimes this function is
# called with a node already constructed that contains a 'versions'
# and 'external' field. Related to virtual packages provider
# indexes.
if node["external"]:
spec.external_path = node["external"]["path"]
spec.external_modules = node["external"]["module"]
if spec.external_modules is False:
spec.external_modules = None
spec.extra_attributes = node["external"].get(
"extra_attributes", syaml.syaml_dict()
)
# specs read in are concrete unless marked abstract
spec._concrete = node.get("concrete", True)
if "patches" in node:
patches = node["patches"]
if len(patches) > 0:
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = patches
# FIXME: Monkey patches mvar to store patches order
mvar._patches_in_order_of_appearance = patches
# Don't read dependencies here; from_dict() is used by
# from_yaml() and from_json() to read the root *and* each dependency
# spec.
return spec
@staticmethod
def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
build_spec_dict = node["build_spec"]
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
@staticmethod
def dependencies_from_node_dict(node):
if "name" in node.keys():
# New format
name = node["name"]
else:
name = next(iter(node))
node = node[name]
if "dependencies" not in node:
return
for t in Spec.read_yaml_dep_specs(node["dependencies"]):
yield t
@staticmethod
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
"""Read the DependencySpec portion of a YAML-formatted Spec.
This needs to be backward-compatible with older spack spec
formats so that reindex will work on old specs/databases.
"""
dep_iter = deps.items() if isinstance(deps, dict) else deps
for dep in dep_iter:
if isinstance(dep, tuple):
dep_name, elt = dep
else:
elt = dep
dep_name = dep["name"]
if isinstance(elt, str):
# original format, elt is just the dependency hash.
dep_hash, deptypes = elt, ["build", "link"]
elif isinstance(elt, tuple):
# original deptypes format: (used tuples, not future-proof)
dep_hash, deptypes = elt
elif isinstance(elt, dict):
# new format: elements of dependency spec are keyed.
for h in ht.hashes:
if h.name in elt:
dep_hash, deptypes = elt[h.name], elt["type"]
hash_type = h.name
break
else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dep_hash, list(deptypes), hash_type
@staticmethod
def override(init_spec, change_spec):
# TODO: this doesn't account for the case where the changed spec
@@ -2248,7 +2367,7 @@ def spec_and_dependency_types(s):
dag_node, dependency_types = spec_and_dependency_types(s)
dependency_spec = spec_builder({dag_node: s_dependencies})
spec._add_dependency(dependency_spec, deptypes=dependency_types)
spec._add_dependency(dependency_spec, dependency_types)
return spec
@@ -2261,14 +2380,54 @@ def from_dict(data):
Args:
data: a nested dict/list data structure read from YAML or JSON.
"""
# Legacy specfile format
if isinstance(data["spec"], list):
return SpecfileV1.load(data)
if isinstance(data["spec"], list): # Legacy specfile format
return _spec_from_old_dict(data)
specfile_version = int(data["spec"]["_meta"]["version"])
if specfile_version == 2:
return SpecfileV2.load(data)
return SpecfileV3.load(data)
# Current specfile format
nodes = data["spec"]["nodes"]
hash_type = None
any_deps = False
# Pass 0: Determine hash type
for node in nodes:
if "dependencies" in node.keys():
any_deps = True
for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
if dhash_type:
hash_type = dhash_type
break
if not any_deps: # If we never see a dependency...
hash_type = ht.dag_hash.name
elif not hash_type: # Seen a dependency, still don't know hash_type
raise spack.error.SpecError(
"Spec dictionary contains malformed " "dependencies. Old format?"
)
hash_dict = {}
root_spec_hash = None
# Pass 1: Create a single lookup dictionary by hash
for i, node in enumerate(nodes):
node_hash = node[hash_type]
node_spec = Spec.from_node_dict(node)
hash_dict[node_hash] = node
hash_dict[node_hash]["node_spec"] = node_spec
if i == 0:
root_spec_hash = node_hash
if not root_spec_hash:
raise spack.error.SpecError("Spec dictionary contains no nodes.")
# Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items():
node_spec = node["node_spec"]
for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
if "build_spec" in node.keys():
_, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
node_spec._build_spec = hash_dict[bhash]["node_spec"]
return hash_dict[root_spec_hash]["node_spec"]
@staticmethod
def from_yaml(stream):
@@ -2424,7 +2583,7 @@ def _replace_with(self, concrete):
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, deptypes=deptypes)
dependent._add_dependency(concrete, deptypes)
def _expand_virtual_packages(self, concretizer):
"""Find virtual packages in this spec, replace them with providers,
@@ -2769,7 +2928,7 @@ def _new_concretize(self, tests=False):
result.raise_if_unsat()
# take the best answer
opt, i, answer = min(result.answers)
opt, i, answer, _ = min(result.answers)
name = self.name
# TODO: Consolidate this code with similar code in solve.py
if self.virtual:
@@ -3095,7 +3254,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
# Add merged spec to my deps and recurse
spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, deptypes=dependency.type)
self._add_dependency(spec_dependency, dependency.type)
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
@@ -3426,7 +3585,7 @@ def _constrain_dependencies(self, other):
dep_spec_copy = other._get_dependency(name)
dep_copy = dep_spec_copy.spec
deptypes = dep_spec_copy.deptypes
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
self._add_dependency(dep_copy.copy(), deptypes)
changed = True
return changed
@@ -3739,7 +3898,7 @@ def spid(spec):
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], deptypes=edge.deptypes
new_specs[spid(edge.spec)], edge.deptypes
)
def copy(self, deps=True, **kwargs):
@@ -4654,12 +4813,12 @@ def from_self(name, transitive):
if name in self_nodes:
for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes)
if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
for edge in other[name].edges_to_dependencies():
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes)
if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
@@ -4732,252 +4891,40 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
# Update with additional constraints from other spec
for name in current_spec_constraint.direct_dep_difference(merged_spec):
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
return merged_spec
class SpecfileReaderBase:
@classmethod
def from_node_dict(cls, node):
spec = Spec()
def _spec_from_old_dict(data):
"""Construct a spec from JSON/YAML using the format version 1.
Note: Version 1 format has no notion of a build_spec, and names are
guaranteed to be unique.
name, node = cls.name_and_data(node)
for h in ht.hashes:
setattr(spec, h.attr, node.get(h.name, None))
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
nodes = data["spec"]
spec.name = name
spec.namespace = node.get("namespace", None)
# Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
dep_list = [Spec.from_node_dict(node) for node in nodes]
if not dep_list:
raise spack.error.SpecError("YAML spec contains no nodes.")
deps = dict((spec.name, spec) for spec in dep_list)
spec = dep_list[0]
if "version" in node or "versions" in node:
spec.versions = vn.VersionList.from_dict(node)
if "arch" in node:
spec.architecture = ArchSpec.from_dict(node)
if "compiler" in node:
spec.compiler = CompilerSpec.from_dict(node)
else:
spec.compiler = None
for name, values in node.get("parameters", {}).items():
if name in _valid_compiler_flags:
spec.compiler_flags[name] = []
for val in values:
spec.compiler_flags.add_flag(name, val, False)
else:
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
spec.external_path = None
spec.external_modules = None
if "external" in node:
# This conditional is needed because sometimes this function is
# called with a node already constructed that contains a 'versions'
# and 'external' field. Related to virtual packages provider
# indexes.
if node["external"]:
spec.external_path = node["external"]["path"]
spec.external_modules = node["external"]["module"]
if spec.external_modules is False:
spec.external_modules = None
spec.extra_attributes = node["external"].get(
"extra_attributes", syaml.syaml_dict()
)
# specs read in are concrete unless marked abstract
spec._concrete = node.get("concrete", True)
if "patches" in node:
patches = node["patches"]
if len(patches) > 0:
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = patches
# FIXME: Monkey patches mvar to store patches order
mvar._patches_in_order_of_appearance = patches
# Don't read dependencies here; from_dict() is used by
# from_yaml() and from_json() to read the root *and* each dependency
# spec.
return spec
@classmethod
def _load(cls, data):
"""Construct a spec from JSON/YAML using the format version 2.
This format is used in Spack v0.17, was introduced in
https://github.com/spack/spack/pull/22845
Args:
data: a nested dict/list data structure read from YAML or JSON.
"""
# Current specfile format
nodes = data["spec"]["nodes"]
hash_type = None
any_deps = False
# Pass 0: Determine hash type
for node in nodes:
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
any_deps = True
if dhash_type:
hash_type = dhash_type
break
if not any_deps: # If we never see a dependency...
hash_type = ht.dag_hash.name
elif not hash_type: # Seen a dependency, still don't know hash_type
raise spack.error.SpecError(
"Spec dictionary contains malformed dependencies. Old format?"
)
hash_dict = {}
root_spec_hash = None
# Pass 1: Create a single lookup dictionary by hash
for i, node in enumerate(nodes):
node_hash = node[hash_type]
node_spec = cls.from_node_dict(node)
hash_dict[node_hash] = node
hash_dict[node_hash]["node_spec"] = node_spec
if i == 0:
root_spec_hash = node_hash
if not root_spec_hash:
raise spack.error.SpecError("Spec dictionary contains no nodes.")
# Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items():
node_spec = node["node_spec"]
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
if "build_spec" in node.keys():
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
node_spec._build_spec = hash_dict[bhash]["node_spec"]
return hash_dict[root_spec_hash]["node_spec"]
class SpecfileV1(SpecfileReaderBase):
@classmethod
def load(cls, data):
"""Construct a spec from JSON/YAML using the format version 1.
Note: Version 1 format has no notion of a build_spec, and names are
guaranteed to be unique. This function is guaranteed to read specs as
old as v0.10 - while it was not checked for older formats.
Args:
data: a nested dict/list data structure read from YAML or JSON.
"""
nodes = data["spec"]
# Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
dep_list = [cls.from_node_dict(node) for node in nodes]
if not dep_list:
raise spack.error.SpecError("specfile contains no nodes.")
deps = {spec.name: spec for spec in dep_list}
result = dep_list[0]
for node in nodes:
# get dependency dict from the node.
name, data = cls.name_and_data(node)
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
return result
@classmethod
def name_and_data(cls, node):
for node in nodes:
# get dependency dict from the node.
name = next(iter(node))
node = node[name]
return name, node
@classmethod
def dependencies_from_node_dict(cls, node):
if "dependencies" not in node:
return []
if "dependencies" not in node[name]:
continue
for t in cls.read_specfile_dep_specs(node["dependencies"]):
yield t
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
deps[name]._add_dependency(deps[dname], dtypes)
@classmethod
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
"""Read the DependencySpec portion of a YAML-formatted Spec.
This needs to be backward-compatible with older spack spec
formats so that reindex will work on old specs/databases.
"""
for dep_name, elt in deps.items():
if isinstance(elt, dict):
for h in ht.hashes:
if h.name in elt:
dep_hash, deptypes = elt[h.name], elt["type"]
hash_type = h.name
break
else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dep_hash, list(deptypes), hash_type
class SpecfileV2(SpecfileReaderBase):
@classmethod
def load(cls, data):
result = cls._load(data)
return result
@classmethod
def name_and_data(cls, node):
return node["name"], node
@classmethod
def dependencies_from_node_dict(cls, node):
return cls.read_specfile_dep_specs(node.get("dependencies", []))
@classmethod
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
"""Read the DependencySpec portion of a YAML-formatted Spec.
This needs to be backward-compatible with older spack spec
formats so that reindex will work on old specs/databases.
"""
if not isinstance(deps, list):
raise spack.error.SpecError("Spec dictionary contains malformed dependencies")
result = []
for dep in deps:
elt = dep
dep_name = dep["name"]
if isinstance(elt, dict):
# new format: elements of dependency spec are keyed.
for h in ht.hashes:
if h.name in elt:
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
break
else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
result.append((dep_name, dep_hash, list(deptypes), hash_type))
return result
@classmethod
def extract_info_from_dep(cls, elt, hash):
dep_hash, deptypes = elt[hash.name], elt["type"]
hash_type = hash.name
virtuals = []
return dep_hash, deptypes, hash_type, virtuals
@classmethod
def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
build_spec_dict = node["build_spec"]
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
class SpecfileV3(SpecfileV2):
pass
return spec
class LazySpecCache(collections.defaultdict):

View File

@@ -51,9 +51,7 @@ def specs_as_constraints(self):
constraints = []
for item in self.specs_as_yaml_list:
if isinstance(item, dict): # matrix of specs
expanded = _expand_matrix_constraints(item)
for e in expanded:
constraints.append([Spec(x) for x in e])
constraints.extend(_expand_matrix_constraints(item))
else: # individual spec
constraints.append([Spec(item)])
self._constraints = constraints
@@ -64,11 +62,13 @@ def specs_as_constraints(self):
def specs(self):
if self._specs is None:
specs = []
for item in self.specs_as_yaml_list:
if isinstance(item, dict): # matrix of specs
specs.extend([Spec(" ".join(x)) for x in _expand_matrix_constraints(item)])
else: # individual spec
specs.append(Spec(item))
# This could be slightly faster done directly from yaml_list,
# but this way is easier to maintain.
for constraint_list in self.specs_as_constraints:
spec = constraint_list[0].copy()
for const in constraint_list[1:]:
spec.constrain(const)
specs.append(spec)
self._specs = specs
return self._specs
@@ -193,7 +193,11 @@ def _expand_matrix_constraints(matrix_config):
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
test_spec = Spec(" ".join(flat_combo))
flat_combo = [Spec(x) for x in flat_combo]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:
test_spec.constrain(constraint)
# Abstract variants don't have normal satisfaction semantics
# Convert all variants to concrete types.
@@ -209,7 +213,7 @@ def _expand_matrix_constraints(matrix_config):
continue
if sigil:
flat_combo[0] = sigil + flat_combo[0]
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
# Add to list of constraints
results.append(flat_combo)

View File

@@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import itertools as it
import os
import subprocess
import sys
import pytest
@@ -14,7 +14,7 @@
import spack.ci as ci
import spack.ci_needs_workaround as cinw
import spack.ci_optimization as ci_opt
import spack.config
import spack.config as cfg
import spack.environment as ev
import spack.error
import spack.paths as spack_paths
@@ -23,16 +23,12 @@
import spack.util.spack_yaml as syaml
@pytest.fixture
def repro_dir(tmp_path):
result = tmp_path / "repro_dir"
result.mkdir()
with fs.working_dir(str(tmp_path)):
yield result
def test_urlencode_string():
assert ci._url_encode_string("Spack Test Project") == "Spack+Test+Project"
s = "Spack Test Project"
s_enc = ci._url_encode_string(s)
assert s_enc == "Spack+Test+Project"
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@@ -58,16 +54,16 @@ def assert_present(config):
"install_missing_compilers" in config and config["install_missing_compilers"] is True
)
original_config = spack.config.get("config")
original_config = cfg.get("config")
assert_missing(original_config)
ci.configure_compilers("FIND_ANY", scope="site")
second_config = spack.config.get("config")
second_config = cfg.get("config")
assert_missing(second_config)
ci.configure_compilers("INSTALL_MISSING")
last_config = spack.config.get("config")
last_config = cfg.get("config")
assert_present(last_config)
@@ -384,7 +380,7 @@ def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
)
for opt, deps in itertools.product(*(((False, True),) * 2)):
for opt, deps in it.product(*(((False, True),) * 2)):
# neither optimizing nor converting needs->dependencies
if not (opt or deps):
# therefore, nothing to test
@@ -457,24 +453,33 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_pack
@pytest.mark.skipif(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_process_command(repro_dir):
result = ci.process_command("help", commands=[], repro_dir=str(repro_dir))
help_sh = repro_dir / "help.sh"
assert help_sh.exists() and not result
def test_ci_process_command(tmpdir):
repro_dir = tmpdir.join("repro_dir").strpath
os.makedirs(repro_dir)
result = ci.process_command("help", [], repro_dir)
assert os.path.exists(fs.join_path(repro_dir, "help.sh"))
assert not result
@pytest.mark.skipif(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_process_command_fail(repro_dir, monkeypatch):
msg = "subprocess wait exception"
def test_ci_process_command_fail(tmpdir, monkeypatch):
import subprocess
err = "subprocess wait exception"
def _fail(self, args):
raise RuntimeError(msg)
raise RuntimeError(err)
monkeypatch.setattr(subprocess.Popen, "__init__", _fail)
with pytest.raises(RuntimeError, match=msg):
ci.process_command("help", [], str(repro_dir))
repro_dir = tmpdir.join("repro_dir").strpath
os.makedirs(repro_dir)
with pytest.raises(RuntimeError, match=err):
ci.process_command("help", [], repro_dir)
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
@@ -508,15 +513,16 @@ def test_ci_run_standalone_tests_missing_requirements(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_run_standalone_tests_not_installed_junit(
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
tmpdir, working_env, default_mock_concretization, mock_test_stage, capfd
):
log_file = tmp_path / "junit.xml"
log_file = tmpdir.join("junit.xml").strpath
args = {
"log_file": str(log_file),
"log_file": log_file,
"job_spec": default_mock_concretization("printing-package"),
"repro_dir": str(repro_dir),
"repro_dir": tmpdir.join("repro_dir").strpath,
"fail_fast": True,
}
os.makedirs(args["repro_dir"])
ci.run_standalone_tests(**args)
err = capfd.readouterr()[1]
@@ -528,15 +534,16 @@ def test_ci_run_standalone_tests_not_installed_junit(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_run_standalone_tests_not_installed_cdash(
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
tmpdir, working_env, default_mock_concretization, mock_test_stage, capfd
):
"""Test run_standalone_tests with cdash and related options."""
log_file = tmp_path / "junit.xml"
log_file = tmpdir.join("junit.xml").strpath
args = {
"log_file": str(log_file),
"log_file": log_file,
"job_spec": default_mock_concretization("printing-package"),
"repro_dir": str(repro_dir),
"repro_dir": tmpdir.join("repro_dir").strpath,
}
os.makedirs(args["repro_dir"])
# Cover when CDash handler provided (with the log file as well)
ci_cdash = {
@@ -557,9 +564,9 @@ def test_ci_run_standalone_tests_not_installed_cdash(
assert "0 passed of 0" in out
# copy test results (though none)
artifacts_dir = tmp_path / "artifacts"
artifacts_dir.mkdir()
handler.copy_test_results(str(tmp_path), str(artifacts_dir))
artifacts_dir = tmpdir.join("artifacts")
fs.mkdirp(artifacts_dir.strpath)
handler.copy_test_results(tmpdir.strpath, artifacts_dir.strpath)
err = capfd.readouterr()[1]
assert "Unable to copy files" in err
assert "No such file or directory" in err

View File

@@ -1187,16 +1187,3 @@ def test_padded_install_runtests_root(install_mockery_mutable_config, mock_fetch
spack.config.set("config:install_tree:padded_length", 255)
output = install("--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False)
assert output.count("method not implemented") == 1
@pytest.mark.regression("35337")
def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
"""Test that the temporary file used to write the XML for CDash is not the upload URL"""
parser = argparse.ArgumentParser()
spack.cmd.install.setup_parser(parser)
args = parser.parse_args(
["--cdash-upload-url", "https://blahblah/submit.php?project=debugging", "a"]
)
_, specs = spack.cmd.install.specs_from_cli(args, {})
filename = spack.cmd.install.report_filename(args, specs)
assert filename != "https://blahblah/submit.php?project=debugging"

View File

@@ -14,8 +14,6 @@
maintainers = spack.main.SpackCommand("maintainers")
MAINTAINED_PACKAGES = ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
def split(output):
"""Split command line output into an array."""
@@ -25,12 +23,14 @@ def split(output):
def test_maintained(mock_packages):
out = split(maintainers("--maintained"))
assert out == MAINTAINED_PACKAGES
assert out == ["maintainers-1", "maintainers-2"]
def test_unmaintained(mock_packages):
out = split(maintainers("--unmaintained"))
assert out == sorted(set(spack.repo.all_package_names()) - set(MAINTAINED_PACKAGES))
assert out == sorted(
set(spack.repo.all_package_names()) - set(["maintainers-1", "maintainers-2"])
)
def test_all(mock_packages, capfd):
@@ -43,16 +43,6 @@ def test_all(mock_packages, capfd):
"maintainers-2:",
"user2,",
"user3",
"maintainers-3:",
"user0,",
"user1,",
"user2,",
"user3",
"py-extension1:",
"adamjstewart,",
"pradyunsg,",
"user1,",
"user2",
]
with capfd.disabled():
@@ -68,38 +58,23 @@ def test_all_by_user(mock_packages, capfd):
with capfd.disabled():
out = split(maintainers("--all", "--by-user"))
assert out == [
"adamjstewart:",
"py-extension1",
"pradyunsg:",
"py-extension1",
"user0:",
"maintainers-3",
"user1:",
"maintainers-1,",
"maintainers-3,",
"py-extension1",
"maintainers-1",
"user2:",
"maintainers-1,",
"maintainers-2,",
"maintainers-3,",
"py-extension1",
"maintainers-2",
"user3:",
"maintainers-2,",
"maintainers-3",
"maintainers-2",
]
with capfd.disabled():
out = split(maintainers("--all", "--by-user", "user1", "user2"))
assert out == [
"user1:",
"maintainers-1,",
"maintainers-3,",
"py-extension1",
"maintainers-1",
"user2:",
"maintainers-1,",
"maintainers-2,",
"maintainers-3,",
"py-extension1",
"maintainers-2",
]
@@ -141,16 +116,16 @@ def test_maintainers_list_fails(mock_packages, capfd):
def test_maintainers_list_by_user(mock_packages, capfd):
with capfd.disabled():
out = split(maintainers("--by-user", "user1"))
assert out == ["maintainers-1", "maintainers-3", "py-extension1"]
assert out == ["maintainers-1"]
with capfd.disabled():
out = split(maintainers("--by-user", "user1", "user2"))
assert out == ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
assert out == ["maintainers-1", "maintainers-2"]
with capfd.disabled():
out = split(maintainers("--by-user", "user2"))
assert out == ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
assert out == ["maintainers-1", "maintainers-2"]
with capfd.disabled():
out = split(maintainers("--by-user", "user3"))
assert out == ["maintainers-2", "maintainers-3"]
assert out == ["maintainers-2"]

View File

@@ -258,7 +258,7 @@ def test_has_test_method_fails(capsys):
assert "is not a class" in captured
def test_read_old_results(mock_packages, mock_test_stage):
def test_read_old_results(mock_test_stage):
"""Take test data generated before the switch to full hash everywhere
and make sure we can still read it in"""
# Test data was generated with:
@@ -319,26 +319,3 @@ def test_test_results_status(mock_packages, mock_test_stage, status, expected):
else:
assert status in results
assert expected in results
@pytest.mark.regression("35337")
def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
"""Test that the temporary file used to write Testing.xml for CDash is not the upload URL"""
name = "trivial"
spec = spack.spec.Spec("trivial-smoke-test").concretized()
suite = spack.install_test.TestSuite([spec], name)
suite.ensure_stage()
parser = argparse.ArgumentParser()
spack.cmd.test.setup_parser(parser)
args = parser.parse_args(
[
"run",
"--cdash-upload-url=https://blahblah/submit.php?project=debugging",
"trivial-smoke-test",
]
)
spack.cmd.common.arguments.sanitize_reporter_options(args)
filename = spack.cmd.test.report_filename(args, suite)
assert filename != "https://blahblah/submit.php?project=debugging"

View File

@@ -1779,8 +1779,8 @@ def test_version_weight_and_provenance(self):
num_specs = len(list(result_spec.traverse()))
criteria = [
(num_specs - 1, None, "number of packages to build (vs. reuse)"),
(2, 0, "version badness"),
(None, num_specs - 1, "number of packages to build (vs. reuse)"),
(2, 0, "NON-ROOTS: version badness"),
]
for criterion in criteria:

View File

@@ -17,7 +17,6 @@
from llnl.util.filesystem import getuid, join_path, mkdirp, touch, touchp
import spack.config
import spack.directory_layout
import spack.environment as ev
import spack.main
import spack.package_base
@@ -29,7 +28,6 @@
import spack.schema.mirrors
import spack.schema.packages
import spack.schema.repos
import spack.store
import spack.util.path as spack_path
import spack.util.spack_yaml as syaml
@@ -453,9 +451,9 @@ def test_substitute_date(mock_low_high_config):
assert date.today().strftime("%Y-%m-%d") in new_path
PAD_STRING = spack_path.SPACK_PATH_PADDING_CHARS
MAX_PATH_LEN = spack_path.get_system_path_max()
MAX_PADDED_LEN = MAX_PATH_LEN - spack_path.SPACK_MAX_INSTALL_PATH_LENGTH
PAD_STRING = spack.util.path.SPACK_PATH_PADDING_CHARS
MAX_PATH_LEN = spack.util.path.get_system_path_max()
MAX_PADDED_LEN = MAX_PATH_LEN - spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH
reps = [PAD_STRING for _ in range((MAX_PADDED_LEN // len(PAD_STRING) + 1) + 2)]
full_padded_string = os.path.join(os.sep + "path", os.sep.join(reps))[:MAX_PADDED_LEN]

View File

@@ -68,19 +68,3 @@ def test_error_on_anonymous_dependency(config, mock_packages):
pkg = spack.repo.path.get_pkg_class("a")
with pytest.raises(spack.directives.DependencyError):
spack.directives._depends_on(pkg, "@4.5")
@pytest.mark.regression("34879")
@pytest.mark.parametrize(
"package_name,expected_maintainers",
[
("maintainers-1", ["user1", "user2"]),
# Reset from PythonPackage
("py-extension1", ["adamjstewart", "pradyunsg", "user1", "user2"]),
# Extends maintainers-1
("maintainers-3", ["user0", "user1", "user2", "user3"]),
],
)
def test_maintainer_directive(config, mock_packages, package_name, expected_maintainers):
pkg_cls = spack.repo.path.get_pkg_class(package_name)
assert pkg_cls.maintainers == expected_maintainers

View File

@@ -75,8 +75,8 @@ def test_env_change_spec(tmpdir, mock_packages, config):
- desired_specs: ["mpileaks@2.1"]
specs:
- matrix:
- [$desired_specs]
- [$compilers]
- [$desired_specs]
"""

View File

@@ -5,7 +5,7 @@
import pytest
from spack import fetch_strategy
from spack.fetch_strategy import from_url_scheme
def test_fetchstrategy_bad_url_scheme():
@@ -13,14 +13,4 @@ def test_fetchstrategy_bad_url_scheme():
unsupported scheme fails as expected."""
with pytest.raises(ValueError):
fetcher = fetch_strategy.from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
def test_filesummary(tmpdir):
p = str(tmpdir.join("xyz"))
with open(p, "wb") as f:
f.write(b"abcdefghijklmnopqrstuvwxyz")
assert fetch_strategy._filesummary(p, print_bytes=8) == (26, b"abcdefgh...stuvwxyz")
assert fetch_strategy._filesummary(p, print_bytes=13) == (26, b"abcdefghijklmnopqrstuvwxyz")
assert fetch_strategy._filesummary(p, print_bytes=100) == (26, b"abcdefghijklmnopqrstuvwxyz")
fetcher = from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841

View File

@@ -36,7 +36,7 @@
needs_binary_relocation,
needs_text_relocation,
relocate_links,
relocate_text,
unsafe_relocate_text,
)
from spack.spec import Spec
@@ -190,7 +190,7 @@ def test_buildcache(mock_archive, tmpdir):
@pytest.mark.usefixtures("install_mockery")
def test_relocate_text(tmpdir):
def test_unsafe_relocate_text(tmpdir):
spec = Spec("trivial-install-test-package")
spec.concretize()
with tmpdir.as_cwd():
@@ -203,7 +203,7 @@ def test_relocate_text(tmpdir):
filenames = [filename]
new_dir = "/opt/rh/devtoolset/"
# Singleton dict doesn't matter if Ordered
relocate_text(filenames, {old_dir: new_dir})
unsafe_relocate_text(filenames, {old_dir: new_dir})
with open(filename, "r") as script:
for line in script:
assert new_dir in line

View File

@@ -2,11 +2,13 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os
import os.path
import re
import shutil
import sys
from collections import OrderedDict
import pytest
@@ -16,11 +18,11 @@
import spack.paths
import spack.platforms
import spack.relocate
import spack.relocate_text as relocate_text
import spack.spec
import spack.store
import spack.tengine
import spack.util.executable
from spack.relocate import utf8_path_to_binary_regex, utf8_paths_to_single_binary_regex
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
@@ -267,7 +269,7 @@ def test_set_elf_rpaths_warning(mock_patchelf):
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
def test_replace_prefix_bin(binary_with_rpaths, prefix_like):
prefix = "/usr/" + prefix_like
prefix_bytes = prefix.encode("utf-8")
new_prefix = "/foo/" + prefix_like
@@ -276,7 +278,7 @@ def test_relocate_text_bin(binary_with_rpaths, prefix_like):
executable = binary_with_rpaths(rpaths=[prefix + "/lib", prefix + "/lib64"])
# Relocate the RPATHs
spack.relocate.relocate_text_bin([str(executable)], {prefix_bytes: new_prefix_bytes})
spack.relocate._replace_prefix_bin(str(executable), {prefix_bytes: new_prefix_bytes})
# Some compilers add rpaths so ensure changes included in final result
assert "%s/lib:%s/lib64" % (new_prefix, new_prefix) in rpaths_for(executable)
@@ -347,7 +349,7 @@ def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpd
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_relocate_text_bin_with_message(binary_with_rpaths, copy_binary, prefix_tmpdir):
def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir):
orig_binary = binary_with_rpaths(
rpaths=[
str(prefix_tmpdir.mkdir("lib")),
@@ -366,7 +368,7 @@ def test_relocate_text_bin_with_message(binary_with_rpaths, copy_binary, prefix_
orig_path_bytes = str(orig_binary.dirpath()).encode("utf-8")
new_path_bytes = str(new_binary.dirpath()).encode("utf-8")
spack.relocate.relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
spack.relocate.unsafe_relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
# Check original directory is not there anymore and it was
# substituted with the new one
@@ -380,8 +382,8 @@ def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
fpath = str(tmpdir.join("fakebin"))
with open(fpath, "w") as f:
f.write("/short")
with pytest.raises(relocate_text.BinaryTextReplaceError):
spack.relocate.relocate_text_bin([fpath], {short_prefix: long_prefix})
with pytest.raises(spack.relocate.BinaryTextReplaceError):
spack.relocate.unsafe_relocate_text_bin([fpath], {short_prefix: long_prefix})
@pytest.mark.requires_executables("install_name_tool", "file", "cc")
@@ -436,3 +438,227 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
# (this is a corner case for GCC installation)
(root, filename) = make_object_file()
assert not fixup_rpath(root, filename)
def test_text_relocation_regex_is_safe():
# Test whether prefix regex is properly escaped
string = b"This does not match /a/, but this does: /[a-z]/."
assert utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/"
def test_utf8_paths_to_single_binary_regex():
regex = utf8_paths_to_single_binary_regex(["/first/path", "/second/path", "/safe/[a-z]"])
# Match nothing
assert not regex.search(b"text /neither/first/path text /the/second/path text")
# Match first
string = b"contains both /first/path/subdir and /second/path/sub"
assert regex.search(string).group(0) == b"/first/path/subdir"
# Match second
string = b"contains both /not/first/path/subdir but /second/path/subdir"
assert regex.search(string).group(0) == b"/second/path/subdir"
# Match "unsafe" dir name
string = b"don't match /safe/a/path but do match /safe/[a-z]/file"
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
def test_ordered_replacement():
# This tests whether binary text replacement respects order, so that
# a long package prefix is replaced before a shorter sub-prefix like
# the root of the spack store (as a fallback).
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
f = io.BytesIO(before)
spack.relocate.apply_binary_replacements(f, OrderedDict(prefix_map), suffix_safety_size)
f.seek(0)
assert f.read() == after
# The case of having a non-null terminated common suffix.
replace_and_expect(
[
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
(b"/old-spack/opt", b"/sec/spack/opt"),
],
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
b"Binary with /////////first/specific-package and /sec/spack/opt",
suffix_safety_size=7,
)
# The case of having a direct null terminated common suffix.
replace_and_expect(
[
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
(b"/old-spack/opt", b"/sec/spack/opt"),
],
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
suffix_safety_size=7,
)
# Testing the order of operations (not null terminated, long enough common suffix)
replace_and_expect(
[
(b"/old-spack/opt", b"/s/spack/opt"),
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
],
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
suffix_safety_size=7,
)
# Testing the order of operations (null terminated, long enough common suffix)
replace_and_expect(
[
(b"/old-spack/opt", b"/s/spack/opt"),
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
],
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix long enough
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix too short, but
# shortening is enough to spare more than 7 bytes of old suffix.
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix too short,
# shortening leaves exactly 7 suffix bytes untouched, amazing!
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix too short,
# shortening doesn't leave space for 7 bytes, sad!
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
b"/old-spack/opt/specific-package",
b"/snacks/specific-XXXXXge",
b"/old-spack/opt/specific-package/sub",
)
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
# expect failure!
suffix_safety_size=7,
)
# Check that it works when changing suffix_safety_size.
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
suffix_safety_size=6,
)
# Finally check the case of no shortening but a long enough common suffix.
replace_and_expect(
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
b"Binary with pkg-gwixwaalgczp6/config\0 data",
b"Binary with pkg-zkesfralgczp6/config\0 data",
suffix_safety_size=7,
)
# Too short matching suffix, identical string length
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
b"pkg-gwixwaxlgczp6",
b"pkg-zkesfrzlgczp6",
b"pkg-gwixwaxlgczp6",
)
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
replace_and_expect(
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
b"Binary with pkg-gwixwaxlgczp6\0 data",
# expect failure
suffix_safety_size=7,
)
# Finally, make sure that the regex is not greedily finding the LAST null byte
# it should find the first null byte in the window. In this test we put one null
# at a distance where we cant keep a long enough suffix, and one where we can,
# so we should expect failure when the first null is used.
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
b"pkg-abcdef",
b"pkg-xyzabc",
b"pkg-abcdef",
)
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
replace_and_expect(
[(b"pkg-abcdef", b"pkg-xyzabc")],
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
# expect failure
suffix_safety_size=7,
)
def test_inplace_text_replacement():
def replace_and_expect(prefix_to_prefix, before: bytes, after: bytes):
f = io.BytesIO(before)
prefix_to_prefix = OrderedDict(prefix_to_prefix)
regex = spack.relocate.byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
spack.relocate._replace_prefix_text_file(f, regex, prefix_to_prefix)
f.seek(0)
assert f.read() == after
replace_and_expect(
[
(b"/first/prefix", b"/first-replacement/prefix"),
(b"/second/prefix", b"/second-replacement/prefix"),
],
b"Example: /first/prefix/subdir and /second/prefix/subdir",
b"Example: /first-replacement/prefix/subdir and /second-replacement/prefix/subdir",
)
replace_and_expect(
[
(b"/replace/in/order", b"/first"),
(b"/replace/in", b"/second"),
(b"/replace", b"/third"),
],
b"/replace/in/order/x /replace/in/y /replace/z",
b"/first/x /second/y /third/z",
)
replace_and_expect(
[
(b"/replace", b"/third"),
(b"/replace/in", b"/second"),
(b"/replace/in/order", b"/first"),
],
b"/replace/in/order/x /replace/in/y /replace/z",
b"/third/in/order/x /third/in/y /third/z",
)
replace_and_expect(
[(b"/my/prefix", b"/replacement")],
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
)
replace_and_expect(
[(b"/my/prefix", b"/replacement")],
b"Install path: /my/prefix.",
b"Install path: /replacement.",
)
replace_and_expect(
[(b"/my/prefix", b"/replacement")],
b"#!/my/prefix",
b"#!/replacement",
)

View File

@@ -1,247 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
from collections import OrderedDict
import pytest
import spack.relocate_text as relocate_text
def test_text_relocation_regex_is_safe():
# Test whether prefix regex is properly escaped
string = b"This does not match /a/, but this does: /[a-z]/."
assert relocate_text.utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/"
def test_utf8_paths_to_single_binary_regex():
regex = relocate_text.utf8_paths_to_single_binary_regex(
["/first/path", "/second/path", "/safe/[a-z]"]
)
# Match nothing
assert not regex.search(b"text /neither/first/path text /the/second/path text")
# Match first
string = b"contains both /first/path/subdir and /second/path/sub"
assert regex.search(string).group(0) == b"/first/path/subdir"
# Match second
string = b"contains both /not/first/path/subdir but /second/path/subdir"
assert regex.search(string).group(0) == b"/second/path/subdir"
# Match "unsafe" dir name
string = b"don't match /safe/a/path but do match /safe/[a-z]/file"
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
def test_ordered_replacement():
# This tests whether binary text replacement respects order, so that
# a long package prefix is replaced before a shorter sub-prefix like
# the root of the spack store (as a fallback).
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
f = io.BytesIO(before)
relocater = relocate_text.BinaryFilePrefixReplacer(
OrderedDict(prefix_map), suffix_safety_size
)
relocater.apply_to_file(f)
f.seek(0)
assert f.read() == after
# The case of having a non-null terminated common suffix.
replace_and_expect(
[
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
(b"/old-spack/opt", b"/sec/spack/opt"),
],
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
b"Binary with /////////first/specific-package and /sec/spack/opt",
suffix_safety_size=7,
)
# The case of having a direct null terminated common suffix.
replace_and_expect(
[
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
(b"/old-spack/opt", b"/sec/spack/opt"),
],
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
suffix_safety_size=7,
)
# Testing the order of operations (not null terminated, long enough common suffix)
replace_and_expect(
[
(b"/old-spack/opt", b"/s/spack/opt"),
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
],
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
suffix_safety_size=7,
)
# Testing the order of operations (null terminated, long enough common suffix)
replace_and_expect(
[
(b"/old-spack/opt", b"/s/spack/opt"),
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
],
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix long enough
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix too short, but
# shortening is enough to spare more than 7 bytes of old suffix.
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix too short,
# shortening leaves exactly 7 suffix bytes untouched, amazing!
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
suffix_safety_size=7,
)
# Null terminated within the lookahead window, common suffix too short,
# shortening doesn't leave space for 7 bytes, sad!
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
b"/old-spack/opt/specific-package",
b"/snacks/specific-XXXXXge",
b"/old-spack/opt/specific-package/sub",
)
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
# expect failure!
suffix_safety_size=7,
)
# Check that it works when changing suffix_safety_size.
replace_and_expect(
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
b"Binary with /old-spack/opt/specific-package/sub\0 data",
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
suffix_safety_size=6,
)
# Finally check the case of no shortening but a long enough common suffix.
replace_and_expect(
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
b"Binary with pkg-gwixwaalgczp6/config\0 data",
b"Binary with pkg-zkesfralgczp6/config\0 data",
suffix_safety_size=7,
)
# Too short matching suffix, identical string length
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
b"pkg-gwixwaxlgczp6",
b"pkg-zkesfrzlgczp6",
b"pkg-gwixwaxlgczp6",
)
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
replace_and_expect(
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
b"Binary with pkg-gwixwaxlgczp6\0 data",
# expect failure
suffix_safety_size=7,
)
# Finally, make sure that the regex is not greedily finding the LAST null byte
# it should find the first null byte in the window. In this test we put one null
# at a distance where we cant keep a long enough suffix, and one where we can,
# so we should expect failure when the first null is used.
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
b"pkg-abcdef",
b"pkg-xyzabc",
b"pkg-abcdef",
)
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
replace_and_expect(
[(b"pkg-abcdef", b"pkg-xyzabc")],
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
# expect failure
suffix_safety_size=7,
)
def test_inplace_text_replacement():
def replace_and_expect(prefix_to_prefix, before: bytes, after: bytes):
f = io.BytesIO(before)
replacer = relocate_text.TextFilePrefixReplacer(OrderedDict(prefix_to_prefix))
replacer.apply_to_file(f)
f.seek(0)
assert f.read() == after
replace_and_expect(
[
(b"/first/prefix", b"/first-replacement/prefix"),
(b"/second/prefix", b"/second-replacement/prefix"),
],
b"Example: /first/prefix/subdir and /second/prefix/subdir",
b"Example: /first-replacement/prefix/subdir and /second-replacement/prefix/subdir",
)
replace_and_expect(
[
(b"/replace/in/order", b"/first"),
(b"/replace/in", b"/second"),
(b"/replace", b"/third"),
],
b"/replace/in/order/x /replace/in/y /replace/z",
b"/first/x /second/y /third/z",
)
replace_and_expect(
[
(b"/replace", b"/third"),
(b"/replace/in", b"/second"),
(b"/replace/in/order", b"/first"),
],
b"/replace/in/order/x /replace/in/y /replace/z",
b"/third/in/order/x /third/in/y /third/z",
)
replace_and_expect(
[(b"/my/prefix", b"/replacement")],
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
)
replace_and_expect(
[(b"/my/prefix", b"/replacement")],
b"Install path: /my/prefix.",
b"Install path: /replacement.",
)
replace_and_expect(
[(b"/my/prefix", b"/replacement")],
b"#!/my/prefix",
b"#!/replacement",
)
def test_relocate_text_filters_redundant_entries():
# Test that we're filtering identical old / new paths, since that's a waste.
mapping = OrderedDict([("/hello", "/hello"), ("/world", "/world")])
replacer_1 = relocate_text.BinaryFilePrefixReplacer.from_strings_or_bytes(mapping)
replacer_2 = relocate_text.TextFilePrefixReplacer.from_strings_or_bytes(mapping)
assert not replacer_1.prefix_to_prefix
assert not replacer_2.prefix_to_prefix

View File

@@ -0,0 +1,72 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import pytest
import spack.spec
import spack.solver.asp as asp
import spack.store
pytestmark = [
pytest.mark.skipif(
spack.config.get("config:concretizer") == "original", reason="requires new concretizer"
),
pytest.mark.usefixtures("mutable_config", "mock_packages"),
]
@pytest.fixture
def reusable_specs(mock_packages):
reusable_specs = []
for spec in ["mpich", "openmpi", "zmpi"]:
reusable_specs.extend(s for s in spack.spec.Spec(spec).concretized().traverse(root=True))
return list(sorted(set(reusable_specs)))
@pytest.mark.parametrize(
"root,reuse",
itertools.product(
("mpileaks ^mpich", "mpileaks ^openmpi", "mpileaks ^zmpi", "patch"),
(True, False),
),
)
def test_all_facts_in_solve(database, root, reuse, reusable_specs):
reusable_specs = reusable_specs if reuse else []
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
result, _, _ = solver.driver.solve(setup, [spack.spec.Spec(root)], reuse=reusable_specs)
*_, result_attrs = result.answers[0]
result_attrs = set(result_attrs)
def remove_hashes(attrs):
return []
for spec in result.specs:
# check only link and run deps if reusing.
deptype = ("link", "run") if reuse else "all"
# get all facts about the spec and filter out just the "attr" ones.
attrs = setup.spec_clauses(spec, deptype=deptype, body=True, expand_hashes=True)
# only consider attr() functions, not other displayed atoms
# don't consider any DAG/package hashes, as they are added after solving
attrs = set(attr for attr in attrs if attr.name == "attr" and "hash" not in attr.args[0])
# make sure all facts from the solver are in the actual solution.
diff = attrs - result_attrs
# this is a current bug in the solver: we don't manage dependency patches
# properly, and with reuse it can grab something w/o the right patch.
# See https://github.com/spack/spack/issues/32497
# TODO: Remove this XFAIL when #32497 is fixed.
patches = [a for a in diff if a.args[0] == "variant_value" and a.args[2] == "patches"]
if diff and not (diff - set(patches)):
pytest.xfail("Bug in new concretizer with patch constraints. See #32497.")
assert not diff

View File

@@ -125,7 +125,7 @@ def _mock_installed(self):
# use the installed C. It should *not* force A to use the installed D
# *if* we're doing a fresh installation.
a_spec = Spec(a)
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
a_spec._add_dependency(c_spec, ("build", "link"))
a_spec.concretize()
assert spack.version.Version("2") == a_spec[c][d].version
assert spack.version.Version("2") == a_spec[e].version
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
a_spec = Spec("a")
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
a_spec._add_dependency(b_spec, ("build", "link"))
a_spec.concretize()
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
@@ -992,9 +992,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
link_run_spec = Spec("c@1.0").concretized()
build_spec = Spec("c@2.0").concretized()
root.add_dependency_edge(link_run_spec, deptypes="link")
root.add_dependency_edge(link_run_spec, deptypes="run")
root.add_dependency_edge(build_spec, deptypes="build")
root.add_dependency_edge(link_run_spec, deptype="link")
root.add_dependency_edge(link_run_spec, deptype="run")
root.add_dependency_edge(build_spec, deptype="build")
# Check dependencies from the perspective of root
assert len(root.dependencies()) == 2
@@ -1020,7 +1020,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
root = Spec("b@2.0").concretized()
bootstrap = Spec("b@1.0").concretized()
root.add_dependency_edge(bootstrap, deptypes="build")
root.add_dependency_edge(bootstrap, deptype="build")
assert len(root.dependencies()) == 1
assert root.dependencies()[0].name == "b"
@@ -1039,7 +1039,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
bootstrap = Spec("b@1.0").concretized()
for current_deptype in ("build", "link", "run"):
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
root.add_dependency_edge(bootstrap, deptype=current_deptype)
# Check edges in dependencies
assert len(root.edges_to_dependencies()) == 1
@@ -1066,9 +1066,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
c1 = Spec("b@1.0").concretized()
c2 = Spec("b@2.0").concretized()
p.add_dependency_edge(c1, deptypes=c1_deptypes)
p.add_dependency_edge(c1, deptype=c1_deptypes)
with pytest.raises(spack.error.SpackError):
p.add_dependency_edge(c2, deptypes=c2_deptypes)
p.add_dependency_edge(c2, deptype=c2_deptypes)
@pytest.mark.regression("33499")
@@ -1087,16 +1087,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
z3_flavor_1 = Spec("z3 +through_a1")
z3_flavor_2 = Spec("z3 +through_z1")
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
root.add_dependency_edge(a1, deptype=("build", "run", "test"))
# unique package as a dep of a build/run/test type dep.
a1.add_dependency_edge(a2, deptypes="all")
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
a1.add_dependency_edge(a2, deptype="all")
a1.add_dependency_edge(z3_flavor_1, deptype="all")
# chain of link type deps root -> z1 -> z2 -> z3
root.add_dependency_edge(z1, deptypes="link")
z1.add_dependency_edge(z2, deptypes="link")
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
root.add_dependency_edge(z1, deptype="link")
z1.add_dependency_edge(z2, deptype="link")
z2.add_dependency_edge(z3_flavor_2, deptype="link")
# Indexing should prefer the link-type dep.
assert "through_z1" in root["z3"].variants

View File

@@ -61,25 +61,25 @@ def test_spec_list_expansions(self):
@pytest.mark.parametrize(
"specs,expected",
[
# Constraints are ordered carefully to apply to appropriate node
# Constraints are ordered randomly
(
[
{
"matrix": [
["hypre", "libelf"],
["~shared"],
["^foo"],
["^zmpi"],
["%gcc@4.5.0"],
["hypre", "libelf"],
["~shared"],
["cflags=-O3", 'cflags="-g -O0"'],
["^foo"],
]
}
],
[
"hypre ~shared ^foo ^zmpi cflags=-O3 %gcc@4.5.0",
'hypre ~shared ^foo ^zmpi cflags="-g -O0" %gcc@4.5.0',
"libelf ~shared ^foo ^zmpi cflags=-O3 %gcc@4.5.0",
'libelf ~shared ^foo ^zmpi cflags="-g -O0" %gcc@4.5.0',
"hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi",
'hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
"libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi",
'libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
],
),
# A constraint affects both the root and a dependency

View File

@@ -1108,7 +1108,7 @@ def test_error_message_unknown_variant(self):
def test_satisfies_dependencies_ordered(self):
d = Spec("zmpi ^fake")
s = Spec("mpileaks")
s._add_dependency(d, deptypes=())
s._add_dependency(d, ())
assert s.satisfies("mpileaks ^zmpi ^fake", strict=True)
@pytest.mark.parametrize("transitive", [True, False])
@@ -1156,9 +1156,7 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
def test_malformed_spec_dict():
with pytest.raises(SpecError, match="malformed"):
Spec.from_dict(
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
)
Spec.from_dict({"spec": {"nodes": [{"dependencies": {"name": "foo"}}]}})
def test_spec_dict_hashless_dep():
@@ -1166,10 +1164,9 @@ def test_spec_dict_hashless_dep():
Spec.from_dict(
{
"spec": {
"_meta": {"version": 2},
"nodes": [
{"name": "foo", "hash": "thehash", "dependencies": [{"name": "bar"}]}
],
]
}
}
)
@@ -1255,7 +1252,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
# add it to an abstract spec as a dependency
top = Spec("dt-diamond")
top.add_dependency_edge(bottom, deptypes=())
top.add_dependency_edge(bottom, ())
# concretize with the already-concrete dependency
top.concretize()

View File

@@ -13,9 +13,7 @@
import ast
import collections
import collections.abc
import gzip
import inspect
import json
import os
import pytest
@@ -509,33 +507,3 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
("version", "1.2.11"),
]
)
@pytest.mark.parametrize(
"specfile,expected_hash,reader_cls",
[
# First version supporting JSON format for specs
("specfiles/hdf5.v013.json.gz", "vglgw4reavn65vx5d4dlqn6rjywnq76d", spack.spec.SpecfileV1),
# Introduces full hash in the format, still has 3 hashes
("specfiles/hdf5.v016.json.gz", "stp45yvzte43xdauknaj3auxlxb4xvzs", spack.spec.SpecfileV1),
# Introduces "build_specs", see https://github.com/spack/spack/pull/22845
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
],
)
def test_load_json_specfiles(specfile, expected_hash, reader_cls):
fullpath = os.path.join(spack.paths.test_path, "data", specfile)
with gzip.open(fullpath, "rt", encoding="utf-8") as f:
data = json.load(f)
s1 = Spec.from_dict(data)
s2 = reader_cls.load(data)
assert s2.dag_hash() == expected_hash
assert s1.dag_hash() == s2.dag_hash()
assert s1 == s2
assert Spec.from_json(s2.to_json()).dag_hash() == s2.dag_hash()
openmpi_edges = s2.edges_to_dependencies(name="openmpi")
assert len(openmpi_edges) == 1

View File

@@ -18,8 +18,8 @@ def create_dag(nodes, edges):
dict: mapping from package name to abstract Spec with proper deps.
"""
specs = {name: Spec(name) for name in nodes}
for parent, child, deptypes in edges:
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
for parent, child, deptype in edges:
specs[parent].add_dependency_edge(specs[child], deptype)
return specs

View File

@@ -238,7 +238,7 @@ class HasManyMetadataAttributes:
url = "https://example.com/foo.tar.gz"
git = "https://example.com/foo/bar.git"
maintainers("alice", "bob")
maintainers = ["alice", "bob"]
tags = ["foo", "bar", "baz"]
depends_on("foo")

View File

@@ -912,7 +912,7 @@ def inspect_path(root, inspections, exclude=None):
env = EnvironmentModifications()
# Inspect the prefix to check for the existence of common directories
for relative_path, variables in inspections.items():
expected = os.path.join(root, os.path.normpath(relative_path))
expected = os.path.join(root, relative_path)
if os.path.isdir(expected) and not exclude(expected):
for variable in variables:

View File

@@ -7,22 +7,10 @@ default:
# Job templates
########################################
.aws-pr-creds:
variables:
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
.aws-protected-creds:
variables:
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
.pr-refs:
.pr:
only:
- /^pr[\d]+_.*$/
.pr:
extends: [ ".pr-refs" ]
- /^github\/pr[\d]+_.*$/
variables:
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
SPACK_PIPELINE_TYPE: "spack_pull_request"
@@ -33,6 +21,7 @@ default:
- /^develop$/
- /^releases\/v.*/
- /^v.*/
- /^github\/develop$/
.protected:
extends: [ ".protected-refs" ]
@@ -41,7 +30,7 @@ default:
SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}"
SPACK_PIPELINE_TYPE: "spack_protected_branch"
.generate-base:
.generate:
stage: generate
script:
- uname -a || true
@@ -60,6 +49,7 @@ default:
artifacts:
paths:
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
tags: ["spack", "aws", "public", "medium", "x86_64"]
variables:
KUBERNETES_CPU_REQUEST: 4000m
KUBERNETES_MEMORY_REQUEST: 16G
@@ -68,15 +58,12 @@ default:
retry:
max: 2
when:
- always
.generate:
extends: [ ".generate-base" ]
tags: ["spack", "public", "medium", "x86_64"]
- runner_system_failure
- stuck_or_timeout_failure
.generate-aarch64:
extends: [ ".generate" ]
tags: ["spack", "public", "medium", "aarch64"]
tags: ["spack", "aws", "public", "medium", "aarch64"]
.pr-generate:
extends: [ ".pr", ".generate" ]
@@ -94,14 +81,20 @@ default:
stage: build
.pr-build:
extends: [ ".pr", ".build", ".aws-pr-creds" ]
extends: [ ".pr", ".build" ]
variables:
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
.protected-build:
extends: [ ".protected", ".build", ".aws-protected-creds" ]
extends: [ ".protected", ".build" ]
variables:
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
protected-publish:
stage: publish
extends: [ ".protected", ".aws-protected-creds" ]
extends: [ ".protected" ]
image: "ghcr.io/spack/python-aws-bash:0.0.1"
tags: ["spack", "public", "medium", "aws", "x86_64"]
retry:
@@ -206,6 +199,7 @@ protected-publish:
# variables:
# AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
# AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
# .mac-protected-build:
# extends: [ ".mac-protected", ".build" ]
# variables:
@@ -323,43 +317,6 @@ e4s-protected-build:
- artifacts: True
job: e4s-protected-generate
########################################
# GPU Testing Pipeline
########################################
.gpu-tests:
variables:
SPACK_CI_STACK_NAME: gpu-tests
gpu-tests-pr-generate:
extends: [ ".gpu-tests", ".pr-generate"]
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
gpu-tests-protected-generate:
extends: [ ".gpu-tests", ".protected-generate"]
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
gpu-tests-pr-build:
extends: [ ".gpu-tests", ".pr-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: gpu-tests-pr-generate
strategy: depend
needs:
- artifacts: True
job: gpu-tests-pr-generate
gpu-tests-protected-build:
extends: [ ".gpu-tests", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: gpu-tests-protected-generate
strategy: depend
needs:
- artifacts: True
job: gpu-tests-protected-generate
########################################
# E4S OneAPI Pipeline
########################################
@@ -369,11 +326,11 @@ gpu-tests-protected-build:
e4s-oneapi-pr-generate:
extends: [ ".e4s-oneapi", ".pr-generate"]
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
e4s-oneapi-protected-generate:
extends: [ ".e4s-oneapi", ".protected-generate"]
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
e4s-oneapi-pr-build:
extends: [ ".e4s-oneapi", ".pr-build" ]
@@ -400,41 +357,41 @@ e4s-oneapi-protected-build:
########################################
# E4S on Power
########################################
.e4s-power-generate-tags-and-image:
image: { "name": "ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01", "entrypoint": [""] }
tags: ["spack", "public", "large", "ppc64le"]
# .power-e4s-generate-tags-and-image:
# image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu20.04-runner-ppc64le:2021-07-01", "entrypoint": [""] }
# tags: ["spack", "public", "medium", "ppc64le"]
.e4s-power:
variables:
SPACK_CI_STACK_NAME: e4s-power
# .e4s-on-power:
# variables:
# SPACK_CI_STACK_NAME: e4s-on-power
e4s-power-pr-generate:
extends: [ ".e4s-power", ".pr-generate", ".e4s-power-generate-tags-and-image"]
# e4s-on-power-pr-generate:
# extends: [ ".e4s-on-power", ".pr-generate", ".power-e4s-generate-tags-and-image"]
e4s-power-protected-generate:
extends: [ ".e4s-power", ".protected-generate", ".e4s-power-generate-tags-and-image"]
# e4s-on-power-protected-generate:
# extends: [ ".e4s-on-power", ".protected-generate", ".power-e4s-generate-tags-and-image"]
e4s-power-pr-build:
extends: [ ".e4s-power", ".pr-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: e4s-power-pr-generate
strategy: depend
needs:
- artifacts: True
job: e4s-power-pr-generate
# e4s-on-power-pr-build:
# extends: [ ".e4s-on-power", ".pr-build" ]
# trigger:
# include:
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
# job: e4s-on-power-pr-generate
# strategy: depend
# needs:
# - artifacts: True
# job: e4s-on-power-pr-generate
e4s-power-protected-build:
extends: [ ".e4s-power", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: e4s-power-protected-generate
strategy: depend
needs:
- artifacts: True
job: e4s-power-protected-generate
# e4s-on-power-protected-build:
# extends: [ ".e4s-on-power", ".protected-build" ]
# trigger:
# include:
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
# job: e4s-on-power-protected-generate
# strategy: depend
# needs:
# - artifacts: True
# job: e4s-on-power-protected-generate
#########################################
# Build tests for different build-systems

View File

@@ -0,0 +1,269 @@
spack:
view: false
concretizer:
reuse: false
unify: false
config:
concretizer: clingo
install_tree:
root: /home/software/spack
padded_length: 512
projections:
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
packages:
all:
compiler:
- gcc@9.3.0
providers:
blas:
- openblas
mpi:
- mpich
target:
- ppc64le
variants: +mpi
binutils:
variants: +ld +gold +headers +libiberty ~nls +plugins
version:
- 2.36.1
doxygen:
version:
- 1.8.20
elfutils:
variants: +bzip2 ~nls +xz
hdf5:
variants: +fortran +hl +shared api=v18
version:
- 1.12.0
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
mesa:
variants: ~llvm
mesa18:
variants: ~llvm
mpich:
variants: ~wrapperrpath
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
definitions:
- cuda_specs:
- amrex +cuda cuda_arch=70
- caliper +cuda cuda_arch=70
- chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared
- ginkgo +cuda cuda_arch=70
- heffte +cuda cuda_arch=70
- hpx +cuda cuda_arch=70
- hypre +cuda cuda_arch=70
- kokkos +wrapper +cuda cuda_arch=70
- kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70
- magma +cuda cuda_arch=70
- mfem +cuda cuda_arch=70
- parsec +cuda cuda_arch=70
- petsc +cuda cuda_arch=70
- raja +cuda cuda_arch=70
- slate +cuda cuda_arch=70
- slepc +cuda cuda_arch=70
- strumpack ~slate +cuda cuda_arch=70
- sundials +cuda cuda_arch=70
- superlu-dist +cuda cuda_arch=70
- tasmanian +cuda cuda_arch=70
- trilinos@13.2.0 +cuda cuda_arch=70
- umpire ~shared +cuda cuda_arch=70
- vtk-m +cuda cuda_arch=70
- zfp +cuda cuda_arch=70
#- ascent ~shared +cuda cuda_arch=70
#- axom +cuda cuda_arch=70 ^umpire ~shared
#- dealii +cuda cuda_arch=70 # gmsh
#- flecsi +cuda cuda_arch=70
#- paraview +cuda cuda_arch=70
- default_specs:
- adios
- adios2
- aml
- amrex
- arborx
- archer
- argobots
- ascent
- axom ^umpire@4.1.2
- bolt
- cabana
- caliper
- chai ~benchmarks ~tests ^umpire@4.1.2
- charliecloud
- conduit
- darshan-runtime
- darshan-util
- datatransferkit
- dyninst
- faodel ~tcmalloc
- flecsi
- flit
- flux-core
- fortrilinos
- gasnet
- ginkgo
- globalarrays
- gmp
- gotcha
- gptune
- hdf5
- heffte +fftw
- hpctoolkit
- hpx
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- legion
- libnrm
- libquo
- libunwind
- llvm targets=amdgpu,nvptx +clang +compiler-rt +libcxx +lld +lldb +llvm_dylib +flang ~cuda
- loki
- mercury
- metall
- mfem
- mpark-variant
- mpifileutils ~xattr
- netlib-scalapack
- ninja
- nrm
- nvhpc
- omega-h
- openmpi
- openpmd-api ^hdf5@1.12.0 +fortran +shared +hl api=default
- papi
- papyrus@1.0.1
- parallel-netcdf
- paraview
- parsec ~cuda
- pdt
- petsc
- plasma
- precice
- pumi
- py-jupyterhub
- py-libensemble
- py-petsc4py
- py-warpx ^warpx dims=2 ^hdf5@1.12.0 +fortran +shared +hl api=default
- py-warpx ^warpx dims=3 ^hdf5@1.12.0 +fortran +shared +hl api=default
- py-warpx ^warpx dims=rz ^hdf5@1.12.0 +fortran +shared +hl api=default
- qthreads scheduler=distrib
- raja
- rempi
- scr
- slate ~cuda
- slepc
- stc
- strumpack ~slate
- sundials
- superlu
- superlu-dist
- swig
- swig@4.0.2-fortran
- sz
- tasmanian
- tau +mpi +python
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
- unifyfs@0.9.1
- upcxx
- variorum
- veloc
- vtk-m
- zfp
#- dealii
#- geopm
#- phist
#- qt
#- qwt
#- stat
#- umpire
- arch:
- '%gcc target=ppc64le'
specs:
- matrix:
- - $default_specs
- - $arch
- matrix:
- - $cuda_specs
- - $arch
mirrors: { "mirror": "s3://spack-binaries/e4s" }
gitlab-ci:
script:
- uname -a || true
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
- nproc
- curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.powerpc64le-linux-gnu.tar.gz' -o gmake.tar.gz
- printf '8096d202fe0a0c400b8c0573c4b9e009f2f10d2fa850a3f495340f16e9c42454 gmake.tar.gz' | sha256sum --check --strict --quiet
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
- . "./share/spack/setup-env.sh"
- spack --version
- spack arch
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- spack --color=always --backtrace ci rebuild
after_script:
- cat /proc/loadavg || true
match_behavior: first
mappings:
- match:
- cuda
- dyninst
- hpx
- llvm
- llvm-amdgpu
- precice
- rocblas
- rocsolver
- strumpack
- sundials
- trilinos
- vtk-h
- vtk-m
- warpx
runner-attributes:
image: { "name": "ghcr.io/spack/e4s-ubuntu-20.04:v2021-10-18", "entrypoint": [""] }
tags: ["spack", "public", "xlarge", "ppc64le"]
- match: ['os=ubuntu20.04']
runner-attributes:
image: { "name": "ghcr.io/spack/e4s-ubuntu-20.04:v2021-10-18", "entrypoint": [""] }
tags: ["spack", "public", "large", "ppc64le"]
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
- spack --version
image: { "name": "ghcr.io/spack/e4s-ubuntu-20.04:v2021-10-18", "entrypoint": [""] }
tags: ["spack", "public", "medium", "ppc64le"]
cdash:
build-group: New PR testing workflow
url: https://cdash.spack.io
project: Spack Testing
site: Cloud Gitlab Infrastructure

View File

@@ -3,7 +3,7 @@ spack:
concretizer:
reuse: false
unify: false
unify: when_possible
config:
build_jobs: 32
@@ -16,37 +16,37 @@ spack:
compilers:
- compiler:
spec: dpcpp@2023.0.0
spec: dpcpp@2022.1.0
paths:
cc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/dpcpp
f77: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
cc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/dpcpp
f77: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
flags: {}
operating_system: ubuntu20.04
target: x86_64
modules: [compiler]
environment:
prepend_path:
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2023.0.0/linux/compiler/lib/intel64_lin
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2022.1.0/linux/compiler/lib/intel64_lin
extra_rpaths: []
- compiler:
spec: oneapi@2023.0.0
spec: oneapi@2022.1.0
paths:
cc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icpx
f77: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
cc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/icpx
f77: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
flags: {}
operating_system: ubuntu20.04
target: x86_64
modules: [compiler]
environment:
prepend_path:
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2023.0.0/linux/compiler/lib/intel64_lin
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2022.1.0/linux/compiler/lib/intel64_lin
extra_rpaths: []
- compiler:
spec: gcc@11.1.0
spec: gcc@9.4.0
paths:
cc: /usr/bin/gcc
cxx: /usr/bin/g++
@@ -60,13 +60,20 @@ spack:
extra_rpaths: []
packages:
adios2:
require: "%gcc"
all:
require: '%oneapi'
require: "%oneapi"
providers:
blas: [openblas]
mpi: [mpich]
target: [x86_64]
variants: +mpi
binutils:
require: "%gcc"
variants: +ld +gold +headers +libiberty ~nls
cuda:
version: [11.4.2]
elfutils:
variants: +bzip2 ~nls +xz
hdf5:
@@ -75,6 +82,8 @@ spack:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
llvm:
require: "%gcc" # undefined reference to `_intel_fast_memset' becauase of -nodefaultlibs
mpich:
variants: ~wrapperrpath
ncurses:
@@ -83,55 +92,35 @@ spack:
variants: threads=openmp
python:
version: [3.8.13]
ruby:
require: "%gcc" # https://github.com/spack/spack/issues/31954
rust:
require: "%gcc" # undefined reference becauase of -nodefaultlibs
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
vtk-m:
require: ~openmp
require: "~openmp"
xz:
variants: +pic
mesa:
version: [21.3.8]
binutils:
require: '%gcc'
variants: +ld +gold +headers +libiberty ~nls
bison:
require: '%gcc'
krb5:
require: '%gcc'
llvm:
require: '%gcc'
m4:
require: '%gcc'
openssh:
require: '%gcc'
papi:
require: '%gcc'
py-scipy:
require: '%gcc'
ruby:
require: '%gcc'
rust:
require: '%gcc'
unzip:
require: '%gcc'
specs:
# CPU
- adios
- adios2
- alquimia
- aml
- amrex
- arborx
- archer
- argobots
- ascent
- amrex
- axom
- bolt
- bricks
- butterflypack
- cabana
- caliper
@@ -140,15 +129,20 @@ spack:
- darshan-runtime
- darshan-util
- datatransferkit
- exaworks
- faodel
- flit
- flux-core
- fortrilinos
- gasnet
- ginkgo
- globalarrays
- gmp
- gotcha
- gptune
- hdf5 +fortran +hl +shared
- heffte +fftw
- hpx max_cpu_count=512 networking=mpi
- hypre
- kokkos-kernels +openmp
- kokkos +openmp
@@ -166,6 +160,7 @@ spack:
- nccmp
- nco
- netlib-scalapack
- nrm
- omega-h
- openmpi
- openpmd-api
@@ -179,11 +174,17 @@ spack:
- plumed
- precice
- pumi
- py-cinemasci
- py-jupyterhub
- py-libensemble
- py-petsc4py
- py-warpx ^warpx dims=2
- py-warpx ^warpx dims=3
- py-warpx ^warpx dims=rz
- qthreads scheduler=distrib
- raja
- rempi
- scr
- slate ~cuda
- slepc
- stc
@@ -192,12 +193,14 @@ spack:
- superlu-dist
- superlu
- swig
- swig@4.0.2-fortran
- sz
- tasmanian
- tau +mpi +python
- trilinos@13.0.1 +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
- umpire
@@ -210,72 +213,54 @@ spack:
# GPU
- aml +ze
- amrex +sycl
- arborx +sycl ^kokkos +sycl +openmp std=17 +tests +examples
- cabana +sycl ^kokkos +sycl +openmp std=17 +tests +examples
- cabana +sycl ^kokkos+sycl +openmp std=17 +tests +examples
- kokkos +sycl +openmp std=17 +tests +examples %oneapi
- kokkos-kernels build_type=Release ^kokkos +sycl +openmp std=17 +tests +examples %oneapi
- kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp std=17 +tests +examples %oneapi
# CPU FAILURES
# - bricks # bricks
# - charliecloud # charliecloud
# - dyninst # old intel-tbb
# - exaworks # py-setuptools-scm
# - flux-core # py-setuptools-scm
# - geopm # geopm
# - ginkgo # ginkgo
# - gptune # py-scipy@1.3.3
# - h5bench # h5bench
# - hpctoolkit # dyninst
# - hpx max_cpu_count=512 networking=mpi # boost cxxstd=17
# - nrm # py-scipy
# - paraview +qt # qt
# - phist # phist
# - pruners-ninja # pruners-ninja
# - py-cinemasci # py-scipy@1.3.3, py-setuptools-scm
# - py-jupyterhub # py-setuptools-scm
# - py-warpx ^warpx dims=2 # py-scipy@1.5.4
# - py-warpx ^warpx dims=3 # py-scipy@1.5.4
# - py-warpx ^warpx dims=rz # py-scipy@1.5.4
# - scr # libyogrt
# - swig@4.0.2-fortran # swig
# - tau +mpi +python # tau
# - variorum # variorum
# --
# amrex: /opt/intel/oneapi/compiler/2023.0.0/linux/bin-llvm/../include/sycl/detail/defines_elementary.hpp:52:40: note: expanded from macro '__SYCL2020_DEPRECATED'
# amrex: /opt/intel/oneapi/compiler/2023.0.0/linux/bin-llvm/../include/sycl/detail/defines_elementary.hpp:52:40: note: expanded from macro '__SYCL2020_DEPRECATED'
# CPU BUILD FAILURES
#- adios2@2.8.0 # adios2
#- charliecloud@0.26 # charliecloud
#- dyninst@12.1.0 # old intel-tbb
#- geopm@1.1.0 # geopm
#- h5bench@1.2 # h5bench
#- hpctoolkit # dyninst
#- phist@1.9.5 # phist
#- paraview +qt # qt
#- pruners-ninja@1.0.1 # pruners-ninja
#- variorum@0.4.1 # variorum
# CPU BUILD FAILURES - NOTES
# adios2: /usr/bin/ld: ../../lib/libadios2_fortran.so.2.8.2: version node not found for symbol adios2_adios_init_mod@adios2_adios_init_serial_smod._; /usr/bin/ld: failed to set dynamic section sizes: bad value
# binutils: gold/powerpc.cc:3590: undefined reference to `gold::Sized_symbol<64>::Value_type gold::Symbol_table::compute_final_value<64>(gold::Sized_symbol<64> const*, gold::Symbol_table::Compute_final_value_status*) const'
# boost cxxstd=17: ./boost/mpl/aux_/integral_wrapper.hpp:73:31: error: integer value -1 is outside the valid range of values [0, 3] for this enumeration type [-Wenum-constexpr-conversion]
# bricks: cc1plus: error: bad value ('OFF') for '-mtune=' switch
# charliecloud: autoreconf phase: RuntimeError: configure script not found in ...
# flux-sched: include/yaml-cpp/emitter.h:164:9: error: comparison with NaN always evaluates to false in fast floating point modes [-Werror,-Wtautological-constant-compare]
# flux-sched: include/yaml-cpp/emitter.h:171:24: error: comparison with infinity always evaluates to false in fast floating point modes [-Werror,-Wtautological-constant-compare]
# ginkgo: icpx: error: clang frontend command failed with exit code 139
# h5bench: commons/h5bench_util.h:196: multiple definition of `has_vol_async';
# intel-tbb: clang++clang++clang++clang++clang++clang++clang++: : : : : : : clang++error: : unknown argument: '-flifetime-dse=1'
# libyogrt: configure: error: slurm is not in specified location!
# phist: fortran_bindings/test/kernels.F90(63): error #8284: If the actual argument is scalar, the dummy argument shall be scalar unless the actual argument is of type character or is an element of an array that is not assumed shape, pointer, or polymorphic. [ARGV]
# pruners-ninja: test/ninja_test_util.c:34: multiple definition of `a';
# py-cryptography: ??
# py-scipy@1.3.3: gcc: error: unrecognized command-line option '-fp-model=strict'
# py-scipy@1.5.4: gcc: error: unrecognized command-line option '-fp-model=strict'
# py-setuptools-scm: ??
# ruby: limits.c:415:34: error: invalid suffix 'D' on floating constant
# rust: /usr/bin/ld: /opt/intel/oneapi/compiler/2022.1.0/linux/bin-llvm/../compiler/lib/intel64_lin/libimf.a(libm_feature_flag.o): in function `__libm_feature_flag_init': libm_feature_flag.c:(.text+0x25): undefined reference to `__intel_cpu_feature_indicator_x'
# swig@4.0.2-fortran: /spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-11.1.0/m4-1.4.19-p3otmjixpi6zibdsyoqib5dpzfshq3nj/bin/m4:/spack/opt/spack/linux-ubuntu20.04-x86_64/oneapi-2023.0.0/bison-3.8.2-xca2sot4jhd72hvj2m2b3ajchagczvau/share/bison/skeletons/yacc.c:420: undefined macro `b4_symbol(103, tag)'
# tau: Error: Unable to identify ifort lib directory
# variorum: ld: Intel/CMakeFiles/variorum_intel.dir/msr_core.c.o:(.bss+0x0): multiple definition of `g_platform'; CMakeFiles/variorum.dir/config_architecture.c.o:(.bss+0x0): first defined here
# vtk-m +openmp: clang++: error: clang frontend command failed with exit code 139 (use -v to see invocation)
# GPU FAILURES
# - amrex +sycl # amrex
# - ginkgo +oneapi # ginkgo
# - hpctoolkit +level_zero # dyninst
# - sundials +sycl cxxstd=17 # sundials
# - tau +mpi +opencl +level_zero ~pdt # tau
# --
# GPU BUILD FAILURES
#- ginkgo@1.4.0 +oneapi %dpcpp ^cmake%oneapi # ginkgo
#- hpctoolkit@2022.04.15 +level_zero # dyninst
#- sundials@6.2.0 +sycl cxxstd=17 # sundials
#- tau@2.31.1 +mpi +opencl +level_zero ~pdt %oneapi ^binutils%gcc@9.4.0 ^papi%gcc@9.4.0 # tau
# GPU BUILD FAILURES - NOTES
# berkeley-db %dpcpp: dpcpp: dpcpperror: : no such file or directory: '/tmp/conftest-9d8d34.o'
# ginkgo %dpcpp: CMakeTestCXXCompiler.cmake:62: /usr/bin/ld: warning: libsvml.so, needed by /opt/intel/oneapi/compiler/2022.1.0/linux/bin-llvm/../lib/libsycl.so, not found (try using -rpath or -rpath-link) ...
# ncurses %dpcpp: If you have ncurses 4.2 applications, you should read the INSTALL document, and install the terminfo without the -x optiontic: error while loading shared libraries: libsvml.so: cannot open shared object file: No such file or directory
# sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found
# tau: requires libdrm-dev
# SKIPPED
# - flecsi # dependency pfunit marks oneapi as an unsupported compiler
#- flecsi@1.4.2 # dependency pfunit marks oneapi as an unsupported compiler
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-oneapi" }
@@ -301,14 +286,14 @@ spack:
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- export PATH=/bootstrap/runner/view/bin:${PATH}
- . /bootstrap/runner/install/linux-ubuntu20.04-x86_64/gcc-11.1.0/lmod-8.7.2-b7eq7rjeckn3m4o4lglsakilibkznjom/lmod/8.7.2/init/bash
- . /bootstrap/runner/install/linux-ubuntu20.04-x86_64/gcc-9.4.0/lmod-8.7.2-ri26z7qy6ixtgpsqinswx3w6tuggluv5/lmod/8.7.2/init/bash
- module use /opt/intel/oneapi/modulefiles
- module load compiler
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
after_script:
- cat /proc/loadavg || true
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
match_behavior: first
mappings:
@@ -493,7 +478,7 @@ spack:
before_script:
- . "./share/spack/setup-env.sh"
- spack --version
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
tags: ["spack", "public", "x86_64"]
signing-job-attributes:

View File

@@ -1,423 +0,0 @@
spack:
view: false
concretizer:
reuse: false
unify: false
config:
concretizer: clingo
install_tree:
root: /home/software/spack
padded_length: 512
projections:
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
packages:
all:
compiler: [gcc@11.1.0]
providers:
blas: [openblas]
mpi: [mpich]
target: [ppc64le]
variants: +mpi cuda_arch=70
tbb:
require: intel-tbb
binutils:
variants: +ld +gold +headers +libiberty ~nls
cuda:
version: [11.7.0]
elfutils:
variants: +bzip2 ~nls +xz
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
mpich:
variants: ~wrapperrpath
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
paraview:
require: '@5.11 ~qt+osmesa'
python:
version: [3.7.15]
trilinos:
require: +amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
+intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro
+phalanx +rol +rythmos +sacado +stk +shards +shylu +stratimikos +teko +tempus
+tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
faodel:
require: ~tcmalloc # needed for ppc64le
specs:
# CPU
- adios
- alquimia
- aml
- amrex
- arborx
- argobots
- axom
- bolt
- butterflypack
- cabana
- caliper
- chai ~benchmarks ~tests
- charliecloud
- conduit
- datatransferkit
- dyninst
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o: /tmp/ccgvkIk5.s: Assembler messages: /tmp/ccgvkIk5.s:260012: Error: invalid machine `power10'
- exaworks
- flecsi
- flit
- flux-core
- fortrilinos
- gasnet
- ginkgo
- globalarrays
- gmp
- gotcha
- gptune
- h5bench
- hdf5-vol-async
- heffte +fftw
- hpctoolkit
- hpx max_cpu_count=512 networking=mpi
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- lammps
- legion
- libnrm
- libquo
- libunwind
- mercury
- metall
- mfem
- mpark-variant
- mpifileutils ~xattr
- nccmp
- nco
- netlib-scalapack
- nrm
- nvhpc
- omega-h
- openmpi
- openpmd-api
- papi
- papyrus
- parsec ~cuda
- pdt
- petsc
- phist
- plasma
- plumed
- pumi
- py-h5py
- py-jupyterhub
- py-libensemble +mpi +nlopt
- py-petsc4py
- py-warpx ^warpx dims=2
- py-warpx ^warpx dims=3
- py-warpx ^warpx dims=rz
- qthreads scheduler=distrib
- quantum-espresso
- raja
- rempi
- scr
- slate ~cuda
- slepc
- stc
- strumpack ~slate
- sundials
- superlu
- superlu-dist
- swig
- swig@4.0.2-fortran
- tasmanian
- tau +mpi +python
- trilinos@13.0.1 +belos +ifpack2 +stokhos
- turbine
- umap
- umpire
- upcxx
- wannier90
# CUDA
- amrex +cuda
- arborx +cuda ^kokkos +wrapper
- cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda
- caliper +cuda
- chai ~benchmarks ~tests +cuda ^umpire ~shared
- ecp-data-vis-sdk +cuda cuda_arch=70 +adios2 +hdf5 ~paraview +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o; /tmp/ccjmJhb6.s: Assembler messages: /tmp/ccjmJhb6.s:260012: Error: invalid machine `power10'
- flecsi +cuda
- flux-core +cuda
- ginkgo +cuda
- heffte +cuda
- hpctoolkit +cuda
- hpx max_cpu_count=512 +cuda
- hypre +cuda
- kokkos +wrapper +cuda
- kokkos-kernels +cuda ^kokkos +wrapper +cuda
- magma +cuda
- mfem +cuda
- omega-h +cuda
- papi +cuda
- petsc +cuda
- py-torch +cuda
- raja +cuda
- slate +cuda
- slepc +cuda
- strumpack ~slate +cuda
- sundials +cuda
- superlu-dist +cuda
- tasmanian +cuda
- tau +mpi +cuda
- trilinos@13.4.0 +belos +ifpack2 +stokhos +cuda
- umpire ~shared +cuda
- parsec +cuda
# CPU FAILURES
# - archer # llvm@8
# - bricks # bricks
# - geopm # geopm
# - loki # loki
# - precice # precice
# - pruners-ninja # pruners-ninja
# - variorum # Intel/variorum_cpuid.c:11:5: error: impossible constraint in 'asm'
# --
# bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope
# fltk: /usr/bin/ld: ../lib/libfltk_png.a(pngrutil.o): in function `png_read_filter_row': pngrutil.c:(.text.png_read_filter_row+0x90): undefined reference to `png_init_filter_functions_vsx'
# geopm: libtool.m4: error: problem compiling CXX test program
# llvm@8: clang/lib/Lex/Lexer.cpp:2547:34: error: ISO C++ forbids declaration of 'type name' with no type [-fpermissive]
# loki: include/loki/SmallObj.h:462:57: error: ISO C++17 does not allow dynamic exception specifications
# precice: /tmp/ccYNMwgE.s: Assembler messages: /tmp/ccYNMwgE.s:278115: Error: invalid machine `power10'
# pruners-ninja: test/ninja_test_util.c:34: multiple definition of `a';
# CUDA FAILURES
# - bricks +cuda # bricks
# - dealii +cuda # fltk
# --
# bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-power" }
gitlab-ci:
script:
- uname -a || true
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
- nproc
- . "./share/spack/setup-env.sh"
- spack --version
- spack arch
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
after_script:
- cat /proc/loadavg || true
image: ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01
match_behavior: first
mappings:
- match:
- hipblas
- llvm
- llvm-amdgpu
- rocblas
runner-attributes:
tags: [ "spack", "huge", "ppc64le" ]
variables:
CI_JOB_SIZE: huge
- match:
- cuda
- dyninst
- ginkgo
- hpx
- kokkos-kernels
- kokkos-nvcc-wrapper
- magma
- mfem
- mpich
- openturns
- precice
- raja
- rust
- slate
- trilinos
- vtk-m
- warpx
runner-attributes:
tags: [ "spack", "large", "ppc64le" ]
variables:
CI_JOB_SIZE: large
- match:
- adios2
- amrex
- archer
- ascent
- axom
- binutils
- blaspp
- boost
- butterflypack
- cabana
- caliper
- camp
- chai
- conduit
- datatransferkit
- faodel
- ffmpeg
- fftw
- fortrilinos
- gperftools
- gptune
- hdf5
- heffte
- hpctoolkit
- hwloc
- hypre
- kokkos
- lammps
- lapackpp
- legion
- libzmq
- llvm-openmp-ompt
- mbedtls
- netlib-scalapack
- omega-h
- openmpi
- openpmd-api
- pagmo2
- papyrus
- parsec
- pdt
- petsc
- pumi
- py-ipython-genutils
- py-petsc4py
- py-scipy
- py-statsmodels
- py-warlock
- py-warpx
- pygmo
- slepc
- slurm
- strumpack
- sundials
- superlu-dist
- tasmanian
- tau
- upcxx
- vtk-h
- zfp
runner-attributes:
tags: [ "spack", "medium", "ppc64le" ]
variables:
CI_JOB_SIZE: "medium"
- match:
- alsa-lib
- ant
- antlr
- argobots
- automake
- berkeley-db
- bison
- blt
- cmake
- curl
- darshan-util
- diffutils
- exmcutils
- expat
- flit
- freetype
- gdbm
- gotcha
- hpcviewer
- jansson
- json-c
- libbsd
- libevent
- libjpeg-turbo
- libnrm
- libpng
- libunistring
- lua-luaposix
- m4
- mpfr
- ncurses
- openblas
- openjdk
- papi
- parallel-netcdf
- pcre2
- perl-data-dumper
- pkgconf
- py-alembic
- py-idna
- py-testpath
- qhull
- snappy
- swig
- tar
- tcl
- texinfo
- unzip
- util-linux-uuid
- util-macros
- yaml-cpp
- zlib
- zstd
runner-attributes:
tags: [ "spack", "small", "ppc64le" ]
variables:
CI_JOB_SIZE: "small"
- match: ['os=ubuntu20.04']
runner-attributes:
tags: ["spack", "ppc64le"]
variables:
CI_JOB_SIZE: "default"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
- spack --version
image: ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01
tags: ["spack", "public", "ppc64le"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: E4S Power
url: https://cdash.spack.io
project: Spack Testing
site: Cloud Gitlab Infrastructure

View File

@@ -26,8 +26,6 @@ spack:
require: "intel-tbb"
binutils:
variants: +ld +gold +headers +libiberty ~nls
boost:
variants: +python +filesystem +iostreams +system
cuda:
version: [11.7.0]
elfutils:
@@ -72,7 +70,6 @@ spack:
- bolt
- bricks
- butterflypack
- boost +python +filesystem +iostreams +system
- cabana
- caliper
- chai ~benchmarks ~tests
@@ -270,7 +267,6 @@ spack:
match_behavior: first
mappings:
- match:
- hipblas
- llvm

View File

@@ -1,325 +0,0 @@
spack:
view: false
concretizer:
reuse: false
unify: false
config:
build_jobs: 32
concretizer: clingo
install_tree:
root: /home/software/spack
padded_length: 512
projections:
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
packages:
all:
compiler: [gcc@11.1.0]
providers:
blas: [openblas]
mpi: [mpich]
target: [x86_64]
variants: +mpi amdgpu_target=gfx90a cuda_arch=80
tbb:
require: "intel-tbb"
binutils:
variants: +ld +gold +headers +libiberty ~nls
boost:
variants: +python +filesystem +iostreams +system
elfutils:
variants: +bzip2 ~nls +xz
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
mpich:
variants: ~wrapperrpath
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
paraview:
# Don't build GUI support or GLX rendering for HPC/container deployments
require: "@5.11 ~qt+osmesa"
python:
version: [3.8.13]
trilinos:
require: +amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext
+ifpack +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
specs:
- kokkos +rocm amdgpu_target=gfx90a
- raja +cuda cuda_arch=80 ^cuda@12.0.0
# FAILURES
# - kokkos +wrapper +cuda cuda_arch=80 ^cuda@12.0.0 # https://github.com/spack/spack/issues/35378
mirrors: { "mirror": "s3://spack-binaries/develop/gpu-tests" }
gitlab-ci:
script:
- uname -a || true
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
- nproc
- nvidia-smi || true
- curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz
- printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
- . "./share/spack/setup-env.sh"
- spack --version
- spack arch
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
# UO runners mount intermediate ci public key (verification), AWS runners mount public/private (signing/verification)
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
after_script:
- cat /proc/loadavg || true
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
broken-tests-packages:
- gptune
match_behavior: first
mappings:
- match:
- kokkos +rocm amdgpu_target=gfx90a
runner-attributes:
tags: [ "rocm-5.4.0", "mi210" ]
variables:
CI_JOB_SIZE: large
- match:
- kokkos +cuda cuda_arch=80 ^cuda@12.0.0
- raja +cuda cuda_arch=80 ^cuda@12.0.0
runner-attributes:
tags: [ "nvidia-525.85.12", "cuda-12.0", "a100" ]
variables:
CI_JOB_SIZE: large
- match:
- hipblas
- llvm
- llvm-amdgpu
- rocblas
- paraview
- py-torch
runner-attributes:
tags: [ "spack", "huge", "x86_64" ]
variables:
CI_JOB_SIZE: huge
KUBERNETES_CPU_REQUEST: 11000m
KUBERNETES_MEMORY_REQUEST: 42G
- match:
- cuda
- dealii
- dray
- dyninst
- ginkgo
- hpx
- kokkos-kernels
- kokkos-nvcc-wrapper
- magma
- mfem
- mpich
- nvhpc
- oce
- openturns
- plumed
- precice
- py-tensorflow
- qt
- raja
- rocfft
- rocsolver
- rocsparse
- rust
- slate
- trilinos
- visit
- vtk
- vtk-m
- warpx
runner-attributes:
tags: [ "spack", "large", "x86_64" ]
variables:
CI_JOB_SIZE: large
KUBERNETES_CPU_REQUEST: 8000m
KUBERNETES_MEMORY_REQUEST: 12G
- match:
- adios2
- amrex
- archer
- ascent
- axom
- binutils
- blaspp
- boost
- butterflypack
- cabana
- caliper
- camp
- chai
- conduit
- datatransferkit
- faodel
- ffmpeg
- fftw
- fortrilinos
- gperftools
- gptune
- hdf5
- heffte
- hpctoolkit
- hwloc
- hypre
- kokkos
- lammps
- lapackpp
- legion
- libzmq
- llvm-openmp-ompt
- mbedtls
- netlib-scalapack
- omega-h
- openmpi
- openpmd-api
- pagmo2
- papyrus
- parsec
- pdt
- petsc
- pumi
- py-ipython-genutils
- py-petsc4py
- py-scipy
- py-statsmodels
- py-warlock
- py-warpx
- pygmo
- slepc
- slurm
- strumpack
- sundials
- superlu-dist
- tasmanian
- tau
- upcxx
- vtk-h
- zfp
runner-attributes:
tags: [ "spack", "medium", "x86_64" ]
variables:
CI_JOB_SIZE: "medium"
KUBERNETES_CPU_REQUEST: "2000m"
KUBERNETES_MEMORY_REQUEST: "4G"
- match:
- alsa-lib
- ant
- antlr
- argobots
- automake
- berkeley-db
- bison
- blt
- cmake
- curl
- darshan-util
- diffutils
- exmcutils
- expat
- flit
- freetype
- gdbm
- gotcha
- hpcviewer
- jansson
- json-c
- libbsd
- libevent
- libjpeg-turbo
- libnrm
- libpng
- libunistring
- lua-luaposix
- m4
- mpfr
- ncurses
- openblas
- openjdk
- papi
- parallel-netcdf
- pcre2
- perl-data-dumper
- pkgconf
- py-alembic
- py-idna
- py-testpath
- qhull
- snappy
- swig
- tar
- tcl
- texinfo
- unzip
- util-linux-uuid
- util-macros
- yaml-cpp
- zlib
- zstd
runner-attributes:
tags: [ "spack", "small", "x86_64" ]
variables:
CI_JOB_SIZE: "small"
KUBERNETES_CPU_REQUEST: "500m"
KUBERNETES_MEMORY_REQUEST: "500M"
- match: ['os=ubuntu20.04']
runner-attributes:
tags: ["spack", "x86_64"]
variables:
CI_JOB_SIZE: "default"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
- spack --version
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
tags: ["spack", "public", "x86_64"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: GPU Testing
url: https://cdash.spack.io
project: Spack Testing
site: Cloud Gitlab Infrastructure

View File

@@ -12,6 +12,6 @@ class Maintainers1(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/maintainers-1.0.tar.gz"
maintainers("user1", "user2")
maintainers = ["user1", "user2"]
version("1.0", "0123456789abcdef0123456789abcdef")

View File

@@ -12,6 +12,6 @@ class Maintainers2(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/maintainers2-1.0.tar.gz"
maintainers("user2", "user3")
maintainers = ["user2", "user3"]
version("1.0", "0123456789abcdef0123456789abcdef")

View File

@@ -1,17 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
from spack.pkg.builtin.mock.maintainers_1 import Maintainers1
class Maintainers3(Maintainers1):
"""A second package with a maintainers field."""
homepage = "http://www.example.com"
url = "http://www.example.com/maintainers2-1.0.tar.gz"
maintainers("user0", "user3")
version("1.0", "0123456789abcdef0123456789abcdef")

View File

@@ -12,4 +12,6 @@ class Openmpi(Package):
variant("internal-hwloc", default=False)
variant("fabrics", values=any_combination_of("psm", "mxm"))
provides("mpi")
depends_on("hwloc", when="~internal-hwloc")

View File

@@ -13,7 +13,8 @@ class PyExtension1(PythonPackage):
homepage = "http://www.example.com"
url = "http://www.example.com/extension1-1.0.tar.gz"
maintainers("user1", "user2")
# Override settings in base class
maintainers = []
version("1.0", "00000000000000000000000000000110")
version("2.0", "00000000000000000000000000000120")

View File

@@ -15,7 +15,7 @@ class Abacus(MakefilePackage):
for large-scale electronic-structure simulations
from first principles"""
maintainers("bitllion")
maintainers = ["bitllion"]
homepage = "http://abacus.ustc.edu.cn/"
git = "https://github.com/abacusmodeling/abacus-develop.git"

View File

@@ -12,7 +12,7 @@ class AbseilCpp(CMakePackage):
homepage = "https://abseil.io/"
url = "https://github.com/abseil/abseil-cpp/archive/refs/tags/20211102.0.tar.gz"
maintainers("jcftang")
maintainers = ["jcftang"]
tags = ["windows"]
version(

View File

@@ -126,7 +126,7 @@ class Acfl(Package):
homepage = "https://developer.arm.com/tools-and-software/server-and-hpc/arm-allinea-studio"
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-20.04_aarch64.tar"
maintainers("annop-w")
maintainers = ["annop-w"]
# Build Versions: establish OS for URL
acfl_os = get_os()

View File

@@ -12,7 +12,7 @@ class ActsDd4hep(CMakePackage):
homepage = "https://github.com/acts-project/acts-dd4hep"
url = "https://github.com/acts-project/acts-dd4hep/archive/refs/tags/v1.0.0.tar.gz"
maintainers("HadrienG2", "wdconinc")
maintainers = ["HadrienG2", "wdconinc"]
version("1.0.1", sha256="e40f34ebc30b3c33a6802c9d94136e65072d8dcee0b7db57a645f08a64ea5334")
version("1.0.0", sha256="991f996944c88efa837880f919239e50d12c5c9361e220bc9422438dd608308c")

View File

@@ -32,17 +32,14 @@ class Acts(CMakePackage, CudaPackage):
homepage = "https://acts.web.cern.ch/ACTS/"
git = "https://github.com/acts-project/acts.git"
list_url = "https://github.com/acts-project/acts/releases/"
maintainers("HadrienG2")
maintainers = ["HadrienG2"]
tags = ["hep"]
# Supported Acts versions
version("main", branch="main")
version("master", branch="main", deprecated=True) # For compatibility
version("23.0.0", commit="5af1b1b5feb8ca8f4c2c69106a1b9ef612c70d9c", submodules=True)
version("22.0.1", commit="a4ac99dd72828c5eb3fac06e146f3391958fca8c", submodules=True)
version("22.0.0", commit="0fb6f8d2ace65338915451201e9ceb6cee11fb5e", submodules=True)
version("21.1.1", commit="8ae825de246e8e574d05d9eaf05ba4a937c69aa9", submodules=True)
version("21.1.0", commit="3b4b5c741c8541491d496a36b917b00b344d52d1", submodules=True)
version("21.0.0", commit="d8cb0fac3a44e1d44595a481f977df9bd70195fb", submodules=True)
version("20.3.0", commit="b1859b322744cb033328fd57d9e74fb5326aa56b", submodules=True)
@@ -281,7 +278,7 @@ class Acts(CMakePackage, CudaPackage):
depends_on("python", when="+python")
depends_on("python@3.8:", when="+python @19.11:19")
depends_on("python@3.8:", when="+python @21:")
depends_on("py-onnxruntime", when="+onnx")
depends_on("py-onnx-runtime", when="+onnx")
depends_on("py-pybind11 @2.6.2:", when="+python @18:")
depends_on("py-pytest", when="+python +unit_tests")
depends_on("root @6.10: cxxstd=14", when="+tgeo @:0.8.0")
@@ -339,7 +336,6 @@ def plugin_cmake_variant(plugin_name, spack_variant):
cmake_variant("FATRAS", "fatras"),
cmake_variant("FATRAS_GEANT4", "fatras_geant4"),
example_cmake_variant("GEANT4", "geant4"),
plugin_cmake_variant("GEANT4", "geant4"),
example_cmake_variant("HEPMC3", "hepmc3"),
plugin_cmake_variant("IDENTIFICATION", "identification"),
cmake_variant(integration_tests_label, "integration_tests"),

View File

@@ -16,7 +16,7 @@ class Actsvg(CMakePackage):
list_url = "https://github.com/acts-project/actsvg/releases"
git = "https://github.com/acts-project/actsvg.git"
maintainers("HadrienG2", "wdconinc")
maintainers = ["HadrienG2", "wdconinc"]
version("0.4.26", sha256="a1dfad15b616cac8191a355c1a87544571c36349400e3de56b9e5be6fa73714c")

View File

@@ -14,7 +14,7 @@ class Adiak(CMakePackage):
url = "https://github.com/LLNL/Adiak/releases/download/v0.1/adiak-v0.1.1.tar.gz"
git = "https://github.com/LLNL/Adiak"
maintainers("daboehme", "mplegendre")
maintainers = ["daboehme", "mplegendre"]
variant("mpi", default=True, description="Build with MPI support")
variant("shared", default=True, description="Build dynamic libraries")

View File

@@ -17,7 +17,7 @@ class Adios(AutotoolsPackage):
url = "https://github.com/ornladios/ADIOS/archive/v1.12.0.tar.gz"
git = "https://github.com/ornladios/ADIOS.git"
maintainers("ax3l")
maintainers = ["ax3l"]
version("develop", branch="master")
version("1.13.1", sha256="b1c6949918f5e69f701cabfe5987c0b286793f1057d4690f04747852544e157b")

View File

@@ -16,7 +16,7 @@ class Adios2(CMakePackage, CudaPackage):
url = "https://github.com/ornladios/ADIOS2/archive/v2.8.0.tar.gz"
git = "https://github.com/ornladios/ADIOS2.git"
maintainers("ax3l", "chuckatkins", "vicentebolea", "williamfgc")
maintainers = ["ax3l", "chuckatkins", "vicentebolea", "williamfgc"]
tags = ["e4s"]
@@ -54,6 +54,7 @@ class Adios2(CMakePackage, CudaPackage):
# change how we're supporting differnt library types in the package at anytime if
# spack decides on a standardized way of doing it across packages
variant("shared", default=True, when="+pic", description="Build shared libraries")
variant("pic", default=True, description="Build pic-enabled static libraries")
# Features
variant("mpi", default=True, description="Enable MPI")
@@ -94,24 +95,12 @@ class Adios2(CMakePackage, CudaPackage):
conflicts("%oneapi@:2022.1.0", when="+fortran")
depends_on("cmake@3.12.0:", type="build")
depends_on("pkgconfig", type="build")
for _platform in ["linux", "darwin", "cray"]:
depends_on("pkgconfig", type="build", when="platform=%s" % _platform)
variant(
"pic",
default=False,
description="Build pic-enabled static libraries",
when="platform=%s" % _platform,
)
# libffi and libfabric and not currently supported on Windows
# see Paraview's superbuild handling of libfabric at
# https://gitlab.kitware.com/paraview/paraview-superbuild/-/blob/master/projects/adios2.cmake#L3
depends_on("libffi", when="+sst platform=%s" % _platform) # optional in DILL
depends_on(
"libfabric@1.6.0:", when="+sst platform=%s" % _platform
) # optional in EVPath and SST
# depends_on('bison', when='+sst') # optional in FFS, broken package
# depends_on('flex', when='+sst') # optional in FFS, depends on BISON
depends_on("libffi", when="+sst") # optional in DILL
depends_on("libfabric@1.6.0:", when="+sst") # optional in EVPath and SST
# depends_on('bison', when='+sst') # optional in FFS, broken package
# depends_on('flex', when='+sst') # optional in FFS, depends on BISON
depends_on("mpi", when="+mpi")
depends_on("libzmq", when="+dataman")

View File

@@ -16,7 +16,7 @@ class Adms(AutotoolsPackage):
url = "https://github.com/Qucs/ADMS/releases/download/release-2.3.7/adms-2.3.7.tar.gz"
git = "https://github.com/Qucs/ADMS.git"
maintainers("cessenat")
maintainers = ["cessenat"]
version("master", branch="master")
version("2.3.7", sha256="3a78e1283ecdc3f356410474b3ff44c4dcc82cb89772087fd3bbde8a1038ce08")

View File

@@ -14,7 +14,7 @@ class AdolC(AutotoolsPackage):
homepage = "https://github.com/coin-or/ADOL-C"
url = "https://github.com/coin-or/ADOL-C/archive/releases/2.7.2.tar.gz"
git = "https://github.com/coin-or/ADOL-C.git"
maintainers("jppelteret")
maintainers = ["jppelteret"]
version("master", branch="master")
version("2.7.2", sha256="701e0856baae91b98397960d5e0a87a549988de9d4002d0e9a56fa08f5455f6e")

View File

@@ -15,7 +15,7 @@ class Agile(AutotoolsPackage):
tags = ["hep"]
maintainers("vvolkl")
maintainers = ["vvolkl"]
version("1.5.1", sha256="e38536300060e4b845ccaaed824c7495944f9117a0d7e4ee74a18bf278e2012f")

View File

@@ -19,7 +19,7 @@ class Akantu(CMakePackage):
url = "https://gitlab.com/akantu/akantu/-/archive/v3.0.0/akantu-v3.0.0.tar.gz"
git = "https://gitlab.com/akantu/akantu.git"
maintainers("nrichart")
maintainers = ["nrichart"]
version("master", branch="master")
version("3.0.0", sha256="7e8f64e25956eba44def1b2d891f6db8ba824e4a82ff0d51d6b585b60ab465db")

View File

@@ -17,7 +17,7 @@ class Albany(CMakePackage):
homepage = "http://gahansen.github.io/Albany"
git = "https://github.com/gahansen/Albany.git"
maintainers("gahansen")
maintainers = ["gahansen"]
version("develop", branch="master")

View File

@@ -14,7 +14,7 @@ class Alpaka(CMakePackage, CudaPackage):
url = "https://github.com/alpaka-group/alpaka/archive/refs/tags/0.6.0.tar.gz"
git = "https://github.com/alpaka-group/alpaka.git"
maintainers("vvolkl")
maintainers = ["vvolkl"]
version("develop", branch="develop")
version("0.8.0", sha256="e01bc377a7657d9a3e0c5f8d3f83dffbd7d0b830283c59efcbc1fb98cf88de43")

View File

@@ -16,7 +16,7 @@ class Alpgen(CMakePackage, MakefilePackage):
homepage = "http://mlm.home.cern.ch/mlm/alpgen/"
url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz"
maintainers("iarspider")
maintainers = ["iarspider"]
tags = ["hep"]
version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e")

View File

@@ -13,7 +13,7 @@ class Alquimia(CMakePackage):
homepage = "https://github.com/LBL-EESA/alquimia-dev"
git = "https://github.com/LBL-EESA/alquimia-dev.git"
maintainers("smolins", "balay")
maintainers = ["smolins", "balay"]
version("develop")
version("1.0.10", commit="b2c11b6cde321f4a495ef9fcf267cb4c7a9858a0") # tag v.1.0.10

View File

@@ -22,7 +22,7 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/LLNL/Aluminum.git"
tags = ["ecp", "radiuss"]
maintainers("bvanessen")
maintainers = ["bvanessen"]
version("master", branch="master")
version("1.0.0-lbann", tag="v1.0.0-lbann")

View File

@@ -26,7 +26,7 @@ class Amber(Package, CudaPackage):
url = "file://{0}/Amber18.tar.bz2".format(os.getcwd())
manual_download = True
maintainers("hseara")
maintainers = ["hseara"]
version("20", sha256="a4c53639441c8cc85adee397933d07856cc4a723c82c6bea585cd76c197ead75")
version("18", sha256="2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277")

View File

@@ -22,7 +22,7 @@ class AmdAocl(BundlePackage):
homepage = "https://developer.amd.com/amd-aocl/"
maintainers("amd-toolchain-support")
maintainers = ["amd-toolchain-support"]
version("4.0")
version("3.2")

View File

@@ -30,7 +30,7 @@ class Amdblis(BlisBase):
url = "https://github.com/amd/blis/archive/3.0.tar.gz"
git = "https://github.com/amd/blis.git"
maintainers("amd-toolchain-support")
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="cddd31176834a932753ac0fc4c76332868feab3e9ac607fa197d8b44c1e74a41")
version("3.2", sha256="5a400ee4fc324e224e12f73cc37b915a00f92b400443b15ce3350278ad46fff6")

View File

@@ -35,7 +35,7 @@ class Amdfftw(FftwBase):
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
git = "https://github.com/amd/amd-fftw.git"
maintainers("amd-toolchain-support")
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="5f02cb05f224bd86bd88ec6272b294c26dba3b1d22c7fb298745fd7b9d2271c0")
version("3.2", sha256="31cab17a93e03b5b606e88dd6116a1055b8f49542d7d0890dbfcca057087b8d0")

View File

@@ -42,7 +42,7 @@ class Amdlibflame(LibflameBase):
url = "https://github.com/amd/libflame/archive/3.0.tar.gz"
git = "https://github.com/amd/libflame.git"
maintainers("amd-toolchain-support")
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="bcb05763aa1df1e88f0da5e43ff86d956826cbea1d9c5ff591d78a3e091c66a4")
version("3.2", sha256="6b5337fb668b82d0ed0a4ab4b5af4e2f72e4cedbeeb4a8b6eb9a3ef057fb749a")
@@ -63,7 +63,6 @@ class Amdlibflame(LibflameBase):
provides("flame@5.2", when="@2:")
depends_on("python+pythoncmd", type="build")
depends_on("gmake@4:", when="@3.0.1,3.1:", type="build")
@property
def lapack_libs(self):

Some files were not shown because too many files have changed in this diff Show More