Compare commits
1 Commits
packages/t
...
fix-rpath-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a385414e5 |
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -113,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@32945a339266b759abcbdc89316275140b0fc960
|
||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
55
.github/workflows/ci.yaml
vendored
55
.github/workflows/ci.yaml
vendored
@@ -15,6 +15,18 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -67,34 +79,35 @@ jobs:
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
|
||||
coverage:
|
||||
upload-coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
uses: ./.github/workflows/coverage.yml
|
||||
secrets: inherit
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Download coverage files
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
||||
with:
|
||||
pattern: coverage-*
|
||||
path: coverage
|
||||
merge-multiple: true
|
||||
- run: pip install --upgrade coverage
|
||||
- run: ls -la coverage
|
||||
- run: coverage combine -a coverage/.coverage*
|
||||
- run: coverage xml
|
||||
- name: "Upload coverage"
|
||||
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
all:
|
||||
needs: [ coverage, bootstrap ]
|
||||
needs: [ upload-coverage, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
||||
34
.github/workflows/coverage.yml
vendored
34
.github/workflows/coverage.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: coverage
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Upload coverage reports to codecov once as a single bundle
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: pip install -r .github/workflows/requirements/coverage/requirements.txt
|
||||
|
||||
- name: Download coverage artifact files
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
||||
with:
|
||||
pattern: coverage-*
|
||||
path: coverage
|
||||
merge-multiple: true
|
||||
|
||||
- run: ls -la coverage
|
||||
- run: coverage combine -a coverage/.coverage*
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
verbose: true
|
||||
@@ -1 +0,0 @@
|
||||
coverage==7.6.1
|
||||
@@ -316,215 +316,6 @@ documentation tests to make sure there are no errors. Documentation changes can
|
||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||
to ask when you submit your PR.
|
||||
|
||||
.. _spack-builders-and-pipelines:
|
||||
|
||||
^^^^^^^^^
|
||||
GitLab CI
|
||||
^^^^^^^^^
|
||||
|
||||
""""""""""""""""""
|
||||
Build Cache Stacks
|
||||
""""""""""""""""""
|
||||
|
||||
Spack welcomes the contribution of software stacks of interest to the community. These
|
||||
stacks are used to test package recipes and generate publicly available build caches.
|
||||
Spack uses GitLab CI for managing the orchestration of build jobs.
|
||||
|
||||
GitLab Entry Point
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Add stack entrypoint to the ``share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml``. There
|
||||
are two stages required for each new stack, the generation stage and the build stage.
|
||||
|
||||
The generate stage is defined using the job template ``.generate`` configured with
|
||||
environment variables defining the name of the stack in ``SPACK_CI_STACK_NAME`` and the
|
||||
platform (``SPACK_TARGET_PLATFORM``) and architecture (``SPACK_TARGET_ARCH``) configuration,
|
||||
and the tags associated with the class of runners to build on.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``SPACK_CI_STACK_NAME`` must match the name of the directory containing the
|
||||
stacks ``spack.yaml``.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
The platform and architecture variables are specified in order to select the
|
||||
correct configurations from the generic configurations used in Spack CI. The
|
||||
configurations currently available are:
|
||||
|
||||
* ``.cray_rhel_zen4``
|
||||
* ``.cray_sles_zen4``
|
||||
* ``.darwin_aarch64``
|
||||
* ``.darwin_x86_64``
|
||||
* ``.linux_aarch64``
|
||||
* ``.linux_icelake``
|
||||
* ``.linux_neoverse_n1``
|
||||
* ``.linux_neoverse_v1``
|
||||
* ``.linux_neoverse_v2``
|
||||
* ``.linux_power``
|
||||
* ``.linux_skylake``
|
||||
* ``.linux_x86_64``
|
||||
* ``.linux_x86_64_v4``
|
||||
|
||||
New configurations can be added to accommodate new platforms and architectures.
|
||||
|
||||
|
||||
The build stage is defined as a trigger job that consumes the GitLab CI pipeline generated in
|
||||
the generate stage for this stack. Build stage jobs use the ``.build`` job template which
|
||||
handles the basic configuration.
|
||||
|
||||
An example entry point for a new stack called ``my-super-cool-stack``
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
.my-super-cool-stack:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: my-super-cool-stack
|
||||
tags: [ "all", "tags", "your", "job", "needs"]
|
||||
|
||||
my-super-cool-stack-generate:
|
||||
extends: [ ".generate", ".my-super-cool-stack" ]
|
||||
image: my-super-cool-stack-image:0.0.1
|
||||
|
||||
my-super-cool-stack-build:
|
||||
extends: [ ".build", ".my-super-cool-stack" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: my-super-cool-stack-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: my-super-cool-stack-generate
|
||||
|
||||
|
||||
Stack Configuration
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The stack configuration is a spack environment file with two additional sections added.
|
||||
Stack configurations should be located in ``share/spack/gitlab/cloud_pipelines/stacks/<stack_name>/spack.yaml``.
|
||||
|
||||
The ``ci`` section is generally used to define stack specific mappings such as image or tags.
|
||||
For more information on what can go into the ``ci`` section refer to the docs on pipelines.
|
||||
|
||||
The ``cdash`` section is used for defining where to upload the results of builds. Spack configures
|
||||
most of the details for posting pipeline results to
|
||||
`cdash.spack.io <https://cdash.spack.io/index.php?project=Spack+Testing>`_. The only
|
||||
requirement in the stack configuration is to define a ``build-group`` that is unique,
|
||||
this is usually the long name of the stack.
|
||||
|
||||
An example stack that builds ``zlib``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
require: ["%gcc", "target=x86_64_v3"]
|
||||
specs:
|
||||
- zlib
|
||||
|
||||
ci:
|
||||
pipeline-gen
|
||||
- build-job:
|
||||
image: my-super-cool-stack-image:0.0.1
|
||||
|
||||
cdash:
|
||||
build-group: My Super Cool Stack
|
||||
|
||||
.. note::
|
||||
|
||||
The ``image`` used in the ``*-generate`` job must match exactly the ``image`` used in the ``build-job``.
|
||||
When the images do not match the build job may fail.
|
||||
|
||||
|
||||
"""""""""""""""""""
|
||||
Registering Runners
|
||||
"""""""""""""""""""
|
||||
|
||||
Contributing computational resources to Spack's CI build farm is one way to help expand the
|
||||
capabilities and offerings of the public Spack build caches. Currently, Spack utilizes linux runners
|
||||
from AWS, Google, and the University of Oregon (UO).
|
||||
|
||||
Runners require three key peices:
|
||||
* Runner Registration Token
|
||||
* Accurate tags
|
||||
* OIDC Authentication script
|
||||
* GPG keys
|
||||
|
||||
|
||||
Minimum GitLab Runner Version: ``16.1.0``
|
||||
`Intallation instructions <https://docs.gitlab.com/runner/install/>`_
|
||||
|
||||
Registration Token
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The first step to contribute new runners is to open an issue in the `spack infrastructure <https://github.com/spack/spack-infrastructure/issues/new?assignees=&labels=runner-registration&projects=&template=runner_registration.yml>`_
|
||||
project. This will be reported to the spack infrastructure team who will guide users through the process
|
||||
of registering new runners for Spack CI.
|
||||
|
||||
The information needed to register a runner is the motivation for the new resources, a semi-detailed description of
|
||||
the runner, and finallly the point of contact for maintaining the software on the runner.
|
||||
|
||||
The point of contact will then work with the infrastruture team to obtain runner registration token(s) for interacting with
|
||||
with Spack's GitLab instance. Once the runner is active, this point of contact will also be responsible for updating the
|
||||
GitLab runner software to keep pace with Spack's Gitlab.
|
||||
|
||||
Tagging
|
||||
~~~~~~~
|
||||
|
||||
In the initial stages of runner registration it is important to **exclude** the special tag ``spack``. This will prevent
|
||||
the new runner(s) from being picked up for production CI jobs while it is configured and evaluated. Once it is determined
|
||||
that the runner is ready for production use the ``spack`` tag will be added.
|
||||
|
||||
Because gitlab has no concept of tag exclustion, runners that provide specialized resource also require specialized tags.
|
||||
For example, a basic CPU only x86_64 runner may have a tag ``x86_64`` associated with it. However, a runner containing an
|
||||
CUDA capable GPU may have the tag ``x86_64-cuda`` to denote that it should only be used for packages that will benefit from
|
||||
a CUDA capable resource.
|
||||
|
||||
OIDC
|
||||
~~~~
|
||||
|
||||
Spack runners use OIDC authentication for connecting to the appropriate AWS bucket
|
||||
which is used for coordinating the communication of binaries between build jobs. In
|
||||
order to configure OIDC authentication, Spack CI runners use a python script with minimal
|
||||
dependencies. This script can be configured for runners as seen here using the ``pre_build_script``.
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[[runners]]
|
||||
pre_build_script = """
|
||||
echo 'Executing Spack pre-build setup script'
|
||||
|
||||
for cmd in "${PY3:-}" python3 python; do
|
||||
if command -v > /dev/null "$cmd"; then
|
||||
export PY3="$(command -v "$cmd")"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "${PY3:-}" ]; then
|
||||
echo "Unable to find python3 executable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$PY3 -c "import urllib.request;urllib.request.urlretrieve('https://raw.githubusercontent.com/spack/spack-infrastructure/main/scripts/gitlab_runner_pre_build/pre_build.py', 'pre_build.py')"
|
||||
$PY3 pre_build.py > envvars
|
||||
|
||||
. ./envvars
|
||||
rm -f envvars
|
||||
unset GITLAB_OIDC_TOKEN
|
||||
"""
|
||||
|
||||
GPG Keys
|
||||
~~~~~~~~
|
||||
|
||||
Runners that may be utilized for ``protected`` CI require the registration of an intermediate signing key that
|
||||
can be used to sign packages. For more information on package signing read :ref:`key_architecture`.
|
||||
|
||||
--------
|
||||
Coverage
|
||||
--------
|
||||
|
||||
59
lib/spack/env/cc
vendored
59
lib/spack/env/cc
vendored
@@ -482,26 +482,6 @@ if [ "$mode" = vcheck ]; then
|
||||
execute
|
||||
fi
|
||||
|
||||
# Darwin's linker has a -r argument that merges object files together.
|
||||
# It doesn't work with -rpath.
|
||||
# This variable controls whether they are added.
|
||||
add_rpaths=true
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
for arg in "$@"; do
|
||||
if [ "$arg" = "-r" ]; then
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
elif [ "$arg" = "-Wl,-r" ] && [ "$mode" = ccld ]; then
|
||||
add_rpaths=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
# Save original command for debug logging
|
||||
input_command="$*"
|
||||
|
||||
@@ -861,13 +841,11 @@ if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
@@ -882,14 +860,10 @@ case "$mode" in
|
||||
ld|ccld)
|
||||
# Set extra RPATHs
|
||||
extend lib_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
extend rpath_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
fi
|
||||
extend rpath_dirs_list SPACK_COMPILER_EXTRA_RPATHS
|
||||
|
||||
# Set implicit RPATHs
|
||||
if [ "$add_rpaths" != "false" ]; then
|
||||
extend rpath_dirs_list SPACK_COMPILER_IMPLICIT_RPATHS
|
||||
fi
|
||||
extend rpath_dirs_list SPACK_COMPILER_IMPLICIT_RPATHS
|
||||
|
||||
# Add SPACK_LDLIBS to args
|
||||
for lib in $SPACK_LDLIBS; do
|
||||
@@ -945,7 +919,8 @@ extend args_list lib_dirs_list "-L"
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
# RPATH arguments
|
||||
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
@@ -962,8 +937,20 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
# Darwin's linker has a -r argument that merges object files together.
|
||||
# It doesn't work with -rpath. add_rpaths controls whether RPATHs are added.
|
||||
add_rpaths=true
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
args="$@"
|
||||
if contains args "-r" || contains args "-Wl,-r"; then
|
||||
add_rpaths=false
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists, adding the prefix determined above
|
||||
if [ "$add_rpaths" == "true" ] && [ -n "$rpath_prefix" ]; then
|
||||
extend_unique args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend_unique args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
|
||||
@@ -43,31 +43,6 @@
|
||||
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
|
||||
|
||||
|
||||
def compatible(flag1: DepFlag, flag2: DepFlag) -> bool:
|
||||
"""Returns True if two depflags can be dependencies from a Spec to deps of the same name.
|
||||
|
||||
The only allowable separated dependencies are a build-only dependency, combined with a
|
||||
non-build dependency. This separates our two process spaces, build time and run time.
|
||||
|
||||
These dependency combinations are allowed:
|
||||
single dep on name: [b], [l], [r], [bl], [br], [blr]
|
||||
two deps on name: [b, l], [b, r], [b, lr]
|
||||
|
||||
but none of these make any sense:
|
||||
two build deps: [b, b], [b, br], [b, bl], [b, blr]
|
||||
any two deps that both have an l or an r, i.e. [l, l], [r, r], [l, r], [bl, l], [bl, r]"""
|
||||
# Cannot have overlapping build types to two different dependencies
|
||||
if flag1 & flag2:
|
||||
return False
|
||||
|
||||
# Cannot have two different link/run dependencies for the same name
|
||||
link_run = LINK | RUN
|
||||
if flag1 & link_run and flag2 & link_run:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def flag_from_string(s: str) -> DepFlag:
|
||||
if s == "build":
|
||||
return BUILD
|
||||
|
||||
@@ -99,7 +99,6 @@
|
||||
install_dependency_symlinks,
|
||||
on_package_attributes,
|
||||
)
|
||||
from spack.package_completions import *
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.executable import *
|
||||
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
"""Functions relating to shell autocompletion scripts for packages."""
|
||||
|
||||
|
||||
def bash_completion_path(root: Union[str, Path]) -> Path:
|
||||
"""
|
||||
Return standard path for bash completion scripts under root.
|
||||
|
||||
Args:
|
||||
root: The prefix root under which to generate the path.
|
||||
|
||||
Returns:
|
||||
Standard path for bash completion scripts under root.
|
||||
"""
|
||||
return Path(root) / "share" / "bash-completion" / "completions"
|
||||
|
||||
|
||||
def zsh_completion_path(root: Union[str, Path]) -> Path:
|
||||
"""
|
||||
Return standard path for zsh completion scripts under root.
|
||||
|
||||
Args:
|
||||
root: The prefix root under which to generate the path.
|
||||
|
||||
Returns:
|
||||
Standard path for zsh completion scripts under root.
|
||||
"""
|
||||
return Path(root) / "share" / "zsh" / "site-functions"
|
||||
|
||||
|
||||
def fish_completion_path(root: Union[str, Path]) -> Path:
|
||||
"""
|
||||
Return standard path for fish completion scripts under root.
|
||||
|
||||
Args:
|
||||
root: The prefix root under which to generate the path.
|
||||
|
||||
Returns:
|
||||
Standard path for fish completion scripts under root.
|
||||
"""
|
||||
return Path(root) / "share" / "fish" / "vendor_completions.d"
|
||||
@@ -1739,28 +1739,19 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added multiple times with
|
||||
# the same deptype. Add a new dependency if it is added with a compatible deptype
|
||||
# (for example, a build-only dependency is compatible with a link-only dependenyc).
|
||||
# The only restrictions, currently, are that we cannot add edges with overlapping
|
||||
# dependency types and we cannot add multiple edges that have link/run dependency types.
|
||||
# See ``spack.deptypes.compatible``.
|
||||
# Keep the intersection of constraints when a dependency is added multiple times.
|
||||
# The only restriction, currently, is keeping the same dependency type
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
|
||||
except StopIteration:
|
||||
# Error if we have overlapping or incompatible deptypes
|
||||
if any(not dt.compatible(dspec.depflag, depflag) for dspec in orig):
|
||||
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
|
||||
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
|
||||
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
|
||||
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
|
||||
|
||||
raise DuplicateDependencyError(
|
||||
f"{spec.name} is a duplicate dependency, with conflicting dependency types\n"
|
||||
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
|
||||
)
|
||||
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
|
||||
return
|
||||
raise DuplicateDependencyError(
|
||||
f"{spec.name} is a duplicate dependency, with conflicting dependency types\n"
|
||||
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
|
||||
)
|
||||
|
||||
try:
|
||||
dspec.spec.constrain(spec)
|
||||
@@ -1785,10 +1776,7 @@ def add_dependency_edge(
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
|
||||
# If the dependency is to an existing spec, we can update dependency
|
||||
# types. If it is to a new object, check deptype compatibility.
|
||||
if id(edge.spec) != id(dependency_spec) and not dt.compatible(edge.depflag, depflag):
|
||||
if edge.depflag & depflag:
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
@@ -1797,13 +1785,14 @@ def add_dependency_edge(
|
||||
)
|
||||
)
|
||||
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {virtuals}"
|
||||
)
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {virtuals}"
|
||||
)
|
||||
)
|
||||
|
||||
if has_errors:
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
@@ -4027,12 +4016,8 @@ def format_path(
|
||||
return str(path_ctor(*output_path_components))
|
||||
|
||||
def __str__(self):
|
||||
if self._concrete:
|
||||
return self.format("{name}{@version}{/hash:7}")
|
||||
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
|
||||
root_str = [self.format()]
|
||||
sorted_dependencies = sorted(
|
||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||
@@ -4179,144 +4164,6 @@ def trim(self, dep_name):
|
||||
new_dependencies.add(edge)
|
||||
spec._dependencies = new_dependencies
|
||||
|
||||
def _virtuals_provided(self, root):
|
||||
"""Return set of virtuals provided by self in the context of root"""
|
||||
if root is self:
|
||||
# Could be using any virtual the package can provide
|
||||
return set(self.package.virtuals_provided)
|
||||
|
||||
hashes = [s.dag_hash() for s in root.traverse()]
|
||||
in_edges = set(
|
||||
[edge for edge in self.edges_from_dependents() if edge.parent.dag_hash() in hashes]
|
||||
)
|
||||
return set().union(*[edge.virtuals for edge in in_edges])
|
||||
|
||||
def _splice_match(self, other, self_root, other_root):
|
||||
"""Return True if other is a match for self in a splice of other_root into self_root
|
||||
|
||||
Other is a splice match for self if it shares a name, or if self is a virtual provider
|
||||
and other provides a superset of the virtuals provided by self. Virtuals provided are
|
||||
evaluated in the context of a root spec (self_root for self, other_root for other).
|
||||
|
||||
This is a slight oversimplification. Other could be a match for self in the context of
|
||||
one edge in self_root and not in the context of another edge. This method could be
|
||||
expanded in the future to account for these cases.
|
||||
"""
|
||||
if other.name == self.name:
|
||||
return True
|
||||
|
||||
return bool(
|
||||
self._virtuals_provided(self_root)
|
||||
and self._virtuals_provided(self_root) <= other._virtuals_provided(other_root)
|
||||
)
|
||||
|
||||
def _splice_detach_and_add_dependents(self, replacement, context):
|
||||
"""Helper method for Spec._splice_helper.
|
||||
|
||||
replacement is a node to splice in, context is the scope of dependents to consider relevant
|
||||
to this splice."""
|
||||
# Update build_spec attributes for all transitive dependents
|
||||
# before we start changing their dependencies
|
||||
ancestors_in_context = [
|
||||
a
|
||||
for a in self.traverse(root=False, direction="parents")
|
||||
if a in context.traverse(deptype=dt.LINK | dt.RUN)
|
||||
]
|
||||
for ancestor in ancestors_in_context:
|
||||
# Only set it if it hasn't been spliced before
|
||||
ancestor._build_spec = ancestor._build_spec or ancestor.copy()
|
||||
ancestor.clear_cached_hashes(ignore=(ht.package_hash.attr,))
|
||||
|
||||
# For each direct dependent in the link/run graph, replace the dependency on
|
||||
# node with one on replacement
|
||||
# For each build dependent, restrict the edge to build-only
|
||||
for edge in self.edges_from_dependents():
|
||||
if edge.parent not in ancestors_in_context:
|
||||
continue
|
||||
build_dep = edge.depflag & dt.BUILD
|
||||
other_dep = edge.depflag & ~dt.BUILD
|
||||
if build_dep:
|
||||
parent_edge = [e for e in edge.parent._dependencies[self.name] if e.spec is self]
|
||||
assert len(parent_edge) == 1
|
||||
|
||||
edge.depflag = dt.BUILD
|
||||
parent_edge[0].depflag = dt.BUILD
|
||||
else:
|
||||
edge.parent._dependencies.edges[self.name].remove(edge)
|
||||
self._dependents.edges[edge.parent.name].remove(edge)
|
||||
|
||||
if other_dep:
|
||||
edge.parent._add_dependency(replacement, depflag=other_dep, virtuals=edge.virtuals)
|
||||
|
||||
def _splice_helper(self, replacement, self_root, other_root):
|
||||
"""Main loop of a transitive splice.
|
||||
|
||||
The while loop around a traversal of self ensures that changes to self from previous
|
||||
iterations are reflected in the traversal. This avoids evaluating irrelevant nodes
|
||||
using topological traversal (all incoming edges traversed before any outgoing edge).
|
||||
If any node will not be in the end result, its parent will be spliced and it will not
|
||||
ever be considered.
|
||||
For each node in self, find any analogous node in replacement and swap it in.
|
||||
We assume all build deps are handled outside of this method
|
||||
|
||||
Arguments:
|
||||
replacement: The node that will replace any equivalent node in self
|
||||
self_root: The root of the spec that self comes from. This provides the context for
|
||||
evaluating whether ``replacement`` is a match for each node of ``self``. See
|
||||
``Spec._splice_match`` and ``Spec._virtuals_provided`` for details.
|
||||
other_root: The root of the spec that replacement comes from. This provides the context
|
||||
for evaluating whether ``replacement`` is a match for each node of ``self``. See
|
||||
``Spec._splice_match`` and ``Spec._virtuals_provided`` for details.
|
||||
"""
|
||||
ids = set(id(s) for s in replacement.traverse())
|
||||
|
||||
# Sort all possible replacements by name and virtual for easy access later
|
||||
replacements_by_name = collections.defaultdict(list)
|
||||
for node in replacement.traverse():
|
||||
replacements_by_name[node.name].append(node)
|
||||
virtuals = node._virtuals_provided(root=replacement)
|
||||
for virtual in virtuals:
|
||||
# Virtual may be spec or str, get name or return str
|
||||
replacements_by_name[getattr(virtual, "name", virtual)].append(node)
|
||||
|
||||
changed = True
|
||||
while changed:
|
||||
changed = False
|
||||
|
||||
# Intentionally allowing traversal to change on each iteration
|
||||
# using breadth-first traversal to ensure we only reach nodes that will
|
||||
# be in final result
|
||||
for node in self.traverse(root=False, order="topo", deptype=dt.ALL & ~dt.BUILD):
|
||||
# If this node has already been swapped in, don't consider it again
|
||||
if id(node) in ids:
|
||||
continue
|
||||
|
||||
analogs = replacements_by_name[node.name]
|
||||
if not analogs:
|
||||
# If we have to check for matching virtuals, then we need to check that it
|
||||
# matches all virtuals. Use `_splice_match` to validate possible matches
|
||||
for virtual in node._virtuals_provided(root=self):
|
||||
analogs += [
|
||||
r
|
||||
for r in replacements_by_name[getattr(virtual, "name", virtual)]
|
||||
if r._splice_match(node, self_root=self_root, other_root=other_root)
|
||||
]
|
||||
|
||||
# No match, keep iterating over self
|
||||
if not analogs:
|
||||
continue
|
||||
|
||||
# If there are multiple analogs, this package must satisfy the constraint
|
||||
# that a newer version can always replace a lesser version.
|
||||
analog = max(analogs, key=lambda s: s.version)
|
||||
|
||||
# No splice needed here, keep checking
|
||||
if analog == node:
|
||||
continue
|
||||
node._splice_detach_and_add_dependents(analog, context=self)
|
||||
changed = True
|
||||
break
|
||||
|
||||
def splice(self, other, transitive):
|
||||
"""Splices dependency "other" into this ("target") Spec, and return the
|
||||
result as a concrete Spec.
|
||||
@@ -4337,70 +4184,134 @@ def splice(self, other, transitive):
|
||||
the same way it was built, any such changes are tracked by setting the
|
||||
build_spec to point to the corresponding dependency from the original
|
||||
Spec.
|
||||
TODO: Extend this for non-concrete Specs.
|
||||
"""
|
||||
assert self.concrete
|
||||
assert other.concrete
|
||||
|
||||
if not any(
|
||||
node._splice_match(other, self_root=self, other_root=other)
|
||||
for node in self.traverse(root=False, deptype=dt.LINK | dt.RUN)
|
||||
):
|
||||
other_str = other.format("{name}/{hash:7}")
|
||||
self_str = self.format("{name}/{hash:7}")
|
||||
msg = f"Cannot splice {other_str} into {self_str}."
|
||||
msg += f" Either {self_str} cannot depend on {other_str},"
|
||||
msg += f" or {other_str} fails to provide a virtual used in {self_str}"
|
||||
raise SpliceError(msg)
|
||||
virtuals_to_replace = [v.name for v in other.package.virtuals_provided if v in self]
|
||||
if virtuals_to_replace:
|
||||
deps_to_replace = dict((self[v], other) for v in virtuals_to_replace)
|
||||
# deps_to_replace = [self[v] for v in virtuals_to_replace]
|
||||
else:
|
||||
# TODO: sanity check and error raise here for other.name not in self
|
||||
deps_to_replace = {self[other.name]: other}
|
||||
# deps_to_replace = [self[other.name]]
|
||||
|
||||
# Copies of all non-build deps, build deps will get added at the end
|
||||
spec = self.copy(deps=dt.ALL & ~dt.BUILD)
|
||||
replacement = other.copy(deps=dt.ALL & ~dt.BUILD)
|
||||
|
||||
def make_node_pairs(orig_spec, copied_spec):
|
||||
return list(
|
||||
zip(
|
||||
orig_spec.traverse(deptype=dt.ALL & ~dt.BUILD),
|
||||
copied_spec.traverse(deptype=dt.ALL & ~dt.BUILD),
|
||||
for d in deps_to_replace:
|
||||
if not all(
|
||||
v in other.package.virtuals_provided or v not in self
|
||||
for v in d.package.virtuals_provided
|
||||
):
|
||||
# There was something provided by the original that we don't
|
||||
# get from its replacement.
|
||||
raise SpliceError(
|
||||
("Splice between {0} and {1} will not provide " "the same virtuals.").format(
|
||||
self.name, other.name
|
||||
)
|
||||
)
|
||||
)
|
||||
for n in d.traverse(root=False):
|
||||
if not all(
|
||||
any(
|
||||
v in other_n.package.virtuals_provided
|
||||
for other_n in other.traverse(root=False)
|
||||
)
|
||||
or v not in self
|
||||
for v in n.package.virtuals_provided
|
||||
):
|
||||
raise SpliceError(
|
||||
(
|
||||
"Splice between {0} and {1} will not provide " "the same virtuals."
|
||||
).format(self.name, other.name)
|
||||
)
|
||||
|
||||
def mask_build_deps(in_spec):
|
||||
for edge in in_spec.traverse_edges(cover="edges"):
|
||||
edge.depflag &= ~dt.BUILD
|
||||
# For now, check that we don't have DAG with multiple specs from the
|
||||
# same package
|
||||
def multiple_specs(root):
|
||||
counter = collections.Counter([node.name for node in root.traverse()])
|
||||
_, max_number = counter.most_common()[0]
|
||||
return max_number > 1
|
||||
|
||||
if multiple_specs(self) or multiple_specs(other):
|
||||
msg = (
|
||||
'Either "{0}" or "{1}" contain multiple specs from the same '
|
||||
"package, which cannot be handled by splicing at the moment"
|
||||
)
|
||||
raise ValueError(msg.format(self, other))
|
||||
|
||||
# Multiple unique specs with the same name will collide, so the
|
||||
# _dependents of these specs should not be trusted.
|
||||
# Variants may also be ignored here for now...
|
||||
|
||||
# Keep all cached hashes because we will invalidate the ones that need
|
||||
# invalidating later, and we don't want to invalidate unnecessarily
|
||||
|
||||
def from_self(name, transitive):
|
||||
if transitive:
|
||||
if name in other:
|
||||
return False
|
||||
if any(v in other for v in self[name].package.virtuals_provided):
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
if name == other.name:
|
||||
return False
|
||||
if any(
|
||||
v in other.package.virtuals_provided
|
||||
for v in self[name].package.virtuals_provided
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
self_nodes = dict(
|
||||
(s.name, s.copy(deps=False))
|
||||
for s in self.traverse(root=True)
|
||||
if from_self(s.name, transitive)
|
||||
)
|
||||
|
||||
if transitive:
|
||||
# These pairs will allow us to reattach all direct build deps
|
||||
# We need the list of pairs while the two specs still match
|
||||
node_pairs = make_node_pairs(self, spec)
|
||||
|
||||
# Ignore build deps in the modified spec while doing the splice
|
||||
# They will be added back in at the end
|
||||
mask_build_deps(spec)
|
||||
|
||||
# Transitively splice any relevant nodes from new into base
|
||||
# This handles all shared dependencies between self and other
|
||||
spec._splice_helper(replacement, self_root=self, other_root=other)
|
||||
other_nodes = dict((s.name, s.copy(deps=False)) for s in other.traverse(root=True))
|
||||
else:
|
||||
# Do the same thing as the transitive splice, but reversed
|
||||
node_pairs = make_node_pairs(other, replacement)
|
||||
mask_build_deps(replacement)
|
||||
replacement._splice_helper(spec, self_root=other, other_root=self)
|
||||
# NOTE: Does not fully validate providers; loader races possible
|
||||
other_nodes = dict(
|
||||
(s.name, s.copy(deps=False))
|
||||
for s in other.traverse(root=True)
|
||||
if s is other or s.name not in self
|
||||
)
|
||||
|
||||
# Intransitively splice replacement into spec
|
||||
# This is very simple now that all shared dependencies have been handled
|
||||
for node in spec.traverse(order="topo", deptype=dt.LINK | dt.RUN):
|
||||
if node._splice_match(other, self_root=spec, other_root=other):
|
||||
node._splice_detach_and_add_dependents(replacement, context=spec)
|
||||
nodes = other_nodes.copy()
|
||||
nodes.update(self_nodes)
|
||||
|
||||
# Set up build dependencies for modified nodes
|
||||
# Also modify build_spec because the existing ones had build deps removed
|
||||
for orig, copy in node_pairs:
|
||||
for edge in orig.edges_to_dependencies(depflag=dt.BUILD):
|
||||
copy._add_dependency(edge.spec, depflag=dt.BUILD, virtuals=edge.virtuals)
|
||||
if copy._build_spec:
|
||||
copy._build_spec = orig.build_spec.copy()
|
||||
for name in nodes:
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
return spec
|
||||
ret = nodes[self.name]
|
||||
|
||||
# Clear cached hashes for all affected nodes
|
||||
# Do not touch unaffected nodes
|
||||
for dep in ret.traverse(root=True, order="post"):
|
||||
opposite = other_nodes if dep.name in self_nodes else self_nodes
|
||||
if any(name in dep for name in opposite.keys()):
|
||||
# package hash cannot be affected by splice
|
||||
dep.clear_cached_hashes(ignore=["package_hash"])
|
||||
|
||||
dep.dag_hash()
|
||||
|
||||
return nodes[self.name]
|
||||
|
||||
def clear_cached_hashes(self, ignore=()):
|
||||
"""
|
||||
|
||||
@@ -676,13 +676,11 @@ def test_build_manifest_visitor(tmpdir):
|
||||
assert all(os.path.islink(f) for f in visitor.symlinks)
|
||||
|
||||
|
||||
def test_text_relocate_if_needed(install_mockery, temporary_store, mock_fetch, monkeypatch, capfd):
|
||||
install_cmd("needs-text-relocation")
|
||||
|
||||
specs = temporary_store.db.query("needs-text-relocation")
|
||||
assert len(specs) == 1
|
||||
manifest = get_buildfile_manifest(specs[0])
|
||||
def test_text_relocate_if_needed(install_mockery, mock_fetch, monkeypatch, capfd):
|
||||
spec = Spec("needs-text-relocation").concretized()
|
||||
install_cmd(str(spec))
|
||||
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
assert join_path("bin", "exe") in manifest["text_to_relocate"]
|
||||
assert join_path("bin", "otherexe") not in manifest["text_to_relocate"]
|
||||
assert join_path("bin", "secretexe") not in manifest["text_to_relocate"]
|
||||
|
||||
@@ -794,7 +794,6 @@ def test_ld_deps_partial(wrapper_environment):
|
||||
+ test_library_paths
|
||||
+ ["-Lxlib"]
|
||||
+ ["--disable-new-dtags"]
|
||||
+ test_rpaths
|
||||
+ ["-r"]
|
||||
+ test_args_without_paths,
|
||||
)
|
||||
|
||||
@@ -933,16 +933,15 @@ def test_push_to_build_cache(
|
||||
"""
|
||||
)
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
with ev.read("test") as current_env:
|
||||
current_env.concretize()
|
||||
install_cmd("--keep-stage")
|
||||
|
||||
concrete_spec = list(current_env.roots())[0]
|
||||
with ev.read("test"):
|
||||
concrete_spec = Spec("patchelf").concretized()
|
||||
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
|
||||
json_path = str(tmp_path / "spec.json")
|
||||
with open(json_path, "w") as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--add", "--keep-stage", json_path)
|
||||
|
||||
for s in concrete_spec.traverse():
|
||||
ci.push_to_build_cache(s, mirror_url, True)
|
||||
|
||||
|
||||
@@ -311,7 +311,7 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-vt", "splice-z"]
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-z"]
|
||||
)
|
||||
|
||||
# ensure that this string isn't fouhnd
|
||||
|
||||
@@ -16,10 +16,8 @@ def modulefile_content(request):
|
||||
"""Returns a function that generates the content of a module file as a list of lines."""
|
||||
writer_cls = getattr(request.module, "writer_cls")
|
||||
|
||||
def _impl(spec_like, module_set_name="default", explicit=True):
|
||||
if isinstance(spec_like, str):
|
||||
spec_like = spack.spec.Spec(spec_like)
|
||||
spec = spec_like.concretized()
|
||||
def _impl(spec_str, module_set_name="default", explicit=True):
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
generator = writer_cls(spec, module_set_name, explicit)
|
||||
generator.write(overwrite=True)
|
||||
written_module = pathlib.Path(generator.layout.filename)
|
||||
|
||||
@@ -388,7 +388,7 @@ def test_setup_environment(self, modulefile_content, module_configuration):
|
||||
|
||||
spec = spack.spec.Spec("mpileaks")
|
||||
spec.concretize()
|
||||
content = modulefile_content(spec["callpath"])
|
||||
content = modulefile_content(str(spec["callpath"]))
|
||||
|
||||
assert len([x for x in content if "setenv FOOBAR" in x]) == 1
|
||||
assert len([x for x in content if "setenv FOOBAR {callpath}" in x]) == 1
|
||||
|
||||
@@ -47,7 +47,7 @@ def test_rewire_db(mock_fetch, install_mockery, transitive):
|
||||
text_file_path = os.path.join(node.prefix, node.name)
|
||||
with open(text_file_path, "r") as f:
|
||||
text = f.read()
|
||||
for modded_spec in node.traverse(root=True, deptype=("link", "run")):
|
||||
for modded_spec in node.traverse(root=True):
|
||||
assert modded_spec.prefix in text
|
||||
|
||||
|
||||
@@ -59,7 +59,6 @@ def test_rewire_bin(mock_fetch, install_mockery, transitive):
|
||||
dep = Spec("garply cflags=-g").concretized()
|
||||
PackageInstaller([spec.package, dep.package], explicit=True).install()
|
||||
spliced_spec = spec.splice(dep, transitive=transitive)
|
||||
|
||||
assert spec.dag_hash() != spliced_spec.dag_hash()
|
||||
|
||||
spack.rewiring.rewire(spliced_spec)
|
||||
@@ -100,8 +99,6 @@ def test_rewire_writes_new_metadata(mock_fetch, install_mockery):
|
||||
)
|
||||
assert os.path.exists(manifest_file_path)
|
||||
orig_node = spec[node.name]
|
||||
if node == orig_node:
|
||||
continue
|
||||
orig_manifest_file_path = os.path.join(
|
||||
orig_node.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.directives
|
||||
import spack.error
|
||||
import spack.parser
|
||||
@@ -16,7 +15,6 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.variant
|
||||
import spack.version as vn
|
||||
from spack.error import SpecError, UnsatisfiableSpecError
|
||||
from spack.spec import (
|
||||
ArchSpec,
|
||||
@@ -34,95 +32,6 @@
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def setup_complex_splice(monkeypatch):
|
||||
r"""Fixture to set up splicing for two complex specs.
|
||||
|
||||
a_red is a spec in which every node has the variant color=red
|
||||
c_blue is a spec in which every node has the variant color=blue
|
||||
|
||||
a_red structure:
|
||||
a -
|
||||
/ \ \
|
||||
b c \
|
||||
/|\ / \ |
|
||||
e | d g@2
|
||||
\|/
|
||||
g@1
|
||||
|
||||
c_blue structure:
|
||||
c
|
||||
/|\
|
||||
d f \
|
||||
/ |\ \
|
||||
g@2 e \ \
|
||||
\| /
|
||||
g@3
|
||||
|
||||
This is not intended for use in tests that use virtuals, so ``_splice_match`` is monkeypatched
|
||||
to avoid needing package files for each spec.
|
||||
"""
|
||||
|
||||
def splice_match(self, other, self_root, other_root):
|
||||
return self.name == other.name
|
||||
|
||||
def virtuals_provided(self, root):
|
||||
return []
|
||||
|
||||
monkeypatch.setattr(Spec, "_splice_match", splice_match)
|
||||
monkeypatch.setattr(Spec, "_virtuals_provided", virtuals_provided)
|
||||
|
||||
g1_red = Spec("g color=red")
|
||||
g1_red.versions = vn.VersionList([vn.Version("1")])
|
||||
g2_red = Spec("g color=red")
|
||||
g2_red.versions = vn.VersionList([vn.Version("2")])
|
||||
g2_blue = Spec("g color=blue")
|
||||
g2_blue.versions = vn.VersionList([vn.Version("2")])
|
||||
g3_blue = Spec("g color=blue")
|
||||
g3_blue.versions = vn.VersionList([vn.Version("3")])
|
||||
|
||||
depflag = dt.LINK | dt.BUILD
|
||||
e_red = Spec("e color=red")
|
||||
e_red._add_dependency(g1_red, depflag=depflag, virtuals=())
|
||||
e_blue = Spec("e color=blue")
|
||||
e_blue._add_dependency(g3_blue, depflag=depflag, virtuals=())
|
||||
|
||||
d_red = Spec("d color=red")
|
||||
d_red._add_dependency(g1_red, depflag=depflag, virtuals=())
|
||||
d_blue = Spec("d color=blue")
|
||||
d_blue._add_dependency(g2_blue, depflag=depflag, virtuals=())
|
||||
|
||||
b_red = Spec("b color=red")
|
||||
b_red._add_dependency(e_red, depflag=depflag, virtuals=())
|
||||
b_red._add_dependency(d_red, depflag=depflag, virtuals=())
|
||||
b_red._add_dependency(g1_red, depflag=depflag, virtuals=())
|
||||
|
||||
f_blue = Spec("f color=blue")
|
||||
f_blue._add_dependency(e_blue, depflag=depflag, virtuals=())
|
||||
f_blue._add_dependency(g3_blue, depflag=depflag, virtuals=())
|
||||
|
||||
c_red = Spec("c color=red")
|
||||
c_red._add_dependency(d_red, depflag=depflag, virtuals=())
|
||||
c_red._add_dependency(g2_red, depflag=depflag, virtuals=())
|
||||
c_blue = Spec("c color=blue")
|
||||
c_blue._add_dependency(d_blue, depflag=depflag, virtuals=())
|
||||
c_blue._add_dependency(f_blue, depflag=depflag, virtuals=())
|
||||
c_blue._add_dependency(g3_blue, depflag=depflag, virtuals=())
|
||||
|
||||
a_red = Spec("a color=red")
|
||||
a_red._add_dependency(b_red, depflag=depflag, virtuals=())
|
||||
a_red._add_dependency(c_red, depflag=depflag, virtuals=())
|
||||
a_red._add_dependency(g2_red, depflag=depflag, virtuals=())
|
||||
|
||||
for spec in [e_red, e_blue, d_red, d_blue, b_red, f_blue, c_red, c_blue, a_red]:
|
||||
spec.versions = vn.VersionList([vn.Version("1")])
|
||||
|
||||
a_red._mark_concrete()
|
||||
c_blue._mark_concrete()
|
||||
|
||||
return a_red, c_blue
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestSpecSemantics:
|
||||
"""Test satisfies(), intersects(), constrain() and other semantic operations on specs."""
|
||||
@@ -806,6 +715,13 @@ def test_exceptional_paths_for_constructor(self):
|
||||
def test_spec_formatting(self, default_mock_concretization):
|
||||
spec = default_mock_concretization("multivalue-variant cflags=-O2")
|
||||
|
||||
# Since the default is the full spec see if the string rep of
|
||||
# spec is the same as the output of spec.format()
|
||||
# ignoring whitespace (though should we?) and ignoring dependencies
|
||||
spec_string = str(spec)
|
||||
idx = spec_string.index(" ^")
|
||||
assert spec_string[:idx] == spec.format().strip()
|
||||
|
||||
# Testing named strings ie {string} and whether we get
|
||||
# the correct component
|
||||
# Mixed case intentional to test both
|
||||
@@ -1050,164 +966,6 @@ def test_splice(self, transitive, default_mock_concretization):
|
||||
# Finally, the spec should know it's been spliced:
|
||||
assert out.spliced
|
||||
|
||||
def test_splice_intransitive_complex(self, setup_complex_splice):
|
||||
a_red, c_blue = setup_complex_splice
|
||||
|
||||
spliced = a_red.splice(c_blue, transitive=False)
|
||||
assert spliced.satisfies(
|
||||
"a color=red ^b color=red ^c color=blue "
|
||||
"^d color=red ^e color=red ^f color=blue ^g@3 color=blue"
|
||||
)
|
||||
assert set(spliced.dependencies(deptype=dt.BUILD)) == set(
|
||||
a_red.dependencies(deptype=dt.BUILD)
|
||||
)
|
||||
assert spliced.build_spec == a_red
|
||||
# We cannot check spliced["b"].build_spec is spliced["b"] because Spec.__getitem__ creates
|
||||
# a new wrapper object on each invocation. So we select once and check on that object
|
||||
# For the rest of the unchanged specs we will just check the s._build_spec is None.
|
||||
b = spliced["b"]
|
||||
assert b == a_red["b"]
|
||||
assert b.build_spec is b
|
||||
assert set(b.dependents()) == {spliced}
|
||||
|
||||
assert spliced["c"].satisfies(
|
||||
"c color=blue ^d color=red ^e color=red ^f color=blue ^g@3 color=blue"
|
||||
)
|
||||
assert set(spliced["c"].dependencies(deptype=dt.BUILD)) == set(
|
||||
c_blue.dependencies(deptype=dt.BUILD)
|
||||
)
|
||||
assert spliced["c"].build_spec == c_blue
|
||||
assert set(spliced["c"].dependents()) == {spliced}
|
||||
|
||||
assert spliced["d"] == a_red["d"]
|
||||
assert spliced["d"]._build_spec is None
|
||||
# Since D had a parent changed, it has a split edge for link vs build dependent
|
||||
# note: spliced["b"] == b_red, referenced differently to preserve logic
|
||||
assert set(spliced["d"].dependents()) == {spliced["b"], spliced["c"], a_red["c"]}
|
||||
assert set(spliced["d"].dependents(deptype=dt.BUILD)) == {a_red["b"], a_red["c"]}
|
||||
|
||||
assert spliced["e"] == a_red["e"]
|
||||
assert spliced["e"]._build_spec is None
|
||||
# Because a copy of e is used, it does not have dependnets in the original specs
|
||||
assert set(spliced["e"].dependents()) == {spliced["b"], spliced["f"]}
|
||||
# Build dependent edge to f because f originally dependended on the e this was copied from
|
||||
assert set(spliced["e"].dependents(deptype=dt.BUILD)) == {spliced["b"]}
|
||||
|
||||
assert spliced["f"].satisfies("f color=blue ^e color=red ^g@3 color=blue")
|
||||
assert set(spliced["f"].dependencies(deptype=dt.BUILD)) == set(
|
||||
c_blue["f"].dependencies(deptype=dt.BUILD)
|
||||
)
|
||||
assert spliced["f"].build_spec == c_blue["f"]
|
||||
assert set(spliced["f"].dependents()) == {spliced["c"]}
|
||||
|
||||
# spliced["g"] is g3, but spliced["b"]["g"] is g1
|
||||
assert spliced["g"] == a_red["g"]
|
||||
assert spliced["g"]._build_spec is None
|
||||
assert set(spliced["g"].dependents(deptype=dt.LINK)) == {
|
||||
spliced,
|
||||
spliced["c"],
|
||||
spliced["f"],
|
||||
a_red["c"],
|
||||
}
|
||||
assert set(spliced["g"].dependents(deptype=dt.BUILD)) == {spliced, a_red["c"]}
|
||||
|
||||
assert spliced["b"]["g"] == a_red["b"]["g"]
|
||||
assert spliced["b"]["g"]._build_spec is None
|
||||
assert set(spliced["b"]["g"].dependents()) == {spliced["b"], spliced["d"], spliced["e"]}
|
||||
|
||||
for edge in spliced.traverse_edges(cover="edges", deptype=dt.LINK | dt.RUN):
|
||||
# traverse_edges creates a synthetic edge with no deptypes to the root
|
||||
if edge.depflag:
|
||||
depflag = dt.LINK
|
||||
if (edge.parent.name, edge.spec.name) not in [
|
||||
("a", "c"), # These are the spliced edges
|
||||
("c", "d"),
|
||||
("f", "e"),
|
||||
("c", "g"),
|
||||
("f", "g"),
|
||||
("c", "f"), # ancestor to spliced edge
|
||||
]:
|
||||
depflag |= dt.BUILD
|
||||
assert edge.depflag == depflag
|
||||
|
||||
def test_splice_transitive_complex(self, setup_complex_splice):
|
||||
a_red, c_blue = setup_complex_splice
|
||||
|
||||
spliced = a_red.splice(c_blue, transitive=True)
|
||||
assert spliced.satisfies(
|
||||
"a color=red ^b color=red"
|
||||
"^c color=blue ^d color=blue ^e color=blue ^f color=blue ^g@3 color=blue"
|
||||
)
|
||||
assert set(spliced.dependencies(deptype=dt.BUILD)) == set(
|
||||
a_red.dependencies(deptype=dt.BUILD)
|
||||
)
|
||||
assert spliced.build_spec == a_red
|
||||
|
||||
assert spliced["b"].satisfies("b color=red ^d color=blue ^e color=blue ^g@2 color=blue")
|
||||
assert set(spliced["b"].dependencies(deptype=dt.BUILD)) == set(
|
||||
a_red["b"].dependencies(deptype=dt.BUILD)
|
||||
)
|
||||
assert spliced["b"].build_spec == a_red["b"]
|
||||
assert set(spliced["b"].dependents()) == {spliced}
|
||||
|
||||
# We cannot check spliced["b"].build_spec is spliced["b"] because Spec.__getitem__ creates
|
||||
# a new wrapper object on each invocation. So we select once and check on that object
|
||||
# For the rest of the unchanged specs we will just check the s._build_spec is None.
|
||||
c = spliced["c"]
|
||||
assert c == c_blue
|
||||
assert c.build_spec is c
|
||||
assert set(c.dependents()) == {spliced}
|
||||
|
||||
assert spliced["d"] == c_blue["d"]
|
||||
assert spliced["d"]._build_spec is None
|
||||
assert set(spliced["d"].dependents()) == {spliced["b"], spliced["c"]}
|
||||
|
||||
assert spliced["e"] == c_blue["e"]
|
||||
assert spliced["e"]._build_spec is None
|
||||
assert set(spliced["e"].dependents()) == {spliced["b"], spliced["f"]}
|
||||
|
||||
assert spliced["f"] == c_blue["f"]
|
||||
assert spliced["f"]._build_spec is None
|
||||
assert set(spliced["f"].dependents()) == {spliced["c"]}
|
||||
|
||||
# spliced["g"] is g3, but spliced["d"]["g"] is g1
|
||||
assert spliced["g"] == c_blue["g"]
|
||||
assert spliced["g"]._build_spec is None
|
||||
assert set(spliced["g"].dependents(deptype=dt.LINK)) == {
|
||||
spliced,
|
||||
spliced["b"],
|
||||
spliced["c"],
|
||||
spliced["e"],
|
||||
spliced["f"],
|
||||
}
|
||||
# Because a copy of g3 is used, it does not have dependents in the original specs
|
||||
# It has build dependents on these spliced specs because it is an unchanged dependency
|
||||
# for them
|
||||
assert set(spliced["g"].dependents(deptype=dt.BUILD)) == {
|
||||
spliced["c"],
|
||||
spliced["e"],
|
||||
spliced["f"],
|
||||
}
|
||||
|
||||
assert spliced["d"]["g"] == c_blue["d"]["g"]
|
||||
assert spliced["d"]["g"]._build_spec is None
|
||||
assert set(spliced["d"]["g"].dependents()) == {spliced["d"]}
|
||||
|
||||
for edge in spliced.traverse_edges(cover="edges", deptype=dt.LINK | dt.RUN):
|
||||
# traverse_edges creates a synthetic edge with no deptypes to the root
|
||||
if edge.depflag:
|
||||
depflag = dt.LINK
|
||||
if (edge.parent.name, edge.spec.name) not in [
|
||||
("a", "c"), # These are the spliced edges
|
||||
("a", "g"),
|
||||
("b", "d"),
|
||||
("b", "e"),
|
||||
("b", "g"),
|
||||
("a", "b"), # This edge not spliced, but b was spliced invalidating edge
|
||||
]:
|
||||
depflag |= dt.BUILD
|
||||
assert edge.depflag == depflag
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_splice_with_cached_hashes(self, default_mock_concretization, transitive):
|
||||
spec = default_mock_concretization("splice-t")
|
||||
@@ -1340,7 +1098,7 @@ def test_satisfies_dependencies_ordered(self):
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
def test_splice_swap_names(self, default_mock_concretization, transitive):
|
||||
spec = default_mock_concretization("splice-vt")
|
||||
spec = default_mock_concretization("splice-t")
|
||||
dep = default_mock_concretization("splice-a+foo")
|
||||
out = spec.splice(dep, transitive)
|
||||
assert dep.name in out
|
||||
@@ -1350,7 +1108,7 @@ def test_splice_swap_names(self, default_mock_concretization, transitive):
|
||||
def test_splice_swap_names_mismatch_virtuals(self, default_mock_concretization, transitive):
|
||||
spec = default_mock_concretization("splice-t")
|
||||
dep = default_mock_concretization("splice-vh+foo")
|
||||
with pytest.raises(spack.spec.SpliceError, match="virtual"):
|
||||
with pytest.raises(spack.spec.SpliceError, match="will not provide the same virtuals."):
|
||||
spec.splice(dep, transitive)
|
||||
|
||||
def test_spec_override(self):
|
||||
|
||||
@@ -551,20 +551,6 @@ def _specfile_for(spec_str, filename):
|
||||
],
|
||||
"^[deptypes=build,link] zlib",
|
||||
),
|
||||
(
|
||||
"^[deptypes=link] zlib ^[deptypes=build] zlib",
|
||||
[
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="deptypes=link"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="deptypes=build"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="zlib"),
|
||||
],
|
||||
"^[deptypes=link] zlib ^[deptypes=build] zlib",
|
||||
),
|
||||
(
|
||||
"git-test@git.foo/bar",
|
||||
[
|
||||
@@ -1006,8 +992,6 @@ def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monk
|
||||
("x target=be platform=test os=be os=fe", "'platform'"),
|
||||
# Dependencies
|
||||
("^[@foo] zlib", "edge attributes"),
|
||||
("x ^[deptypes=link]foo ^[deptypes=run]foo", "conflicting dependency types"),
|
||||
("x ^[deptypes=build,link]foo ^[deptypes=link]foo", "conflicting dependency types"),
|
||||
# TODO: Remove this as soon as use variants are added and we can parse custom attributes
|
||||
("^[foo=bar] zlib", "edge attributes"),
|
||||
# Propagating reserved names generates a parse error
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class SpliceVt(Package):
|
||||
"""Simple package with one optional dependency"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/splice-t-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
depends_on("somethingelse")
|
||||
depends_on("splice-z")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
with open(prefix.join("splice-vt"), "w") as f:
|
||||
f.write("splice-vt: {0}".format(prefix))
|
||||
f.write("splice-h: {0}".format(spec["somethingelse"].prefix))
|
||||
f.write("splice-z: {0}".format(spec["splice-z"].prefix))
|
||||
@@ -143,7 +143,6 @@ class Amber(Package, CudaPackage):
|
||||
depends_on("cuda@7.5.18", when="@:16+cuda")
|
||||
|
||||
# conflicts
|
||||
conflicts("+x11", when="platform=cray", msg="x11 amber applications not available for cray")
|
||||
conflicts("+openmp", when="%clang", msg="OpenMP not available for the clang compiler")
|
||||
conflicts(
|
||||
"+openmp", when="%apple-clang", msg="OpenMP not available for the Apple clang compiler"
|
||||
|
||||
@@ -1,177 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
# Wrapper around depends_on to propagate dependency variants
|
||||
def dav_sdk_depends_on(spec, when=None, propagate=None):
|
||||
# Do the basic depends_on
|
||||
depends_on(spec, when=when)
|
||||
|
||||
# Strip spec string to just the base spec name
|
||||
# ie. A +c ~b -> A
|
||||
spec = Spec(spec).name
|
||||
|
||||
# If the package is in the spec tree then it must be enabled in the SDK.
|
||||
if "+" in when:
|
||||
_when_variants = when.strip("+").split("+")
|
||||
if any(tok in when for tok in ["~", "="]):
|
||||
tty.error("Bad token in when clause, only positive boolean tokens allowed")
|
||||
|
||||
for variant in _when_variants:
|
||||
conflicts("~" + variant, when="^" + spec)
|
||||
|
||||
# Skip if there is nothing to propagate
|
||||
if not propagate:
|
||||
return
|
||||
|
||||
# Map the propagated variants to the dependency variant. Some packages may need
|
||||
# overrides to propagate a dependency as something else, e.g., {"visit": "libsim"}.
|
||||
# Most call-sites will just use a list.
|
||||
if not type(propagate) is dict:
|
||||
propagate = dict([(v, v) for v in propagate])
|
||||
|
||||
# Determine the base variant
|
||||
base_variant = ""
|
||||
if when:
|
||||
base_variant = when
|
||||
|
||||
def is_boolean(variant):
|
||||
return "=" not in variant
|
||||
|
||||
# Propagate variants to dependecy
|
||||
for v_when, v_then in propagate.items():
|
||||
if is_boolean(v_when):
|
||||
depends_on(
|
||||
"{0} +{1}".format(spec, v_then), when="{0} +{1}".format(base_variant, v_when)
|
||||
)
|
||||
depends_on(
|
||||
"{0} ~{1}".format(spec, v_then), when="{0} ~{1}".format(base_variant, v_when)
|
||||
)
|
||||
else:
|
||||
depends_on("{0} {1}".format(spec, v_then), when="{0} {1}".format(base_variant, v_when))
|
||||
|
||||
|
||||
def exclude_variants(variants, exclude):
|
||||
return [variant for variant in variants if variant not in exclude]
|
||||
|
||||
|
||||
class DavSdk(BundlePackage, CudaPackage, ROCmPackage):
|
||||
"""Data & Vis SDK
|
||||
|
||||
** This package is a post-ECP port from the ECP Data & Vis SDK **
|
||||
"""
|
||||
|
||||
homepage = "https://dav-sdk.github.io/"
|
||||
|
||||
tags = ["peso", "oasis", "sdk"]
|
||||
maintainers("kwryankrattiger", "vicentebolea")
|
||||
|
||||
version("1.0")
|
||||
|
||||
############################################################
|
||||
# Variants
|
||||
############################################################
|
||||
|
||||
# Data
|
||||
variant("adios2", default=False, description="Enable ADIOS2")
|
||||
variant("hdf5", default=False, description="Enable HDF5")
|
||||
variant("pnetcdf", default=False, description="Enable PNetCDF")
|
||||
variant("diy", default=False, description="Enable DIY")
|
||||
|
||||
# Vis
|
||||
variant("ascent", default=False, description="Enable Ascent")
|
||||
variant("paraview", default=False, description="Enable ParaView")
|
||||
variant("visit", default=False, description="Enable VisIt")
|
||||
variant("vtkm", default=False, description="Enable VTK-m")
|
||||
variant("zfp", default=False, description="Enable ZFP")
|
||||
|
||||
# Language Options
|
||||
variant("fortran", default=True, sticky=True, description="Enable fortran language features.")
|
||||
|
||||
############################################################
|
||||
# Dependencies
|
||||
############################################################
|
||||
cuda_arch_variants = ["cuda_arch={0}".format(x) for x in CudaPackage.cuda_arch_values]
|
||||
amdgpu_target_variants = ["amdgpu_target={0}".format(x) for x in ROCmPackage.amdgpu_targets]
|
||||
|
||||
dav_sdk_depends_on(
|
||||
"adios2+shared+mpi+python+sst+dataman",
|
||||
when="+adios2",
|
||||
propagate=["cuda", "hdf5", "zfp", "fortran"] + cuda_arch_variants,
|
||||
)
|
||||
|
||||
# HDF5 1.14 is a soft requirement for the ECP Data and Vis SDK.
|
||||
# When building with VisIt and CinemaSci in the same SDK environment there is a conflict
|
||||
# in the build dependency of py-setuptools which prevents building a py-h5py that is
|
||||
# compatible with 'hdf5@1.14:'. Until there is a version of VisIt with an updated VTK or Spack
|
||||
# allows the concretization of multiple versions of the same build only dependency
|
||||
# concretization with VisIt and Cinema variants will not allow building VOLs.
|
||||
dav_sdk_depends_on("hdf5@1.12: +shared+mpi", when="+hdf5", propagate=["fortran"])
|
||||
|
||||
# HDF5 VOL Adapters require hdf5@1.14:
|
||||
|
||||
# hdf5-vfd-gds needs cuda@11.7.1 or later, only enable when 11.7.1+ available.
|
||||
depends_on("hdf5-vfd-gds@1.0.2:", when="+cuda+hdf5 ^cuda@11.7.1: ^hdf5@1.14:")
|
||||
for cuda_arch in cuda_arch_variants:
|
||||
depends_on(
|
||||
"hdf5-vfd-gds@1.0.2: {0}".format(cuda_arch),
|
||||
when="+cuda+hdf5 {0} ^cuda@11.7.1: ^hdf5@1.14:".format(cuda_arch),
|
||||
)
|
||||
conflicts("~cuda", when="^hdf5-vfd-gds@1.0.2:")
|
||||
conflicts("~hdf5", when="^hdf5-vfd-gds@1.0.2:")
|
||||
conflicts("~hdf5", when="^hdf5-vol-async")
|
||||
conflicts("~hdf5", when="^hdf5-vol-cache")
|
||||
conflicts("~hdf5", when="^hdf5-vol-log")
|
||||
depends_on("hdf5-vol-async", when="+hdf5 ^hdf5@1.14:")
|
||||
depends_on("hdf5-vol-cache", when="+hdf5 ^hdf5@1.14:")
|
||||
depends_on("hdf5-vol-log", when="+hdf5 ^hdf5@1.14:")
|
||||
|
||||
dav_sdk_depends_on("parallel-netcdf+shared", when="+pnetcdf", propagate=["fortran"])
|
||||
|
||||
# Fortran support with ascent is problematic on some Cray platforms so the
|
||||
# SDK is explicitly disabling it until the issues are resolved.
|
||||
dav_sdk_depends_on(
|
||||
"ascent+mpi~fortran+python+shared+vtkh+dray~test",
|
||||
when="+ascent",
|
||||
propagate=["adios2", "cuda"] + cuda_arch_variants,
|
||||
)
|
||||
depends_on("ascent+openmp", when="~rocm+ascent")
|
||||
depends_on("ascent~openmp", when="+rocm+ascent")
|
||||
|
||||
# Need to explicitly turn off conduit hdf5_compat in order to build
|
||||
# hdf5@1.12 which is required for SDK
|
||||
depends_on("conduit ~hdf5_compat", when="+ascent +hdf5")
|
||||
# Disable configuring with @develop. This should be removed after ascent
|
||||
# releases 0.8 and ascent can build with conduit@0.8: and vtk-m@1.7:
|
||||
conflicts("^ascent@develop", when="+ascent")
|
||||
|
||||
dav_sdk_depends_on("diy@3:", when="+diy")
|
||||
|
||||
# ParaView needs @5.11: in order to use CUDA/ROCM, therefore it is the minimum
|
||||
# required version since GPU capability is desired for ECP
|
||||
dav_sdk_depends_on(
|
||||
"paraview@5.11:+mpi+openpmd+python+kits+shared+catalyst+libcatalyst" " use_vtkm=on",
|
||||
when="+paraview",
|
||||
propagate=["adios2", "cuda", "hdf5", "rocm"] + amdgpu_target_variants + cuda_arch_variants,
|
||||
)
|
||||
dav_sdk_depends_on("libcatalyst@2:+mpi", when="+paraview")
|
||||
conflicts("^paraview@master", when="+paraview")
|
||||
|
||||
dav_sdk_depends_on("visit+mpi+python+silo", when="+visit", propagate=["hdf5", "adios2"])
|
||||
|
||||
dav_sdk_depends_on(
|
||||
"vtk-m@1.7:+shared+mpi+rendering",
|
||||
when="+vtkm",
|
||||
propagate=["cuda", "rocm"] + cuda_arch_variants + amdgpu_target_variants,
|
||||
)
|
||||
# TODO: When Ascent is updated to use VTK-m >= 1.8 move examples to
|
||||
# the main spec.
|
||||
conflicts("^vtk-m~examples", when="+vtkm ^vtk-m@1.8:")
|
||||
depends_on("vtk-m+openmp", when="~rocm+vtkm")
|
||||
depends_on("vtk-m~openmp", when="+rocm+vtkm")
|
||||
|
||||
dav_sdk_depends_on("zfp", when="+zfp", propagate=["cuda"] + cuda_arch_variants)
|
||||
@@ -22,7 +22,6 @@ class Dlb(AutotoolsPackage):
|
||||
license("LGPL-3.0-or-later")
|
||||
|
||||
version("main", branch="main")
|
||||
version("3.4.1", sha256="7c071b75c126f8e77c1a30369348751624d5636edcbd663bf3d41fa04733c894")
|
||||
version("3.4", sha256="6091d032c11a094a3ce0bec11c0a164783fdff83cb4ec870c9d8e192410c353a")
|
||||
version("3.3.1", sha256="1b245acad80b03eb83e815fd59dcfc598cfddd899de4504cf6a9572fe5359f40")
|
||||
version("3.3", sha256="55b87aea14f3954d8878912f3134938db235e6984fae26fdf5134148007eb722")
|
||||
@@ -32,17 +31,13 @@ class Dlb(AutotoolsPackage):
|
||||
version("3.0.1", sha256="04f8a7aa269d02fc8561d0a61d64786aa18850367ce4f95d086ca12ab3eb7d24")
|
||||
version("3.0", sha256="e3fc1d51e9ded6d4d40d37f8568da4c4d72d1a8996bdeff2dfbbd86c9b96e36a")
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("fortran", type="build")
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("fortran", type="build") # generated
|
||||
|
||||
variant("debug", default=False, description="Build additional debug libraries")
|
||||
variant("mpi", default=True, description="Build MPI libraries")
|
||||
variant("hwloc", default=True, description="Enable HWLOC support")
|
||||
variant("papi", default=True, description="Enable PAPI support")
|
||||
variant("debug", default=False, description="Builds additional debug libraries")
|
||||
variant("mpi", default=False, description="Builds MPI libraries")
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
depends_on("hwloc", when="+hwloc")
|
||||
depends_on("papi", when="@3.4: +papi")
|
||||
depends_on("python", type="build")
|
||||
depends_on("autoconf", type="build", when="@main")
|
||||
depends_on("automake", type="build", when="@main")
|
||||
@@ -53,8 +48,5 @@ def configure_args(self):
|
||||
args.extend(self.enable_or_disable("debug"))
|
||||
args.extend(self.enable_or_disable("instrumentation-debug", variant="debug"))
|
||||
args.extend(self.with_or_without("mpi"))
|
||||
args.extend(self.with_or_without("hwloc"))
|
||||
if self.spec.satisfies("@3.4:"):
|
||||
args.extend(self.with_or_without("papi"))
|
||||
|
||||
return args
|
||||
|
||||
@@ -24,19 +24,3 @@ class Fd(CargoPackage):
|
||||
version("7.4.0", sha256="33570ba65e7f8b438746cb92bb9bc4a6030b482a0d50db37c830c4e315877537")
|
||||
|
||||
depends_on("rust@1.64:", type="build", when="@9:")
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
fd = Executable(self.prefix.bin.fd)
|
||||
|
||||
mkdirp(bash_completion_path(self.prefix))
|
||||
with open(bash_completion_path(self.prefix) / "fd", "w") as file:
|
||||
fd("--gen-completions", "bash", output=file)
|
||||
|
||||
mkdirp(fish_completion_path(self.prefix))
|
||||
with open(fish_completion_path(self.prefix) / "fd.fish", "w") as file:
|
||||
fd("--gen-completions", "fish", output=file)
|
||||
|
||||
mkdirp(zsh_completion_path(self.prefix))
|
||||
with open(zsh_completion_path(self.prefix) / "_fd", "w") as file:
|
||||
fd("--gen-completions", "zsh", output=file)
|
||||
|
||||
@@ -51,7 +51,6 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage, CompilerPackage):
|
||||
version("12.2.0", sha256="e549cf9cf3594a00e27b6589d4322d70e0720cdd213f39beb4181e06926230ff")
|
||||
version("12.1.0", sha256="62fd634889f31c02b64af2c468f064b47ad1ca78411c45abe6ac4b5f8dd19c7b")
|
||||
|
||||
version("11.5.0", sha256="a6e21868ead545cf87f0c01f84276e4b5281d672098591c1c896241f09363478")
|
||||
version("11.4.0", sha256="3f2db222b007e8a4a23cd5ba56726ef08e8b1f1eb2055ee72c1402cea73a8dd9")
|
||||
version("11.3.0", sha256="b47cf2818691f5b1e21df2bb38c795fac2cfbd640ede2d0a5e1c89e338a3ac39")
|
||||
version("11.2.0", sha256="d08edc536b54c372a1010ff6619dd274c0f1603aa49212ba20f7aa2cda36fa8b")
|
||||
|
||||
@@ -263,17 +263,7 @@ def install(self, spec, prefix):
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
mkdirp(bash_completion_path(self.prefix))
|
||||
install(
|
||||
"contrib/completion/git-completion.bash",
|
||||
join_path(bash_completion_path(self.prefix), "git"),
|
||||
)
|
||||
|
||||
mkdirp(zsh_completion_path(self.prefix))
|
||||
install(
|
||||
"contrib/completion/git-completion.zsh",
|
||||
join_path(zsh_completion_path(self.prefix), "_git"),
|
||||
)
|
||||
install_tree("contrib/completion", self.prefix.share)
|
||||
|
||||
@run_after("install")
|
||||
def install_manpages(self):
|
||||
|
||||
@@ -33,7 +33,7 @@ class Gpgme(AutotoolsPackage):
|
||||
|
||||
depends_on("gnupg", type="build")
|
||||
depends_on("libgpg-error", type="build")
|
||||
depends_on("libassuan@2.4.2:2", type=("build", "link"))
|
||||
depends_on("libassuan", type="build")
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe):
|
||||
|
||||
@@ -36,6 +36,7 @@ class Highway(CMakePackage):
|
||||
depends_on("googletest", type="test")
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
define = self.define
|
||||
from_variant = self.define_from_variant
|
||||
|
||||
@@ -45,6 +46,9 @@ def cmake_args(self):
|
||||
define("HWY_ENABLE_TESTS", self.run_tests),
|
||||
define("BUILD_TESTING", self.run_tests),
|
||||
define("HWY_SYSTEM_GTEST", self.run_tests),
|
||||
define(
|
||||
"HWY_CMAKE_ARM7", spec.satisfies("%gcc@:6.1.0") or spec.satisfies("%clang@:16")
|
||||
),
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
@@ -226,7 +226,7 @@ class Lbann(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("python@3: +shared", type=("build", "run"), when="+pfe")
|
||||
extends("python", when="+pfe")
|
||||
depends_on("py-setuptools", type="build", when="+pfe")
|
||||
depends_on("py-protobuf@3.10.0:4.21.12", type=("build", "run"), when="+pfe")
|
||||
depends_on("py-protobuf+cpp@3.10.0:4.21.12", type=("build", "run"), when="+pfe")
|
||||
|
||||
depends_on("protobuf@3.10.0:3.21.12")
|
||||
depends_on("zlib-api", when="^protobuf@3.11.0:")
|
||||
|
||||
@@ -18,7 +18,6 @@ class Meshtool(MakefilePackage):
|
||||
# Version to use with openCARP releases
|
||||
# It is possible that different openCARP releases rely on the same
|
||||
# meshtool version
|
||||
version("oc16.0", commit="867431d6bde35ad41104f611aa57130ef58cfb79")
|
||||
version("oc15.0", commit="867431d6bde35ad41104f611aa57130ef58cfb79")
|
||||
version("oc13.0", commit="867431d6bde35ad41104f611aa57130ef58cfb79")
|
||||
version("oc12.0", commit="867431d6bde35ad41104f611aa57130ef58cfb79")
|
||||
|
||||
@@ -69,7 +69,7 @@ class Mvapich(AutotoolsPackage):
|
||||
"pmi_version",
|
||||
description="Which pmi version to be used. If using pmi2 add it to your CFLAGS",
|
||||
default="simple",
|
||||
values=("simple", "pmi2", "pmix"),
|
||||
values=("simple", "pmi2"),
|
||||
multi=False,
|
||||
)
|
||||
|
||||
@@ -113,7 +113,6 @@ class Mvapich(AutotoolsPackage):
|
||||
depends_on("libfabric", when="netmod=ofi")
|
||||
depends_on("slurm", when="process_managers=slurm")
|
||||
depends_on("ucx", when="netmod=ucx")
|
||||
depends_on("pmix", when="pmi_version=pmix")
|
||||
|
||||
with when("process_managers=slurm"):
|
||||
conflicts("pmi_version=pmi2")
|
||||
@@ -266,8 +265,6 @@ def configure_args(self):
|
||||
|
||||
args.extend(self.enable_or_disable("alloca"))
|
||||
args.append("--with-pmi=" + spec.variants["pmi_version"].value)
|
||||
if "pmi_version=pmix" in spec:
|
||||
args.append("--with-pmix={0}".format(spec["pmix"].prefix))
|
||||
|
||||
if "+debug" in self.spec:
|
||||
args.extend(
|
||||
|
||||
@@ -19,15 +19,12 @@ class Opencarp(CMakePackage):
|
||||
maintainers("MarieHouillon")
|
||||
|
||||
version(
|
||||
"16.0",
|
||||
commit="295055b6a3859709730f62fc8d4fe0e87c4e20b9",
|
||||
"15.0",
|
||||
commit="2271a3cccd7137f1e28c043c10adbd80480f1462",
|
||||
submodules=False,
|
||||
no_cache=True,
|
||||
preferred=True,
|
||||
)
|
||||
version(
|
||||
"15.0", commit="2271a3cccd7137f1e28c043c10adbd80480f1462", submodules=False, no_cache=True
|
||||
)
|
||||
version(
|
||||
"13.0", commit="e1e0deca7eddcfd210835f54430361c85a97a5a4", submodules=False, no_cache=True
|
||||
)
|
||||
@@ -75,7 +72,7 @@ class Opencarp(CMakePackage):
|
||||
depends_on("py-carputils", when="+carputils", type=("build", "run"))
|
||||
depends_on("meshtool", when="+meshtool", type=("build", "run"))
|
||||
# Use specific versions of carputils and meshtool for releases
|
||||
for ver in ["16.0", "15.0", "13.0", "12.0", "11.0", "10.0", "9.0", "8.2", "8.1", "7.0"]:
|
||||
for ver in ["15.0", "13.0", "12.0", "11.0", "10.0", "9.0", "8.2", "8.1", "7.0"]:
|
||||
depends_on("py-carputils@oc" + ver, when="@" + ver + " +carputils")
|
||||
depends_on("meshtool@oc" + ver, when="@" + ver + " +meshtool")
|
||||
|
||||
|
||||
@@ -4,28 +4,34 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.boost import Boost
|
||||
|
||||
|
||||
class Openimageio(CMakePackage):
|
||||
"""Reading, writing, and processing images in a wide variety of file formats, using
|
||||
a format-agnostic API, aimed at VFX applications."""
|
||||
"""OpenImageIO is a library for reading and writing images, and a bunch of
|
||||
related classes, utilities, and applications."""
|
||||
|
||||
homepage = "https://openimageio.readthedocs.io"
|
||||
git = "https://github.com/AcademySoftwareFoundation/OpenImageIO"
|
||||
url = "https://github.com/AcademySoftwareFoundation/OpenImageIO/archive/refs/tags/v2.5.14.0.tar.gz"
|
||||
homepage = "https://www.openimageio.org"
|
||||
url = "https://github.com/OpenImageIO/oiio/archive/Release-1.8.15.tar.gz"
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
version("2.5.15.0", sha256="7779ef2c3d03c5ed95e13ff292de85c3f8cee301cd46baad0d2dc83c93bfe85c")
|
||||
version("2.2.7.0", sha256="857ac83798d6d2bda5d4d11a90618ff19486da2e5a4c4ff022c5976b5746fe8c")
|
||||
version("1.8.15", sha256="4d5b4ed3f2daaed69989f53c0f9364dd87c82dc0a09807b5b6e9008e2426e86f")
|
||||
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
# Core dependencies
|
||||
depends_on("cmake@3.2.2:", type="build")
|
||||
depends_on("boost+atomic+filesystem+thread+chrono@1.53:")
|
||||
depends_on("libtiff@4.0:")
|
||||
depends_on("openexr@3.1:")
|
||||
depends_on("libpng@1.6:")
|
||||
depends_on("boost@1.53:", type=("build", "link"))
|
||||
|
||||
# TODO: replace this with an explicit list of components of Boost,
|
||||
# for instance depends_on('boost +filesystem')
|
||||
# See https://github.com/spack/spack/pull/22303 for reference
|
||||
depends_on(Boost.with_default_variants, type=("build", "link"))
|
||||
depends_on("libtiff@4.0:", type=("build", "link"))
|
||||
depends_on("openexr@2.3:", type=("build", "link"))
|
||||
depends_on("libpng@1.6:", type=("build", "link"))
|
||||
|
||||
# Optional dependencies
|
||||
variant("ffmpeg", default=False, description="Support video frames")
|
||||
@@ -42,10 +48,7 @@ class Openimageio(CMakePackage):
|
||||
variant("qt", default=False, description="Build qt viewer")
|
||||
depends_on("qt@5.6.0:+opengl", when="+qt")
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2"):
|
||||
return super().url_for_version(version)
|
||||
return f"https://github.com/AcademySoftwareFoundation/OpenImageIO/archive/refs/tags/Release-{version}.tar.gz"
|
||||
conflicts("target=aarch64:", when="@:1.8.15")
|
||||
|
||||
def cmake_args(self):
|
||||
args = ["-DUSE_FFMPEG={0}".format("ON" if "+ffmpeg" in self.spec else "OFF")]
|
||||
|
||||
@@ -16,9 +16,6 @@ class Protobuf(CMakePackage):
|
||||
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("3.28.2", sha256="1b6b6a7a7894f509f099c4469b5d4df525c2f3c9e4009e5b2db5b0f66cb8ee0e")
|
||||
version("3.27.5", sha256="a4aa92d0a207298149bf553d9a3192f3562eb91740086f50fa52331e60fa480c")
|
||||
version("3.26.1", sha256="f3c0830339eaa5036eba8ff8ce7fca5aa3088f7d616f7c3713d946f611ae92bf")
|
||||
version("3.25.3", sha256="da82be8acc5347c7918ef806ebbb621b24988f7e1a19b32cd7fc73bc29b59186")
|
||||
version("3.24.3", sha256="2c23dee0bdbc36bd43ee457083f8f5560265d0815cc1c56033de3932843262fe")
|
||||
version("3.23.3", sha256="5e4b555f72a7e3f143a7aff7262292500bb02c49b174351684bb70fc7f2a6d33")
|
||||
@@ -80,8 +77,8 @@ class Protobuf(CMakePackage):
|
||||
version("3.1.0", sha256="fb2a314f4be897491bb2446697be693d489af645cb0e165a85e7e64e07eb134d")
|
||||
version("3.0.2", sha256="a0a265bcc9d4e98c87416e59c33afc37cede9fb277292523739417e449b18c1e")
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
variant("shared", default=True, description="Enables the build of shared libraries")
|
||||
variant(
|
||||
|
||||
@@ -19,7 +19,6 @@ class PyCarputils(PythonPackage):
|
||||
|
||||
version("master", branch="master")
|
||||
# Version to use with openCARP releases
|
||||
version("oc16.0", commit="c40783d884de5ad8ae1b5102b68013b28e14cbe4")
|
||||
version("oc15.0", commit="50e2580b3f75711388eb55982a9b43871c3201f3")
|
||||
version("oc13.0", commit="216c3802c2ac2d14c739164dcd57f2e59aa2ede3")
|
||||
version("oc12.0", commit="4d7a1f0c604a2ad232e70cf9aa3a8daff5ffb195")
|
||||
|
||||
@@ -18,4 +18,3 @@ class PyFluidfftBuilder(PythonPackage):
|
||||
|
||||
depends_on("python@3.9:", type=("build", "run"))
|
||||
depends_on("py-flit-core@3.2:3", type="build")
|
||||
depends_on("py-cython@3.0:", type="run")
|
||||
|
||||
@@ -26,6 +26,5 @@ class PyFluidfftFftw(PythonPackage):
|
||||
depends_on("py-meson-python")
|
||||
depends_on("py-transonic@0.6.4:")
|
||||
depends_on("py-fluidfft-builder")
|
||||
depends_on("py-cython@3.0:")
|
||||
|
||||
depends_on("py-fluidfft", type="run")
|
||||
|
||||
@@ -27,6 +27,5 @@ class PyFluidfftFftwmpi(PythonPackage):
|
||||
depends_on("py-meson-python")
|
||||
depends_on("py-transonic@0.6.4:")
|
||||
depends_on("py-fluidfft-builder")
|
||||
depends_on("py-cython@3.0:")
|
||||
|
||||
depends_on("py-fluidfft", type="run")
|
||||
|
||||
@@ -27,6 +27,5 @@ class PyFluidfftMpiWithFftw(PythonPackage):
|
||||
depends_on("py-meson-python")
|
||||
depends_on("py-transonic@0.6.4:")
|
||||
depends_on("py-fluidfft-builder")
|
||||
depends_on("py-cython@3.0:")
|
||||
|
||||
depends_on("py-fluidfft", type="run")
|
||||
|
||||
@@ -28,6 +28,5 @@ class PyFluidfftP3dfft(PythonPackage):
|
||||
depends_on("py-meson-python")
|
||||
depends_on("py-transonic@0.6.4:")
|
||||
depends_on("py-fluidfft-builder")
|
||||
depends_on("py-cython@3.0:")
|
||||
|
||||
depends_on("py-fluidfft", type="run")
|
||||
|
||||
@@ -28,6 +28,5 @@ class PyFluidfftPfft(PythonPackage):
|
||||
depends_on("py-meson-python")
|
||||
depends_on("py-transonic@0.6.4:")
|
||||
depends_on("py-fluidfft-builder")
|
||||
depends_on("py-cython@3.0:")
|
||||
|
||||
depends_on("py-fluidfft", type="run")
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class PyFluidsimCore(PythonPackage):
|
||||
"""Pure-Python core library for FluidSim framework."""
|
||||
|
||||
pypi = "fluidsim-core/fluidsim_core-0.8.3.tar.gz"
|
||||
|
||||
maintainers("paugier")
|
||||
license("CECILL", checked_by="paugier")
|
||||
|
||||
version("0.8.3", sha256="2c829486d640f921e42b690b824fe52ce6fcb678a36535f06d76b872e353d211")
|
||||
version("0.8.2", sha256="62a8b43fc7ede8c6efc5cc109ae5caca2c1f54891dff547511c8fe94caf0bd7c")
|
||||
version("0.8.1", sha256="3dfb51d5db1a574089738a4b8e1c76e75da32b25dceb349207dcece73d1b1646")
|
||||
version("0.8.0", sha256="4b7a23649df9d10cde6510280fb8683550549d4cbbc1ebb0bc6adc6e559915f7")
|
||||
|
||||
depends_on("python@3.9:", type=("build", "run"))
|
||||
depends_on("py-flit-core@3.2:3", type="build")
|
||||
|
||||
with default_args(type="run"):
|
||||
depends_on("py-fluiddyn")
|
||||
depends_on("py-importlib_metadata", when="^python@:3.9")
|
||||
@@ -1,43 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.py_fluidsim_core import PyFluidsimCore
|
||||
|
||||
|
||||
class PyFluidsim(PythonPackage):
|
||||
"""Framework for studying fluid dynamics with simulations."""
|
||||
|
||||
pypi = "fluidsim/fluidsim-0.8.3.tar.gz"
|
||||
|
||||
maintainers("paugier")
|
||||
license("CECILL", checked_by="paugier")
|
||||
|
||||
version("0.8.3", sha256="ff3df8c2e8c96a694b5656125e778fc5f6561699bae3b264cbb75e2070b94169")
|
||||
version("0.8.2", sha256="eb36c2d7d588fbb088af026683a12bb14aa126bbbc91b999009130d6cb7920f9")
|
||||
version("0.8.1", sha256="44c70f388c429856f5df24705cddb2e024d7d1376d2153e113ef111af90b857b")
|
||||
version("0.8.0", sha256="01f6d489ce44fe4dc47357506ba227ae0e87b346758d8f067c13f319d0a9a881")
|
||||
|
||||
variant("native", default=False, description="Compile with -march=native and -Ofast.")
|
||||
|
||||
with default_args(type=("build", "run")):
|
||||
extends("python@3.9:")
|
||||
depends_on("py-transonic@0.6.4:")
|
||||
|
||||
with default_args(type="build"):
|
||||
depends_on("py-meson-python")
|
||||
depends_on("py-pythran@0.9.7:")
|
||||
|
||||
with default_args(type="run"):
|
||||
for _v in PyFluidsimCore.versions:
|
||||
depends_on(f"py-fluidsim-core@{_v}", when=f"@{_v}")
|
||||
depends_on("py-fluidfft@0.4.0:")
|
||||
depends_on("py-xarray")
|
||||
depends_on("py-rich")
|
||||
depends_on("py-scipy")
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
settings = {"setup-args": {"-Dnative": spec.variants["native"].value}}
|
||||
return settings
|
||||
@@ -17,8 +17,6 @@ class PyProtobuf(PythonPackage):
|
||||
homepage = "https://developers.google.com/protocol-buffers/"
|
||||
pypi = "protobuf/protobuf-3.11.0.tar.gz"
|
||||
|
||||
version("5.28.2", sha256="59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0")
|
||||
version("5.27.5", sha256="7fa81bc550201144a32f4478659da06e0b2ebe4d5303aacce9a202a1c3d5178d")
|
||||
version("5.26.1", sha256="8ca2a1d97c290ec7b16e4e5dff2e5ae150cc1582f55b5ab300d45cb0dfa90e51")
|
||||
version("4.25.3", sha256="25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c")
|
||||
version("4.24.3", sha256="12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d")
|
||||
@@ -64,18 +62,38 @@ class PyProtobuf(PythonPackage):
|
||||
version("3.3.0", sha256="1cbcee2c45773f57cb6de7ee0eceb97f92b9b69c0178305509b162c0160c1f04")
|
||||
version("3.0.0", sha256="ecc40bc30f1183b418fe0ec0c90bc3b53fa1707c4205ee278c6b90479e5b6ff5")
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
variant("cpp", default=False, when="@:4.21", description="Enable the cpp implementation")
|
||||
|
||||
depends_on("python", type=("build", "link", "run"))
|
||||
depends_on("py-setuptools", type=("build", "run"))
|
||||
# in newer pip versions --install-option does not exist
|
||||
depends_on("py-pip@:23.0", when="+cpp", type=("build", "run"))
|
||||
depends_on("py-six@1.9:", when="@3.0:3.17", type=("build", "run"))
|
||||
|
||||
# Minor version must match protobuf
|
||||
for ver in range(26, 29):
|
||||
depends_on(f"protobuf@3.{ver}", when=f"@5.{ver}")
|
||||
for ver in range(21, 26):
|
||||
depends_on(f"protobuf@3.{ver}", when=f"@4.{ver}")
|
||||
for ver in range(0, 21):
|
||||
depends_on(f"protobuf@3.{ver}", when=f"@3.{ver}")
|
||||
# Setup dependencies for protobuf to use the same minor version as py-protobuf
|
||||
# Handle mapping the 4.x release to the protobuf 3.x releases
|
||||
depends_on("protobuf@3.21", when="+cpp @4.21")
|
||||
# Handle the 3.x series releases
|
||||
for ver in list(range(0, 21)):
|
||||
depends_on(f"protobuf@3.{ver}", when=f"@3.{ver}+cpp")
|
||||
|
||||
conflicts("%gcc@14:", when="@:4.24.3")
|
||||
conflicts("+cpp", when="^python@3.11:")
|
||||
conflicts("%gcc@14", when="@:4.24.3")
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
if self.spec.satisfies("@3.1.0"):
|
||||
return "python"
|
||||
else:
|
||||
return "."
|
||||
|
||||
@when("+cpp")
|
||||
def setup_build_environment(self, env):
|
||||
protobuf_dir = self.spec["protobuf"].libs.directories[0]
|
||||
env.prepend_path("LIBRARY_PATH", protobuf_dir)
|
||||
|
||||
@when("+cpp")
|
||||
def install_options(self, spec, prefix):
|
||||
return ["--cpp_implementation"]
|
||||
|
||||
@@ -13,19 +13,15 @@ class PySnakemakeExecutorPluginClusterGeneric(PythonPackage):
|
||||
homepage = "https://github.com/snakemake/snakemake-executor-plugin-cluster-generic"
|
||||
pypi = (
|
||||
"snakemake_executor_plugin_cluster_generic/"
|
||||
"snakemake_executor_plugin_cluster_generic-1.0.9.tar.gz"
|
||||
"snakemake_executor_plugin_cluster_generic-1.0.7.tar.gz"
|
||||
)
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("1.0.9", sha256="ad0dc2d8bde7d4f336364bebe11a3b2209653c481ce8fbb0ae8bec81016a9a14")
|
||||
version("1.0.7", sha256="093808e63cc48294a9d1eb0b620cdff8cc970806294a2f6ba127a49f8a81d473")
|
||||
|
||||
depends_on("py-snakemake-interface-common@1.13:1", type=("build", "run"))
|
||||
depends_on("py-snakemake-interface-executor-plugins@9", type=("build", "run"), when="@1.0.9:")
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@8.1:8", type=("build", "run"), when="@:1.0.8"
|
||||
)
|
||||
depends_on("py-snakemake-interface-executor-plugins@8.1:8", type=("build", "run"))
|
||||
|
||||
depends_on("python@3.11:3", type=("build", "run"))
|
||||
depends_on("py-poetry-core", type="build")
|
||||
|
||||
@@ -13,19 +13,15 @@ class PySnakemakeExecutorPluginClusterSync(PythonPackage):
|
||||
homepage = "https://github.com/snakemake/snakemake-executor-plugin-cluster-sync"
|
||||
pypi = (
|
||||
"snakemake_executor_plugin_cluster_sync/"
|
||||
"snakemake_executor_plugin_cluster_sync-0.1.4.tar.gz"
|
||||
"snakemake_executor_plugin_cluster_sync-0.1.3.tar.gz"
|
||||
)
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("0.1.4", sha256="6a6dcb2110d4c2ee74f9a48ea68e0fd7ddd2800672ebef00a01faa4affa835ad")
|
||||
version("0.1.3", sha256="c30fca6ccb98a3f7ca52ca8a95414c71360a3d4a835bd4a097a13445d6fce2ac")
|
||||
|
||||
depends_on("py-snakemake-interface-common@1.14:1", type=("build", "run"))
|
||||
depends_on("py-snakemake-interface-executor-plugins@9", type=("build", "run"), when="@0.1.4:")
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@8.1:8", type=("build", "run"), when="@:0.1.3"
|
||||
)
|
||||
depends_on("py-snakemake-interface-executor-plugins@8.1:8", type=("build", "run"))
|
||||
|
||||
depends_on("python@3.11:3", type=("build", "run"))
|
||||
depends_on("py-poetry-core", type="build")
|
||||
|
||||
@@ -14,21 +14,15 @@ class PySnakemakeExecutorPluginSlurmJobstep(PythonPackage):
|
||||
homepage = "https://github.com/snakemake/snakemake-executor-plugin-slurm-jobstep"
|
||||
pypi = (
|
||||
"snakemake_executor_plugin_slurm_jobstep/"
|
||||
"snakemake_executor_plugin_slurm_jobstep-0.2.1.tar.gz"
|
||||
"snakemake_executor_plugin_slurm_jobstep-0.1.9.tar.gz"
|
||||
)
|
||||
maintainers("w8jcik")
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("0.2.1", sha256="58894d52b5998a34fa6f60ec511ff0bfde4a9ec96714bcaa3cd2f46cf8a33859")
|
||||
version("0.1.11", sha256="cafdac937796ab0dfc0354c42380167a44a1db00c4edc98ab736a6ace2201a94")
|
||||
version("0.1.10", sha256="321b6bdf7883a8fb40ff4aeeb88633502e4db8394e40b6628db41a430c2eae2b")
|
||||
|
||||
depends_on("py-snakemake-interface-common@1.13:1", type=("build", "run"))
|
||||
depends_on("py-snakemake-interface-executor-plugins@9", type=("build", "run"), when="@0.1.11:")
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@8.2:8", type=("build", "run"), when="@:0.1.10"
|
||||
)
|
||||
depends_on("py-snakemake-interface-executor-plugins@8.2:8", type=("build", "run"))
|
||||
|
||||
depends_on("python@3.11:3", type=("build", "run"))
|
||||
depends_on("py-poetry-core", type="build")
|
||||
|
||||
@@ -11,36 +11,17 @@ class PySnakemakeExecutorPluginSlurm(PythonPackage):
|
||||
"""A Snakemake executor plugin for submitting jobs to a SLURM cluster."""
|
||||
|
||||
homepage = "https://github.com/snakemake/snakemake-executor-plugin-slurm"
|
||||
pypi = "snakemake_executor_plugin_slurm/snakemake_executor_plugin_slurm-0.10.0.tar.gz"
|
||||
maintainers("w8jcik")
|
||||
pypi = "snakemake_executor_plugin_slurm/snakemake_executor_plugin_slurm-0.3.1.tar.gz"
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("0.10.0", sha256="d970bd08e00f1664adbd3c421c956b2ce926359ff10a4d7650c444c1179bec3f")
|
||||
version("0.3.2", sha256="3912f2895eab1270d7a42959a2e221ce53428dfffb847e03ec6bc4eead88e30b")
|
||||
|
||||
depends_on("py-throttler@1.2.2:1", type=("build", "run"))
|
||||
|
||||
depends_on("py-snakemake-interface-common@1.13:1", type=("build", "run"))
|
||||
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@9.1.1:9", type=("build", "run"), when="@0.4.4:"
|
||||
)
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@9", type=("build", "run"), when="@0.4.2:0.4.3"
|
||||
)
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@8.2:8", type=("build", "run"), when="@:0.4.1"
|
||||
)
|
||||
|
||||
depends_on(
|
||||
"py-snakemake-executor-plugin-slurm-jobstep@0.2", type=("build", "run"), when="@0.4.4:"
|
||||
)
|
||||
depends_on(
|
||||
"py-snakemake-executor-plugin-slurm-jobstep@0.1.10:0.1",
|
||||
type=("build", "run"),
|
||||
when="@:0.4.3",
|
||||
)
|
||||
depends_on("py-snakemake-interface-executor-plugins@8.2:8", type=("build", "run"))
|
||||
depends_on("py-snakemake-executor-plugin-slurm-jobstep@0.1.10:0.1", type=("build", "run"))
|
||||
|
||||
depends_on("python@3.11:3", type=("build", "run"))
|
||||
depends_on("py-poetry-core", type="build")
|
||||
|
||||
@@ -11,12 +11,10 @@ class PySnakemakeInterfaceCommon(PythonPackage):
|
||||
"""Common functions and classes for Snakemake and its plugins."""
|
||||
|
||||
homepage = "https://github.com/snakemake/snakemake-interface-common"
|
||||
pypi = "snakemake_interface_common/snakemake_interface_common-1.17.3.tar.gz"
|
||||
maintainers("w8jcik")
|
||||
pypi = "snakemake_interface_common/snakemake_interface_common-1.17.1.tar.gz"
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("1.17.3", sha256="cca6e2c728072a285a8e750f00fdd98d9c50063912184c41f8b89e4cab66c7b0")
|
||||
version("1.17.1", sha256="555c8218d9b68ddc1046f94a517e7d0f22e15bdc839d6ce149608d8ec137b9ae")
|
||||
|
||||
depends_on("py-argparse-dataclass@2", type=("build", "run"))
|
||||
|
||||
@@ -12,12 +12,10 @@ class PySnakemakeInterfaceExecutorPlugins(PythonPackage):
|
||||
executor plugins."""
|
||||
|
||||
homepage = "https://github.com/snakemake/snakemake-interface-executor-plugins"
|
||||
pypi = "snakemake_interface_executor_plugins/snakemake_interface_executor_plugins-9.2.0.tar.gz"
|
||||
maintainers("w8jcik")
|
||||
pypi = "snakemake_interface_executor_plugins/snakemake_interface_executor_plugins-8.2.0.tar.gz"
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("9.2.0", sha256="67feaf438a0b8b041ec5f1a1dd859f729036c70c07c9fdad895135f5b949e40a")
|
||||
version("8.2.0", sha256="4c74e3e1751bab6b266baf8688e854b8b4c5c5e10f5e34c581f42d69af4ff13b")
|
||||
|
||||
depends_on("py-argparse-dataclass@2", type=("build", "run"))
|
||||
|
||||
@@ -12,12 +12,10 @@ class PySnakemakeInterfaceStoragePlugins(PythonPackage):
|
||||
plugins."""
|
||||
|
||||
homepage = "https://github.com/snakemake/snakemake-interface-storage-plugins"
|
||||
pypi = "snakemake_interface_storage_plugins/snakemake_interface_storage_plugins-3.3.0.tar.gz"
|
||||
maintainers("w8jcik")
|
||||
pypi = "snakemake_interface_storage_plugins/snakemake_interface_storage_plugins-3.1.0.tar.gz"
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("3.3.0", sha256="203d8f794dfb37d568ad01a6c375fa8beac36df8e488c0f9b9f75984769c362a")
|
||||
version("3.1.0", sha256="26e95be235ef2a9716b890ea96c3a9a2e62061c5d72fbb89c2fad2afada87304")
|
||||
|
||||
depends_on("py-wrapt@1.15:1", type=("build", "run"))
|
||||
|
||||
@@ -378,6 +378,8 @@ class PyTensorflow(Package, CudaPackage, ROCmPackage, PythonExtension):
|
||||
conflicts("+rocm", when="@:2.7.4-a,2.7.4.0:2.11.0-a,2.11.0.0:2.14-a,2.14-z:2.16.1-a,2.16.1-z:")
|
||||
# wheel 0.40 upgrades vendored packaging, trips over tensorflow-io-gcs-filesystem identifier
|
||||
conflicts("^py-wheel@0.40:", when="@2.11:2.13")
|
||||
# Must be matching versions of py-protobuf and protobuf
|
||||
conflicts("^py-protobuf~cpp")
|
||||
|
||||
# https://www.tensorflow.org/install/source#tested_build_configurations
|
||||
# https://github.com/tensorflow/tensorflow/issues/70199
|
||||
|
||||
@@ -57,15 +57,3 @@ def build(self, spec, prefix):
|
||||
def install(self, spec, prefix):
|
||||
mkdirp(prefix.bin)
|
||||
install("rclone", prefix.bin)
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
rclone = Executable(self.prefix.bin.rclone)
|
||||
|
||||
mkdirp(bash_completion_path(self.prefix))
|
||||
mkdirp(fish_completion_path(self.prefix))
|
||||
mkdirp(zsh_completion_path(self.prefix))
|
||||
|
||||
rclone("genautocomplete", "bash", str(bash_completion_path(self.prefix) / "rclone"))
|
||||
rclone("genautocomplete", "fish", str(fish_completion_path(self.prefix) / "rclone.fish"))
|
||||
rclone("genautocomplete", "zsh", str(zsh_completion_path(self.prefix) / "_rclone"))
|
||||
|
||||
@@ -44,19 +44,3 @@ def build(self, spec, prefix):
|
||||
def install(self, spec, prefix):
|
||||
mkdirp(prefix.bin)
|
||||
install("restic", prefix.bin)
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
restic = Executable(self.prefix.bin.restic)
|
||||
|
||||
mkdirp(bash_completion_path(self.prefix))
|
||||
mkdirp(fish_completion_path(self.prefix))
|
||||
mkdirp(zsh_completion_path(self.prefix))
|
||||
|
||||
restic("generate", "--bash-completion", "restic.bash")
|
||||
restic("generate", "--fish-completion", "restic.fish")
|
||||
restic("generate", "--zsh-completion", "_restic")
|
||||
|
||||
install("restic.bash", bash_completion_path(self.prefix))
|
||||
install("restic.fish", fish_completion_path(self.prefix))
|
||||
install("_restic", zsh_completion_path(self.prefix))
|
||||
|
||||
@@ -26,19 +26,3 @@ class Ripgrep(CargoPackage):
|
||||
version("11.0.2", sha256="0983861279936ada8bc7a6d5d663d590ad34eb44a44c75c2d6ccd0ab33490055")
|
||||
|
||||
depends_on("rust@1.72:", type="build", when="@14:")
|
||||
|
||||
@run_after("install")
|
||||
def install_completions(self):
|
||||
rg = Executable(self.prefix.bin.rg)
|
||||
|
||||
mkdirp(bash_completion_path(self.prefix))
|
||||
with open(bash_completion_path(self.prefix) / "rg", "w") as file:
|
||||
rg("--generate", "complete-bash", output=file)
|
||||
|
||||
mkdirp(fish_completion_path(self.prefix))
|
||||
with open(fish_completion_path(self.prefix) / "rg.fish", "w") as file:
|
||||
rg("--generate", "complete-fish", output=file)
|
||||
|
||||
mkdirp(zsh_completion_path(self.prefix))
|
||||
with open(zsh_completion_path(self.prefix) / "_rg", "w") as file:
|
||||
rg("--generate", "complete-zsh", output=file)
|
||||
|
||||
@@ -92,7 +92,7 @@ class Rust(Package):
|
||||
depends_on("rust-bootstrap@1.73:1.74", type="build", when="@1.74")
|
||||
depends_on("rust-bootstrap@1.74:1.75", type="build", when="@1.75")
|
||||
depends_on("rust-bootstrap@1.77:1.78", type="build", when="@1.78")
|
||||
depends_on("rust-bootstrap@1.80:1.81", type="build", when="@1.81")
|
||||
depends_on("rust-bootstrap@1.80:1.81", type="build", when="@1.80")
|
||||
|
||||
# src/llvm-project/llvm/cmake/modules/CheckCompilerVersion.cmake
|
||||
conflicts("%gcc@:7.3", when="@1.73:", msg="Host GCC version must be at least 7.4")
|
||||
|
||||
@@ -21,9 +21,6 @@ class SbclBootstrap(Package):
|
||||
|
||||
maintainers("ashermancinelli")
|
||||
|
||||
# sbcl-bootstrap is not available on Windows, but is depended on by sbcl:
|
||||
skip_version_audit = ["platform=windows"]
|
||||
|
||||
# NOTE: The sbcl homepage lists
|
||||
# while the sourceforge repo lists "Public Domain, MIT License", the
|
||||
# COPYING file distributed with the source code contains this message:
|
||||
@@ -39,45 +36,25 @@ class SbclBootstrap(Package):
|
||||
# By checking objdump -T of the sbcl binary in each prebuilt tarball, I
|
||||
# found the latest reference to glibc for each version.
|
||||
sbcl_releases = {
|
||||
"2.4.0": {
|
||||
"darwin": {"arm64": "1d01fac2d9748f769c9246a0a11a2c011d7843337f8f06ca144f5a500e10c117"}
|
||||
},
|
||||
"2.3.11": {
|
||||
"linux": {
|
||||
"x86_64": "98784b04f68882b887984242eef73dbb092ec5c778dd536b2c60846715e03f3c",
|
||||
"min_glibc": "2.34",
|
||||
}
|
||||
"x86_64": "98784b04f68882b887984242eef73dbb092ec5c778dd536b2c60846715e03f3c",
|
||||
"min_glibc": "2.34",
|
||||
},
|
||||
# TODO(ashermancinelli): I don't have a machine to test this on, but the binaries are
|
||||
# available.
|
||||
# "2.2.9": {
|
||||
# "darwin": {
|
||||
# "x86_64": "0000000000000000000000000000000000000000000000000000000000000000"
|
||||
# }
|
||||
# },
|
||||
"2.0.11": {
|
||||
"linux": {
|
||||
"x86_64": "b7e61bc6b8d238f8878e660bc0635e99c2ea1255bfd6153d702fe9a00f8138fd",
|
||||
"min_glibc": "2.28",
|
||||
}
|
||||
"x86_64": "b7e61bc6b8d238f8878e660bc0635e99c2ea1255bfd6153d702fe9a00f8138fd",
|
||||
"min_glibc": "2.28",
|
||||
},
|
||||
"1.4.16": {
|
||||
"linux": {
|
||||
"x86_64": "df3d905d37656a7eeeba72d703577afc94a21d756a4dde0949310200f82ce575",
|
||||
"min_glibc": "2.14",
|
||||
}
|
||||
"x86_64": "df3d905d37656a7eeeba72d703577afc94a21d756a4dde0949310200f82ce575",
|
||||
"min_glibc": "2.14",
|
||||
},
|
||||
"1.4.2": {
|
||||
"linux": {
|
||||
"aarch64": "ddac6499f36c18ecbce9822a53ef3914c0def5276a457446a456c62999b16d36",
|
||||
"min_glibc": "2.17",
|
||||
}
|
||||
"aarch64": "ddac6499f36c18ecbce9822a53ef3914c0def5276a457446a456c62999b16d36",
|
||||
"min_glibc": "2.17",
|
||||
},
|
||||
"1.3.21": {
|
||||
"linux": {
|
||||
"x86_64": "c1c3e17e1857fb1c22af575941be5cd1d5444b462397b1b3c9f3877aee2e814b",
|
||||
"min_glibc": "2.3",
|
||||
}
|
||||
"x86_64": "c1c3e17e1857fb1c22af575941be5cd1d5444b462397b1b3c9f3877aee2e814b",
|
||||
"min_glibc": "2.3",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -85,29 +62,26 @@ class SbclBootstrap(Package):
|
||||
target = platform.machine().lower()
|
||||
|
||||
for ver in sbcl_releases:
|
||||
if os in sbcl_releases[ver]:
|
||||
if target in sbcl_releases[ver][os]:
|
||||
version(ver, sha256=sbcl_releases[ver][os][target])
|
||||
if "min_glibc" in sbcl_releases[ver][os]:
|
||||
conflicts(
|
||||
"glibc@:{0}".format(sbcl_releases[ver][os]["min_glibc"]),
|
||||
when="@{0}".format(ver),
|
||||
)
|
||||
if target in sbcl_releases[ver]:
|
||||
version(ver, sha256=sbcl_releases[ver][target])
|
||||
if "min_glibc" in sbcl_releases[ver]:
|
||||
conflicts(
|
||||
"glibc@:{0}".format(sbcl_releases[ver]["min_glibc"]), when="@{0}".format(ver)
|
||||
)
|
||||
|
||||
supported_sysinfo_msg = (
|
||||
"Not a supported platform. See https://www.sbcl.org/platform-table.html"
|
||||
)
|
||||
for sysinfo in ["platform=windows", "target=ppc64le"]:
|
||||
supported_sysinfo_msg = "linux x86_64 is the only supported platform"
|
||||
for sysinfo in ["platform=darwin", "platform=windows", "target=ppc64le"]:
|
||||
conflicts(sysinfo, msg=supported_sysinfo_msg)
|
||||
|
||||
def url_for_version(self, version):
|
||||
if self.os != "linux":
|
||||
return None
|
||||
target = platform.machine().lower()
|
||||
os = platform.system().lower()
|
||||
sbcl_targets = {"arm64": "arm64", "aarch64": "arm64", "x86_64": "x86-64"}
|
||||
sbcl_targets = {"aarch64": "arm64", "x86_64": "x86-64"}
|
||||
if target not in sbcl_targets:
|
||||
return None
|
||||
sbcl_url = "https://sourceforge.net/projects/sbcl/files/sbcl/{version}/sbcl-{version}-{target}-{os}-binary.tar.bz2"
|
||||
return sbcl_url.format(version=version, target=sbcl_targets[target], os=os)
|
||||
sbcl_url = "https://sourceforge.net/projects/sbcl/files/sbcl/{version}/sbcl-{version}-{target}-linux-binary.tar.bz2"
|
||||
return sbcl_url.format(version=version, target=sbcl_targets[target])
|
||||
|
||||
def install(self, spec, prefix):
|
||||
sh = which("sh")
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.environment import set_env
|
||||
|
||||
|
||||
class Sbcl(MakefilePackage):
|
||||
"""Steel Bank Common Lisp (SBCL) is a high performance Common Lisp compiler.
|
||||
It is open source / free software, with a permissive license. In addition
|
||||
to the compiler and runtime system for ANSI Common Lisp, it provides an
|
||||
interactive environment including a debugger, a statistical profiler, a
|
||||
code coverage tool, and many other extensions.
|
||||
"""
|
||||
|
||||
homepage = "https://www.sbcl.org/"
|
||||
url = "https://sourceforge.net/projects/sbcl/files/sbcl/2.4.8/sbcl-2.4.8-source.tar.bz2"
|
||||
git = "git://git.code.sf.net/p/sbcl/sbcl"
|
||||
|
||||
maintainers("ashermancinelli")
|
||||
|
||||
# NOTE: The sbcl homepage lists
|
||||
# while the sourceforge repo lists "Public Domain, MIT License", the
|
||||
# COPYING file distributed with the source code contains this message:
|
||||
#
|
||||
# > Thus, there are no known obstacles to copying, using, and modifying
|
||||
# > SBCL freely, as long as copyright notices of MIT, Symbolics, Xerox and
|
||||
# > Gerd Moellmann are retained.
|
||||
#
|
||||
# MIT seems the most appropriate, but if we can add more context to this
|
||||
# license message, then we should.
|
||||
license("MIT", checked_by="ashermancinelli")
|
||||
|
||||
version("master", branch="master")
|
||||
version("2.4.8", sha256="fc6ecdcc538e80a14a998d530ccc384a41790f4f4fc6cd7ffe8cb126a677694c")
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("sbcl-bootstrap", type="build")
|
||||
depends_on("zstd", when="platform=darwin")
|
||||
|
||||
variant(
|
||||
"fancy", default=True, description="Enable extra features like compression and threading."
|
||||
)
|
||||
|
||||
# TODO(ashermancinelli): there's nothing on the platform support page that
|
||||
# makes me think this shouldn't build, but I can't
|
||||
# get the sbcl binary to link with gcc on darwin.
|
||||
conflicts(
|
||||
"+fancy%gcc",
|
||||
when="platform=darwin",
|
||||
msg="Cannot build with gcc on darwin because pthreads will fail to link",
|
||||
)
|
||||
|
||||
phases = ["build", "install"]
|
||||
|
||||
def build(self, spec, prefix):
|
||||
sh = which("sh")
|
||||
|
||||
version_str = str(spec.version)
|
||||
|
||||
# NOTE: add any other git versions here.
|
||||
# When installing from git, the build system expects a dummy version
|
||||
# to be provided as a lisp expression.
|
||||
if version_str in ("master",):
|
||||
with open("version.lisp-expr", "w") as f:
|
||||
f.write(f'"{version_str}"')
|
||||
|
||||
build_args = []
|
||||
build_args.append("--prefix={0}".format(prefix))
|
||||
|
||||
if "+fancy" in self.spec:
|
||||
build_args.append("--fancy")
|
||||
|
||||
sbcl_bootstrap_prefix = self.spec["sbcl-bootstrap"].prefix.lib.sbcl
|
||||
with set_env(SBCL_HOME=sbcl_bootstrap_prefix):
|
||||
sh("make.sh", *build_args)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
sh = which("sh")
|
||||
sh("install.sh")
|
||||
@@ -113,11 +113,6 @@ def fix_mksquashfs_path(self):
|
||||
"mksquashfs path = {0}".format(squash_path),
|
||||
join_path(prefix.etc, self.singularity_name, self.singularity_name + ".conf"),
|
||||
)
|
||||
filter_file(
|
||||
r"^shared loop devices = no",
|
||||
"shared loop devices = yes",
|
||||
join_path(prefix.etc, self.singularity_name, self.singularity_name + ".conf"),
|
||||
)
|
||||
|
||||
#
|
||||
# Assemble a script that fixes the ownership and permissions of several
|
||||
|
||||
@@ -10,12 +10,11 @@ class Snakemake(PythonPackage):
|
||||
"""Workflow management system to create reproducible and scalable data analyses."""
|
||||
|
||||
homepage = "https://snakemake.readthedocs.io/en"
|
||||
pypi = "snakemake/snakemake-8.18.2.tar.gz"
|
||||
maintainers("marcusboden", "w8jcik")
|
||||
pypi = "snakemake/snakemake-8.4.12.tar.gz"
|
||||
maintainers("marcusboden")
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("8.18.2", sha256="7dc8cdc3c836444c2bc3d67a4a7f4d703557c1bf96a90da18f312f4df9daefc4")
|
||||
version("8.5.2", sha256="cc94876263182277e4a429e5d371c867400eeddc791c114dfd090d1bb3158975")
|
||||
version("7.32.4", sha256="fdc3f15dd7b06fabb7da30d460e0a3b1fba08e4ea91f9c32c47a83705cdc7b6e")
|
||||
version("7.31.1", sha256="6fadcc9a051737aa187dccf437879b3b83ddc917fff9bd7d400e056cf17a1788")
|
||||
@@ -60,42 +59,20 @@ class Snakemake(PythonPackage):
|
||||
depends_on("py-pulp@2.3.1:2.8", type=("build", "run"), when="@8.1.2:")
|
||||
depends_on("py-pulp@2:", type=("build", "run"), when="@:8.1.1")
|
||||
depends_on("py-pyyaml", type=("build", "run"))
|
||||
|
||||
depends_on("py-requests@2.8.1:2", type=("build", "run"), when="@8.4.12")
|
||||
depends_on("py-requests", type=("build", "run"))
|
||||
|
||||
depends_on("py-reretry", type=("build", "run"), when="@7:")
|
||||
|
||||
depends_on("py-smart-open@4:7", type=("build", "run"), when="@8.11:")
|
||||
depends_on("py-smart-open@3:7", type=("build", "run"), when="@8.8:8.10")
|
||||
depends_on("py-smart-open@3:6", type=("build", "run"), when="@8.4.12:8.7")
|
||||
depends_on("py-smart-open@3:6", type=("build", "run"), when="@8.4.12:")
|
||||
depends_on("py-smart-open@3:", type=("build", "run"))
|
||||
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@9.2:9", type=("build", "run"), when="@8.15.0:"
|
||||
"py-snakemake-interface-executor-plugins@8.1.3:8", type=("build", "run"), when="@8:"
|
||||
)
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@9.1:9", type=("build", "run"), when="@8.10.1:"
|
||||
)
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@9.0.2:9", type=("build", "run"), when="@8.10:"
|
||||
)
|
||||
depends_on("py-snakemake-interface-executor-plugins@9", type=("build", "run"), when="@8.6:")
|
||||
depends_on(
|
||||
"py-snakemake-interface-executor-plugins@8.1.3:8", type=("build", "run"), when="@8:8.5"
|
||||
)
|
||||
|
||||
depends_on("py-snakemake-interface-common@1.17:1", type=("build", "run"), when="@8.4.10:")
|
||||
depends_on("py-snakemake-interface-common@1.15:1", type=("build", "run"), when="@8:")
|
||||
|
||||
depends_on(
|
||||
"py-snakemake-interface-storage-plugins@3.2.3:3", type=("build", "run"), when="@8.15.1:"
|
||||
)
|
||||
depends_on(
|
||||
"py-snakemake-interface-storage-plugins@3.1:3", type=("build", "run"), when="@8.4.10:"
|
||||
)
|
||||
depends_on("py-snakemake-interface-storage-plugins@3", type=("build", "run"), when="@8:")
|
||||
|
||||
depends_on("py-snakemake-interface-report-plugins@1", type=("build", "run"), when="@8.5:")
|
||||
depends_on("py-stopit", type=("build", "run"))
|
||||
depends_on("py-tabulate", type=("build", "run"))
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
diff -Naur spack-src.orig/packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp spack-src/packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp
|
||||
--- spack-src.orig/packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp 2024-09-02 10:53:10.022724683 +0200
|
||||
+++ spack-src/packages/kokkos/core/src/impl/Kokkos_MemoryPool.cpp 2024-09-02 10:57:03.312228742 +0200
|
||||
@@ -44,6 +44,7 @@
|
||||
|
||||
#include <impl/Kokkos_Error.hpp>
|
||||
|
||||
+#include <cstdint>
|
||||
#include <ostream>
|
||||
#include <sstream>
|
||||
|
||||
diff -Naur spack-src.orig/packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp spack-src/packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp
|
||||
--- spack-src.orig/packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp 2024-09-02 10:53:50.098010896 +0200
|
||||
+++ spack-src/packages/teuchos/core/src/Teuchos_BigUIntDecl.hpp 2024-09-02 10:51:17.777157344 +0200
|
||||
@@ -42,6 +42,7 @@
|
||||
#ifndef TEUCHOS_BIG_UINT_DECL_HPP
|
||||
#define TEUCHOS_BIG_UINT_DECL_HPP
|
||||
|
||||
+#include <cstdint>
|
||||
#include <iosfwd>
|
||||
|
||||
/*! \file Teuchos_BigUIntDecl.hpp
|
||||
diff -Naur spack-src.orig/packages/teuchos/core/src/Teuchos_PrintDouble.cpp spack-src/packages/teuchos/core/src/Teuchos_PrintDouble.cpp
|
||||
--- spack-src.orig/packages/teuchos/core/src/Teuchos_PrintDouble.cpp 2024-09-02 10:54:02.240401775 +0200
|
||||
+++ spack-src/packages/teuchos/core/src/Teuchos_PrintDouble.cpp 2024-09-02 10:51:34.110672927 +0200
|
||||
@@ -42,6 +42,7 @@
|
||||
#include "Teuchos_PrintDouble.hpp"
|
||||
#include "Teuchos_BigUInt.hpp"
|
||||
|
||||
+#include <cstdint>
|
||||
#include <cstring>
|
||||
|
||||
namespace Teuchos {
|
||||
@@ -516,7 +516,6 @@ class Trilinos(CMakePackage, CudaPackage, ROCmPackage):
|
||||
"0001-use-the-gcnArchName-inplace-of-gcnArch-as-gcnArch-is.patch",
|
||||
when="@15.0.0 ^hip@6.0 +rocm",
|
||||
)
|
||||
patch("cstdint_gcc13.patch", when="@13.4.0:13.4.1 %gcc@13.0.0:")
|
||||
|
||||
# Allow building with +teko gotype=long
|
||||
patch(
|
||||
|
||||
@@ -9,18 +9,13 @@
|
||||
class Turnserver(AutotoolsPackage):
|
||||
"""coturn TURN server project."""
|
||||
|
||||
homepage = "https://github.com/coturn/coturn"
|
||||
url = "https://github.com/coturn/coturn/archive/4.6.2.tar.gz"
|
||||
homepage = "https://coturn.net/turnserver"
|
||||
url = "https://coturn.net/turnserver/v4.5.1.3/turnserver-4.5.1.3.tar.gz"
|
||||
|
||||
license("BSD-3-Clause", checked_by="wdconinc")
|
||||
license("OpenSSL")
|
||||
|
||||
version("4.6.2", sha256="408bf7fde455d641bb2a23ba2df992ea0ae87b328de74e66e167ef58d8e9713a")
|
||||
version(
|
||||
"4.5.1.3",
|
||||
sha256="408bf7fde455d641bb2a23ba2df992ea0ae87b328de74e66e167ef58d8e9713a",
|
||||
url="https://coturn.net/turnserver/v4.5.1.3/turnserver-4.5.1.3.tar.gz",
|
||||
)
|
||||
version("4.5.1.3", sha256="408bf7fde455d641bb2a23ba2df992ea0ae87b328de74e66e167ef58d8e9713a")
|
||||
|
||||
depends_on("c", type="build")
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
depends_on("libevent")
|
||||
|
||||
Reference in New Issue
Block a user