Compare commits

..

16 Commits

Author SHA1 Message Date
Gregory Becker
1b4ff30665 clean up post rebase 2023-06-14 11:18:41 -07:00
Gregory Becker
c10797718a fixup after rebase 2023-06-13 12:11:27 -07:00
becker33
81ea29b007 [@spackbot] updating style on behalf of becker33 2023-06-13 11:54:00 -07:00
Gregory Becker
d8666a7fdf !fixup 511dd5a968c23f3f0c660b0cc5216415e60c6018 2023-06-13 11:54:00 -07:00
Gregory Becker
7c41bba6f8 error causation in separate file loaded for debug only 2023-06-13 11:53:58 -07:00
Gregory Becker
20e9fe3785 solver: nodes directly caused by dependency conditions 2023-06-13 11:51:01 -07:00
Gregory Becker
401218b4f1 solver: reorder code so it's easier to instrument for debugging 2023-06-13 11:50:57 -07:00
Gregory Becker
adfc1c0896 error messages: add causation for conflicting variants error 2023-06-13 11:49:45 -07:00
Gregory Becker
f4402c1cde error messages: print cause tree as part of error 2023-06-13 11:48:02 -07:00
Gregory Becker
c1d6d93388 error-chaining: expand to additional error messages 2023-06-13 11:44:54 -07:00
Gregory Becker
e9012c7781 solver: first prototype of chained error messages for one message type 2023-06-13 11:42:05 -07:00
Gregory Becker
59acfe4f0b solver: treat literals as conditions 2023-06-13 11:28:47 -07:00
Gregory Becker
004ff9d4e2 error messages: compute connections between conditions 2023-06-13 11:28:47 -07:00
Gregory Becker
9d20be5fe5 solver: remove indirection for dependency conditions 2023-06-13 11:28:47 -07:00
Gregory Becker
edc07dab27 solver: remove indirection for external conditions 2023-06-13 11:28:47 -07:00
Gregory Becker
acde8ef104 solver: remove indirection for provider conditions 2023-06-13 11:28:47 -07:00
226 changed files with 1750 additions and 3351 deletions

View File

@@ -5,8 +5,3 @@ updates:
directory: "/"
schedule:
interval: "daily"
# Requirements to build documentation
- package-ecosystem: "pip"
directory: "/lib/spack/docs"
schedule:
interval: "daily"

View File

@@ -49,7 +49,7 @@ jobs:
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
name: Build ${{ matrix.dockerfile[0] }}

View File

@@ -1,16 +1,10 @@
version: 2
build:
os: "ubuntu-22.04"
apt_packages:
- graphviz
tools:
python: "3.11"
sphinx:
configuration: lib/spack/docs/conf.py
fail_on_warning: true
python:
version: 3.7
install:
- requirements: lib/spack/docs/requirements.txt

View File

@@ -1,16 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# The name of the Pygments (syntax highlighting) style to use.
# We use our own extension of the default style with a few modifications
from pygments.styles.default import DefaultStyle
from pygments.token import Generic
class SpackStyle(DefaultStyle):
styles = DefaultStyle.styles.copy()
background_color = "#f4f4f8"
styles[Generic.Output] = "#355"
styles[Generic.Prompt] = "bold #346ec9"

View File

@@ -149,6 +149,7 @@ def setup(sphinx):
# Get nice vector graphics
graphviz_output_format = "svg"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
@@ -232,8 +233,30 @@ def setup(sphinx):
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
sys.path.append("./_pygments")
pygments_style = "style.SpackStyle"
# The name of the Pygments (syntax highlighting) style to use.
# We use our own extension of the default style with a few modifications
from pygments.style import Style
from pygments.styles.default import DefaultStyle
from pygments.token import Comment, Generic, Text
class SpackStyle(DefaultStyle):
styles = DefaultStyle.styles.copy()
background_color = "#f4f4f8"
styles[Generic.Output] = "#355"
styles[Generic.Prompt] = "bold #346ec9"
import pkg_resources
dist = pkg_resources.Distribution(__file__)
sys.path.append(".") # make 'conf' module findable
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
pkg_resources.working_set.add(dist)
pygments_style = "spack"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@@ -318,15 +341,16 @@ def setup(sphinx):
# Output file base name for HTML help builder.
htmlhelp_basename = "Spackdoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples

View File

@@ -636,7 +636,7 @@ to customize the generation of container recipes:
- No
* - ``os_packages:command``
- Tool used to manage system packages
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
- Only with custom base images
* - ``os_packages:update``
- Whether or not to update the list of available packages

View File

@@ -76,7 +76,6 @@ or refer to the full manual below.
chain
extensions
pipelines
signing
.. toctree::
:maxdepth: 2

View File

@@ -1,8 +1,13 @@
sphinx==6.2.1
sphinxcontrib-programoutput==0.17
sphinx_design==0.4.1
sphinx-rtd-theme==1.2.1
python-levenshtein==0.21.0
docutils==0.18.1
pygments==2.15.1
urllib3==2.0.2
# These dependencies should be installed using pip in order
# to build the documentation.
sphinx>=3.4,!=4.1.2,!=5.1.0
sphinxcontrib-programoutput
sphinx-design
sphinx-rtd-theme
python-levenshtein
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
# https://stackoverflow.com/questions/67542699
docutils <0.17
pygments <2.13
urllib3 <2

View File

@@ -1,484 +0,0 @@
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _signing:
=====================
Spack Package Signing
=====================
The goal of package signing in Spack is to provide data integrity
assurances around official packages produced by the automated Spack CI
pipelines. These assurances directly address the security of Spacks
software supply chain by explaining why a security-conscious user can
be reasonably justified in the belief that packages installed via Spack
have an uninterrupted auditable trail back to change management
decisions judged to be appropriate by the Spack maintainers. This is
achieved through cryptographic signing of packages built by Spack CI
pipelines based on code that has been transparently reviewed and
approved on GitHub. This document describes the signing process for
interested users.
.. _risks:
------------------------------
Risks, Impact and Threat Model
------------------------------
This document addresses the approach taken to safeguard Spacks
reputation with regard to the integrity of the package data produced by
Spacks CI pipelines. It does not address issues of data confidentiality
(Spack is intended to be largely open source) or availability (efforts
are described elsewhere). With that said the main reputational risk can
be broadly categorized as a loss of faith in the data integrity due to a
breach of the private key used to sign packages. Remediation of a
private key breach would require republishing the public key with a
revocation certificate, generating a new signing key, an assessment and
potential rebuild/resigning of all packages since the key was breached,
and finally direct intervention by every spack user to update their copy
of Spacks public keys used for local verification.
The primary threat model used in mitigating the risks of these stated
impacts is one of individual error not malicious intent or insider
threat. The primary objective is to avoid the above impacts by making a
private key breach nearly impossible due to oversight or configuration
error. Obvious and straightforward measures are taken to mitigate issues
of malicious interference in data integrity and insider threats but
these attack vectors are not systematically addressed. It should be hard
to exfiltrate the private key intentionally, and almost impossible to
leak the key by accident.
.. _overview:
-----------------
Pipeline Overview
-----------------
Spack pipelines build software through progressive stages where packages
in later stages nominally depend on packages built in earlier stages.
For both technical and design reasons these dependencies are not
implemented through the default GitLab artifacts mechanism; instead
built packages are uploaded to AWS S3 mirrors (buckets) where they are
retrieved by subsequent stages in the pipeline. Two broad categories of
pipelines exist: Pull Request (PR) pipelines and Develop/Release
pipelines.
- PR pipelines are launched in response to pull requests made by
trusted and untrusted users. Packages built on these pipelines upload
code to quarantined AWS S3 locations which cache the built packages
for the purposes of review and iteration on the changes proposed in
the pull request. Packages built on PR pipelines can come from
untrusted users so signing of these pipelines is not implemented.
Jobs in these pipelines are executed via normal GitLab runners both
within the AWS GitLab infrastructure and at affiliated institutions.
- Develop and Release pipelines **sign** the packages they produce and carry
strong integrity assurances that trace back to auditable change management
decisions. These pipelines only run after members from a trusted group of
reviewers verify that the proposed changes in a pull request are appropriate.
Once the PR is merged, or a release is cut, a pipeline is run on protected
GitLab runners which provide access to the required signing keys within the
job. Intermediary keys are used to sign packages in each stage of the
pipeline as they are built and a final job officially signs each package
external to any specific packages build environment. An intermediate key
exists in the AWS infrastructure and for each affiliated instritution that
maintains protected runners. The runners that execute these pipelines
exclusively accept jobs from protected branches meaning the intermediate keys
are never exposed to unreviewed code and the official keys are never exposed
to any specific build environment.
.. _key_architecture:
----------------
Key Architecture
----------------
Spacks CI process uses public-key infrastructure (PKI) based on GNU Privacy
Guard (gpg) keypairs to sign public releases of spack package metadata, also
called specs. Two classes of GPG keys are involved in the process to reduce the
impact of an individual private key compromise, these key classes are the
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
sub-keys that are used exclusively for signing packages. This can be confusing
so for the purpose of this explanation well refer to Root and Signing keys.
Each key has a private and a public component as well as one or more identities
and zero or more signatures.
-------------------
Intermediate CI Key
-------------------
The Intermediate key class is used to sign and verify packages between stages
within a develop or release pipeline. An intermediate key exists for the AWS
infrastructure as well as each affiliated institution that maintains protected
runners. These intermediate keys are made available to the GitLab execution
environment building the package so that the packages dependencies may be
verified by the Signing Intermediate CI Public Key and the final package may be
signed by the Signing Intermediate CI Private Key.
+---------------------------------------------------------------------------------------------------------+
| **Intermediate CI Key (GPG)** |
+==================================================+======================================================+
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
+---------------------------------------------------------------------------------------------------------+
| Signatures: None |
+---------------------------------------------------------------------------------------------------------+
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
stored offline completely separate from Spacks infrastructure. This allows the
core development team to append revocation certificates to the GPG key and
issue new sub-keys for use in the pipeline. It is our expectation that this
will happen on a semi regular basis. A corollary of this is that *this key
should not be used to verify package integrity outside the internal CI process.*
----------------
Reputational Key
----------------
The Reputational Key is the public facing key used to sign complete groups of
development and release packages. Only one key pair exsits in this class of
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
used to verify package integrity. At the end of develop and release pipeline a
final pipeline job pulls down all signed package metadata built by the pipeline,
verifies they were signed with an Intermediate CI Key, then strips the
Intermediate CI Key signature from the package and re-signs them with the
Signing Reputational Private Key. The officially signed packages are then
uploaded back to the AWS S3 mirror. Please note that separating use of the
reputational key into this final job is done to prevent leakage of the key in a
spack package. Because the Signing Reputational Private Key is never exposed to
a build job it cannot accidentally end up in any built package.
+---------------------------------------------------------------------------------------------------------+
| **Reputational Key (GPG)** |
+==================================================+======================================================+
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Identity: “Spack Project <maintainers@spack.io>” |
+---------------------------------------------------------------------------------------------------------+
| Signatures: Signed by core development team [#f1]_ |
+---------------------------------------------------------------------------------------------------------+
The Root Reputational Private Key is stripped out of the GPG key and stored
offline completely separate from Spacks infrastructure. This allows the core
development team to append revocation certificates to the GPG key in the
unlikely event that the Signing Reputation Private Key is compromised. In
general it is the expectation that rotating this key will happen infrequently if
at all. This should allow relatively transparent verification for the end-user
community without needing deep familiarity with GnuPG or Public Key
Infrastructure.
.. _build_cache_format:
------------------
Build Cache Format
------------------
A binary package consists of a metadata file unambiguously defining the
built package (and including other details such as how to relocate it)
and the installation directory of the package stored as a compressed
archive file. The metadata files can either be unsigned, in which case
the contents are simply the json-serialized concrete spec plus metadata,
or they can be signed, in which case the json-serialized concrete spec
plus metadata is wrapped in a gpg cleartext signature. Built package
metadata files are named to indicate the operating system and
architecture for which the package was built as well as the compiler
used to build it and the packages name and version. For example::
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
would contain the concrete spec and binary metadata for a binary package
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
architecture. The id of the built package exists in the name of the file
as well (after the package name and version) and in this case begins
with ``llv2ys``. The id distinguishes a particular built package from all
other built packages with the same os/arch, compiler, name, and version.
Below is an example of a signed binary package metadata file. Such a
file would live in the ``build_cache`` directory of a binary mirror::
-----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA512
{
"spec": {
<concrete-spec-contents-omitted>
},
"buildcache_layout_version": 1,
"binary_cache_checksum": {
"hash_algorithm": "sha256",
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
},
"buildinfo": {
"relative_prefix":
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
"relative_rpaths": false
}
}
-----BEGIN PGP SIGNATURE-----
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
j3DXty57
=3gvm
-----END PGP SIGNATURE-----
If a user has trusted the public key associated with the private key
used to sign the above spec file, the signature can be verified with
gpg, as follows::
$ gpg verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
The metadata (regardless whether signed or unsigned) contains the checksum
of the ``.spack`` file containing the actual installation. The checksum should
be compared to a checksum computed locally on the ``.spack`` file to ensure the
contents have not changed since the binary spec plus metadata were signed. The
``.spack`` files are actually tarballs containing the compressed archive of the
install tree. These files, along with the metadata files, live within the
``build_cache`` directory of the mirror, and together are organized as follows::
build_cache/
# unsigned metadata (for indexing, contains sha256 of .spack file)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
# clearsigned metadata (same as above, but signed)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
<arch>/
<compiler>/
<name>-<ver>/
# tar.gz-compressed prefix (may support more compression formats later)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
Uncompressing and extracting the ``.spack`` file results in the install tree.
This is in contrast to previous versions of spack, where the ``.spack`` file
contained a (duplicated) metadata file, a signature file and a nested tarball
containing the install tree.
.. _internal_implementation:
-----------------------
Internal Implementation
-----------------------
The technical implementation of the pipeline signing process includes components
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
implementation in two interdependent sections. The first addresses how secrets
are managed through the lifecycle of a develop or release pipeline. The second
section describes how Gitlab Runner and pipelines are configured and managed to
support secure automated signing.
Secrets Management
^^^^^^^^^^^^^^^^^^
As stated above the Root Private Keys (intermediate and reputational)
are stripped from the GPG keys and stored outside Spacks
infrastructure.
.. warning::
**TODO**
- Explanation here about where and how access is handled for these keys.
- Both Root private keys are protected with strong passwords
- Who has access to these and how?
**Intermediate CI Key**
-----------------------
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
run in AWS, and one key for each affiliated institution (e.g. Univerity of
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
exported as an ASCII armored file and stored in a Kubernetes secret called
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
contains an ASCII-armored export of just the *public* components of the
Reputational Key. This secret also contains the *public* components of each of
the affiliated institutions' Intermediate CI Key. These are potentially needed
to verify dependent packages which may have been found in the public mirror or
built by a protected job running on an affiliated institution's infrastrcuture
in an earlier stage of the pipeline.
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
the following way:
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
a job tagged ``protected`` from a protected GitLab branch. (See
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
2. Based on its configuration, the runner creates a job Pod in the
pipeline namespace and mounts the spack-intermediate-ci-signing-key
Kubernetes secret into the build container
3. The Intermediate CI Key, affiliated institutions' public key and the
Reputational Public Key are imported into a keyring by the ``spack gpg …``
sub-command. This is initiated by the jobs build script which is created by
the generate job at the beginning of the pipeline.
4. Assuming the package has dependencies those specs are verified using
the keyring.
5. The package is built and the spec.json is generated
6. The spec.json is signed by the keyring and uploaded to the mirrors
build cache.
**Reputational Key**
--------------------
Because of the increased impact to end users in the case of a private
key breach, the Reputational Key is managed separately from the
Intermediate CI Keys and has additional controls. First, the Reputational
Key was generated outside of Spacks infrastructure and has been signed
by the core development team. The Reputational Key (along with the
Signing Reputational Private Key) was then ASCII armor exported to a
file. Unlike the Intermediate CI Key this exported file is not stored as
a base64 encoded secret in Kubernetes. Instead\ *the key file
itself*\ is encrypted and stored in Kubernetes as the
``spack-signing-key-encrypted`` secret in the pipeline namespace.
The encryption of the exported Reputational Key (including the Signing
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
keys
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
The private key material is decrypted and imported at the time of signing into a
memory mounted temporary directory holding the keychain. The signing job uses
the `AWS Encryption SDK
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
decrypt the key is granted to the job Pod through a Kubernetes service account
specifically used for this, and only this, function. Finally, for convenience
sake, this same secret contains an ASCII-armored export of the *public*
components of the Intermediate CI Keys and the Reputational Key. This allows the
signing script to verify that packages were built by the pipeline (both on AWS
or at affiliated institutions), or signed previously as a part of a different
pipeline. This is is done *before* importing decrypting and importing the
Signing Reputational Private Key material and officially signing the packages.
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
following way:
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
up a job tagged ``notary`` from a protected GitLab branch (See
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
2. Based on its configuration, the runner creates a job pod in the
pipeline namespace. The job is run in a stripped down purpose-built
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
configured to only allow running jobs with this image.
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
a path on disk. Note that this becomes several files on disk, the
public components of the Intermediate CI Keys, the public components
of the Reputational CI, and an AWS KMS encrypted file containing the
Singing Reputational Private Key.
4. In addition to the secret, the runner creates a tmpfs memory mounted
directory where the GnuPG keyring will be created to verify, and
then resign the package specs.
5. The job script syncs all spec.json.sig files from the build cache to
a working directory in the jobs execution environment.
6. The job script then runs the ``sign.sh`` script built into the
notary Docker image.
7. The ``sign.sh`` script imports the public components of the
Reputational and Intermediate CI Keys and uses them to verify good
signatures on the spec.json.sig files. If any signed spec does not
verify the job immediately fails.
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
the spec json data from the signed file in preparation for being
re-signed with the Reputational Key.
9. The private components of the Reputational Key are decrypted to
standard out using ``aws-encryption-cli`` directly into a ``gpg
import …`` statement which imports the key into the
keyring mounted in-memory.
10. The private key is then used to sign each of the json specs and the
keyring is removed from disk.
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
public signing of the packages for the develop or release pipeline
that created them is complete.
Non service-account access to the private components of the Reputational
Key that are managed through access to the symmetric secret in KMS used
to encrypt the data key (which in turn is used to encrypt the GnuPG key
- See:\ `Encryption SDK
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
A small trusted subset of the core development team are the only
individuals with access to this symmetric key.
.. _protected_runners:
Protected Runners and Reserved Tags
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack has a large number of Gitlab Runners operating in its build farm.
These include runners deployed in the AWS Kubernetes cluster as well as
runners deployed at affiliated institutions. The majority of runners are
shared runners that operate across projects in gitlab.spack.io. These
runners pick up jobs primarily from the spack/spack project and execute
them in PR pipelines.
A small number of runners operating on AWS and at affiliated institutions are
registered as specific *protected* runners on the spack/spack project. In
addition to protected runners there are protected branches on the spack/spack
project. These are the ``develop`` branch, any release branch (i.e. managed with
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
wildcard) Finally Spacks pipeline generation code reserves certain tags to make
sure jobs are routed to the correct runners, these tags are ``public``,
``protected``, and ``notary``. Understanding how all this works together to
protect secrets and provide integrity assurances can be a little confusing so
lets break these down:
- **Protected Branches**- Protected branches in Spack prevent anyone
other than Maintainers in GitLab from pushing code. In the case of
Spack the only Maintainer level entity pushing code to protected
branches is Spack bot. Protecting branches also marks them in such a
way that Protected Runners will only run jobs from those branches
- **Protected Runners**- Protected Runners only run jobs from protected
branches. Because protected runners have access to secrets, it's critical
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
would be possible for a PR branch to tag a job in such a way that a protected
runner executed that job and mounted secrets into a code execution
environment that had not been reviewed by Spack maintainers. Note however
that in the absence of tagging used to route jobs, public runners *could* run
jobs from protected branches. No secrets would be at risk of being breached
because non-protected runners do not have access to those secrets; lack of
secrets would, however, cause the jobs to fail.
- **Reserved Tags**- To mitigate the issue of public runners picking up
protected jobs Spack uses a small set of “reserved” job tags (Note that these
are *job* tags not git tags). These tags are “public”, “private”, and
“notary.” The majority of jobs executed in Spacks GitLab instance are
executed via a ``generate`` job. The generate job code systematically ensures
that no user defined configuration sets these tags. Instead, the ``generate``
job sets these tags based on rules related to the branch where this pipeline
originated. If the job is a part of a pipeline on a PR branch it sets the
``public`` tag. If the job is part of a pipeline on a protected branch it
sets the ``protected`` tag. Finally if the job is the package signing job and
it is running on a pipeline that is part of a protected branch then it sets
the ``notary`` tag.
Protected Runners are configured to only run jobs from protected branches. Only
jobs running in pipelines on protected branches are tagged with ``protected`` or
``notary`` tags. This tightly couples jobs on protected branches to protected
runners that provide access to the secrets required to sign the built packages.
The secrets are can **only** be accessed via:
1. Runners under direct control of the core development team.
2. Runners under direct control of trusted maintainers at affiliated institutions.
3. By code running the automated pipeline that has been reviewed by the
Spack maintainers and judged to be appropriate.
Other attempts (either through malicious intent or incompetence) can at
worst grab jobs intended for protected runners which will cause those
jobs to fail alerting both Spack maintainers and the core development
team.
.. [#f1]
The Reputational Key has also cross signed core development team
keys.

View File

@@ -252,7 +252,7 @@ def initconfig_hardware_entries(self):
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
archs = spec.variants["cuda_arch"].value
if archs[0] != "none":
if archs != "none":
arch_str = ";".join(archs)
entries.append(
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
@@ -269,7 +269,7 @@ def initconfig_hardware_entries(self):
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
)
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
if archs != "none":
arch_str = ";".join(archs)
entries.append(
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))

View File

@@ -5,7 +5,6 @@
import collections.abc
import inspect
import os
import pathlib
import platform
import re
import sys
@@ -16,6 +15,7 @@
import spack.build_environment
import spack.builder
import spack.package_base
import spack.util.path
from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when
@@ -271,7 +271,7 @@ def std_args(pkg, generator=None):
args = [
"-G",
generator,
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
define("CMAKE_BUILD_TYPE", build_type),
define("BUILD_TESTING", pkg.run_tests),
]

View File

@@ -751,7 +751,7 @@ def generate_gitlab_ci_yaml(
env.concretize()
env.write()
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
yaml_root = ev.config_dict(env.manifest)
# Get the joined "ci" config with all of the current scopes resolved
ci_config = cfg.get("ci")
@@ -946,7 +946,7 @@ def generate_gitlab_ci_yaml(
# Add config scopes to environment
env_includes = env_yaml_root["spack"].get("include", [])
cli_scopes = [
os.path.relpath(s.path, concrete_env_dir)
os.path.abspath(s.path)
for s in cfg.scopes().values()
if type(s) == cfg.ImmutableConfigScope
and s.path not in env_includes

View File

@@ -228,7 +228,7 @@ def ci_reindex(args):
Use the active, gitlab-enabled environment to rebuild the buildcache
index for the associated mirror."""
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
yaml_root = ev.config_dict(env.manifest)
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
tty.die("spack ci rebuild-index requires an env containing a mirror")

View File

@@ -349,7 +349,7 @@ def install_status():
"-I",
"--install-status",
action="store_true",
default=True,
default=False,
help="show install status of packages. packages can be: "
"installed [+], missing and needed by an installed package [-], "
"installed in and upstream instance [^], "
@@ -357,17 +357,6 @@ def install_status():
)
@arg
def no_install_status():
return Args(
"--no-install-status",
dest="install_status",
action="store_false",
default=True,
help="do not show install status annotations",
)
@arg
def no_checksum():
return Args(

View File

@@ -53,7 +53,7 @@ def setup_parser(subparser):
"--scope",
choices=scopes,
metavar=scopes_metavar,
default=None,
default=spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
@@ -106,21 +106,19 @@ def compiler_find(args):
def compiler_remove(args):
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
if not candidate_compilers:
tty.die("No compilers match spec %s" % compiler_spec)
if not args.all and len(candidate_compilers) > 1:
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
cspec = spack.spec.CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
if not compilers:
tty.die("No compilers match spec %s" % cspec)
elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
for current_compiler in candidate_compilers:
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
tty.msg(f"{current_compiler.spec.display_str} has been removed")
for compiler in compilers:
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec.display_str)
def compiler_info(args):

View File

@@ -44,11 +44,7 @@ def setup_parser(subparser):
)
# Below are arguments w.r.t. spec display (like spack spec)
arguments.add_common_arguments(subparser, ["long", "very_long"])
install_status_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
subparser.add_argument(
"-y",
"--yaml",

View File

@@ -31,11 +31,7 @@ def setup_parser(subparser):
for further documentation regarding the spec syntax, see:
spack help --spec
"""
arguments.add_common_arguments(subparser, ["long", "very_long"])
install_status_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
"-y",

View File

@@ -37,6 +37,7 @@
"implicit_rpaths",
"extra_rpaths",
]
_cache_config_file = []
# TODO: Caches at module level make it difficult to mock configurations in
# TODO: unit tests. It might be worth reworking their implementation.
@@ -154,65 +155,52 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
compiler_config.append(_to_dict(compiler))
global _cache_config_file
_cache_config_file = compiler_config
spack.config.set("compilers", compiler_config, scope=scope)
@_auto_compiler_spec
def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from configuration by spec.
If scope is None, all the scopes are searched for removal.
"""Remove compilers from the config, by spec.
Arguments:
compiler_spec: compiler to be removed
scope: configuration scope to modify
compiler_specs: a list of CompilerSpec objects.
scope: configuration scope to modify.
"""
candidate_scopes = [scope]
if scope is None:
candidate_scopes = spack.config.config.scopes.keys()
# Need a better way for this
global _cache_config_file
removal_happened = False
for current_scope in candidate_scopes:
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
return removal_happened
def _remove_compiler_from_scope(compiler_spec, scope):
"""Removes a compiler from a specific configuration scope.
Args:
compiler_spec: compiler to be removed
scope: configuration scope under consideration
Returns:
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(scope)
config_length = len(compiler_config)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
comp
for comp in compiler_config
if not spack.spec.parse_with_version_concrete(
compiler_entry["compiler"]["spec"], compiler=True
comp["compiler"]["spec"], compiler=True
).satisfies(compiler_spec)
]
if len(filtered_compiler_config) == len(compiler_config):
return False
# We need to preserve the YAML type for comments, hence we are copying the
# items in the list that has just been retrieved
compiler_config[:] = filtered_compiler_config
spack.config.set("compilers", compiler_config, scope=scope)
return True
# Update the cache for changes
_cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.set("compilers", filtered_compiler_config, scope=scope)
def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
return get_compiler_config(scope, init_config)
# Get compilers for this architecture.
# Create a cache of the config file so we don't load all the time.
global _cache_config_file
if not _cache_config_file:
_cache_config_file = get_compiler_config(scope, init_config)
return _cache_config_file
else:
return _cache_config_file
def all_compiler_specs(scope=None, init_config=True):

View File

@@ -81,7 +81,7 @@
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
all_schemas = copy.deepcopy(section_schemas)
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
all_schemas.update(dict((key, spack.schema.env.schema) for key in spack.schema.env.keys))
#: Path to the default configuration
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
@@ -111,6 +111,14 @@
overrides_base_name = "overrides-"
def first_existing(dictionary, keys):
"""Get the value of the first key in keys that is in the dictionary."""
try:
return next(k for k in keys if k in dictionary)
except StopIteration:
raise KeyError("None of %s is in dict!" % str(keys))
class ConfigScope(object):
"""This class represents a configuration scope.
@@ -830,10 +838,12 @@ def _config():
def add_from_file(filename, scope=None):
"""Add updates to a config from a filename"""
# Extract internal attributes, if we are dealing with an environment
import spack.environment as ev
# Get file as config dict
data = read_config_file(filename)
if spack.schema.env.TOP_LEVEL_KEY in data:
data = data[spack.schema.env.TOP_LEVEL_KEY]
if any(k in data for k in spack.schema.env.keys):
data = ev.config_dict(data)
# update all sections from config dict
# We have to iterate on keys to keep overrides from the file
@@ -1343,11 +1353,17 @@ def use_configuration(*scopes_or_paths):
configuration = _config_from(scopes_or_paths)
config.clear_caches(), configuration.clear_caches()
# Save and clear the current compiler cache
saved_compiler_cache = spack.compilers._cache_config_file
spack.compilers._cache_config_file = []
saved_config, config = config, configuration
try:
yield configuration
finally:
# Restore previous config files
spack.compilers._cache_config_file = saved_compiler_cache
config = saved_config

View File

@@ -37,7 +37,7 @@ def validate(configuration_file):
config = syaml.load(f)
# Ensure we have a "container" attribute with sensible defaults set
env_dict = config[ev.TOP_LEVEL_KEY]
env_dict = ev.config_dict(config)
env_dict.setdefault(
"container", {"format": "docker", "images": {"os": "ubuntu:22.04", "spack": "develop"}}
)

View File

@@ -17,7 +17,7 @@
"template": "container/fedora_38.dockerfile",
"image": "docker.io/fedora:38"
},
"os_package_manager": "dnf",
"os_package_manager": "yum",
"build": "spack/fedora38",
"build_tags": {
"develop": "latest"
@@ -31,7 +31,7 @@
"template": "container/fedora_37.dockerfile",
"image": "docker.io/fedora:37"
},
"os_package_manager": "dnf",
"os_package_manager": "yum",
"build": "spack/fedora37",
"build_tags": {
"develop": "latest"
@@ -45,7 +45,7 @@
"template": "container/rockylinux_9.dockerfile",
"image": "docker.io/rockylinux:9"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/rockylinux9",
"build_tags": {
"develop": "latest"
@@ -59,7 +59,7 @@
"template": "container/rockylinux_8.dockerfile",
"image": "docker.io/rockylinux:8"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/rockylinux8",
"build_tags": {
"develop": "latest"
@@ -73,7 +73,7 @@
"template": "container/almalinux_9.dockerfile",
"image": "quay.io/almalinux/almalinux:9"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/almalinux9",
"build_tags": {
"develop": "latest"
@@ -87,7 +87,7 @@
"template": "container/almalinux_8.dockerfile",
"image": "quay.io/almalinux/almalinux:8"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/almalinux8",
"build_tags": {
"develop": "latest"
@@ -101,7 +101,7 @@
"template": "container/centos_stream.dockerfile",
"image": "quay.io/centos/centos:stream"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/centos-stream",
"final": {
"image": "quay.io/centos/centos:stream"
@@ -185,16 +185,6 @@
"install": "apt-get -yqq install",
"clean": "rm -rf /var/lib/apt/lists/*"
},
"dnf": {
"update": "dnf update -y",
"install": "dnf install -y",
"clean": "rm -rf /var/cache/dnf && dnf clean all"
},
"dnf_epel": {
"update": "dnf update -y && dnf install -y epel-release && dnf update -y",
"install": "dnf install -y",
"clean": "rm -rf /var/cache/dnf && dnf clean all"
},
"yum": {
"update": "yum update -y && yum install -y epel-release && yum update -y",
"install": "yum install -y",

View File

@@ -50,7 +50,7 @@ def create(configuration, last_phase=None):
configuration (dict): how to generate the current recipe
last_phase (str): last phase to be printed or None to print them all
"""
name = configuration[ev.TOP_LEVEL_KEY]["container"]["format"]
name = ev.config_dict(configuration)["container"]["format"]
return _writer_factory[name](configuration, last_phase)
@@ -138,7 +138,7 @@ class PathContext(tengine.Context):
template_name: Optional[str] = None
def __init__(self, config, last_phase):
self.config = config[ev.TOP_LEVEL_KEY]
self.config = ev.config_dict(config)
self.container_config = self.config["container"]
# Operating system tag as written in the configuration file

View File

@@ -337,7 +337,6 @@
"""
from .environment import (
TOP_LEVEL_KEY,
Environment,
SpackEnvironmentError,
SpackEnvironmentViewError,
@@ -346,6 +345,7 @@
active_environment,
all_environment_names,
all_environments,
config_dict,
create,
create_in_dir,
deactivate,
@@ -369,7 +369,6 @@
)
__all__ = [
"TOP_LEVEL_KEY",
"Environment",
"SpackEnvironmentError",
"SpackEnvironmentViewError",
@@ -378,6 +377,7 @@
"active_environment",
"all_environment_names",
"all_environments",
"config_dict",
"create",
"create_in_dir",
"deactivate",

View File

@@ -53,7 +53,6 @@
import spack.version
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY
from spack.spec import Spec
from spack.spec_list import InvalidSpecConstraintError, SpecList
from spack.util.path import substitute_path_variables
@@ -362,6 +361,19 @@ def ensure_env_root_path_exists():
fs.mkdirp(env_root_path())
def config_dict(yaml_data):
"""Get the configuration scope section out of an spack.yaml"""
# TODO (env:): Remove env: as a possible top level keyword in v0.21
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
if key == "env":
msg = (
"using 'env:' as a top-level attribute of a Spack environment is deprecated and "
"will be removed in Spack v0.21. Please use 'spack:' instead."
)
warnings.warn(msg)
return yaml_data[key]
def all_environment_names():
"""List the names of environments that currently exist."""
# just return empty if the env path does not exist. A read-only
@@ -809,8 +821,8 @@ def write_transaction(self):
def _construct_state_from_manifest(self):
"""Read manifest file and set up user specs."""
self.spec_lists = collections.OrderedDict()
env_configuration = self.manifest[TOP_LEVEL_KEY]
for item in env_configuration.get("definitions", []):
for item in config_dict(self.manifest).get("definitions", []):
entry = copy.deepcopy(item)
when = _eval_conditional(entry.pop("when", "True"))
assert len(entry) == 1
@@ -822,13 +834,13 @@ def _construct_state_from_manifest(self):
else:
self.spec_lists[name] = user_specs
spec_list = env_configuration.get(user_speclist_name, [])
spec_list = config_dict(self.manifest).get(user_speclist_name, [])
user_specs = SpecList(
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
)
self.spec_lists[user_speclist_name] = user_specs
enable_view = env_configuration.get("view")
enable_view = config_dict(self.manifest).get("view")
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
@@ -843,11 +855,14 @@ def _construct_state_from_manifest(self):
else:
self.views = {}
# Retrieve the current concretization strategy
configuration = config_dict(self.manifest)
# Retrieve unification scheme for the concretizer
self.unify = spack.config.get("concretizer:unify", False)
# Retrieve dev-build packages:
self.dev_specs = copy.deepcopy(env_configuration.get("develop", {}))
self.dev_specs = copy.deepcopy(configuration.get("develop", {}))
for name, entry in self.dev_specs.items():
# spec must include a concrete version
assert Spec(entry["spec"]).versions.concrete_range_as_version
@@ -967,7 +982,7 @@ def included_config_scopes(self):
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
includes = config_dict(self.manifest).get("include", [])
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
@@ -1060,7 +1075,10 @@ def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file."""
config_name = self.env_file_config_scope_name()
return spack.config.SingleFileScope(
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
config_name,
self.manifest_path,
spack.schema.env.schema,
[spack.config.first_existing(self.manifest, spack.schema.env.keys)],
)
def config_scopes(self):
@@ -2666,8 +2684,8 @@ def add_user_spec(self, user_spec: str) -> None:
Args:
user_spec: user spec to be appended
"""
self.pristine_configuration.setdefault("specs", []).append(user_spec)
self.configuration.setdefault("specs", []).append(user_spec)
config_dict(self.pristine_yaml_content).setdefault("specs", []).append(user_spec)
config_dict(self.yaml_content).setdefault("specs", []).append(user_spec)
self.changed = True
def remove_user_spec(self, user_spec: str) -> None:
@@ -2680,8 +2698,8 @@ def remove_user_spec(self, user_spec: str) -> None:
SpackEnvironmentError: when the user spec is not in the list
"""
try:
self.pristine_configuration["specs"].remove(user_spec)
self.configuration["specs"].remove(user_spec)
config_dict(self.pristine_yaml_content)["specs"].remove(user_spec)
config_dict(self.yaml_content)["specs"].remove(user_spec)
except ValueError as e:
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
raise SpackEnvironmentError(msg) from e
@@ -2698,8 +2716,8 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
SpackEnvironmentError: when the user spec cannot be overridden
"""
try:
self.pristine_configuration["specs"][idx] = user_spec
self.configuration["specs"][idx] = user_spec
config_dict(self.pristine_yaml_content)["specs"][idx] = user_spec
config_dict(self.yaml_content)["specs"][idx] = user_spec
except ValueError as e:
msg = f"cannot override {user_spec} from {self}"
raise SpackEnvironmentError(msg) from e
@@ -2715,14 +2733,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
Raises:
SpackEnvironmentError: is no valid definition exists already
"""
defs = self.pristine_configuration.get("definitions", [])
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
item[list_name].append(user_spec)
break
self.configuration["definitions"][idx][list_name].append(user_spec)
config_dict(self.yaml_content)["definitions"][idx][list_name].append(user_spec)
self.changed = True
def remove_definition(self, user_spec: str, list_name: str) -> None:
@@ -2736,7 +2754,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
SpackEnvironmentError: if the user spec cannot be removed from the list,
or the list does not exist
"""
defs = self.pristine_configuration.get("definitions", [])
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
msg = (
f"cannot remove {user_spec} from the '{list_name}' definition, "
f"no valid list exists"
@@ -2749,7 +2767,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
except ValueError:
pass
self.configuration["definitions"][idx][list_name].remove(user_spec)
config_dict(self.yaml_content)["definitions"][idx][list_name].remove(user_spec)
self.changed = True
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
@@ -2764,7 +2782,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
Raises:
SpackEnvironmentError: if the user spec cannot be overridden
"""
defs = self.pristine_configuration.get("definitions", [])
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
@@ -2775,7 +2793,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
except ValueError:
pass
self.configuration["definitions"][idx][list_name][sub_index] = override
config_dict(self.yaml_content)["definitions"][idx][list_name][sub_index] = override
self.changed = True
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
@@ -2807,24 +2825,24 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
True the default view is used for the environment, if False there's no view.
"""
if isinstance(view, dict):
self.pristine_configuration["view"][default_view_name].update(view)
self.configuration["view"][default_view_name].update(view)
config_dict(self.pristine_yaml_content)["view"][default_view_name].update(view)
config_dict(self.yaml_content)["view"][default_view_name].update(view)
self.changed = True
return
if not isinstance(view, bool):
view = str(view)
self.pristine_configuration["view"] = view
self.configuration["view"] = view
config_dict(self.pristine_yaml_content)["view"] = view
config_dict(self.yaml_content)["view"] = view
self.changed = True
def remove_default_view(self) -> None:
"""Removes the default view from the manifest file"""
view_data = self.pristine_configuration.get("view")
view_data = config_dict(self.pristine_yaml_content).get("view")
if isinstance(view_data, collections.abc.Mapping):
self.pristine_configuration["view"].pop(default_view_name)
self.configuration["view"].pop(default_view_name)
config_dict(self.pristine_yaml_content)["view"].pop(default_view_name)
config_dict(self.yaml_content)["view"].pop(default_view_name)
self.changed = True
return
@@ -2841,10 +2859,12 @@ def add_develop_spec(self, pkg_name: str, entry: Dict[str, str]) -> None:
if entry["path"] == pkg_name:
entry.pop("path")
self.pristine_configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(
config_dict(self.pristine_yaml_content).setdefault("develop", {}).setdefault(
pkg_name, {}
).update(entry)
config_dict(self.yaml_content).setdefault("develop", {}).setdefault(pkg_name, {}).update(
entry
)
self.configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(entry)
self.changed = True
def remove_develop_spec(self, pkg_name: str) -> None:
@@ -2857,11 +2877,11 @@ def remove_develop_spec(self, pkg_name: str) -> None:
SpackEnvironmentError: if there is nothing to remove
"""
try:
del self.pristine_configuration["develop"][pkg_name]
del config_dict(self.pristine_yaml_content)["develop"][pkg_name]
except KeyError as e:
msg = f"cannot remove '{pkg_name}' from develop specs in {self}, entry does not exist"
raise SpackEnvironmentError(msg) from e
del self.configuration["develop"][pkg_name]
del config_dict(self.yaml_content)["develop"][pkg_name]
self.changed = True
def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
@@ -2872,11 +2892,11 @@ def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
init_file_dir: directory with the "spack.yaml" used to initialize the environment.
"""
init_file_dir = pathlib.Path(init_file_dir).absolute()
for _, entry in self.pristine_configuration.get("develop", {}).items():
for _, entry in config_dict(self.pristine_yaml_content).get("develop", {}).items():
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
entry["path"] = str(expanded_path)
for _, entry in self.configuration.get("develop", {}).items():
for _, entry in config_dict(self.yaml_content).get("develop", {}).items():
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
entry["path"] = str(expanded_path)
self.changed = True
@@ -2890,16 +2910,6 @@ def flush(self) -> None:
_write_yaml(self.pristine_yaml_content, f)
self.changed = False
@property
def pristine_configuration(self):
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
return self.pristine_yaml_content[TOP_LEVEL_KEY]
@property
def configuration(self):
"""Return the dictionaries in the YAML, without the top level attribute"""
return self.yaml_content[TOP_LEVEL_KEY]
def __len__(self):
return len(self.yaml_content)

View File

@@ -209,7 +209,7 @@ def update(data):
# Warn if deprecated section is still in the environment
ci_env = ev.active_environment()
if ci_env:
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
env_config = ev.config_dict(ci_env.manifest)
if "gitlab-ci" in env_config:
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")

View File

@@ -15,8 +15,8 @@
import spack.schema.packages
import spack.schema.projections
#: Top level key in a manifest file
TOP_LEVEL_KEY = "spack"
#: legal first keys in the schema
keys = ("spack", "env")
spec_list_schema = {
"type": "array",
@@ -47,8 +47,8 @@
"title": "Spack environment file schema",
"type": "object",
"additionalProperties": False,
"properties": {
"spack": {
"patternProperties": {
"^env|spack$": {
"type": "object",
"default": {},
"additionalProperties": False,

View File

@@ -614,6 +614,23 @@ def multiple_values_error(self, attribute, pkg):
def no_value_error(self, attribute, pkg):
return f'Cannot select a single "{attribute}" for package "{pkg}"'
def _get_cause_tree(self, cause, conditions, condition_causes, literals, indent=" "):
parents = [c for e, c in condition_causes if e == cause]
local = "required because %s " % conditions[cause]
return [indent + local] + [
c
for parent in parents
for c in self._get_cause_tree(
parent, conditions, condition_causes, literals, indent=indent + " "
)
]
def get_cause_tree(self, cause):
conditions = dict(extract_args(self.model, "condition"))
condition_causes = list(extract_args(self.model, "condition_cause"))
return self._get_cause_tree(cause, conditions, condition_causes, [])
def handle_error(self, msg, *args):
"""Handle an error state derived by the solver."""
if msg == "multiple_values_error":
@@ -622,14 +639,28 @@ def handle_error(self, msg, *args):
if msg == "no_value_error":
return self.no_value_error(*args)
try:
idx = args.index("startcauses")
except ValueError:
msg_args = args
cause_args = []
else:
msg_args = args[:idx]
cause_args = args[idx + 1 :]
msg = msg.format(*msg_args)
# For variant formatting, we sometimes have to construct specs
# to format values properly. Find/replace all occurances of
# Spec(...) with the string representation of the spec mentioned
msg = msg.format(*args)
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
for spec_str in specs_to_construct:
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
for cause in set(cause_args):
for c in self.get_cause_tree(cause):
msg += f"\n{c}"
return msg
def message(self, errors) -> str:
@@ -775,6 +806,8 @@ def visit(node):
self.control.load(os.path.join(parent_dir, "concretize.lp"))
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
self.control.load(os.path.join(parent_dir, "display.lp"))
if spack.error.debug:
self.control.load(os.path.join(parent_dir, "causation.lp"))
timer.stop("load")
# Grounding is the first step in the solve -- it turns our facts
@@ -835,7 +868,13 @@ def on_model(model):
# print any unknown functions in the model
for sym in best_model:
if sym.name not in ("attr", "error", "opt_criterion"):
if sym.name not in (
"attr",
"error",
"opt_criterion",
"condition",
"condition_cause",
):
tty.debug(
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
)
@@ -1266,7 +1305,11 @@ def package_provider_rules(self, pkg):
for when in whens:
msg = "%s provides %s when %s" % (pkg.name, provided, when)
condition_id = self.condition(when, provided, pkg.name, msg)
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
self.gen.fact(
fn.imposed_constraint(
condition_id, "virtual_condition_holds", pkg.name, provided.name
)
)
self.gen.newline()
def package_dependencies_rules(self, pkg):
@@ -1287,16 +1330,25 @@ def package_dependencies_rules(self, pkg):
if not deptypes:
continue
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
msg = "%s depends on %s" % (pkg.name, dep.spec)
if cond != spack.spec.Spec():
msg += " when %s" % cond
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
self.gen.fact(fn.condition_requirement(condition_id, "spack_installed", pkg.name))
for t in sorted(deptypes):
# there is a declared dependency of type t
self.gen.fact(fn.dependency_type(condition_id, t))
self.gen.fact(
fn.imposed_constraint(
condition_id, "dependency_holds", pkg.name, dep.spec.name, t
)
)
self.gen.fact(
fn.imposed_constraint(
condition_id, "virtual_node" if dep.spec.virtual else "node", dep.spec.name
)
)
self.gen.newline()
@@ -1450,7 +1502,11 @@ def external_packages(self):
for local_idx, spec in enumerate(external_specs):
msg = "%s available as external when satisfying %s" % (spec.name, spec)
condition_id = self.condition(spec, msg=msg)
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
self.gen.fact(
fn.imposed_constraint(
condition_id, "external_conditions_hold", pkg_name, local_idx
)
)
self.possible_versions[spec.name].add(spec.version)
self.gen.newline()
@@ -2294,16 +2350,29 @@ def setup(self, driver, specs, reuse=None):
self.define_target_constraints()
def literal_specs(self, specs):
for idx, spec in enumerate(specs):
for spec in specs:
self.gen.h2("Spec: %s" % str(spec))
self.gen.fact(fn.literal(idx))
self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name))
# cannot use self.condition because it requires condition requirements
condition_id = next(self._condition_id_counter)
self.gen.fact(fn.condition(condition_id, "%s is provided as input spec" % spec))
self.gen.fact(fn.literal(condition_id))
self.gen.fact(fn.condition_requirement(condition_id, "literal_solved", condition_id))
self.gen.fact(
fn.imposed_constraint(
condition_id, "virtual_root" if spec.virtual else "root", spec.name
)
)
for clause in self.spec_clauses(spec):
self.gen.fact(fn.literal(idx, *clause.args))
self.gen.fact(fn.imposed_constraint(condition_id, *clause.args))
if clause.args[0] == "variant_set":
self.gen.fact(
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
fn.imposed_constraint(
condition_id, "variant_default_value_from_cli", *clause.args[1:]
)
)
if self.concretize_everything:
@@ -2397,6 +2466,8 @@ class SpecBuilder(object):
r"^root$",
r"^virtual_node$",
r"^virtual_root$",
r"^.*holds?$",
r"^literal.*$",
]
)
)

View File

@@ -0,0 +1,72 @@
% Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
% associated conditions by cause -> effect
condition_cause(Effect, Cause) :-
condition_holds(Effect), condition_holds(Cause),
attr(Name, A1),
condition_requirement(Effect, Name, A1),
imposed_constraint(Cause, Name, A1).
condition_cause(Effect, Cause) :-
condition_holds(Effect), condition_holds(Cause),
attr(Name, A1, A2),
condition_requirement(Effect, Name, A1, A2),
imposed_constraint(Cause, Name, A1, A2).
condition_cause(Effect, Cause) :-
condition_holds(Effect), condition_holds(Cause),
attr(Name, A1, A2, A3),
condition_requirement(Effect, Name, A1, A2, A3),
imposed_constraint(Cause, Name, A1, A2, A3).
condition_cause(Effect, Cause) :-
condition_holds(Effect), condition_holds(Cause),
attr(Name, A1, A2, A3, A4),
condition_requirement(Effect, Name, A1, A2, A3, A4),
imposed_constraint(Cause, Name, A1, A2, A3, A4).
% At most one variant for single valued variants
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, Cause2)
:- attr("node", Package),
variant(Package, Variant),
variant_single_value(Package, Variant),
build(Package),
attr("variant_value", Package, Variant, Value1),
imposed_constraint(Cause1, "variant_set", Package, Variant, Value1),
condition_holds(Cause1),
attr("variant_value", Package, Variant, Value2),
imposed_constraint(Cause2, "variant_set", Package, Variant, Value2),
condition_holds(Cause2),
Value1 < Value2. % see[1] in concretize.lp
% We cannot have a version that violates another version constraint
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Package, Constraint, startcauses, VersionCause, ConstraintCause)
:- attr("node", Package),
attr("version", Package, Version),
imposed_constraint(VersionCause, "node_version_satisfies", Package, Version),
condition_holds(VersionCause),
attr("node_version_satisfies", Package, Constraint),
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
condition_holds(ConstraintCause),
not version_satisfies(Package, Constraint, Version).
% A virtual package may or may not have a version, but never has more than one
% Error to catch how it happens
error(0, "Version '{0}' of {1} does not satisfy '@{2}'", Version, Virtual, Constraint, startcauses, VersionCause, ConstraintCause)
:- attr("virtual_node", Virtual),
attr("version", Virtual, Version),
imposed_constraint(VersionCause, "node_version_satisfies", Virtual, Version),
condition_holds(VersionCause),
attr("node_version_satisfies", Virtual, Constraint),
imposed_constraint(ConstraintCause, "node_version_satisfies", Virtual, Constraint),
condition_holds(ConstraintCause),
not version_satisfies(Virtual, Constraint, Version).
% More specific error message if the version cannot satisfy some constraint
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
error(0, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause)
:- attr("node_version_satisfies", Package, Constraint),
imposed_constraint(ConstraintCause, "node_version_satisfies", Package, Constraint),
condition_holds(ConstraintCause),
attr("version", Package, Version),
not version_satisfies(Package, Constraint, Version).

View File

@@ -12,8 +12,8 @@
%-----------------------------------------------------------------------------
% Give clingo the choice to solve an input spec or not
{ literal_solved(ID) } :- literal(ID).
literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
{ attr("literal_solved", ID) } :- literal(ID).
literal_not_solved(ID) :- not attr("literal_solved", ID), literal(ID).
% If concretize_everything() is a fact, then we cannot have unsolved specs
:- literal_not_solved(ID), concretize_everything.
@@ -21,24 +21,14 @@ literal_not_solved(ID) :- not literal_solved(ID), literal(ID).
% Make a problem with "zero literals solved" unsat. This is to trigger
% looking for solutions to the ASP problem with "errors", which results
% in better reporting for users. See #30669 for details.
1 { literal_solved(ID) : literal(ID) }.
1 { attr("literal_solved", ID) : literal(ID) }.
opt_criterion(300, "number of input specs not concretized").
#minimize{ 0@300: #true }.
#minimize { 1@300,ID : literal_not_solved(ID) }.
% Map constraint on the literal ID to the correct PSID
attr(Name, A1) :- literal(LiteralID, Name, A1), literal_solved(LiteralID).
attr(Name, A1, A2) :- literal(LiteralID, Name, A1, A2), literal_solved(LiteralID).
attr(Name, A1, A2, A3) :- literal(LiteralID, Name, A1, A2, A3), literal_solved(LiteralID).
attr(Name, A1, A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), literal_solved(LiteralID).
#defined concretize_everything/0.
#defined literal/1.
#defined literal/3.
#defined literal/4.
#defined literal/5.
#defined literal/6.
% Attributes for node packages which must have a single value
attr_single_value("version").
@@ -58,6 +48,13 @@ error(100, multiple_values_error, Attribute, Package)
attr_single_value(Attribute),
2 { attr(Attribute, Package, Version) }.
%-----------------------------------------------------------------------------
% Define functions for error handling
%-----------------------------------------------------------------------------
#defined error/9.
#defined condition_cause/2.
%-----------------------------------------------------------------------------
% Version semantics
%-----------------------------------------------------------------------------
@@ -96,7 +93,18 @@ version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package
{ attr("version", Package, Version) : version_declared(Package, Version) }
:- attr("node", Package).
% Error to ensure structure of the program is not violated
error(2, "No version from '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
:- attr("node", Package),
attr("version", Package, Version1),
attr("version", Package, Version2),
Version1 < Version2. % see[1]
error(2, "No versions available for package '{0}'", Package)
:- attr("node", Package), not attr("version", Package, _).
% A virtual package may or may not have a version, but never has more than one
% fallback error for structure in case there's another way for it to happen
error(100, "Cannot select a single version for virtual '{0}'", Virtual)
:- attr("virtual_node", Virtual),
2 { attr("version", Virtual, Version) }.
@@ -150,8 +158,7 @@ possible_version_weight(Package, Weight)
:- attr("node_version_satisfies", Package, Constraint),
version_satisfies(Package, Constraint, _).
% More specific error message if the version cannot satisfy some constraint
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
% Error for structure of program
error(10, "Cannot satisfy '{0}@{1}'", Package, Constraint)
:- attr("node_version_satisfies", Package, Constraint),
attr("version", Package, Version),
@@ -182,9 +189,8 @@ condition_holds(ID) :-
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3);
attr(Name, A1, A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4).
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
% is derived. This allows imposed constraints to be canceled in special cases.
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
% condition_holds(ID) implies all imposed_constraints.
impose(ID) :- condition_holds(ID).
% conditions that hold impose constraints on other specs
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
@@ -229,33 +235,19 @@ depends_on(Package, Dependency) :- attr("depends_on", Package, Dependency, _).
% a dependency holds if its condition holds and if it is not external or
% concrete. We chop off dependencies for externals, and dependencies of
% concrete specs don't need to be resolved -- they arise from the concrete
% specs themselves.
dependency_holds(Package, Dependency, Type) :-
dependency_condition(ID, Package, Dependency),
dependency_type(ID, Type),
build(Package),
not external(Package),
condition_holds(ID).
% We cut off dependencies of externals (as we don't really know them).
% Don't impose constraints on dependencies that don't exist.
do_not_impose(ID) :-
not dependency_holds(Package, Dependency, _),
dependency_condition(ID, Package, Dependency).
% specs themselves. This attr is used in constraints from dependency conditions
attr("spack_installed", Package) :- build(Package), not external(Package).
% declared dependencies are real if they're not virtual AND
% the package is not an external.
% They're only triggered if the associated dependnecy condition holds.
attr("depends_on", Package, Dependency, Type)
:- dependency_holds(Package, Dependency, Type),
:- attr("dependency_holds", Package, Dependency, Type),
not virtual(Dependency).
% every root must be a node
attr("node", Package) :- attr("root", Package).
% dependencies imply new nodes
attr("node", Dependency) :- attr("node", Package), depends_on(Package, Dependency).
% all nodes in the graph must be reachable from some root
% this ensures a user can't say `zlib ^libiconv` (neither of which have any
% dependencies) and get a two-node unconnected graph
@@ -296,14 +288,17 @@ error(1, Msg) :- attr("node", Package),
% if a package depends on a virtual, it's not external and we have a
% provider for that virtual then it depends on the provider
attr("depends_on", Package, Provider, Type)
:- dependency_holds(Package, Virtual, Type),
:- attr("dependency_holds", Package, Virtual, Type),
provider(Provider, Virtual),
not external(Package).
% dependencies on virtuals also imply that the virtual is a virtual node
attr("virtual_node", Virtual)
:- dependency_holds(Package, Virtual, Type),
virtual(Virtual), not external(Package).
% If a package depends on a provider, the provider must be a node
% nodes that are not indirected by a virtual are instantiated
% directly from the imposed constraints of the dependency condition
attr("node", Provider)
:- attr("dependency_holds", Package, Virtual, Type),
provider(Provider, Virtual),
not external(Package).
% If there's a virtual node, we must select one and only one provider.
% The provider must be selected among the possible providers.
@@ -330,17 +325,11 @@ attr("root", Package) :- attr("virtual_root", Virtual), provider(Package, Virtua
% for environments that are concretized together (e.g. where we
% asks to install "mpich" and "hdf5+mpi" and we want "mpich" to
% be the mpi provider)
provider(Package, Virtual) :- attr("node", Package), virtual_condition_holds(Package, Virtual).
% The provider provides the virtual if some provider condition holds.
virtual_condition_holds(Provider, Virtual) :-
provider_condition(ID, Provider, Virtual),
condition_holds(ID),
virtual(Virtual).
provider(Package, Virtual) :- attr("node", Package), attr("virtual_condition_holds", Package, Virtual).
% A package cannot be the actual provider for a virtual if it does not
% fulfill the conditions to provide that virtual
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
:- provider(Package, Virtual), not attr("virtual_condition_holds", Package, Virtual),
internal_error("Virtual when provides not respected").
#defined possible_provider/2.
@@ -382,14 +371,8 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
% do not warn if generated program contains none of these.
#defined possible_provider/2.
#defined provider_condition/3.
#defined required_provider_condition/3.
#defined required_provider_condition/4.
#defined required_provider_condition/5.
#defined required_provider_condition/6.
#defined declared_dependency/3.
#defined virtual/1.
#defined virtual_condition_holds/2.
#defined external/1.
#defined external_spec/2.
#defined external_version_declared/4.
@@ -437,25 +420,15 @@ external(Package) :- attr("external_spec_selected", Package, _).
% determine if an external spec has been selected
attr("external_spec_selected", Package, LocalIndex) :-
external_conditions_hold(Package, LocalIndex),
attr("external_conditions_hold", Package, LocalIndex),
attr("node", Package),
not attr("hash", Package, _).
external_conditions_hold(Package, LocalIndex) :-
possible_external(ID, Package, LocalIndex), condition_holds(ID).
% it cannot happen that a spec is external, but none of the external specs
% conditions hold.
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
:- external(Package),
not external_conditions_hold(Package, _).
#defined possible_external/3.
#defined external_spec_index/3.
#defined external_spec_condition/3.
#defined external_spec_condition/4.
#defined external_spec_condition/5.
#defined external_spec_condition/6.
not attr("external_conditions_hold", Package, _).
%-----------------------------------------------------------------------------
% Config required semantics
@@ -594,7 +567,6 @@ attr("variant_value", Package, Variant, Value) :-
variant(Package, Variant),
build(Package).
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
:- attr("node", Package),
variant(Package, Variant),
@@ -665,7 +637,7 @@ variant_default_not_used(Package, Variant, Value)
external_with_variant_set(Package, Variant, Value)
:- attr("variant_value", Package, Variant, Value),
condition_requirement(ID, "variant_value", Package, Variant, Value),
possible_external(ID, Package, _),
imposed_constraint(ID, "external_conditions_hold", Package, _),
external(Package),
attr("node", Package).

View File

@@ -23,5 +23,12 @@
#show error/4.
#show error/5.
#show error/6.
#show error/7.
#show error/8.
#show error/9.
% show cause -> effect data for errors
#show condition_cause/2.
#show condition/2.
% debug

View File

@@ -115,6 +115,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
spack.config.config, old_config = cfg, spack.config.config
spack.config.config.set("repos", [spack.paths.mock_packages_path])
# This is essential, otherwise the cache will create weird side effects
# that will compromise subsequent tests if compilers.yaml is modified
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")

View File

@@ -8,6 +8,8 @@
import pytest
import llnl.util.filesystem
import spack.compilers
import spack.main
import spack.version
@@ -16,8 +18,124 @@
@pytest.fixture
def compilers_dir(mock_executable):
"""Create a directory with some mock compiler scripts in it.
def mock_compiler_version():
return "4.5.3"
@pytest.fixture()
def mock_compiler_dir(tmpdir, mock_compiler_version):
"""Return a directory containing a fake, but detectable compiler."""
tmpdir.ensure("bin", dir=True)
bin_dir = tmpdir.join("bin")
gcc_path = bin_dir.join("gcc")
gxx_path = bin_dir.join("g++")
gfortran_path = bin_dir.join("gfortran")
gcc_path.write(
"""\
#!/bin/sh
for arg in "$@"; do
if [ "$arg" = -dumpversion ]; then
echo '%s'
fi
done
"""
% mock_compiler_version
)
# Create some mock compilers in the temporary directory
llnl.util.filesystem.set_executable(str(gcc_path))
gcc_path.copy(gxx_path, mode=True)
gcc_path.copy(gfortran_path, mode=True)
return str(tmpdir)
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
@pytest.mark.regression("11678,13138")
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
with open("gcc", "w") as f:
f.write(
"""\
#!/bin/sh
echo "0.0.0"
"""
)
os.chmod("gcc", 0o700)
os.environ["PATH"] = str(tmpdir)
output = compiler("find", "--scope=site")
assert "gcc" in output
@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
# make a script to emulate apple gcc's version args
with open("gcc", "w") as f:
f.write(
"""\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
"""
)
os.chmod("gcc", 0o700)
os.environ["PATH"] = str(tmpdir)
output = compiler("find", "--scope=site")
assert "gcc" not in output
def test_compiler_remove(mutable_config, mock_packages):
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
# Compilers available by default.
old_compilers = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
)
spack.cmd.compiler.compiler_find(args)
# Ensure new compiler is in there
new_compilers = set(spack.compilers.all_compiler_specs())
new_compiler = new_compilers - old_compilers
assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
@pytest.fixture
def clangdir(tmpdir):
"""Create a directory with some dummy compiler scripts in it.
Scripts are:
- clang
@@ -27,9 +145,11 @@ def compilers_dir(mock_executable):
- gfortran-8
"""
clang_path = mock_executable(
"clang",
output="""
with tmpdir.as_cwd():
with open("clang", "w") as f:
f.write(
"""\
#!/bin/sh
if [ "$1" = "--version" ]; then
echo "clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
@@ -39,11 +159,12 @@ def compilers_dir(mock_executable):
echo "clang: error: no input files"
exit 1
fi
""",
)
shutil.copy(clang_path, clang_path.parent / "clang++")
"""
)
shutil.copy("clang", "clang++")
gcc_script = """
gcc_script = """\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "8"
elif [ "$1" = "-dumpfullversion" ]; then
@@ -57,111 +178,30 @@ def compilers_dir(mock_executable):
exit 1
fi
"""
mock_executable("gcc-8", output=gcc_script.format("gcc", "gcc-8"))
mock_executable("g++-8", output=gcc_script.format("g++", "g++-8"))
mock_executable("gfortran-8", output=gcc_script.format("GNU Fortran", "gfortran-8"))
with open("gcc-8", "w") as f:
f.write(gcc_script.format("gcc", "gcc-8"))
with open("g++-8", "w") as f:
f.write(gcc_script.format("g++", "g++-8"))
with open("gfortran-8", "w") as f:
f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
os.chmod("clang", 0o700)
os.chmod("clang++", 0o700)
os.chmod("gcc-8", 0o700)
os.chmod("g++-8", 0o700)
os.chmod("gfortran-8", 0o700)
return clang_path.parent
yield tmpdir
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.regression("11678,13138")
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
is given.
"""
gcc_path = mock_executable("gcc", output='echo "0.0.0"')
os.environ["PATH"] = str(gcc_path.parent)
output = compiler("find", "--scope=site")
assert "gcc" in output
@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
Clang with a few tweaks.
"""
gcc_path = mock_executable(
"gcc",
output="""
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
""",
)
os.environ["PATH"] = str(gcc_path.parent)
output = compiler("find", "--scope=site")
assert "gcc" not in output
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration."""
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.regression("37996")
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
# Duplicate "site" scope into "user" scope
site_config = spack.config.get("compilers", scope="site")
spack.config.set("compilers", site_config, scope="user")
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
def test_compiler_add(mutable_config, mock_packages, mock_executable):
"""Tests that we can add a compiler to configuration."""
expected_version = "4.5.3"
gcc_path = mock_executable(
"gcc",
output=f"""\
for arg in "$@"; do
if [ "$arg" = -dumpversion ]; then
echo '{expected_version}'
fi
done
""",
)
bin_dir = gcc_path.parent
root_dir = bin_dir.parent
compilers_before_find = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
)
spack.cmd.compiler.compiler_find(args)
compilers_after_find = set(spack.compilers.all_compiler_specs())
compilers_added_by_find = compilers_after_find - compilers_before_find
assert len(compilers_added_by_find) == 1
new_compiler = compilers_added_by_find.pop()
assert new_compiler.version == spack.version.Version(expected_version)
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
@pytest.mark.regression("17590")
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll mix compilers with different suffixes when necessary."""
os.environ["PATH"] = str(compilers_dir)
os.environ["PATH"] = str(clangdir)
output = compiler("find", "--scope=site")
assert "clang@11.0.0" in output
@@ -171,33 +211,39 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
gfortran_path = str(compilers_dir / "gfortran-8")
gfortran_path = str(clangdir.join("gfortran-8"))
assert clang["paths"] == {
"cc": str(compilers_dir / "clang"),
"cxx": str(compilers_dir / "clang++"),
"cc": str(clangdir.join("clang")),
"cxx": str(clangdir.join("clang++")),
# we only auto-detect mixed clang on macos
"f77": gfortran_path if sys.platform == "darwin" else None,
"fc": gfortran_path if sys.platform == "darwin" else None,
}
assert gcc["paths"] == {
"cc": str(compilers_dir / "gcc-8"),
"cxx": str(compilers_dir / "g++-8"),
"cc": str(clangdir.join("gcc-8")),
"cxx": str(clangdir.join("g++-8")),
"f77": gfortran_path,
"fc": gfortran_path,
}
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
@pytest.mark.regression("17590")
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
clang_path = compilers_dir / "clang"
shutil.copy(clang_path, clang_path.parent / "clang-gpu")
shutil.copy(clang_path, clang_path.parent / "clang++-gpu")
with clangdir.as_cwd():
shutil.copy("clang", "clang-gpu")
shutil.copy("clang++", "clang++-gpu")
os.chmod("clang-gpu", 0o700)
os.chmod("clang++-gpu", 0o700)
os.environ["PATH"] = str(compilers_dir)
os.environ["PATH"] = str(clangdir)
output = compiler("find", "--scope=site")
assert "clang@11.0.0" in output
@@ -206,38 +252,46 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
config = spack.compilers.get_compiler_config("site", False)
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
assert clang["paths"]["cc"] == str(clangdir.join("clang"))
assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
new_dir = compilers_dir / "first_in_path"
new_dir.mkdir()
for name in ("gcc-8", "g++-8", "gfortran-8"):
shutil.copy(compilers_dir / name, new_dir / name)
# Set PATH to have the new folder searched first
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
"""Ensure that we find compilers that come first in the PATH first"""
with clangdir.as_cwd():
os.mkdir("first_in_path")
shutil.copy("gcc-8", "first_in_path/gcc-8")
shutil.copy("g++-8", "first_in_path/g++-8")
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
# the first_in_path folder should be searched first
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
compiler("find", "--scope=site")
config = spack.compilers.get_compiler_config("site", False)
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
assert gcc["paths"] == {
"cc": str(new_dir / "gcc-8"),
"cxx": str(new_dir / "g++-8"),
"f77": str(new_dir / "gfortran-8"),
"fc": str(new_dir / "gfortran-8"),
"cc": str(clangdir.join("first_in_path", "gcc-8")),
"cxx": str(clangdir.join("first_in_path", "g++-8")),
"f77": str(clangdir.join("first_in_path", "gfortran-8")),
"fc": str(clangdir.join("first_in_path", "gfortran-8")),
}
def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
"""Spack should not automatically search for compilers when listing them and none are
available. And when stdout is not a tty like in tests, there should be no output and
no error exit code.
"""
os.environ["PATH"] = str(compilers_dir)
def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
# Spack should not automatically search for compilers when listing them and none
# are available. And when stdout is not a tty like in tests, there should be no
# output and no error exit code.
os.environ["PATH"] = str(clangdir)
out = compiler("list")
assert not out
assert compiler.returncode == 0

View File

@@ -32,7 +32,7 @@ def check_develop(self, env, spec, path=None):
assert dev_specs_entry["spec"] == str(spec)
# check yaml representation
yaml = env.manifest[ev.TOP_LEVEL_KEY]
yaml = ev.config_dict(env.manifest)
assert spec.name in yaml["develop"]
yaml_entry = yaml["develop"][spec.name]
assert yaml_entry["spec"] == str(spec)

View File

@@ -44,8 +44,9 @@ def define_plat_exe(exe):
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
executables_found({str(cmake_path): define_plat_exe("cmake")})
executables_found(
{mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
)
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
@@ -70,7 +71,7 @@ def test_find_external_two_instances_same_package(
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
)
cmake_exe = define_plat_exe("cmake")
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
@@ -106,7 +107,7 @@ def test_get_executables(working_env, mock_executable):
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
path_to_exe = spack.detection.executables_in_path([os.path.dirname(cmake_path1)])
cmake_exe = define_plat_exe("cmake")
assert path_to_exe[str(cmake_path1)] == cmake_exe
assert path_to_exe[cmake_path1] == cmake_exe
external = SpackCommand("external")
@@ -333,7 +334,7 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
assert "extra_attributes" in external_gcc
extra_attributes = external_gcc["extra_attributes"]
assert "prefix" not in extra_attributes
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
assert extra_attributes["compilers"]["c"] == gcc_exe
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):

View File

@@ -157,7 +157,7 @@ def _parse_types(string):
def test_spec_deptypes_nodes():
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
output = spec("--types", "--cover", "nodes", "dt-diamond")
types = _parse_types(output)
assert types["dt-diamond"] == [" "]
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
def test_spec_deptypes_edges():
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
output = spec("--types", "--cover", "edges", "dt-diamond")
types = _parse_types(output)
assert types["dt-diamond"] == [" "]

View File

@@ -337,6 +337,8 @@ def test_compiler_flags_differ_identical_compilers(self):
# Get the compiler that matches the spec (
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
# Clear cache for compiler config since it has its own cache mechanism outside of config
spack.compilers._cache_config_file = []
# Configure spack to have two identical compilers with different flags
default_dict = spack.compilers._to_dict(compiler)
@@ -2135,7 +2137,7 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
{
"compiler": {
"spec": "gcc@foo",
"paths": {"cc": str(gcc_path), "cxx": str(gcc_path), "f77": None, "fc": None},
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
"operating_system": "debian6",
"modules": [],
}

View File

@@ -1669,21 +1669,22 @@ def clear_directive_functions():
@pytest.fixture
def mock_executable(tmp_path):
def mock_executable(tmpdir):
"""Factory to create a mock executable in a temporary directory that
output a custom string when run.
"""
import jinja2
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
def _factory(name, output, subdir=("bin",)):
executable_dir = tmp_path.joinpath(*subdir)
executable_dir.mkdir(parents=True, exist_ok=True)
executable_path = executable_dir / name
f = tmpdir.ensure(*subdir, dir=True).join(name)
if sys.platform == "win32":
executable_path = executable_dir / (name + ".bat")
executable_path.write_text(f"{ shebang }{ output }\n")
executable_path.chmod(0o755)
return executable_path
f += ".bat"
t = jinja2.Template("{{ shebang }}{{ output }}\n")
f.write(t.render(shebang=shebang, output=output))
f.chmod(0o755)
return str(f)
return _factory

View File

@@ -935,7 +935,7 @@ def test_inclusion_upperbound():
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_git_version_repo_attached_after_serialization(
mock_git_version_info, mock_packages, config, monkeypatch
mock_git_version_info, mock_packages, monkeypatch
):
"""Test that a GitVersion instance can be serialized and deserialized
without losing its repository reference.
@@ -954,9 +954,7 @@ def test_git_version_repo_attached_after_serialization(
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_resolved_git_version_is_shown_in_str(
mock_git_version_info, mock_packages, config, monkeypatch
):
def test_resolved_git_version_is_shown_in_str(mock_git_version_info, mock_packages, monkeypatch):
"""Test that a GitVersion from a commit without a user supplied version is printed
as <hash>=<version>, and not just <hash>."""
repo_path, _, commits = mock_git_version_info
@@ -970,7 +968,7 @@ def test_resolved_git_version_is_shown_in_str(
assert str(spec.version) == f"{commit}=1.0-git.1"
def test_unresolvable_git_versions_error(config, mock_packages):
def test_unresolvable_git_versions_error(mock_packages):
"""Test that VersionLookupError is raised when a git prop is not set on a package."""
with pytest.raises(VersionLookupError):
# The package exists, but does not have a git property set. When dereferencing

View File

@@ -4,11 +4,6 @@ default:
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
# CI Platform-Arch
.cray_zen4:
variables:
SPACK_TARGET_PLATFORM: "cray"
SPACK_TARGET_ARCH: "zen4"
.darwin_x86_64:
variables:
SPACK_TARGET_PLATFORM: "darwin"
@@ -329,7 +324,7 @@ gpu-tests-build:
e4s-oneapi-generate:
extends: [ ".e4s-oneapi", ".generate"]
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023.06.01
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
e4s-oneapi-build:
extends: [ ".e4s-oneapi", ".build" ]
@@ -736,15 +731,12 @@ deprecated-ci-build:
# Use gcc from local container buildcache
- - . "./share/spack/setup-env.sh"
- . /etc/profile.d/modules.sh
- spack buildcache rebuild-index /bootstrap/local-cache/
- spack mirror add local-cache /bootstrap/local-cache
- spack gpg trust /bootstrap/public-key
- cd "${CI_PROJECT_DIR}" && curl -sOL https://raw.githubusercontent.com/spack/spack-configs/main/AWS/parallelcluster/postinstall.sh
- sed -i -e "s/spack arch -t/echo ${SPACK_TARGET_ARCH}/g" postinstall.sh
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ postinstall.sh
- diff postinstall.sh postinstall.sh.bkp || echo Done
- /bin/bash postinstall.sh -fg
- spack config --scope site add "packages:all:target:\"${SPACK_TARGET_ARCH}\""
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
after_script:
- - mv "${CI_PROJECT_DIR}/postinstall.sh" "${CI_PROJECT_DIR}/jobs_scratch_dir/"
@@ -823,89 +815,3 @@ aws-pcluster-build-neoverse_v1:
needs:
- artifacts: True
job: aws-pcluster-generate-neoverse_v1
# Cray definitions
.base-cray-job:
variables:
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-cray/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
AWS_ACCESS_KEY_ID: ${CRAY_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${CRAY_MIRRORS_AWS_SECRET_ACCESS_KEY}
rules:
- if: $CI_COMMIT_REF_NAME == "develop"
# Pipelines on develop only rebuild what is missing from the mirror
when: always
variables:
SPACK_PIPELINE_TYPE: "spack_protected_branch"
- if: $CI_COMMIT_REF_NAME =~ /^pr[\d]+_.*$/
# Pipelines on PR branches rebuild only what's missing, and do extra pruning
when: always
variables:
SPACK_PIPELINE_TYPE: "spack_pull_request"
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-cray/prs/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
SPACK_PRUNE_UNTOUCHED: "True"
SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH: "1"
.generate-cray:
tags: [ "cce@15.0.1", "cray-zen4", "public" ]
extends: [ ".base-cray-job" ]
stage: generate
script:
- echo $PATH
- module avail
- module list
- export SPACK_DISABLE_LOCAL_CONFIG=1
- export SPACK_USER_CACHE_PATH=$(pwd)/_user_cache
- uname -a || true
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
- nproc || true
- . "./share/spack/setup-env.sh"
- spack --version
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
- spack env activate --without-view .
- export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs"
- spack
--config-scope "${SPACK_CI_CONFIG_ROOT}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
${CI_STACK_CONFIG_SCOPES}
ci generate --check-index-only
--buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
after_script:
- cat /proc/loadavg || true
artifacts:
paths:
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
interruptible: true
timeout: 60 minutes
retry:
max: 2
when:
- always
.build-cray:
extends: [ ".base-cray-job" ]
stage: build
#######################################
# E4S - Cray
#######################################
.e4s-cray:
extends: [ ".cray_zen4" ]
variables:
SPACK_CI_STACK_NAME: e4s-cray
e4s-cray-generate:
extends: [ ".generate-cray", ".e4s-cray" ]
e4s-cray-build:
extends: [ ".build-cray", ".e4s-cray" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: e4s-cray-generate
strategy: depend
needs:
- artifacts: True
job: e4s-cray-generate

View File

@@ -1,295 +0,0 @@
ci:
pipeline-gen:
- build-job-remove:
image: no-image
- build-job:
script+:
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
- if [[ -r /etc/protected-runner/e4s.gpg ]]; then spack gpg trust /etc/protected-runner/e4s.gpg; fi
# UO runners mount intermediate ci public key (verification), AWS runners mount public/private (signing/verification)
- if [[ -r /etc/protected-runner/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /etc/protected-runner/intermediate_ci_signing_key.gpg; fi
- if [[ -r /etc/protected-runner/spack_public_key.gpg ]]; then spack gpg trust /etc/protected-runner/spack_public_key.gpg; fi
- match_behavior: first
submapping:
- match:
- hipblas
- llvm
- llvm-amdgpu
- pango
- paraview
- py-tensorflow
- py-torch
- qt
- rocblas
- visit
build-job:
tags: [ "spack", "huge" ]
variables:
CI_JOB_SIZE: huge
SPACK_BUILD_JOBS: "12"
- match:
- ascent
- atk
- axom
- cistem
- cmake
- ctffind
- cuda
- dealii
- dray
- dyninst
- ecp-data-vis-sdk
- gcc
- ginkgo
- hdf5
- hpx
- kokkos-kernels
- kokkos-nvcc-wrapper
- lbann
- magma
- mesa
- mfem
- mpich
- netlib-lapack
- nvhpc
- oce
- openblas
- openfoam
- openturns
- parallelio
- plumed
- precice
#- py-tensorflow
#- qt
- raja
- relion
#- rocblas
- rocfft
- rocsolver
- rocsparse
- rust
- slate
- strumpack
- sundials
- trilinos
- umpire
#- visit
- vtk
- vtk-h
- vtk-m
- warpx
- wrf
- wxwidgets
build-job:
tags: [ "spack", "large" ]
variables:
CI_JOB_SIZE: large
SPACK_BUILD_JOBS: "8"
- match:
- adios2
- amrex
- archer
- ascent
- autoconf-archive
- axom
- binutils
- blaspp
- blt
- boost
- butterflypack
- cabana
- caliper
- camp
- chai
- conduit
- curl
- datatransferkit
- double-conversion
- dray
- eigen
- faodel
- ffmpeg
- fftw
- fortrilinos
- gettext
- gperftools
- gptune
- hdf5
- heffte
- hpctoolkit
- hwloc
- hydrogen
- hypre
- kokkos
- lammps
- lapackpp
- legion
- libtool
- libxml2
- libzmq
- llvm-openmp-ompt
- mbedtls
- mfem
- mpich
- mvapich2
- nasm
- netlib-scalapack
- omega-h
- openblas
- openjpeg
- openmpi
- openpmd-api
- pagmo2
- papyrus
- parsec
- pdt
- pegtl
- petsc
- pumi
- py-beniget
- py-cinemasci
- pygmo
- py-ipython-genutils
- py-packaging
- py-petsc4py
- py-scipy
- py-statsmodels
- py-warlock
- py-warpx
- raja
- samrai
- slepc
- slurm
- sqlite
- strumpack
- sundials
- superlu-dist
- tasmanian
- tau
- upcxx
- vtk
- vtk-h
- vtk-m
- zfp
build-job:
tags: [ "spack", "medium" ]
variables:
CI_JOB_SIZE: "medium"
SPACK_BUILD_JOBS: "2"
- match:
- alsa-lib
- ant
- antlr
- argobots
- autoconf-archive
- automake
- berkeley-db
- bison
- blt
- bzip2
- camp
- cmake
- curl
- czmq
- darshan-util
- diffutils
- docbook-xml
- exmcutils
- expat
- findutils
- flit
- freetype
- gawk
- gdbm
- gettext
- glib
- gmake
- gotcha
- hpcviewer
- hwloc
- jansson
- json-c
- libbsd
- libedit
- libevent
- libfabric
- libffi
- libgcrypt
- libiconv
- libidn2
- libjpeg-turbo
- libmd
- libnrm
- libpciaccess
- libpng
- libsigsegv
- libsodium
- libunistring
- libunwind
- libxml2
- libyaml
- libzmq
- lua
- lua-luaposix
- lz4
- m4
- meson
- metis
- mpfr
- ncurses
- ninja
- numactl
- openblas
- openjdk
- openssh
- openssl
- papi
- parallel-netcdf
- pcre
- pcre2
- pdsh
- perl
- perl-data-dumper
- pkgconf
- py-alembic
- py-cffi
- py-cycler
- py-decorator
- py-idna
- py-jsonschema
- py-kiwisolver
- py-mistune
- py-pycparser
- py-setuptools
- py-setuptools-scm
- py-six
- py-testpath
- py-wheel
- qhull
- readline
- sed
- slurm
- snappy
- sqlite
- superlu
- swig
- tar
- tcl
- texinfo
- tut
- unzip
- util-linux-uuid
- util-macros
- xz
- yaml-cpp
- zfp
- zlib
- zstd
build-job:
tags: [ "spack", "small" ]
variables:
CI_JOB_SIZE: "small"
SPACK_BUILD_JOBS: "1"

View File

@@ -1,9 +0,0 @@
config:
concretizer: clingo
db_lock_timeout: 120
install_tree:
root: $spack/opt/spack
padded_length: 256
projections:
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'

View File

@@ -1,4 +0,0 @@
ci:
pipeline-gen:
- build-job:
tags: ["cce@15.0.1", "cray-zen4"]

View File

@@ -42,11 +42,8 @@ spack:
- spack --version
- spack arch
# Use gcc from local container buildcache
- - spack buildcache rebuild-index /bootstrap/local-cache/
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ /bootstrap/postinstall.sh
- spack mirror add local-cache /bootstrap/local-cache
- - spack mirror add local-cache /bootstrap/local-cache
- spack gpg trust /bootstrap/public-key
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
- signing-job:

View File

@@ -44,11 +44,8 @@ spack:
- spack --version
- spack arch
# Use gcc from local container buildcache
- - spack buildcache rebuild-index /bootstrap/local-cache/
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ /bootstrap/postinstall.sh
- spack mirror add local-cache /bootstrap/local-cache
- - spack mirror add local-cache /bootstrap/local-cache
- spack gpg trust /bootstrap/public-key
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
- signing-job:

View File

@@ -44,11 +44,8 @@ spack:
- spack --version
- spack arch
# Use gcc from local container buildcache
- - spack buildcache rebuild-index /bootstrap/local-cache/
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ /bootstrap/postinstall.sh
- spack mirror add local-cache /bootstrap/local-cache
- - spack mirror add local-cache /bootstrap/local-cache
- spack gpg trust /bootstrap/public-key
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
- signing-job:

View File

@@ -42,11 +42,8 @@ spack:
- spack --version
- spack arch
# Use gcc from local container buildcache
- - spack buildcache rebuild-index /bootstrap/local-cache/
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ /bootstrap/postinstall.sh
- spack mirror add local-cache /bootstrap/local-cache
- - spack mirror add local-cache /bootstrap/local-cache
- spack gpg trust /bootstrap/public-key
- diff /bootstrap/postinstall.sh /bootstrap/postinstall.sh.bkp || echo Done
- - /bin/bash "${SPACK_ARTIFACTS_ROOT}/postinstall.sh" -fg
- spack config --scope site add "packages:all:target:\"target=${SPACK_TARGET_ARCH}\""
- signing-job:

View File

@@ -14,7 +14,7 @@ spack:
llvm:
require: "@14:"
# Minimize LLVM
variants: "~lldb~lld~polly~gold libunwind=none compiler-rt=none"
variants: ~lldb~lld~internal_unwind~polly~compiler-rt~gold
all:
require: target=x86_64_v3

View File

@@ -1,102 +0,0 @@
spack:
view: false
concretizer:
reuse: false
unify: false
compilers:
- compiler:
spec: cce@15.0.1
paths:
cc: cc
cxx: CC
f77: ftn
fc: ftn
flags: {}
operating_system: rhel8
target: any
modules:
- PrgEnv-cray/8.3.3
- cce/15.0.1
environment:
set:
MACHTYPE: x86_64
- compiler:
spec: gcc@11.2.0
paths:
cc: gcc
cxx: g++
f77: gfortran
fc: gfortran
flags: {}
operating_system: rhel8
target: any
modules:
- PrgEnv-gnu
- gcc/11.2.0
environment: {}
packages:
all:
require: '%cce@15.0.1'
compiler: [cce@15.0.1]
providers:
blas: [cray-libsci]
lapack: [cray-libsci]
mpi: [cray-mpich]
tbb: [intel-tbb]
scalapack: [netlib-scalapack]
target: [zen4]
variants: +mpi
binutils:
variants: +ld +gold +headers +libiberty ~nls
hdf5:
variants: +fortran +hl +shared
libunwind:
variants: +pic +xz
ncurses:
require: '@6.3 +termlib'
openblas:
require: '@0.3.20'
variants: threads=openmp
python:
require: '@3.7.15'
xz:
variants: +pic
elfutils:
variants: +bzip2 ~nls +xz
require: '%gcc'
# EXTERNALS
cray-mpich:
buildable: false
externals:
- spec: cray-mpich@8.1.25 %cce@15.0.1
prefix: /opt/cray/pe/mpich/8.1.25/ofi/cray/10.0
modules:
- cray-mpich/8.1.25
cray-libsci:
buildable: false
externals:
- spec: cray-libsci@23.02.1.1 %cce@15.0.1
prefix: /opt/cray/pe/libsci/23.02.1.1/CRAY/9.0/x86_64/
modules:
- cray-libsci/23.02.1.1
specs:
- butterflypack
- hypre
- kokkos
- kokkos-kernels
- petsc
- raja
- slepc
- superlu-dist
- tau
mirrors: { "mirror": "s3://spack-binaries-cray/develop/e4s-cray" }
cdash:
build-group: E4S Cray

View File

@@ -1,87 +1,35 @@
spack:
view: false
concretizer:
reuse: false
unify: false
packages:
all:
require: '%oneapi'
providers:
blas: [openblas]
mpi: [mpich]
tbb: [intel-tbb]
target: [x86_64]
variants: +mpi
elfutils:
variants: +bzip2 ~nls +xz
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mesa:
version: [21.3.8]
hdf5:
require: "%gcc"
variants: +fortran +hl +shared
mpi:
require: mpich
mpich:
require: '@4.1.1 ~wrapperrpath ~hwloc'
python:
require: '@3.7.15'
py-cryptography:
require: '@38.0.1'
unzip:
require: '%gcc'
binutils:
require: '%gcc'
variants: +ld +gold +headers +libiberty ~nls
llvm:
require: '%gcc'
ruby:
require: '%gcc'
rust:
require: '%gcc'
krb5:
require: '%gcc'
papi:
require: '%gcc'
openssh:
require: '%gcc'
bison:
require: '%gcc'
libffi:
require: "@3.4.4"
dyninst:
require: "%gcc"
compilers:
- compiler:
spec: oneapi@2023.1.0
spec: dpcpp@2023.0.0
paths:
cc: /opt/intel/oneapi/compiler/2023.1.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2023.1.0/linux/bin/icpx
f77: /opt/intel/oneapi/compiler/2023.1.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2023.1.0/linux/bin/ifx
cc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/dpcpp
f77: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
flags: {}
operating_system: ubuntu20.04
target: x86_64
modules: []
environment: {}
modules: [compiler]
environment:
prepend_path:
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2023.0.0/linux/compiler/lib/intel64_lin
extra_rpaths: []
- compiler:
spec: oneapi@2023.0.0
paths:
cc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icx
cxx: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icpx
f77: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
fc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
flags: {}
operating_system: ubuntu20.04
target: x86_64
modules: [compiler]
environment:
prepend_path:
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2023.0.0/linux/compiler/lib/intel64_lin
extra_rpaths: []
- compiler:
spec: gcc@11.1.0
@@ -97,53 +45,108 @@ spack:
environment: {}
extra_rpaths: []
packages:
all:
require: '%oneapi'
providers:
blas: [openblas]
mpi: [mpich]
target: [x86_64]
variants: +mpi
elfutils:
variants: +bzip2 ~nls +xz
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
libunwind:
variants: +pic +xz
mpich:
variants: ~wrapperrpath
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
python:
version: [3.8.13]
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
vtk-m:
require: ~openmp
xz:
variants: +pic
mesa:
version: [21.3.8]
binutils:
require: '%gcc'
variants: +ld +gold +headers +libiberty ~nls
bison:
require: '%gcc'
krb5:
require: '%gcc'
llvm:
require: '%gcc'
m4:
require: '%gcc'
openssh:
require: '%gcc'
papi:
require: '%gcc'
py-scipy:
require: '%gcc'
ruby:
require: '%gcc'
rust:
require: '%gcc'
unzip:
require: '%gcc'
specs:
# CPU
- adios
- adios2
- alquimia
- aml
- amrex
- arborx
- archer
- argobots
- ascent
- amrex
- axom
- bolt
- boost
- bricks ~cuda
- butterflypack
- cabana
- caliper
- chai ~benchmarks ~tests
- charliecloud
- conduit
- darshan-runtime
- darshan-util
- datatransferkit
- exaworks
- flecsi
- faodel
- flit
- flux-core
- fortrilinos
- gasnet
- globalarrays
- gmp
- gotcha
- h5bench
- hdf5-vol-async
- hdf5-vol-log
- hdf5 +fortran +hl +shared
- heffte +fftw
- hpx networking=mpi
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- kokkos +openmp
- lammps
- lbann
- legion
- libnrm
- libnrm
- libquo
- libunwind
- loki
- mercury
- metall
- mfem
- mgard +serial +openmp +timing +unstructured ~cuda
- mpark-variant
- mpifileutils ~xattr
- nccmp
@@ -154,19 +157,17 @@ spack:
- openpmd-api
- papi
- papyrus
- parallel-netcdf
- parsec ~cuda
- pdt
- petsc
- phist
- plasma
- plumed
- precice
- pumi
- py-h5py
- py-libensemble
- py-petsc4py
- qthreads scheduler=distrib
- quantum-espresso
- raja
- rempi
- slate ~cuda
@@ -174,80 +175,104 @@ spack:
- stc
- strumpack ~slate
- sundials
- superlu
- superlu-dist
- swig@4.0.2-fortran
- sz3
- superlu
- swig
- sz
- tasmanian
- trilinos@13.0.1 +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- trilinos@13.0.1 +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
- umpire
- variorum
- unifyfs
- upcxx
- veloc
- vtk-m ~openmp # can't build +openmp w/ %oneapi: https://github.com/spack/spack/issues/31830
- wannier90
# INCLUDED IN ECP DAV CPU
# - adios2
# - ascent
# - darshan-runtime
# - darshan-util
# - faodel
# - hdf5
# - libcatalyst
# - parallel-netcdf
# - paraview
# - py-cinemasci
# - sz
# - unifyfs
# - veloc
# - visit
# - vtk-m ~openmp # https://github.com/spack/spack/issues/31830
# - zfp
# --
# - alquimia # pflotran: pflotran/hdf5_aux.F90(5): error #7013: This module file was not generated by any release of this compiler. [HDF5]
# - dealii # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1'
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 +paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # sz: hdf5-filter/H5Z-SZ/src/H5Z_SZ.c:24:9: error: call to undeclared function 'gettimeofday'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
# - geopm # geopm: In file included from src/ProfileTable.cpp:34: ./src/ProfileTable.hpp:79:45: error: no type named 'string' in namespace 'std'
# - ginkgo # ginkgo: icpx: error: clang frontend command failed with exit code 139 (use -v to see invocation)
# - gptune ~mpispawn # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__'
# - hdf5-vol-cache # /H5VLcache_ext.c:580:9: error: incompatible function pointer types initializing 'herr_t (*)(const void *, uint64_t *)' (aka 'int (*)(const void *, unsigned long *)') with an expression of type 'herr_t (const void *, unsigned int *)' (aka 'int (const void *, unsigned int *)') [-Wincompatible-function-pointer-types]
# - hpctoolkit # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1'
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs: c-blosc/internal-complibs/zlib-1.2.8/gzread.c:30:15: error: call to undeclared function 'read'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
# - nrm # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__'
# - openfoam # adios2: patch failed
# - pruners-ninja # pruners-ninja: ninja_test_pingpong.c:79:5: error: call to undeclared library function 'memset' with type 'void *(void *, int, unsigned long)'; ISO C99 and later do not support implicit function declarations [-Wimplicit-function-declaration]
# - py-jupyterhub # py-ruamel-yaml-clib: setuptools/dist.py:287: SetuptoolsDeprecationWarning: The namespace_packages parameter is deprecated, consider using implicit namespaces instead (PEP 420). See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
# - py-warpx ^warpx dims=2 # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__'
# - py-warpx ^warpx dims=3 # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__'
# - py-warpx ^warpx dims=rz # py-scipy: for_main.c:(.text+0x19): undefined reference to `MAIN__'
# - scr # libyogrt: configure: error: slurm is not in specified location!
# - tau +mpi +python # tau: x86_64/lib/Makefile.tau-icpx-papi-mpi-pthread-python-pdt: No such file or directory
# - upcxx # upcxx: /opt/intel/oneapi/mpi/2021.9.0//libfabric/bin/fi_info: error while loading shared libraries: libfabric.so.1: cannot open shared object file: No such file or directory
# - xyce +mpi +shared +pymi +pymi_static_tpls ^trilinos~shylu # cmake/tps.cmake:220 (message): Unable to compile against Trilinos. It is possible Trilinos was not properly configured, or the environment has changed since Trilinos was installed. See the CMake log files for more information.
- zfp
# GPU
- aml +ze
- amrex +sycl
- arborx +sycl ^kokkos +sycl +openmp std=17 +tests +examples
- cabana +sycl ^kokkos +sycl +openmp std=17 +tests +examples
- kokkos +sycl +openmp std=17 +tests +examples
- kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp std=17 +tests +examples
- kokkos +sycl +openmp std=17 +tests +examples %oneapi
- kokkos-kernels build_type=Release ^kokkos +sycl +openmp std=17 +tests +examples %oneapi
# CPU FAILURES
# - bricks # bricks
# - charliecloud # charliecloud
# - dyninst # old intel-tbb
# - exaworks # py-setuptools-scm
# - flux-core # py-setuptools-scm
# - geopm # geopm
# - ginkgo # ginkgo
# - gptune # py-scipy@1.3.3
# - h5bench # h5bench
# - hpctoolkit # dyninst
# - hpx max_cpu_count=512 networking=mpi # boost cxxstd=17
# - nrm # py-scipy
# - paraview +qt # qt
# - phist # phist
# - pruners-ninja # pruners-ninja
# - py-cinemasci # py-scipy@1.3.3, py-setuptools-scm
# - py-jupyterhub # py-setuptools-scm
# - py-warpx ^warpx dims=2 # py-scipy@1.5.4
# - py-warpx ^warpx dims=3 # py-scipy@1.5.4
# - py-warpx ^warpx dims=rz # py-scipy@1.5.4
# - scr # libyogrt
# - swig@4.0.2-fortran # swig
# - tau +mpi +python # tau
# - variorum # variorum
# --
# amrex: /opt/intel/oneapi/compiler/2023.0.0/linux/bin-llvm/../include/sycl/detail/defines_elementary.hpp:52:40: note: expanded from macro '__SYCL2020_DEPRECATED'
# amrex: /opt/intel/oneapi/compiler/2023.0.0/linux/bin-llvm/../include/sycl/detail/defines_elementary.hpp:52:40: note: expanded from macro '__SYCL2020_DEPRECATED'
# binutils: gold/powerpc.cc:3590: undefined reference to `gold::Sized_symbol<64>::Value_type gold::Symbol_table::compute_final_value<64>(gold::Sized_symbol<64> const*, gold::Symbol_table::Compute_final_value_status*) const'
# boost cxxstd=17: ./boost/mpl/aux_/integral_wrapper.hpp:73:31: error: integer value -1 is outside the valid range of values [0, 3] for this enumeration type [-Wenum-constexpr-conversion]
# bricks: cc1plus: error: bad value ('OFF') for '-mtune=' switch
# charliecloud: autoreconf phase: RuntimeError: configure script not found in ...
# flux-sched: include/yaml-cpp/emitter.h:164:9: error: comparison with NaN always evaluates to false in fast floating point modes [-Werror,-Wtautological-constant-compare]
# flux-sched: include/yaml-cpp/emitter.h:171:24: error: comparison with infinity always evaluates to false in fast floating point modes [-Werror,-Wtautological-constant-compare]
# ginkgo: icpx: error: clang frontend command failed with exit code 139
# h5bench: commons/h5bench_util.h:196: multiple definition of `has_vol_async';
# intel-tbb: clang++clang++clang++clang++clang++clang++clang++: : : : : : : clang++error: : unknown argument: '-flifetime-dse=1'
# libyogrt: configure: error: slurm is not in specified location!
# phist: fortran_bindings/test/kernels.F90(63): error #8284: If the actual argument is scalar, the dummy argument shall be scalar unless the actual argument is of type character or is an element of an array that is not assumed shape, pointer, or polymorphic. [ARGV]
# pruners-ninja: test/ninja_test_util.c:34: multiple definition of `a';
# py-cryptography: ??
# py-scipy@1.3.3: gcc: error: unrecognized command-line option '-fp-model=strict'
# py-scipy@1.5.4: gcc: error: unrecognized command-line option '-fp-model=strict'
# py-setuptools-scm: ??
# ruby: limits.c:415:34: error: invalid suffix 'D' on floating constant
# rust: /usr/bin/ld: /opt/intel/oneapi/compiler/2022.1.0/linux/bin-llvm/../compiler/lib/intel64_lin/libimf.a(libm_feature_flag.o): in function `__libm_feature_flag_init': libm_feature_flag.c:(.text+0x25): undefined reference to `__intel_cpu_feature_indicator_x'
# swig@4.0.2-fortran: /spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-11.1.0/m4-1.4.19-p3otmjixpi6zibdsyoqib5dpzfshq3nj/bin/m4:/spack/opt/spack/linux-ubuntu20.04-x86_64/oneapi-2023.0.0/bison-3.8.2-xca2sot4jhd72hvj2m2b3ajchagczvau/share/bison/skeletons/yacc.c:420: undefined macro `b4_symbol(103, tag)'
# tau: Error: Unable to identify ifort lib directory
# variorum: ld: Intel/CMakeFiles/variorum_intel.dir/msr_core.c.o:(.bss+0x0): multiple definition of `g_platform'; CMakeFiles/variorum.dir/config_architecture.c.o:(.bss+0x0): first defined here
# vtk-m +openmp: clang++: error: clang frontend command failed with exit code 139 (use -v to see invocation)
# GPU FAILURES
# - amrex +sycl # amrex
# - ginkgo +oneapi # ginkgo
# - hpctoolkit +level_zero # dyninst
# - sundials +sycl cxxstd=17 # sundials
# - tau +mpi +opencl +level_zero ~pdt # tau
# --
# - ginkgo +oneapi # InstallError: Ginkgo's oneAPI backend requires theDPC++ compiler as main CXX compiler.
# - hpctoolkit +level_zero # intel-tbb: icpx: error: unknown argument: '-flifetime-dse=1'
# - sundials +sycl cxxstd=17 # sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found
# - tau +mpi +opencl +level_zero ~pdt # builds ok in container, but needs libdrm, will update container
# SKIPPED
# - nvhpc
# - dyninst # only %gcc
# - flecsi # dependency pfunit marks oneapi as an unsupported compiler
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-oneapi" }
ci:
pipeline-gen:
- build-job:
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023.06.01
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
before_script:
- - . /bootstrap/runner/view/lmod/lmod/init/bash
- module use /opt/intel/oneapi/modulefiles
- module load compiler
cdash:
build-group: E4S OneAPI
build-group: E4S OneAPI

View File

@@ -26,7 +26,7 @@ spack:
- py-transformers
# JAX
# - py-jax # bazel codesign
- py-jax
# - py-jaxlib # bazel codesign
# Keras

View File

@@ -1695,7 +1695,7 @@ _spack_restage() {
_spack_solve() {
if $list_options
then
SPACK_COMPREPLY="-h --help --show -l --long -L --very-long -I --install-status --no-install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse --reuse-deps"
SPACK_COMPREPLY="-h --help --show -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse --reuse-deps"
else
_all_packages
fi
@@ -1704,7 +1704,7 @@ _spack_solve() {
_spack_spec() {
if $list_options
then
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status --no-install-status -y --yaml -j --json --format -c --cover -N --namespaces -t --types -U --fresh --reuse --reuse-deps"
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status -y --yaml -j --json --format -c --cover -N --namespaces -t --types -U --fresh --reuse --reuse-deps"
else
_all_packages
fi

View File

@@ -37,7 +37,7 @@ RUN find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \
# Modifications to the environment that are necessary to run
RUN cd {{ paths.environment }} && \
spack env activate --sh -d . > activate.sh
spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh
{% if extra_instructions.build %}
{{ extra_instructions.build }}
@@ -53,13 +53,7 @@ COPY --from=builder {{ paths.environment }} {{ paths.environment }}
COPY --from=builder {{ paths.store }} {{ paths.store }}
COPY --from=builder {{ paths.hidden_view }} {{ paths.hidden_view }}
COPY --from=builder {{ paths.view }} {{ paths.view }}
RUN { \
echo '#!/bin/sh' \
&& echo '.' {{ paths.environment }}/activate.sh \
&& echo 'exec "$@"'; \
} > /entrypoint.sh \
&& chmod a+x /entrypoint.sh
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh
{% block final_stage %}
@@ -76,6 +70,6 @@ RUN {% if os_package_update %}{{ os_packages_final.update }} \
{% for label, value in labels.items() %}
LABEL "{{ label }}"="{{ value }}"
{% endfor %}
ENTRYPOINT [ "/entrypoint.sh" ]
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l", "-c", "$*", "--" ]
CMD [ "/bin/bash" ]
{% endif %}

View File

@@ -1,7 +1,7 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
&& dnf install -y \
RUN yum update -y \
&& yum install -y \
bzip2 \
curl \
file \
@@ -23,6 +23,6 @@ RUN dnf update -y \
unzip \
zstd \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -1,9 +1,9 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
&& dnf install -y epel-release \
&& dnf update -y \
&& dnf --enablerepo epel install -y \
RUN yum update -y \
&& yum install -y epel-release \
&& yum update -y \
&& yum --enablerepo epel install -y \
bzip2 \
curl-minimal \
file \
@@ -25,6 +25,6 @@ RUN dnf update -y \
unzip \
zstd \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -1,13 +1,13 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
RUN yum update -y \
# See https://fedoraproject.org/wiki/EPEL#Quickstart for powertools
&& dnf install -y dnf-plugins-core \
&& yum install -y dnf-plugins-core \
&& dnf config-manager --set-enabled powertools \
&& dnf install -y epel-release \
&& dnf update -y \
&& dnf --enablerepo epel groupinstall -y "Development Tools" \
&& dnf --enablerepo epel install -y \
&& yum install -y epel-release \
&& yum update -y \
&& yum --enablerepo epel groupinstall -y "Development Tools" \
&& yum --enablerepo epel install -y \
curl \
findutils \
gcc-c++ \
@@ -24,6 +24,6 @@ RUN dnf update -y \
python38-setuptools \
unzip \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -1,7 +1,7 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
&& dnf install -y \
RUN yum update -y \
&& yum install -y \
bzip2 \
curl \
file \
@@ -24,6 +24,6 @@ RUN dnf update -y \
zstd \
xz \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -1,7 +1,7 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
&& dnf install -y \
RUN yum update -y \
&& yum install -y \
bzip2 \
curl \
file \
@@ -24,6 +24,6 @@ RUN dnf update -y \
xz \
zstd \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -1,7 +1,7 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
&& dnf install -y \
RUN yum update -y \
&& yum install -y \
bzip2 \
curl \
file \
@@ -24,6 +24,6 @@ RUN dnf update -y \
xz \
zstd \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -1,9 +1,9 @@
{% extends "container/bootstrap-base.dockerfile" %}
{% block install_os_packages %}
RUN dnf update -y \
&& dnf install -y epel-release \
&& dnf update -y \
&& dnf --enablerepo epel install -y \
RUN yum update -y \
&& yum install -y epel-release \
&& yum update -y \
&& yum --enablerepo epel install -y \
bzip2 \
curl-minimal \
file \
@@ -26,6 +26,6 @@ RUN dnf update -y \
xz \
zstd \
&& pip3 install boto3 \
&& rm -rf /var/cache/dnf \
&& dnf clean all
&& rm -rf /var/cache/yum \
&& yum clean all
{% endblock %}

View File

@@ -27,8 +27,7 @@ class Abinit(AutotoolsPackage):
homepage = "https://www.abinit.org/"
url = "https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz"
version("9.8.4", sha256="a086d5045f0093b432e6a044d5f71f7edf5a41a62d67b3677cb0751d330c564a")
version("9.8.3", sha256="de823878aea2c20098f177524fbb4b60de9b1b5971b2e835ec244dfa3724589b")
version("9.8.3", sha256="65fb93217336a72d1554cc6991127203958cc7df59921782251a86569e33a357")
version("9.6.1", sha256="b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6")
version("9.4.2", sha256="d40886f5c8b138bb4aa1ca05da23388eb70a682790cfe5020ecce4db1b1a76bc")
version("8.10.3", sha256="ed626424b4472b93256622fbb9c7645fa3ffb693d4b444b07d488771ea7eaa75")

View File

@@ -24,7 +24,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
maintainers("WeiqunZhang", "asalmgren", "etpalmer63")
version("develop", branch="development")
version("23.06", sha256="3bddcb07cce3e65e06cac35005c30820d311ce47ae54b46e4af333fa272b236b")
version("23.05", sha256="a4bf5ad5322e706b9fae46ff52043e2cca5ddba81479647816251e9ab21c0027")
version("23.04", sha256="b070949611abd2156208e675e40e5e73ed405bf83e3b1e8ba70fbb451a9e7dd7")
version("23.03", sha256="e17c721b1aba4f66e467723f61b59e56c02cf1b72cab5a2680b13ff6e79ef903")

View File

@@ -14,7 +14,6 @@ class Armadillo(CMakePackage):
homepage = "http://arma.sourceforge.net/"
url = "http://sourceforge.net/projects/arma/files/armadillo-8.100.1.tar.xz"
version("12.4.0", sha256="9905282781ced3f99769b0e45a705ecb50192ca1622300707b3302ea167dc883")
version("12.2.0", sha256="b0dce042297e865add3351dad77f78c2c7638d6632f58357b015e50edcbd2186")
version("12.0.1", sha256="230a5c75daad52dc47e1adce8f5a50f9aa4e4354e0f1bb18ea84efa2e70e20df")
version("10.5.0", sha256="ea990c34dc6d70d7c95b4354d9f3b0819bde257dbb67796348e91e196082cb87")

View File

@@ -12,7 +12,6 @@ class Asdcplib(AutotoolsPackage):
homepage = "https://github.com/cinecert/asdcplib"
url = "https://github.com/cinecert/asdcplib/archive/rel_2_10_35.tar.gz"
version("2_10_38", sha256="f8cb3b1fecfe18f1a64e12e96e5696480631509e9088e29f5a259eb25b1b1656")
version("2_10_35", sha256="a68eec9ae0cc363f75331dc279c6dd6d3a9999a9e5f0a4405fd9afa8a29ca27b")
version("2_10_34", sha256="faa54ee407c1afceb141e08dae9ebf83b3f839e9c49a1793ac741ec6cdee5c3c")
version("2_10_33", sha256="16fafb5da3d46b0f44570ef9780c85dd82cca60106a9e005e538809ea1a95373")

View File

@@ -16,7 +16,6 @@ class AtSpi2Core(MesonPackage):
list_url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-core"
list_depth = 1
version("2.48.3", sha256="37316df43ca9989ce539d54cf429a768c28bb38a0b34950beadd0421827edf55")
version("2.48.0", sha256="905a5b6f1790b68ee803bffa9f5fab4ceb591fb4fae0b2f8c612c54f1d4e8a30")
version("2.47.90", sha256="71189c21af7bd084a12ab85b229c2d798936470b12fb0c3f177e37181fb6c00c")
version("2.40.1", sha256="9f66e3a4ee42db897af478a826b1366d7011a6d55ddb7e9d4bfeb3300ab23856")

View File

@@ -14,7 +14,6 @@ class Azcopy(Package):
homepage = "https://github.com/Azure/azure-storage-azcopy"
url = "https://github.com/Azure/azure-storage-azcopy/archive/refs/tags/v10.18.1.tar.gz"
version("10.19.0", sha256="33ce1539b56a4e9a38140374630bd9640157bb44d0c57b3224a5e5f592ab5399")
version("10.18.1", sha256="80292625d7f1a6fc41688c5948b3a20cfdae872464d37d831e20999430819c3f")
depends_on("go", type="build")

View File

@@ -11,22 +11,13 @@ class BdwGc(AutotoolsPackage):
collecting replacement for C malloc or C++ new."""
homepage = "https://www.hboehm.info/gc/"
url = "https://github.com/ivmai/bdwgc/releases/download/v8.2.4/gc-8.2.4.tar.gz"
url = "https://github.com/ivmai/bdwgc/releases/download/v8.2.2/gc-8.2.2.tar.gz"
version("8.2.4", sha256="3d0d3cdbe077403d3106bb40f0cbb563413d6efdbb2a7e1cd6886595dec48fc2")
version("8.2.2", sha256="f30107bcb062e0920a790ffffa56d9512348546859364c23a14be264b38836a0")
version("8.0.6", sha256="3b4914abc9fa76593596773e4da671d7ed4d5390e3d46fbf2e5f155e121bea11")
version("8.0.0", sha256="8f23f9a20883d00af2bff122249807e645bdf386de0de8cbd6cce3e0c6968f04")
version(
"7.6.0",
sha256="a14a28b1129be90e55cd6f71127ffc5594e1091d5d54131528c24cd0c03b7d90",
url="http://www.hboehm.info/gc/gc_source/gc-7.6.0.tar.gz",
)
version(
"7.4.4",
sha256="e5ca9b628b765076b6ab26f882af3a1a29cde786341e08b9f366604f74e4db84",
url="http://www.hboehm.info/gc/gc_source/gc-7.4.4.tar.gz",
)
version("7.6.0", sha256="a14a28b1129be90e55cd6f71127ffc5594e1091d5d54131528c24cd0c03b7d90")
version("7.4.4", sha256="e5ca9b628b765076b6ab26f882af3a1a29cde786341e08b9f366604f74e4db84")
variant("libatomic-ops", default=True, description="Use external libatomic-ops")
variant(

View File

@@ -1,25 +0,0 @@
https://github.com/arq5x/bedtools2/pull/1045
From 7d7fb513b9b05b7a0512a83520e9f60036e5ff9a Mon Sep 17 00:00:00 2001
From: David Seifert <soap@gentoo.org>
Date: Tue, 18 Apr 2023 11:59:58 +0200
Subject: [PATCH] Add missing <cstdint> include
* breaks build with GCC 13:
https://bugs.gentoo.org/895860
---
src/utils/general/ParseTools.h | 1 +
1 file changed, 1 insertion(+)
diff --git a/src/utils/general/ParseTools.h b/src/utils/general/ParseTools.h
index e056c149..3418eff1 100644
--- a/src/utils/general/ParseTools.h
+++ b/src/utils/general/ParseTools.h
@@ -16,6 +16,7 @@
#include "string.h"
#include <cstdio>
#include <cstdlib>
+#include <cstdint>
using namespace std;

View File

@@ -29,7 +29,5 @@ class Bedtools2(Package):
depends_on("xz", when="@2.29:")
depends_on("python", type="build")
patch("bedtools-gcc13.patch", level=1, when="@2.27:%gcc@13:")
def install(self, spec, prefix):
make("prefix=%s" % prefix, "install")

View File

@@ -13,7 +13,6 @@ class Bismark(Package):
homepage = "https://www.bioinformatics.babraham.ac.uk/projects/bismark"
url = "https://github.com/FelixKrueger/Bismark/archive/0.23.0.tar.gz"
version("0.24.1", sha256="c5409f5fa470ea5ac07327ced28c60b793f5ef88c5a7bc75b71dde0f52f39894")
version("0.23.0", sha256="ea1625808487c1442dbf825d9cbe5c0cbc37ea5bd1460f59e1e0ccc80cc01c9e")
version("0.19.0", sha256="91707737f96a0574956a282b635abad7560e7d90bee188a67a7807b2470deae2")
version("0.18.2", sha256="83391c5b5af33047178e7774ac25f5a69ce9315c13ae02f016baf7c50b73e702")
@@ -21,7 +20,6 @@ class Bismark(Package):
depends_on("bowtie2", type="run")
depends_on("perl", type="run")
depends_on("samtools", type="run")
depends_on("hisat2", type="run", when="@0.21.0:")
def install(self, spec, prefix):
mkdirp(prefix.bin)

View File

@@ -23,7 +23,6 @@ class Blt(Package):
# if you export targets this could cause problems in downstream
# projects if not handled properly. More info here:
# https://llnl-blt.readthedocs.io/en/develop/tutorial/exporting_targets.html
version("0.5.3", sha256="75d17caac98e78432ce25371c50d45ad3e7053820976bc5ed210bbef998f1732")
version("0.5.2", sha256="95b924cfbb2bddd9b1a92e96603b2fd485a19721d59ddf8ff50baefc1714d7ea")
version("0.5.1", sha256="ff7e87eefc48704a0721b66174612b945955adaa0a56aa69dd0473074fa4badf")
version("0.5.0", sha256="5f680ef922d0e0a7ff1b1a5fc8aa107cd4f543ad888cbc9b12639bea72a6ab1f")

View File

@@ -14,9 +14,6 @@ class Botan(MakefilePackage):
maintainers("aumuell")
version("3.0.0", sha256="5da552e00fa1c047a90c22eb5f0247ec27e7432b68b78e10a7ce0955269ccad7")
version("2.19.3", sha256="dae047f399c5a47f087db5d3d9d9e8f11ae4985d14c928d71da1aff801802d55")
version("2.19.2", sha256="3af5f17615c6b5cd8b832d269fb6cb4d54ec64f9eb09ddbf1add5093941b4d75")
version("2.19.1", sha256="e26e00cfefda64082afdd540d3c537924f645d6a674afed2cd171005deff5560")
version("2.19.0", sha256="240d9e56e6acb91ef4cf06a8a1c6c0f101c61d40cf48cccf139faef821d7040b")
version("2.18.2", sha256="541a3b13f1b9d30f977c6c1ae4c7bfdfda763cda6e44de807369dce79f42307e")
@@ -36,8 +33,6 @@ class Botan(MakefilePackage):
variant("doc", default=False, description="Build documentation")
executables = ["^botan$"]
depends_on("python", type="build")
depends_on("py-sphinx@1.2:", type="build", when="+doc")
@@ -53,8 +48,3 @@ def configure_args(self):
else:
args.append("--without-documentation")
return args
@classmethod
def determine_version(cls, exe):
output = Executable(exe)("--version", output=str, error=str)
return output

View File

@@ -14,8 +14,6 @@ class Bowtie2(MakefilePackage):
homepage = "http://bowtie-bio.sourceforge.net/bowtie2/index.shtml"
url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.3.1/bowtie2-2.3.1-source.zip"
version("2.5.1", sha256="cb6cbbbb5a7167a2f21a3d63cb9774336361f540e1ec3d8ff907f955c35f71b8")
version("2.4.5", sha256="d3cbd5f323393b5649aea10325d7c4b77f02035a8b204e5ac18eba95236e076a")
version("2.4.2", sha256="4cc555eeeeb8ae2d47aaa1551f3f01b57f567a013e4e0d1f30e90f462865027e")
version("2.4.1", sha256="566d6fb01a361883747103d797308ee4bdb70f6db7d27bfc72a520587815df22")
version("2.3.5.1", sha256="335c8dafb1487a4a9228ef922fbce4fffba3ce8bc211e2d7085aac092155a53f")

View File

@@ -15,7 +15,6 @@ class CBlosc(CMakePackage):
homepage = "https://www.blosc.org"
url = "https://github.com/Blosc/c-blosc/archive/v1.11.1.tar.gz"
version("1.21.4", sha256="e72bd03827b8564bbb3dc3ea0d0e689b4863871ce3861d946f2efd7a186ecf3e")
version("1.21.2", sha256="e5b4ddb4403cbbad7aab6e9ff55762ef298729c8a793c6147160c771959ea2aa")
version("1.21.1", sha256="f387149eab24efa01c308e4cba0f59f64ccae57292ec9c794002232f7903b55b")
version("1.21.0", sha256="b0ef4fda82a1d9cbd11e0f4b9685abf14372db51703c595ecd4d76001a8b342d")

View File

@@ -6,17 +6,17 @@
import os
from spack.package import *
from spack.pkg.builtin.boost import Boost
class Casacore(CMakePackage):
"""A suite of c++ libraries for radio astronomy data processing."""
homepage = "https://github.com/casacore/casacore"
url = "https://github.com/casacore/casacore/archive/refs/tags/v3.5.0.tar.gz"
url = "https://github.com/casacore/casacore/archive/v2.4.1.tar.gz"
maintainers("mpokorny")
version("3.5.0", sha256="63f1c8eff932b0fcbd38c598a5811e6e5397b72835b637d6f426105a183b3f91")
version("3.4.0", sha256="31f02ad2e26f29bab4a47a2a69e049d7bc511084a0b8263360e6157356f92ae1")
version("3.3.0", sha256="3a714644b908ef6e81489b792cc9b80f6d8267a275e15d38a42a6a5137d39d3d")
version("3.2.0", sha256="ae5d3786cb6dfdd7ebc5eecc0c724ff02bbf6929720bc23be43a027978e79a5f")
@@ -28,17 +28,15 @@ class Casacore(CMakePackage):
depends_on("cmake@3.7.1:", type="build")
variant("openmp", default=False, description="Build OpenMP support")
variant("shared", default=True, description="Build shared libraries")
variant("readline", default=True, description="Build readline support")
# see note below about the reason for disabling the "sofa" variant
# variant('sofa', default=False, description='Build SOFA support')
variant("adios2", default=False, description="Build ADIOS2 support")
variant("dysco", default=True, when="@3.5.0:", description="Build Dysco storage manager")
variant("fftpack", default=False, description="Build FFTPack")
variant("hdf5", default=False, description="Build HDF5 support")
variant("mpi", default=False, description="Use MPI for parallel I/O")
variant("openmp", default=False, description="Build OpenMP support")
variant("python", default=False, description="Build python support")
variant("readline", default=True, description="Build readline support")
variant("shared", default=True, description="Build shared libraries")
variant("tablelocking", default=True, description="Enable table locking")
variant("threads", default=True, description="Use mutex thread synchronization")
# Force dependency on readline in v3.2 and earlier. Although the
# presence of readline is tested in CMakeLists.txt, and casacore
@@ -55,34 +53,33 @@ class Casacore(CMakePackage):
depends_on("wcslib@4.20:+cfitsio")
depends_on("fftw@3.0.0: precision=float,double", when="@3.4.0:")
depends_on("fftw@3.0.0: precision=float,double", when="~fftpack")
# SOFA dependency suffers the same problem in CMakeLists.txt as readline;
# force a dependency when building unit tests
depends_on("sofa-c", type="test")
depends_on("hdf5", when="+hdf5")
depends_on("adios2+mpi", when="+adios2")
depends_on("mpi", when="+mpi")
depends_on("mpi", when="+adios2")
depends_on("python@2.6:", when="+python")
depends_on("boost +python", when="+python")
depends_on("boost +system +filesystem", when="+dysco")
depends_on("py-numpy", when="+python")
depends_on("gsl", when="+dysco")
depends_on("boost+python", when="+python")
conflicts("~mpi", when="+adios2")
conflicts("+tablelocking", when="+mpi")
conflicts("~threads", when="+openmp")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants, when="+python")
depends_on("py-numpy", when="+python")
def cmake_args(self):
args = []
spec = self.spec
args.append(self.define_from_variant("BUILD_DYSCO", "dysco"))
args.append(self.define_from_variant("ENABLE_TABLELOCKING", "tablelocking"))
args.append(self.define_from_variant("ENABLE_SHARED", "shared"))
args.append(self.define_from_variant("USE_THREADS", "threads"))
args.append(self.define_from_variant("USE_OPENMP", "openmp"))
args.append(self.define_from_variant("USE_READLINE", "readline"))
args.append(self.define_from_variant("USE_HDF5", "hdf5"))
args.append(self.define_from_variant("USE_ADIOS2", "adios2"))
args.append(self.define_from_variant("USE_MPI", "mpi"))
args.append("-DPORTABLE=ON") # let Spack determine arch build flags
args.append(self.define_from_variant("USE_MPI", "adios2"))
if spec.satisfies("+adios2"):
args.append(self.define("ENABLE_TABLELOCKING", False))
# fftw3 is required by casacore starting with v3.4.0, but the
# old fftpack is still available. For v3.4.0 and later, we
@@ -92,6 +89,7 @@ def cmake_args(self):
if spec.satisfies("@3.4.0:"):
if spec.satisfies("+fftpack"):
args.append("-DBUILD_FFTPACK_DEPRECATED=YES")
args.append(self.define("USE_FFTW3", True))
else:
args.append(self.define("USE_FFTW3", spec.satisfies("~fftpack")))

View File

@@ -16,7 +16,6 @@ class Chemfiles(CMakePackage):
maintainers("RMeli")
version("0.10.4", sha256="b8232ddaae2953538274982838aa6c2df87d300f7e2f80e92c171581e06325ba")
version("0.10.3", sha256="5f53d87a668a85bebf04e0e8ace0f1db984573de1c54891ba7d37d31cced0408")
variant("shared", default=False, description="Build shared libraries")

View File

@@ -436,22 +436,6 @@ def setup_dependent_package(self, module, dependent_spec):
module.cmake = Executable(self.spec.prefix.bin.cmake)
module.ctest = Executable(self.spec.prefix.bin.ctest)
@property
def libs(self):
"""CMake has no libraries, so if you ask for `spec['cmake'].libs`
(which happens automatically for packages that depend on CMake as
a link dependency) the default implementation of ``.libs` will
search the entire root prefix recursively before failing.
The longer term solution is for all dependents of CMake to change
their deptype. For now, this returns an empty set of libraries.
"""
return LibraryList([])
@property
def headers(self):
return HeaderList([])
def run_version_check(self, bin):
"""Runs and checks output of the installed binary."""
exe_path = join_path(self.prefix.bin, bin)

View File

@@ -13,7 +13,6 @@ class CniPlugins(Package):
url = "https://github.com/containernetworking/plugins/archive/v1.0.1.tar.gz"
maintainers("bernhardkaindl")
version("1.3.0", sha256="f9871b9f6ccb51d2b264532e96521e44f926928f91434b56ce135c95becf2901")
version("1.2.0", sha256="f3496ddda9c7770a0b695b67ae7ee80a4ee331ac2745af4830054b81627f79b7")
version("1.1.1", sha256="c86c44877c47f69cd23611e22029ab26b613f620195b76b3ec20f589367a7962")
version("1.0.1", sha256="2ba3cd9f341a7190885b60d363f6f23c6d20d975a7a0ab579dd516f8c6117619")

View File

@@ -159,10 +159,6 @@ class Comgr(CMakePackage):
root_cmakelists_dir = join_path("lib", "comgr")
def cmake_args(self):
args = [self.define("BUILD_TESTING", self.run_tests)]
return args
@classmethod
def determine_version(cls, lib):
match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib)

View File

@@ -91,7 +91,7 @@ def configure_args(self):
return extra_args
@property
def tests_log_path(self):
def test_log_path(self):
if self.version < Version("3.4.1"):
return join_path(
"darshan-test",
@@ -105,23 +105,36 @@ def tests_log_path(self):
@run_after("install")
def _copy_test_inputs(self):
test_inputs = [self.tests_log_path]
test_inputs = [self.test_log_path]
self.cache_extra_test_sources(test_inputs)
def test_parser(self):
"""process example log and check counters"""
# TODO: Switch to loading the expected strings from the darshan source in future
def _test_parser(self):
purpose = "Verify darshan-parser can parse an example log \
and check some expected counter values"
# Switch to loading the expected strings from the darshan source in future
# filename = self.test_suite.current_test_cache_dir.
# join(join_path(self.basepath, "mpi-io-test-spack-expected.txt"))
# expected_output = self.get_escaped_text_output(filename)
expected_output = [
r"POSIX\s+-1\s+\w+\s+POSIX_OPENS\s+\d+",
r"MPI-IO\s+-1\s+\w+\s+MPIIO_INDEP_OPENS\s+\d+",
r"STDIO\s+0\s+\w+\s+STDIO_OPENS\s+\d+",
]
logname = self.test_suite.current_test_cache_dir.join(self.tests_log_path)
parser = which(join_path(self.prefix.bin, "darshan-parser"))
out = parser(logname, output=str.split, error=str.split)
check_outputs(expected_output, out)
logname = self.test_suite.current_test_cache_dir.join(self.test_log_path)
exe = "darshan-parser"
options = [logname]
status = [0]
installed = True
self.run_test(
exe,
options,
expected_output,
status,
installed,
purpose,
skip_missing=False,
work_dir=None,
)
def test(self):
self._test_parser()

View File

@@ -14,7 +14,6 @@ class Diamond(CMakePackage):
url = "https://github.com/bbuchfink/diamond/archive/v2.0.9.tar.gz"
maintainers("snehring")
version("2.1.7", sha256="2dcaba0e79ecb02c3d2a6816d317e714767118a9a056721643abff4c586ca95b")
version("2.1.6", sha256="852d27c7535d53f1ce59db0625ff23ac3bf17e57f7a3b1c46c08718f77e19c54")
version("2.0.15", sha256="cc8e1f3fd357d286cf6585b21321bd25af69aae16ae1a8f605ea603c1886ffa4")
version("2.0.14", sha256="3eaef2b957e4ba845eac27a2ca3249aae4259ff1fe0ff5a21b094481328fdc53")

View File

@@ -52,8 +52,8 @@ def cache_test_sources(self):
"""Save off benchmark files for stand-alone tests."""
self.cache_extra_test_sources("benchmark")
def test_benchmarks(self):
"""run benchmark checks"""
def test(self):
"""Perform stand-alone/smoke tests using pre-built benchmarks."""
# NOTE: This package would ideally build the test program using
# the installed software *each* time the tests are run since
# this package installs a library.
@@ -61,17 +61,20 @@ def test_benchmarks(self):
test_cache_dir = join_path(self.test_suite.current_test_cache_dir, "benchmark")
test_data_dir = self.test_suite.current_test_data_dir
with working_dir(test_data_dir):
opts = [
"run-test.sh",
self.spec["mpi"].prefix.bin.mpirun,
"-n",
"1",
join_path(test_cache_dir, "eigenexa_benchmark"),
"-f",
join_path(test_cache_dir, "IN"),
]
env["OMP_NUM_THREADS"] = "1"
sh = which("sh")
out = sh(*opts, output=str.split, error=str.split)
assert "EigenExa Test Passed !" in out
opts = [
"run-test.sh",
self.spec["mpi"].prefix.bin.mpirun,
"-n",
"1",
join_path(test_cache_dir, "eigenexa_benchmark"),
"-f",
join_path(test_cache_dir, "IN"),
]
env["OMP_NUM_THREADS"] = "1"
self.run_test(
"sh",
options=opts,
expected="EigenExa Test Passed !",
purpose="test: running benchmark checks",
work_dir=test_data_dir,
)

View File

@@ -1,31 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Ensmallen(CMakePackage):
"""ensmallen is a high-quality C++ library for non-linear numerical
optimization.
ensmallen provides many types of optimizers that can be used for
virtually any numerical optimization task. This includes gradient
descent techniques, gradient-free optimizers, and constrained
optimization. ensmallen also allows optional callbacks to customize
the optimization process."""
homepage = "https://ensmallen.org"
url = "https://github.com/mlpack/ensmallen/archive/refs/tags/2.19.1.tar.gz"
version("2.19.1", sha256="f36ad7f08b0688d2a8152e1c73dd437c56ed7a5af5facf65db6ffd977b275b2e")
variant("openmp", default=True, description="Use OpenMP for parallelization")
depends_on("cmake@3.3.2:")
depends_on("armadillo@9.800.0:")
def cmake_args(self):
args = [self.define_from_variant("USE_OPENMP", "openmp")]
return args

View File

@@ -1,17 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class EprosimaFastcdr(CMakePackage):
"""eProsima Fast CDR is a C++ library that provides two serialization mechanisms.
One is the standard CDR serialization mechanism, while the other is a
faster implementation that modifies the standard."""
homepage = "https://www.eprosima.com/"
url = "https://github.com/eProsima/Fast-CDR/archive/v1.0.27.tar.gz"
version("1.0.27", sha256="a9bc8fd31a2c2b95e6d2fb46e6ce1ad733e86dc4442f733479e33ed9cdc54bf6")

View File

@@ -1,25 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class EprosimaFastdds(CMakePackage):
"""eprosima Fast DDS (formerly Fast RTPS) is a C++ implementation of the DDS
(Data Distribution Service) standard of the OMG (Object Management Group).
eProsima Fast DDS implements the RTPS (Real Time Publish Subscribe) protocol,
which provides publisher-subscriber communications over unreliable transports
such as UDP, as defined and maintained by the Object Management Group (OMG) consortium."""
homepage = "https://www.eprosima.com/"
url = "https://github.com/eProsima/Fast-DDS/archive/v2.10.1.tar.gz"
version("2.10.1", sha256="2cc2682db5dc7e87684b7f23166e2f32faf8d5c4b4a8c94c6c21211a8a38f553")
depends_on("asio")
depends_on("tinyxml2")
depends_on("openssl")
depends_on("foonathan-memory")
depends_on("eprosima-fastcdr")

View File

@@ -1,31 +0,0 @@
--- fastqc.orig 2023-05-26 08:02:49.123718275 +0100
+++ fastqc 2023-05-26 08:12:19.980320240 +0100
@@ -38,13 +38,21 @@
$delimiter = ';';
}
-if ($ENV{CLASSPATH}) {
- $ENV{CLASSPATH} .= "$delimiter$RealBin$delimiter$RealBin/htsjdk.jar$delimiter$RealBin/jbzip2-0.9.jar$delimiter$RealBin/cisd-jhdf5.jar";
-}
-else {
- $ENV{CLASSPATH} = "$RealBin$delimiter$RealBin/htsjdk.jar$delimiter$RealBin/jbzip2-0.9.jar$delimiter$RealBin/cisd-jhdf5.jar";
-}
-
+# The lib dir is $RealBin/../lib
+# start with list of jars we need and prefix them with the lib dir
+# then stick CLASSPATH onto the front (empty or otherwise...)
+# then filter out anything that's empty (perhaps CLASSPATH...)
+# then join all the remainings bits with the delimiter.
+use File::Basename;
+use File::Spec::Functions;
+my $_lib = catfile(dirname($RealBin), 'lib');
+$ENV{CLASSPATH} =
+ join($delimiter,
+ grep {$_}
+ ($ENV{CLASSPATH},
+ $_lib,
+ map {"$_lib/$_"}
+ qw(htsjdk.jar jbzip2-0.9.jar cisd-jhdf5.jar)));
# We need to find the java interpreter. We'll start from the assumption that this
# is included in the path.

View File

@@ -12,7 +12,6 @@ class Fastqc(Package):
homepage = "https://www.bioinformatics.babraham.ac.uk/projects/fastqc/"
url = "https://www.bioinformatics.babraham.ac.uk/projects/fastqc/fastqc_v0.11.5.zip"
version("0.12.1", sha256="5f4dba8780231a25a6b8e11ab2c238601920c9704caa5458d9de559575d58aa7")
version("0.11.9", sha256="15510a176ef798e40325b717cac556509fb218268cfdb9a35ea6776498321369")
version("0.11.7", sha256="59cf50876bbe5f363442eb989e43ae3eaab8d932c49e8cff2c1a1898dd721112")
version("0.11.5", sha256="dd7a5ad80ceed2588cf6d6ffe35e0f161c0d9977ed08355f5e4d9473282cbd66")
@@ -21,8 +20,7 @@ class Fastqc(Package):
depends_on("java", type="run")
depends_on("perl") # for fastqc "script", any perl will do
patch("fastqc_0.12.x.patch", level=0, when="@0.12:")
patch("fastqc_0.11.x.patch", level=0, when="@:0.11.9")
patch("fastqc.patch", level=0)
def patch(self):
filter_file("/usr/bin/perl", self.spec["perl"].command.path, "fastqc", backup=False)
@@ -31,12 +29,8 @@ def install(self, spec, prefix):
mkdir(prefix.bin)
mkdir(prefix.lib)
install("fastqc", prefix.bin)
install("cisd-jhdf5.jar", prefix.lib)
install("jbzip2-0.9.jar", prefix.lib)
if self.spec.satisfies("@:0.11.9"):
install("sam-1.103.jar", prefix.lib)
else:
install("htsjdk.jar", prefix.lib)
for j in ["cisd-jhdf5.jar", "jbzip2-0.9.jar", "sam-1.103.jar"]:
install(j, prefix.lib)
for d in ["Configuration", "net", "org", "Templates", "uk"]:
install_tree(d, join_path(prefix.lib, d))
chmod = which("chmod")

View File

@@ -16,7 +16,6 @@ class Fjcontrib(AutotoolsPackage):
tags = ["hep"]
version("1.052", sha256="bde63c28cbdf992bedea4ddedfc3cd52c9fec241a767cc455dd4ad10e8210c39")
version("1.051", sha256="76a2ec612c768db3eb6bbaf686d02b05ddb64dde477d185e20df563b52308473")
version("1.045", sha256="667f15556ca371cfaf185086fb41ac579658a233c18fb1e5153382114f9785f8")
version("1.044", sha256="de3f45c2c1bed6d7567483e4a774575a504de8ddc214678bac7f64e9d2e7e7a7")

View File

@@ -71,7 +71,7 @@ class Fms(CMakePackage):
when="@2022.02:",
)
variant(
"pic", default=False, description="Build with position independent code", when="@2022.02:"
"fpic", default=False, description="Build with position independent code", when="@2022.02:"
)
depends_on("netcdf-c")
@@ -86,9 +86,9 @@ def cmake_args(self):
self.define_from_variant("ENABLE_QUAD_PRECISION", "quad_precision"),
self.define_from_variant("WITH_YAML", "yaml"),
self.define_from_variant("CONSTANTS"),
self.define_from_variant("FPIC"),
self.define("32BIT", "precision=32" in self.spec),
self.define("64BIT", "precision=64" in self.spec),
self.define_from_variant("FPIC", "pic"),
]
args.append(self.define("CMAKE_C_COMPILER", self.spec["mpi"].mpicc))

View File

@@ -1,16 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class FoonathanMemory(CMakePackage):
"""STL compatible C++ memory allocator library using a new RawAllocator concept
that is similar to an Allocator but easier to use and write."""
homepage = "https://memory.foonathan.net/"
url = "https://github.com/foonathan/memory/archive/v0.7.tar.gz"
version("0.7", sha256="01a7cc5a5ebddbd71bec69c89562a4a2ecd7c29334c0a29d38d83e7f7f66eb53")

View File

@@ -19,7 +19,6 @@ class Gatk(Package):
list_url = "https://github.com/broadinstitute/gatk/releases"
maintainers("snehring")
version("4.4.0.0", sha256="444600f7b38b46ad0b3606b7d40ce921e0ff1910a50165872f1c73c7c4a1a390")
version("4.3.0.0", sha256="e2c27229b34c3e22445964adf00639a0909887bbfcc040f6910079177bc6e2dd")
version("4.2.6.1", sha256="1125cfc862301d437310506c8774d36c3a90d00d52c7b5d6b59dac7241203628")
version("4.2.2.0", sha256="ddd902441d1874493796566159288e9df178714ac18216ba05092136db1497fd")
@@ -62,8 +61,7 @@ class Gatk(Package):
# output.
variant("r", default=False, description="Use R for plotting")
depends_on("java@17", type="run", when="@4.4:")
depends_on("java@8", type="run", when="@:4.3")
depends_on("java@8:", type="run")
depends_on("python@2.6:2.8,3.6:", type="run", when="@4.0:")
depends_on("r@3.2:", type="run", when="@4.0: +r")

View File

@@ -16,8 +16,6 @@ class Ginkgo(CMakePackage, CudaPackage, ROCmPackage):
homepage = "https://ginkgo-project.github.io/"
git = "https://github.com/ginkgo-project/ginkgo.git"
test_requires_compiler = True
maintainers("tcojean", "hartwiganzt")
tags = ["e4s"]
@@ -162,32 +160,33 @@ def cmake_args(self):
)
return args
@property
def extra_install_tests(self):
return "test_install" if self.spec.satisfies("@1.3.0") else "test"
extra_install_tests = join_path("test", "test_install")
@run_after("install")
def cache_test_sources(self):
self.cache_extra_test_sources(self.extra_install_tests)
def _cached_tests_src_dir(self, script):
"""The cached smoke test source directory for the script."""
subdir = script if self.spec.satisfies("@1.4.0:") else ""
return join_path(self.test_suite.current_test_cache_dir, self.extra_install_tests, subdir)
@property
def _cached_tests_src_dir(self):
"""The cached smoke test source directory."""
return join_path(self.test_suite.current_test_cache_dir, self.extra_install_tests)
def _build_and_run_test(self, script):
"""Build and run the test against the installation."""
src_dir = self._cached_tests_src_dir(script)
@property
def _cached_tests_work_dir(self):
"""The working directory for cached test sources."""
return join_path(self._cached_tests_src_dir, "build")
def _build_test(self):
cmake_bin = join_path(self.spec["cmake"].prefix.bin, "cmake")
cmake_args = [
f"-DCMAKE_C_COMPILER={os.environ['CC']}",
f"-DCMAKE_CXX_COMPILER={os.environ['CXX']}",
src_dir,
"-DCMAKE_C_COMPILER={0}".format(self.compiler.cc),
"-DCMAKE_CXX_COMPILER={0}".format(self.compiler.cxx),
self._cached_tests_src_dir,
]
# Fix: For HIP tests, add the ARCH compilation flags when not present
if "+rocm" in self.spec:
src_path = join_path(src_dir, "CMakeLists.txt")
src_path = join_path(self._cached_tests_src_dir, "CMakeLists.txt")
cmakelists = open(src_path, "rt")
data = cmakelists.read()
data = data.replace(
@@ -199,39 +198,43 @@ def _build_and_run_test(self, script):
cmakelists.write(data)
cmakelists.close()
cmake = which(self.spec["cmake"].prefix.bin.cmake)
make = which("make")
with working_dir(src_dir):
cmake(*cmake_args)
make()
exe = which(script)
output = exe(output=str.split, error=str.split)
assert "correctly detected and is complete" in output
if not self.run_test(
cmake_bin,
options=cmake_args,
purpose="Generate the Makefile",
work_dir=self._cached_tests_work_dir,
):
print("Skipping Ginkgo test: failed to generate Makefile")
return
def test_install(self):
"""build, run and check results of test_install"""
if not self.spec.satisfies("@1.3.0:"):
raise SkipTest("Test is only available for v1.3.0:")
if not self.run_test(
"make", purpose="Build test software", work_dir=self._cached_tests_work_dir
):
print("Skipping Ginkgo test: failed to build test")
return
self._build_and_run_test("test_install")
def test(self):
"""Run the smoke tests."""
# For now only 1.4.0 and later releases support this scheme.
if self.spec.satisfies("@:1.3.0"):
print("SKIPPED: smoke tests not supported with this Ginkgo version.")
return
def test_install_cuda(self):
"""build, run and check results of test_install_cuda"""
if not self.spec.satisfies("@1.4.0: +cuda"):
raise SkipTest("Test is only available for v1.4.0: +cuda")
self._build_test()
self._build_and_run_test("test_install_cuda")
def test_install_hip(self):
"""build, run and check results of test_install_hip"""
if not self.spec.satisfies("@1.4.0: +rocm"):
raise SkipTest("Test is only available for v1.4.0: +rocm")
self._build_and_run_test("test_install_hip")
def test_exportbuild(self):
"""build, run and check results of test_exportbuild"""
if not self.spec.satisfies("@1.4.0:"):
raise SkipTest("Test is only available for v1.4.0:")
self._build_and_run_test("test_exportbuild")
# Perform the test(s) created by setup_build_tests.
files = [
("test_install", [r"REFERENCE", r"correctly detected and is complete"]),
("test_install_cuda", [r"CUDA", r"correctly detected and is complete"]),
("test_install_hip", [r"HIP", r"correctly detected and is complete"]),
]
for f, expected in files:
self.run_test(
f,
[],
expected,
skip_missing=True,
installed=False,
purpose="test: Running {0}".format(f),
work_dir=self._cached_tests_work_dir,
)

View File

@@ -14,7 +14,6 @@ class Glab(Package):
maintainers("alecbcs")
version("1.30.0", sha256="d3c1a9ba723d94a0be10fc343717cf7b61732644f5c42922f1c8d81047164b99")
version("1.28.1", sha256="243a0f15e4400aab7b4d27ec71c6ae650bf782473c47520ffccd57af8d939c90")
version("1.28.0", sha256="9a0b433c02033cf3d257405d845592e2b7c2e38741027769bb97a8fd763aeeac")
version("1.27.0", sha256="26bf5fe24eeaeb0f861c89b31129498f029441ae11cc9958e14ad96ec1356d51")

View File

@@ -3,12 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.build_systems.autotools import AutotoolsBuilder
from spack.build_systems.cmake import CMakeBuilder
from spack.package import *
class Gmt(CMakePackage, AutotoolsPackage):
class Gmt(Package):
"""GMT (Generic Mapping Tools) is an open source collection of about 80
command-line tools for manipulating geographic and Cartesian data sets
(including filtering, trend fitting, gridding, projecting, etc.) and
@@ -23,17 +21,16 @@ class Gmt(CMakePackage, AutotoolsPackage):
maintainers("adamjstewart")
version("master", branch="master")
version("6.4.0", sha256="c39d23dbc8a85416457946f6b93c2b9a5f039f092453e7f4b1aaf88d4a288300")
version("6.3.0", sha256="48712279da8228a7960f36fd4b7b04cc1a66489c37b2a5c03f8336a631aa3b24")
version("6.2.0", sha256="b70786ca5ba7d1293acc4e901a0f82e1300d368b61009ef87f771f4bc99d058a")
version("6.1.1", sha256="4cb17f42ff10b8f5fe372956c23f1fa3ca21a8e94933a6c614894f0be33427c1")
version("6.1.0", sha256="f76ad7f444d407dfd7e5762644eec3a719c6aeb06d877bf746fe51abd79b1a9e")
version("6.0.0", sha256="7a733e670f01d99f8fc0da51a4337320d764c06a68746621f83ccf2e3453bcb7")
version("5.4.4", sha256="b593dfb101e6507c467619f3d2190a9f78b09d49fe2c27799750b8c4c0cd2da0")
version(
"4.5.18",
sha256="27c30b516c317fed8e44efa84a0262f866521d80cfe76a61bf12952efb522b63",
url="ftp://ftp.soest.hawaii.edu/gmt/gmt-4.5.18-src.tar.bz2",
"4.5.9",
sha256="9b13be96ccf4bbd38c14359c05dfa7eeeb4b5f06d6f4be9c33d6c3ea276afc86",
url="ftp://ftp.soest.hawaii.edu/gmt/legacy/gmt-4.5.9.tar.bz2",
deprecated=True,
)
variant(
@@ -41,7 +38,11 @@ class Gmt(CMakePackage, AutotoolsPackage):
default=False,
description="Ability to convert PostScript plots to PDF and rasters",
)
variant("geos", default=False, description="Ability to buffer lines and polygons")
variant(
"gdal",
default=False,
description="Ability to read and write numerous grid and image formats",
)
variant("pcre", default=False, description="Regular expression support")
variant("fftw", default=False, description="Fast FFTs")
variant("glib", default=False, description="GTHREAD support")
@@ -49,52 +50,29 @@ class Gmt(CMakePackage, AutotoolsPackage):
variant("blas", default=False, description="Fast matrix multiplications")
variant("graphicsmagick", default=False, description="Convert images to animated GIFs")
variant("ffmpeg", default=False, description="Convert images to videos")
variant("docs", default=False, description="Build manpage and HTML documentation")
# https://github.com/GenericMappingTools/gmt/blob/master/BUILDING.md
# Build system
build_system(
conditional("cmake", when="@5.0.1:"),
conditional("autotools", when="@:5.0.0"),
default="cmake",
)
# https://github.com/GenericMappingTools/gmt/blob/master/MAINTENANCE.md
# Required dependencies
with when("build_system=cmake"):
generator("ninja")
depends_on("cmake@2.8.12:", type="build")
depends_on("cmake@2.8.12:", type="build", when="@5:")
depends_on("netcdf-c@4:")
depends_on("curl", when="@5.4:")
depends_on("gdal")
# Optional dependencies
depends_on("ghostscript", when="+ghostscript")
depends_on("pcre2", when="+pcre")
depends_on("fftw@3.3:", when="+fftw")
depends_on("glib@2.32:", when="+glib")
depends_on("gdal", when="+gdal")
depends_on("pcre", when="+pcre")
depends_on("fftw", when="+fftw")
depends_on("glib", when="+glib")
depends_on("lapack", when="+lapack")
depends_on("blas", when="+blas")
depends_on("graphicsmagick", when="+graphicsmagick")
depends_on("ffmpeg", when="+ffmpeg")
depends_on("py-sphinx@1.4:", when="+docs", type="build")
depends_on("graphicsmagick", type="test")
depends_on("py-dvc", type="test")
resource(
name="gshhg",
url="https://github.com/GenericMappingTools/gshhg-gmt/releases/download/2.3.7/gshhg-gmt-2.3.7.tar.gz",
sha256="9bb1a956fca0718c083bef842e625797535a00ce81f175df08b042c2a92cfe7f",
destination="share",
placement="gshhg",
)
resource(
name="dcw",
url="https://github.com/GenericMappingTools/dcw-gmt/releases/download/2.1.1/dcw-gmt-2.1.1.tar.gz",
sha256="d4e208dca88fbf42cba1bb440fbd96ea2f932185c86001f327ed0c7b65d27af1",
destination="share",
placement="dcw",
)
# https://github.com/spack/spack/issues/26661
conflicts(
@@ -105,7 +83,7 @@ class Gmt(CMakePackage, AutotoolsPackage):
# https://github.com/GenericMappingTools/gmt/pull/3603
patch("regexp.patch", when="@6.1.0")
patch("type.patch", when="@4")
patch("type.patch", when="@4.5.9")
executables = ["^gmt-config$"]
@@ -113,72 +91,78 @@ class Gmt(CMakePackage, AutotoolsPackage):
def determine_version(cls, exe):
return Executable(exe)("--version", output=str, error=str).rstrip()
@when("@5:")
def install(self, spec, prefix):
with working_dir("spack-build", create=True):
args = std_cmake_args
class CMakeBuilder(CMakeBuilder):
def cmake_args(self):
spec = self.spec
args.extend(
[
"-DNETCDF_CONFIG={0}".format(spec["netcdf-c"].prefix.bin.join("nc-config")),
"-DNETCDF_INCLUDE_DIR={0}".format(spec["netcdf-c"].headers.directories[0]),
"-DNETCDF_LIBRARY={0}".format(spec["netcdf-c"].libs[0]),
]
)
# If these options aren't explicitly disabled,
# CMake will search OS for dependencies
if "+ghostscript" in spec:
args.append("-DGS={0}".format(spec["ghostscript"].prefix.bin.gs))
else:
args.append("-DGS=")
if "+gdal" in spec:
args.extend(
[
"-DGDAL_TRANSLATE={0}".format(spec["gdal"].prefix.bin.gdal_translate),
"-DOGR2OGR={0}".format(spec["gdal"].prefix.bin.ogr2ogr),
]
)
else:
args.extend(["-DGDAL_TRANSLATE=", "-DOGR2OGR="])
if "graphicsmagick" in spec:
args.extend(
[
"-DGM={0}".format(spec["graphicsmagick"].prefix.bin.gm),
"-DGRAPHICSMAGICK={0}".format(spec["graphicsmagick"].prefix.bin.gm),
]
)
else:
args.extend(["-DGM=", "-DGRAPHICSMAGICK="])
if "+ffmpeg" in spec:
args.append("-DFFMPEG={0}".format(spec["ffmpeg"].prefix.bin.ffmpeg))
else:
args.append("-DFFMPEG=")
cmake("..", *args)
make()
if self.run_tests:
make("check")
make("install")
@when("@:4")
def install(self, spec, prefix):
args = [
self.define("NETCDF_CONFIG", spec["netcdf-c"].prefix.bin.join("nc-config")),
self.define("GDAL_CONFIG", spec["gdal"].prefix.bin.join("gdal-config")),
self.define("PCRE_CONFIG", ""),
self.define("GSHHG_PATH", "gshhg"),
self.define("DCW_PATH", "dcw"),
]
if "+ghostscript" in spec:
args.append(self.define("GS", spec["ghostscript"].prefix.bin.gs))
if "+geos" in spec:
args.append(self.define("GEOS_CONFIG", spec["geos"].prefix.bin.join("geos-config")))
if "+pcre" in spec:
args.append(self.define("PCRE2_CONFIG", spec["pcre2"].prefix.bin.join("pcre2-config")))
if "+fftw" in spec:
args.extend(
[
self.define("FFTW3_INCLUDE_DIR", spec["fftw"].headers.directories[0]),
self.define("FFTW3F_LIBRARY", spec["fftw"].libs.directories[0]),
]
)
if "+glib" in spec:
args.extend(
[
self.define("GLIB_INCLUDE_DIR", spec["glib"].headers.directories[0]),
self.define("GLIB_LIBRARIES", spec["glib"].libs[0]),
]
)
if "graphicsmagick" in spec:
args.extend(
[
self.define("GM", spec["graphicsmagick"].prefix.bin.gm),
self.define("GRAPHICSMAGICK", spec["graphicsmagick"].prefix.bin.gm),
]
)
if "+ffmpeg" in spec:
args.append(self.define("FFMPEG", spec["ffmpeg"].prefix.bin.ffmpeg))
return args
class AutotoolsBuilder(AutotoolsBuilder):
def configure_args(self):
return [
"--enable-netcdf={0}".format(self.spec["netcdf-c"].prefix),
"--enable-gdal",
"--prefix={0}".format(prefix),
"--enable-netcdf={0}".format(spec["netcdf-c"].prefix),
"--enable-shared",
"--without-x",
]
def build(self, pkg, spec, prefix):
if "+gdal" in spec:
args.append("--enable-gdal")
else:
args.append("--disable-gdal")
configure(*args)
# Building in parallel results in dozens of errors like:
# *** No rule to make target `../libgmtps.so', needed by `pssegyz'.
make(parallel=False)
def install(self, pkg, spec, prefix):
# Installing in parallel results in dozens of errors like:
# /usr/bin/install: cannot create directory '...': File exists
make("install", parallel=False)

View File

@@ -12,7 +12,6 @@ class Grep(AutotoolsPackage):
homepage = "https://www.gnu.org/software/grep/"
url = "https://ftp.gnu.org/gnu/grep/grep-3.3.tar.xz"
version("3.11", sha256="1db2aedde89d0dea42b16d9528f894c8d15dae4e190b59aecc78f5a951276eab")
version("3.10", sha256="24efa5b595fb5a7100879b51b8868a0bb87a71c183d02c4c602633b88af6855b")
version("3.9", sha256="abcd11409ee23d4caf35feb422e53bbac867014cfeed313bb5f488aca170b599")
version("3.7", sha256="5c10da312460aec721984d5d83246d24520ec438dd48d7ab5a05dbc0d6d6823c")

View File

@@ -232,13 +232,6 @@ class Gromacs(CMakePackage, CudaPackage):
for gmx_ver, plumed_vers in plumed_patches.items():
depends_on("plumed@{0}".format(plumed_vers), when="@{0}+plumed".format(gmx_ver))
variant(
"intel_provided_gcc",
default=False,
description="Use this if Intel compiler is installed through spack."
+ "The g++ location is written to icp{c,x}.cfg",
)
depends_on("fftw-api@3")
depends_on("cmake@2.8.8:3", type="build")
depends_on("cmake@3.4.3:3", type="build", when="@2018:")
@@ -251,8 +244,7 @@ class Gromacs(CMakePackage, CudaPackage):
depends_on("sycl", when="+sycl")
depends_on("lapack", when="+lapack")
depends_on("blas", when="+blas")
depends_on("gcc", when="%oneapi ~intel_provided_gcc")
depends_on("gcc", when="%intel ~intel_provided_gcc")
depends_on("gcc", when="%oneapi")
depends_on("hwloc@1.0:1", when="+hwloc@2016:2018")
depends_on("hwloc", when="+hwloc@2019:")
@@ -262,14 +254,6 @@ class Gromacs(CMakePackage, CudaPackage):
depends_on("nvhpc", when="+cufftmp")
requires(
"%intel",
"%oneapi",
policy="one_of",
when="+intel_provided_gcc",
msg="Only attempt to find gcc libs for Intel compiler if Intel compiler is used.",
)
patch("gmxDetectCpu-cmake-3.14.patch", when="@2018:2019.3^cmake@3.14.0:")
patch("gmxDetectSimd-cmake-3.14.patch", when="@5.0:2017^cmake@3.14.0:")
@@ -447,16 +431,8 @@ def cmake_args(self):
if self.spec.satisfies("@2020:"):
options.append("-DGMX_INSTALL_LEGACY_API=ON")
if self.spec.satisfies("%oneapi") or self.spec.satisfies("%intel"):
# If intel-oneapi-compilers was installed through spack the gcc is added to the
# configuration file.
if self.spec.satisfies("+intel_provided_gcc") and os.path.exists(
".".join([os.environ["SPACK_CXX"], "cfg"])
):
with open(".".join([os.environ["SPACK_CXX"], "cfg"]), "r") as f:
options.append("-DCMAKE_CXX_FLAGS={}".format(f.read()))
else:
options.append("-DGMX_GPLUSPLUS_PATH=%s/g++" % self.spec["gcc"].prefix.bin)
if self.spec.satisfies("%oneapi"):
options.append("-DGMX_GPLUSPLUS_PATH=%s/g++" % self.spec["gcc"].prefix.bin)
if "+double" in self.spec:
options.append("-DGMX_DOUBLE:BOOL=ON")

View File

@@ -26,12 +26,19 @@ class Hdf5VolAsync(CMakePackage):
version("develop", branch="develop")
version("1.6", tag="v1.6")
version("1.5", tag="v1.5")
version("1.4", tag="v1.4")
version("1.3", tag="v1.3")
version("1.2", tag="v1.2")
version("1.1", tag="v1.1")
version("1.0", tag="v1.0")
variant("memcpy", default=False, description="Enable buffer copy for dataset write")
depends_on("mpi")
depends_on("argobots@1.1:")
depends_on("hdf5@1.14.0: +mpi +threadsafe")
depends_on("hdf5 +mpi +threadsafe")
depends_on("hdf5@1.13.0:1.13.2", when="@:1.3")
depends_on("hdf5@1.13.3:", when="@1.4:")
# Require MPI_THREAD_MULTIPLE.
depends_on("openmpi +thread_multiple", when="^openmpi")

View File

@@ -19,7 +19,6 @@ class Hssp(AutotoolsPackage):
homepage = "https://github.com/cmbi/hssp"
url = "https://github.com/cmbi/hssp/archive/3.0.10.tar.gz"
version("3.1.5", sha256="9462608ce6b5b92f13a3a8d94b780d85a3cac68ab38449116193754cc22dc5d0")
version("3.0.10", sha256="9b2cba9c498e65fd48730f0fc86ca2b480bf12903a2c131521023f3a161fe870")
version("3.0.9", sha256="2f67743ffd233ed9c4cd298e8fc65a332b863052945fb62bd61d7f1776274da9")
version("3.0.8", sha256="56c926d2e43a3dd6324de558dde868751355f385d1b60fd85586a0a2c2bc82e0")

View File

@@ -18,7 +18,6 @@ class Hugo(Package):
maintainers("alecbcs")
version("0.112.7", sha256="d706e52c74f0fb00000caf4e95b98e9d62c3536a134d5e26b433b1fa1e2a74aa")
version("0.111.3", sha256="b6eeb13d9ed2e5d5c6895bae56480bf0fec24a564ad9d17c90ede14a7b240999")
version("0.111.2", sha256="66500ae3a03cbf51a6ccf7404d01f42fdc454aa1eaea599c934860bbf0aa2fc5")
version("0.111.1", sha256="a71d4e1f49ca7156d3811c0b10957816b75ff2e01b35ef326e7af94dfa554ec0")

View File

@@ -14,7 +14,6 @@ class Hyperfine(Package):
maintainers("michaelkuhn")
version("1.16.1", sha256="ffb3298945cbe2c068ca1a074946d55b9add83c9df720eda2ed7f3d94d7e65d2")
version("1.14.0", sha256="59018c22242dd2ad2bd5fb4a34c0524948b7921d02aa79419ccec4c1ffd3da14")
version("1.13.0", sha256="6e57c8e51962dd24a283ab46dde6fe306da772f4ef9bad86f8c89ac3a499c87e")
version("1.12.0", sha256="2120870a97e68fa3426eac5646a071c9646e96d2309220e3c258bf588e496454")

Some files were not shown because too many files have changed in this diff Show More