Compare commits

..

1 Commits

Author SHA1 Message Date
Gregory Becker
03084d2ff8 avoid quadratic expansion of targets in the solver 2023-01-12 12:25:15 -08:00
178 changed files with 1151 additions and 3219 deletions

View File

@@ -19,8 +19,8 @@ jobs:
package-audits:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup non-root user
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup non-root user
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup repo
@@ -158,7 +158,7 @@ jobs:
run: |
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -179,7 +179,7 @@ jobs:
run: |
brew install tree
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap clingo
run: |
set -ex
@@ -204,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup repo
@@ -247,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup non-root user
@@ -283,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- name: Setup non-root user
@@ -316,7 +316,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -333,7 +333,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh

View File

@@ -50,7 +50,7 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- name: Set Container Tag Normal (Nightly)
run: |
@@ -106,7 +106,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@37abcedcc1da61a57767b7588cb9d03eb57e28b3 # @v2
uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -47,10 +47,10 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -94,10 +94,10 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -133,7 +133,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -151,10 +151,10 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -185,10 +185,10 @@ jobs:
matrix:
python-version: ["3.10"]
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages

View File

@@ -18,8 +18,8 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: '3.11'
cache: 'pip'
@@ -35,10 +35,10 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with:
python-version: '3.11'
cache: 'pip'

View File

@@ -15,10 +15,10 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
with:
python-version: 3.9
- name: Install Python packages
@@ -39,10 +39,10 @@ jobs:
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
with:
python-version: 3.9
- name: Install Python packages
@@ -63,10 +63,10 @@ jobs:
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with:
fetch-depth: 0
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
with:
python-version: 3.9
- name: Install Python packages
@@ -87,10 +87,10 @@ jobs:
# git config --global core.symlinks false
# shell:
# powershell
# - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
# - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
# with:
# fetch-depth: 0
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
# with:
# python-version: 3.9
# - name: Install Python packages
@@ -121,7 +121,7 @@ jobs:
# run:
# shell: pwsh
# steps:
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
# with:
# python-version: 3.9
# - name: Install Python packages

View File

@@ -54,11 +54,6 @@ config:
# are that it precludes its use as a system package and its ability to be
# pip installable.
#
# In Spack environment files, chaining onto existing system Spack
# installations, the $env variable can be used to download, cache and build
# into user-writable paths that are relative to the currently active
# environment.
#
# In any case, if the username is not already in the path, Spack will append
# the value of `$user` in an attempt to avoid potential conflicts between
# users in shared temporary spaces.

View File

@@ -13,51 +13,49 @@ Some sites may encourage users to set up their own test environments
before carrying out central installations, or some users may prefer to set
up these environments on their own motivation. To reduce the load of
recompiling otherwise identical package specs in different installations,
installed packages can be put into build cache tarballs, pushed to
installed packages can be put into build cache tarballs, uploaded to
your Spack mirror and then downloaded and installed by others.
Whenever a mirror provides prebuilt packages, Spack will take these packages
into account during concretization and installation, making ``spack install``
signficantly faster.
--------------------------
Creating build cache files
--------------------------
.. note::
We use the terms "build cache" and "mirror" often interchangeably. Mirrors
are used during installation both for sources and prebuilt packages. Build
caches refer to mirrors that provide prebuilt packages.
----------------------
Creating a build cache
----------------------
A compressed tarball of an installed package is created. Tarballs are created
for all of its link and run dependency packages as well. Compressed tarballs are
signed with gpg and signature and tarball and put in a ``.spack`` file. Optionally,
the rpaths (and ids and deps on macOS) can be changed to paths relative to
the Spack install tree before the tarball is created.
Build caches are created via:
.. code-block:: console
$ spack buildcache create <path/url/mirror name> <spec>
$ spack buildcache create <spec>
This command takes the locally installed spec and its dependencies, and
creates tarballs of their install prefixes. It also generates metadata files,
signed with GPG. These tarballs and metadata files are then pushed to the
provided binary cache, which can be a local directory or a remote URL.
Here is an example where a build cache is created in a local directory named
"spack-cache", to which we push the "ninja" spec:
If you wanted to create a build cache in a local directory, you would provide
the ``-d`` argument to target that directory, again also specifying the spec.
Here is an example creating a local directory, "spack-cache" and creating
build cache files for the "ninja" spec:
.. code-block:: console
$ spack buildcache create --allow-root ./spack-cache ninja
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
$ mkdir -p ./spack-cache
$ spack buildcache create -d ./spack-cache ninja
==> Buildcache files will be output to file:///home/spackuser/spack/spack-cache/build_cache
gpgconf: socketdir is '/run/user/1000/gnupg'
gpg: using "E6DF6A8BD43208E4D6F392F23777740B7DBD643D" as default secret key for signing
Not that ``ninja`` must be installed locally for this to work.
Note that the targeted spec must already be installed. Once you have a build cache,
you can add it as a mirror, discussed next.
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
the binaries we're pushing contain references to the local Spack install
directory.
.. warning::
Once you have a build cache, you can add it as a mirror, discussed next.
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
will be able to verify and install binary caches both in the new and in the old format.
Support for using the old format is expected to end in v0.19, so we advise users to
recreate relevant buildcaches using Spack v0.18 or higher.
---------------------------------------
Finding or installing build cache files
@@ -68,10 +66,10 @@ with:
.. code-block:: console
$ spack mirror add <name> <url or path>
$ spack mirror add <name> <url>
Both web URLs and local paths on the filesystem can be specified. In the previous
Note that the url can be a web url _or_ a local filesystem location. In the previous
example, you might add the directory "spack-cache" and call it ``mymirror``:
@@ -96,7 +94,7 @@ this new build cache as follows:
.. code-block:: console
$ spack buildcache update-index ./spack-cache
$ spack buildcache update-index -d spack-cache/
Now you can use list:
@@ -107,38 +105,46 @@ Now you can use list:
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
ninja@1.10.2
With ``mymirror`` configured and an index available, Spack will automatically
use it during concretization and installation. That means that you can expect
``spack install ninja`` to fetch prebuilt packages from the mirror. Let's
verify by re-installing ninja:
Great! So now let's say you have a different spack installation, or perhaps just
a different environment for the same one, and you want to install a package from
that build cache. Let's first uninstall the actual library "ninja" to see if we can
re-install it from the cache.
.. code-block:: console
$ spack uninstall ninja
$ spack install ninja
==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
gpg: Signature made Do 12 Jan 2023 16:01:04 CET
gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
[+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
It worked! You've just completed a full example of creating a build cache with
a spec of interest, adding it as a mirror, updating its index, listing the contents,
and finally, installing from it.
By default Spack falls back to building from sources when the mirror is not available
or when the package is simply not already available. To force Spack to only install
prebuilt packages, you can use
And now reinstall from the buildcache
.. code-block:: console
$ spack install --use-buildcache only <package>
$ spack buildcache install ninja
==> buildcache spec(s) matching ninja
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-i4e5luour7jxdpc3bkiykd4imke3mkym.spack
####################################################################################################################################### 100.0%
==> Installing buildcache for spec ninja@1.10.2%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
gpgconf: socketdir is '/run/user/1000/gnupg'
gpg: Signature made Tue 23 Mar 2021 10:16:29 PM MDT
gpg: using RSA key E6DF6A8BD43208E4D6F392F23777740B7DBD643D
gpg: Good signature from "spackuser (GPG created for Spack) <spackuser@noreply.users.github.com>" [ultimate]
It worked! You've just completed a full example of creating a build cache with
a spec of interest, adding it as a mirror, updating it's index, listing the contents,
and finally, installing from it.
Note that the above command is intended to install a particular package to a
build cache you have created, and not to install a package from a build cache.
For the latter, once a mirror is added, by default when you do ``spack install`` the ``--use-cache``
flag is set, and you will install a package from a build cache if it is available.
If you want to always use the cache, you can do:
.. code-block:: console
$ spack install --cache-only <package>
For example, to combine all of the commands above to add the E4S build cache
and then install from it exclusively, you would do:
@@ -147,7 +153,7 @@ and then install from it exclusively, you would do:
$ spack mirror add E4S https://cache.e4s.io
$ spack buildcache keys --install --trust
$ spack install --use-buildache only <package>
$ spack install --cache-only <package>
We use ``--install`` and ``--trust`` to say that we are installing keys to our
keyring, and trusting all downloaded keys.

View File

@@ -272,7 +272,7 @@ Selection of the target microarchitectures
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The options under the ``targets`` attribute control which targets are considered during a solve.
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
Currently the options in this section are only configurable from the ``concretization.yaml`` file
and there are no corresponding command line arguments to enable them for a single solve.
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.

View File

@@ -2410,7 +2410,7 @@ package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
the ``package.py`` recipes). ``test`` dependencies do not affect the package
hash, as they are only used to construct a test environment *after* building and
installing a given package installation. Older versions of Spack did not include
build dependencies in the hash, but this has been
build dependencies in the hash, but this has been
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
.. |Spack v0.18| replace:: Spack ``v0.18``
@@ -3604,70 +3604,6 @@ In the example above ``Cp2k`` inherits all the conflicts and variants that ``Cud
.. _install-environment:
--------------------------------
Package Inheritance
--------------------------------
Spack packages are Python classes, and you can use inheritance with them just as you can
with any Python class. This is common when you have your own package :ref:`repository
<repositories>` with packages that extend Spack's ``builtin`` packages.
You can extend a ``builtin`` package like this:
.. code-block:: python
from spack.pkg.builtin.mpich import Mpich
class MyPackage(Mpich):
version("1.0", "0209444070d9c8af9b62c94095a217e3bc6843692d1e3fdc1ff5371e03aac47c")
version("2.0", "5dda192154047d6296ba14a4ab2d869c6926fd7f44dce8ce94f63aae2e359c5b")
Every repository registered with Spack ends up in a submodule of ``spack.pkg`` with a
name corresponding to its :ref:`namespace <namespaces>`. So, if you have a different
repository with namespace ``myrepo`` you want to import packages from , you might write:
.. code-block:: python
from spack.pkg.myrepo.my_package import MyPackage
class NewPackage(MyPackage):
version("3.0", "08721a102fefcea2ae4add8c9cc548df77e9224f5385ad0872a9150fdd26a415")
version("4.0", "9cc39dd33dd4227bb82301d285437588d705290846d22ab6b8791c7e631ce385")
^^^^^^^^^^^^^^^^^^^^^^^^
``disinherit``
^^^^^^^^^^^^^^^^^^^^^^^^
When you inherit from a package in Spack, you inherit all the metadata from its
directives, including ``version``, ``provides``, ``depends_on``, ``conflicts``, etc. For
example, ``NewPackage`` above will have four versions: ``1.0`` and ``2.0`` inherited
from ``MyPackage``, as well as, ``3.0``, and ``4.0`` defined in ``NewPackage``.
If you do not want your package to define all the same things as its base class, you can
use the ``disinherit`` directive to start fresh in your subclass:
.. code-block:: python
from spack.pkg.myrepo.my_package import MyPackage
class NewerPackage(MyPackage):
disinherit("versions") # don't inherit any versions from MyPackage
version("5.0", "08721a102fefcea2ae4add8c9cc548df77e9224f5385ad0872a9150fdd26a415")
version("6.0", "9cc39dd33dd4227bb82301d285437588d705290846d22ab6b8791c7e631ce385")
Now, ``NewerPackage`` will have **only** versions ``5.0`` and ``6.0``, and will not
inherit ``1.0`` or ``2.0`` from ``MyPackage``. You can ``disinherit`` many different
properties from base packages. The full list of options is:
* ``conflicts``
* ``dependencies``
* ``extendees``
* ``patches``
* ``provided``
* ``resources``
* ``variants``
* ``versions``
-----------------------
The build environment
-----------------------

View File

@@ -91,8 +91,6 @@ packages and use the first valid file:
to eventually support URLs in ``repos.yaml``, so that you can easily
point to remote package repositories, but that is not yet implemented.
.. _namespaces:
---------------------
Namespaces
---------------------
@@ -428,3 +426,36 @@ By path:
$ spack repo list
==> 1 package repository.
builtin ~/spack/var/spack/repos/builtin
--------------------------------
Repo namespaces and Python
--------------------------------
You may have noticed that namespace notation for repositories is similar
to the notation for namespaces in Python. As it turns out, you *can*
treat Spack repositories like Python packages; this is how they are
implemented.
You could, for example, extend a ``builtin`` package in your own
repository:
.. code-block:: python
from spack.pkg.builtin.mpich import Mpich
class MyPackage(Mpich):
...
Spack repo namespaces are actually Python namespaces tacked on under
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
implemented using Python's built-in `sys.path
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
:py:mod:`spack.repo` module implements a custom `Python importer
<https://docs.python.org/2/library/imp.html>`_.
.. warning::
The mechanism for extending packages is not yet extensively tested,
and extending packages across repositories imposes inter-repo
dependencies, which may be hard to manage. Use this feature at your
own risk, but let us know if you have a use case for it.

View File

@@ -1050,7 +1050,7 @@ def generate_package_index(cache_prefix, concurrency=32):
try:
file_list, read_fn = _spec_files_from_cache(cache_prefix)
except ListMirrorSpecsError as err:
tty.error("Unable to generate package index, {0}".format(err))
tty.error("Unabled to generate package index, {0}".format(err))
return
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))

View File

@@ -94,15 +94,22 @@ class Bootstrapper:
def __init__(self, conf):
self.conf = conf
self.name = conf["name"]
self.url = conf["info"]["url"]
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Promote (relative) paths to file urls
url = conf["info"]["url"]
if spack.util.url.is_path_instead_of_url(url):
if not os.path.isabs(url):
url = os.path.join(self.metadata_dir, url)
url = spack.util.url.path_to_file_url(url)
self.url = url
@property
def mirror_url(self):
"""Mirror url associated with this bootstrapper"""
# Absolute paths
if os.path.isabs(self.url):
return spack.util.url.format(self.url)
# Check for :// and assume it's an url if we find it
if "://" in self.url:
return self.url
# Otherwise, it's a relative path
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
@property
def mirror_scope(self):
@@ -110,7 +117,7 @@ def mirror_scope(self):
this bootstrapper.
"""
return spack.config.InternalConfigScope(
self.config_scope_name, {"mirrors:": {self.name: self.url}}
self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
)
def try_import(self, module: str, abstract_spec_str: str) -> bool:

View File

@@ -427,15 +427,15 @@ def _do_patch_libtool(self):
x.filter(regex="-nostdlib", repl="", string=True)
rehead = r"/\S*/"
for o in [
r"fjhpctag\.o",
r"fjcrt0\.o",
r"fjlang08\.o",
r"fjomp\.o",
r"crti\.o",
r"crtbeginS\.o",
r"crtendS\.o",
"fjhpctag.o",
"fjcrt0.o",
"fjlang08.o",
"fjomp.o",
"crti.o",
"crtbeginS.o",
"crtendS.o",
]:
x.filter(regex=(rehead + o), repl="")
x.filter(regex=(rehead + o), repl="", string=True)
elif self.pkg.compiler.name == "dpcpp":
# Hack to filter out spurious predep_objects when building with Intel dpcpp
# (see https://github.com/spack/spack/issues/32863):

View File

@@ -128,12 +128,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
new_cls = type(
new_cls_name,
bases,
{
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
"test_log_file": property(lambda x: x.wrapped_package_object.test_log_file),
"test_failures": property(lambda x: x.wrapped_package_object.test_failures),
"test_suite": property(lambda x: x.wrapped_package_object.test_suite),
},
{"run_tests": property(lambda x: x.wrapped_package_object.run_tests)},
)
new_cls.__module__ = package_cls.__module__
self.__class__ = new_cls

View File

@@ -727,19 +727,13 @@ def generate_gitlab_ci_yaml(
# --check-index-only, then the override mirror needs to be added to
# the configured mirrors when bindist.update() is run, or else we
# won't fetch its index and include in our local cache.
spack.mirror.add(
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
cfg.default_modify_scope(),
)
spack.mirror.add("ci_pr_mirror", remote_mirror_override, cfg.default_modify_scope())
shared_pr_mirror = None
if spack_pipeline_type == "spack_pull_request":
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
spack.mirror.add(
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
cfg.default_modify_scope(),
)
spack.mirror.add("ci_shared_pr_mirror", shared_pr_mirror, cfg.default_modify_scope())
pipeline_artifacts_dir = artifacts_root
if not pipeline_artifacts_dir:
@@ -1488,7 +1482,7 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
tty.debug("Creating buildcache ({0})".format("unsigned" if unsigned else "signed"))
hashes = env.all_hashes() if env else None
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
push_url = spack.mirror.push_url_from_mirror_url(mirror_url)
spec_kwargs = {"include_root": True, "include_dependencies": False}
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
bindist.push(matches, push_url, spec_kwargs, **kwargs)
@@ -2370,6 +2364,7 @@ def report_skipped(self, spec, directory_name, reason):
site=self.site,
buildstamp=self.build_stamp,
track=None,
ctest_parsing=False,
)
reporter = CDash(configuration=configuration)
reporter.test_skipped_report(directory_name, spec, reason)

View File

@@ -43,8 +43,6 @@
"The sha256 checksum of binaries is checked before installation."
),
"info": {
# This is a mis-nomer since it's not a URL; but file urls cannot
# represent relative paths, so we have to live with it for now.
"url": os.path.join("..", "..", LOCAL_MIRROR_DIR),
"homepage": "https://github.com/spack/spack-bootstrap-mirrors",
"releases": "https://github.com/spack/spack-bootstrap-mirrors/releases",
@@ -60,11 +58,7 @@
SOURCE_METADATA = {
"type": "install",
"description": "Mirror with software needed to bootstrap Spack",
"info": {
# This is a mis-nomer since it's not a URL; but file urls cannot
# represent relative paths, so we have to live with it for now.
"url": os.path.join("..", "..", LOCAL_MIRROR_DIR)
},
"info": {"url": os.path.join("..", "..", LOCAL_MIRROR_DIR)},
}

View File

@@ -8,6 +8,7 @@
import shutil
import sys
import tempfile
import urllib.parse
import llnl.util.tty as tty
@@ -65,37 +66,27 @@ def setup_parser(subparser):
create.add_argument(
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
)
output = create.add_mutually_exclusive_group(required=False)
# TODO: remove from Spack 0.21
output = create.add_mutually_exclusive_group(required=True)
output.add_argument(
"-d",
"--directory",
metavar="directory",
dest="mirror_flag",
type=arguments.mirror_directory,
help="local directory where buildcaches will be written. (deprecated)",
type=str,
help="local directory where buildcaches will be written.",
)
# TODO: remove from Spack 0.21
output.add_argument(
"-m",
"--mirror-name",
metavar="mirror-name",
dest="mirror_flag",
type=arguments.mirror_name,
help="name of the mirror where buildcaches will be written. (deprecated)",
type=str,
help="name of the mirror where buildcaches will be written.",
)
# TODO: remove from Spack 0.21
output.add_argument(
"--mirror-url",
metavar="mirror-url",
dest="mirror_flag",
type=arguments.mirror_url,
help="URL of the mirror where buildcaches will be written. (deprecated)",
type=str,
help="URL of the mirror where buildcaches will be written.",
)
# Unfortunately we cannot add this to the mutually exclusive group above,
# because we have further positional arguments.
# TODO: require from Spack 0.21
create.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
create.add_argument(
"--rebuild-index",
action="store_true",
@@ -188,7 +179,7 @@ def setup_parser(subparser):
"-m",
"--mirror-url",
default=None,
help="Override any configured mirrors with this mirror URL",
help="Override any configured mirrors with this mirror url",
)
check.add_argument(
@@ -275,108 +266,55 @@ def setup_parser(subparser):
help="A quoted glob pattern identifying copy manifest files",
)
source = sync.add_mutually_exclusive_group(required=False)
# TODO: remove in Spack 0.21
source.add_argument(
"--src-directory",
metavar="DIRECTORY",
dest="src_mirror_flag",
type=arguments.mirror_directory,
help="Source mirror as a local file path (deprecated)",
"--src-directory", metavar="DIRECTORY", type=str, help="Source mirror as a local file path"
)
# TODO: remove in Spack 0.21
source.add_argument(
"--src-mirror-name",
metavar="MIRROR_NAME",
dest="src_mirror_flag",
type=arguments.mirror_name,
help="Name of the source mirror (deprecated)",
"--src-mirror-name", metavar="MIRROR_NAME", type=str, help="Name of the source mirror"
)
# TODO: remove in Spack 0.21
source.add_argument(
"--src-mirror-url",
metavar="MIRROR_URL",
dest="src_mirror_flag",
type=arguments.mirror_url,
help="URL of the source mirror (deprecated)",
)
# TODO: only support this in 0.21
source.add_argument(
"src_mirror",
metavar="source mirror",
type=arguments.mirror_name_or_url,
help="Source mirror name, path, or URL",
nargs="?",
"--src-mirror-url", metavar="MIRROR_URL", type=str, help="URL of the source mirror"
)
dest = sync.add_mutually_exclusive_group(required=False)
# TODO: remove in Spack 0.21
dest.add_argument(
"--dest-directory",
metavar="DIRECTORY",
dest="dest_mirror_flag",
type=arguments.mirror_directory,
help="Destination mirror as a local file path (deprecated)",
type=str,
help="Destination mirror as a local file path",
)
# TODO: remove in Spack 0.21
dest.add_argument(
"--dest-mirror-name",
metavar="MIRROR_NAME",
type=arguments.mirror_name,
dest="dest_mirror_flag",
help="Name of the destination mirror (deprecated)",
type=str,
help="Name of the destination mirror",
)
# TODO: remove in Spack 0.21
dest.add_argument(
"--dest-mirror-url",
metavar="MIRROR_URL",
dest="dest_mirror_flag",
type=arguments.mirror_url,
help="URL of the destination mirror (deprecated)",
)
# TODO: only support this in 0.21
dest.add_argument(
"dest_mirror",
metavar="destination mirror",
type=arguments.mirror_name_or_url,
help="Destination mirror name, path, or URL",
nargs="?",
"--dest-mirror-url", metavar="MIRROR_URL", type=str, help="URL of the destination mirror"
)
sync.set_defaults(func=sync_fn)
# Update buildcache index without copying any additional packages
update_index = subparsers.add_parser("update-index", help=update_index_fn.__doc__)
update_index_out = update_index.add_mutually_exclusive_group(required=True)
# TODO: remove in Spack 0.21
update_index_out.add_argument(
"-d",
"--directory",
metavar="directory",
dest="mirror_flag",
type=arguments.mirror_directory,
help="local directory where buildcaches will be written (deprecated)",
type=str,
help="local directory where buildcaches will be written.",
)
# TODO: remove in Spack 0.21
update_index_out.add_argument(
"-m",
"--mirror-name",
metavar="mirror-name",
dest="mirror_flag",
type=arguments.mirror_name,
help="name of the mirror where buildcaches will be written (deprecated)",
type=str,
help="name of the mirror where buildcaches will be written.",
)
# TODO: remove in Spack 0.21
update_index_out.add_argument(
"--mirror-url",
metavar="mirror-url",
dest="mirror_flag",
type=arguments.mirror_url,
help="URL of the mirror where buildcaches will be written (deprecated)",
)
# TODO: require from Spack 0.21
update_index_out.add_argument(
"mirror",
type=arguments.mirror_name_or_url,
help="Destination mirror name, path, or URL",
nargs="?",
type=str,
help="URL of the mirror where buildcaches will be written.",
)
update_index.add_argument(
"-k",
@@ -388,17 +326,26 @@ def setup_parser(subparser):
update_index.set_defaults(func=update_index_fn)
def _matching_specs(specs, spec_file):
def _mirror_url_from_args(args):
if args.directory:
return spack.mirror.push_url_from_directory(args.directory)
if args.mirror_name:
return spack.mirror.push_url_from_mirror_name(args.mirror_name)
if args.mirror_url:
return spack.mirror.push_url_from_mirror_url(args.mirror_url)
def _matching_specs(args):
"""Return a list of matching specs read from either a spec file (JSON or YAML),
a query over the store or a query over the active environment.
"""
env = ev.active_environment()
hashes = env.all_hashes() if env else None
if spec_file:
return spack.store.specfile_matches(spec_file, hashes=hashes)
if args.spec_file:
return spack.store.specfile_matches(args.spec_file, hashes=hashes)
if specs:
constraints = spack.cmd.parse_specs(specs)
if args.specs:
constraints = spack.cmd.parse_specs(args.specs)
return spack.store.find(constraints, hashes=hashes)
if env:
@@ -436,30 +383,10 @@ def _concrete_spec_from_args(args):
def create_fn(args):
"""create a binary package and push it to a mirror"""
if args.mirror_flag:
mirror = args.mirror_flag
elif not args.mirror:
raise ValueError("No mirror provided")
else:
mirror = arguments.mirror_name_or_url(args.mirror)
push_url = _mirror_url_from_args(args)
matches = _matching_specs(args)
if args.mirror_flag:
tty.warn(
"Using flags to specify mirrors is deprecated and will be removed in "
"Spack 0.21, use positional arguments instead."
)
# TODO: remove this in 0.21. If we have mirror_flag, the first
# spec is in the positional mirror arg due to argparse limitations.
specs = args.specs
if args.mirror_flag and args.mirror:
specs.insert(0, args.mirror)
url = mirror.push_url
matches = _matching_specs(specs, args.spec_file)
msg = "Pushing binary packages to {0}/build_cache".format(url)
msg = "Pushing binary packages to {0}/build_cache".format(push_url)
tty.msg(msg)
specs_kwargs = {
"include_root": "package" in args.things_to_install,
@@ -473,7 +400,7 @@ def create_fn(args):
"allow_root": args.allow_root,
"regenerate_index": args.rebuild_index,
}
bindist.push(matches, url, specs_kwargs, **kwargs)
bindist.push(matches, push_url, specs_kwargs, **kwargs)
def install_fn(args):
@@ -666,24 +593,51 @@ def sync_fn(args):
manifest_copy(glob.glob(args.manifest_glob))
return 0
# If no manifest_glob, require a source and dest mirror.
# TODO: Simplify in Spack 0.21
if not (args.src_mirror_flag or args.src_mirror) or not (
args.dest_mirror_flag or args.dest_mirror
):
raise ValueError("Source and destination mirror are required.")
# Figure out the source mirror
source_location = None
if args.src_directory:
source_location = args.src_directory
scheme = urllib.parse.urlparse(source_location, scheme="<missing>").scheme
if scheme != "<missing>":
raise ValueError('"--src-directory" expected a local path; got a URL, instead')
# Ensure that the mirror lookup does not mistake this for named mirror
source_location = url_util.path_to_file_url(source_location)
elif args.src_mirror_name:
source_location = args.src_mirror_name
result = spack.mirror.MirrorCollection().lookup(source_location)
if result.name == "<unnamed>":
raise ValueError('no configured mirror named "{name}"'.format(name=source_location))
elif args.src_mirror_url:
source_location = args.src_mirror_url
scheme = urllib.parse.urlparse(source_location, scheme="<missing>").scheme
if scheme == "<missing>":
raise ValueError('"{url}" is not a valid URL'.format(url=source_location))
if args.src_mirror_flag or args.dest_mirror_flag:
tty.warn(
"Using flags to specify mirrors is deprecated and will be removed in "
"Spack 0.21, use positional arguments instead."
)
src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
src_mirror_url = url_util.format(src_mirror.fetch_url)
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
# Figure out the destination mirror
dest_location = None
if args.dest_directory:
dest_location = args.dest_directory
scheme = urllib.parse.urlparse(dest_location, scheme="<missing>").scheme
if scheme != "<missing>":
raise ValueError('"--dest-directory" expected a local path; got a URL, instead')
# Ensure that the mirror lookup does not mistake this for named mirror
dest_location = url_util.path_to_file_url(dest_location)
elif args.dest_mirror_name:
dest_location = args.dest_mirror_name
result = spack.mirror.MirrorCollection().lookup(dest_location)
if result.name == "<unnamed>":
raise ValueError('no configured mirror named "{name}"'.format(name=dest_location))
elif args.dest_mirror_url:
dest_location = args.dest_mirror_url
scheme = urllib.parse.urlparse(dest_location, scheme="<missing>").scheme
if scheme == "<missing>":
raise ValueError('"{url}" is not a valid URL'.format(url=dest_location))
src_mirror_url = src_mirror.fetch_url
dest_mirror_url = dest_mirror.push_url
dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
dest_mirror_url = url_util.format(dest_mirror.fetch_url)
# Get the active environment
env = spack.cmd.require_active_env(cmd_name="buildcache sync")
@@ -744,28 +698,38 @@ def manifest_copy(manifest_file_list):
copy_buildcache_file(copy_file["src"], copy_file["dest"])
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url = mirror.push_url
def update_index(mirror_url, update_keys=False):
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
outdir = url_util.format(mirror.push_url)
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
bindist.generate_package_index(url_util.join(outdir, bindist.build_cache_relative_path()))
if update_keys:
keys_url = url_util.join(
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
outdir, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
)
bindist.generate_key_index(keys_url)
def _mirror_url_from_args_deprecated_format(args):
# In Spack 0.19 the -d flag was equivalent to --mirror-url.
# Spack 0.20 deprecates this, so in 0.21 -d means --directory.
if args.directory and url_util.validate_scheme(urllib.parse.urlparse(args.directory).scheme):
tty.warn(
"Passing a URL to `update-index -d <url>` is deprecated "
"and will be removed in Spack 0.21. "
"Use `update-index --mirror-url <url>` instead."
)
return spack.mirror.push_url_from_mirror_url(args.directory)
else:
return _mirror_url_from_args(args)
def update_index_fn(args):
"""Update a buildcache index."""
if args.mirror_flag:
tty.warn(
"Using flags to specify mirrors is deprecated and will be removed in "
"Spack 0.21, use positional arguments instead."
)
mirror = args.mirror_flag if args.mirror_flag else args.mirror
update_index(mirror, update_keys=args.keys)
push_url = _mirror_url_from_args_deprecated_format(args)
update_index(push_url, update_keys=args.keys)
def buildcache(parser, args):

View File

@@ -241,9 +241,8 @@ def ci_reindex(args):
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
mirror = spack.mirror.Mirror(remote_mirror_url)
buildcache.update_index(mirror, update_keys=True)
buildcache.update_index(remote_mirror_url, update_keys=True)
def ci_rebuild(args):
@@ -453,8 +452,9 @@ def ci_rebuild(args):
# mirror now so it's used when we check for a hash match already
# built for this spec.
if pipeline_mirror_url:
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
spack.mirror.add(mirror, cfg.default_modify_scope())
spack.mirror.add(
spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url, cfg.default_modify_scope()
)
pipeline_mirrors.append(pipeline_mirror_url)
# Check configured mirrors for a built spec with a matching hash
@@ -469,10 +469,7 @@ def ci_rebuild(args):
# could be installed from either the override mirror or any other configured
# mirror (e.g. remote_mirror_url which is defined in the environment or
# pipeline_mirror_url), which is also what we want.
spack.mirror.add(
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
cfg.default_modify_scope(),
)
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
pipeline_mirrors.append(remote_mirror_override)
if spack_pipeline_type == "spack_pull_request":

View File

@@ -5,7 +5,6 @@
import argparse
import os.path
from llnl.util.lang import stable_partition
@@ -13,7 +12,6 @@
import spack.config
import spack.dependency as dep
import spack.environment as ev
import spack.mirror
import spack.modules
import spack.reporters
import spack.spec
@@ -155,6 +153,7 @@ def installed_specs(args):
site=namespace.cdash_site,
buildstamp=namespace.cdash_buildstamp,
track=namespace.cdash_track,
ctest_parsing=getattr(namespace, "ctest_parsing", False),
)
return spack.reporters.CDash(configuration=configuration)
@@ -553,42 +552,3 @@ def is_valid(arg):
dependencies = val
return package, dependencies
def mirror_name_or_url(m):
# Look up mirror by name or use anonymous mirror with path/url.
# We want to guard against typos in mirror names, to avoid pushing
# accidentally to a dir in the current working directory.
# If there's a \ or / in the name, it's interpreted as a path or url.
if "/" in m or "\\" in m:
return spack.mirror.Mirror(m)
# Otherwise, the named mirror is required to exist.
try:
return spack.mirror.require_mirror_name(m)
except ValueError as e:
raise argparse.ArgumentTypeError(
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
)
def mirror_url(url):
try:
return spack.mirror.Mirror.from_url(url)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e))
def mirror_directory(path):
try:
return spack.mirror.Mirror.from_local_path(path)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e))
def mirror_name(name):
try:
return spack.mirror.require_mirror_name(name)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e))

View File

@@ -55,6 +55,13 @@ def setup_parser(subparser):
choices=formatters,
help="format to be used to print the output [default: name_only]",
)
subparser.add_argument(
"--update",
metavar="FILE",
default=None,
action="store",
help="write output to the specified file, if any package is newer",
)
subparser.add_argument(
"-v",
"--virtuals",
@@ -64,22 +71,6 @@ def setup_parser(subparser):
)
arguments.add_common_arguments(subparser, ["tags"])
# Doesn't really make sense to update in count mode.
count_or_update = subparser.add_mutually_exclusive_group()
count_or_update.add_argument(
"--count",
action="store_true",
default=False,
help="display the number of packages that would be listed",
)
count_or_update.add_argument(
"--update",
metavar="FILE",
default=None,
action="store",
help="write output to the specified file, if any package is newer",
)
def filter_by_name(pkgs, args):
"""
@@ -329,9 +320,6 @@ def list(parser, args):
with open(args.update, "w") as f:
formatter(sorted_packages, f)
elif args.count:
# just print the number of packages in the result
print(len(sorted_packages))
else:
# print formatted package list
# Print to stdout
formatter(sorted_packages, sys.stdout)

View File

@@ -9,7 +9,6 @@
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
import spack.caches
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.concretize
@@ -19,6 +18,7 @@
import spack.repo
import spack.spec
import spack.util.path
import spack.util.url as url_util
import spack.util.web as web_util
from spack.error import SpackError
from spack.util.spack_yaml import syaml_dict
@@ -145,26 +145,8 @@ def setup_parser(subparser):
def mirror_add(args):
"""Add a mirror to Spack."""
if (
args.s3_access_key_id
or args.s3_access_key_secret
or args.s3_access_token
or args.s3_profile
or args.s3_endpoint_url
):
connection = {"url": args.url}
if args.s3_access_key_id and args.s3_access_key_secret:
connection["access_pair"] = (args.s3_access_key_id, args.s3_access_key_secret)
if args.s3_access_token:
connection["access_token"] = args.s3_access_token
if args.s3_profile:
connection["profile"] = args.s3_profile
if args.s3_endpoint_url:
connection["endpoint_url"] = args.s3_endpoint_url
mirror = spack.mirror.Mirror(fetch_url=connection, push_url=connection, name=args.name)
else:
mirror = spack.mirror.Mirror(args.url, name=args.name)
spack.mirror.add(mirror, args.scope)
url = url_util.format(args.url)
spack.mirror.add(args.name, url, args.scope, args)
def mirror_remove(args):
@@ -174,7 +156,7 @@ def mirror_remove(args):
def mirror_set_url(args):
"""Change the URL of a mirror."""
url = args.url
url = url_util.format(args.url)
mirrors = spack.config.get("mirrors", scope=args.scope)
if not mirrors:
mirrors = syaml_dict()
@@ -374,9 +356,11 @@ def versions_per_spec(args):
return num_versions
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
tty.msg("Summary for mirror in {}".format(path))
def create_mirror_for_individual_specs(mirror_specs, directory_hint, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(
directory_hint, mirror_specs, skip_unstable_versions
)
tty.msg("Summary for mirror in {}".format(directory_hint))
process_mirror_stats(present, mirrored, error)
@@ -394,6 +378,19 @@ def process_mirror_stats(present, mirrored, error):
sys.exit(1)
def local_mirror_url_from_user(directory_hint):
"""Return a file:// url pointing to the local mirror to be used.
Args:
directory_hint (str or None): directory where to create the mirror. If None,
defaults to "config:source_cache".
"""
mirror_directory = spack.util.path.canonicalize_path(
directory_hint or spack.config.get("config:source_cache")
)
return url_util.path_to_file_url(mirror_directory)
def mirror_create(args):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
@@ -424,12 +421,9 @@ def mirror_create(args):
"The option '--all' already implies mirroring all versions for each package.",
)
# When no directory is provided, the source dir is used
path = args.directory or spack.caches.fetch_cache_location()
if args.all and not ev.active_environment():
create_mirror_for_all_specs(
path=path,
directory_hint=args.directory,
skip_unstable_versions=args.skip_unstable_versions,
selection_fn=not_excluded_fn(args),
)
@@ -437,7 +431,7 @@ def mirror_create(args):
if args.all and ev.active_environment():
create_mirror_for_all_specs_inside_environment(
path=path,
directory_hint=args.directory,
skip_unstable_versions=args.skip_unstable_versions,
selection_fn=not_excluded_fn(args),
)
@@ -446,15 +440,16 @@ def mirror_create(args):
mirror_specs = concrete_specs_from_user(args)
create_mirror_for_individual_specs(
mirror_specs,
path=path,
directory_hint=args.directory,
skip_unstable_versions=args.skip_unstable_versions,
)
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
def create_mirror_for_all_specs(directory_hint, skip_unstable_versions, selection_fn):
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
local_push_url = local_mirror_url_from_user(directory_hint=directory_hint)
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions
local_push_url, skip_unstable_versions=skip_unstable_versions
)
for candidate in mirror_specs:
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
@@ -464,11 +459,13 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
def create_mirror_for_all_specs_inside_environment(
directory_hint, skip_unstable_versions, selection_fn
):
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
create_mirror_for_individual_specs(
mirror_specs,
path=path,
directory_hint=directory_hint,
skip_unstable_versions=skip_unstable_versions,
)

View File

@@ -53,7 +53,6 @@ class OpenMpi(Package):
"version",
"conflicts",
"depends_on",
"disinherit",
"extends",
"provides",
"patch",
@@ -236,12 +235,12 @@ class Foo(Package):
if isinstance(dicts, str):
dicts = (dicts,)
if dicts is not None:
if not isinstance(dicts, collections.abc.Sequence):
raise TypeError(f"dicts arg must be list, tuple, or string. Found {type(dicts)}")
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
@@ -768,35 +767,6 @@ def build_system(*values, **kwargs):
)
@directive()
def disinherit(dict_name: str):
"""Clear all values in a dict inherited from base packages.
You can use this to, e.g., remove all inherited versions from a base package:
disinherit("versions") # this package doesn't share any versions w/parents
version("2.0", ...) # new versions specific to this package
version("3.0", ...)
The dictionary name is checked, so you can't call this on somethign that's not a
valid directive dictonary.
Arguments:
dict_name: name of directive dictionary to clear.
"""
if dict_name not in DirectiveMeta._directive_dict_names:
names = ", ".join(DirectiveMeta._directive_dict_names)
raise DirectiveError(f"Can't disinherit '{dict_name}'. Options are: {names}")
def _execute_disinherit(pkg):
dictionary = getattr(pkg, dict_name, None)
if dictionary:
dictionary.clear()
return _execute_disinherit
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -13,9 +13,9 @@
import spack.error
import spack.paths
import spack.util.prefix
import spack.util.spack_json as sjson
from spack.spec import Spec
from spack.util.prefix import Prefix
test_suite_filename = "test_suite.lock"
results_filename = "results.txt"
@@ -145,7 +145,6 @@ def __init__(self, specs, alias=None):
self.alias = alias
self._hash = None
self._stage = None
self.fails = 0
@@ -248,19 +247,8 @@ def ensure_stage(self):
@property
def stage(self):
"""The root test suite stage directory.
Returns:
str: the spec's test stage directory path
"""
if not self._stage:
self._stage = Prefix(fs.join_path(get_test_stage_dir(), self.content_hash))
return self._stage
@stage.setter
def stage(self, value):
"""Set the value of a non-default stage directory."""
self._stage = value if isinstance(value, Prefix) else Prefix(value)
"""The root test suite stage directory."""
return spack.util.prefix.Prefix(os.path.join(get_test_stage_dir(), self.content_hash))
@property
def results_file(self):
@@ -311,7 +299,7 @@ def test_dir_for_spec(self, spec):
Returns:
str: the spec's test stage directory path
"""
return Prefix(self.stage.join(self.test_pkg_id(spec)))
return self.stage.join(self.test_pkg_id(spec))
@classmethod
def tested_file_name(cls, spec):
@@ -334,7 +322,7 @@ def tested_file_for_spec(self, spec):
Returns:
str: the spec's test status file path
"""
return fs.join_path(self.stage, self.tested_file_name(spec))
return self.stage.join(self.tested_file_name(spec))
@property
def current_test_cache_dir(self):

View File

@@ -364,13 +364,14 @@ def _process_external_package(pkg, explicit):
def _process_binary_cache_tarball(
pkg, explicit, unsigned, mirrors_for_spec=None, timer=timer.NULL_TIMER
pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=timer.NULL_TIMER
):
"""
Process the binary cache tarball.
Args:
pkg (spack.package_base.PackageBase): the package being installed
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@@ -382,24 +383,30 @@ def _process_binary_cache_tarball(
bool: ``True`` if the package was extracted from binary cache,
else ``False``
"""
with timer.measure("fetch"):
download_result = binary_distribution.download_tarball(
pkg.spec, unsigned, mirrors_for_spec
)
timer.start("fetch")
download_result = binary_distribution.download_tarball(
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec
)
timer.stop("fetch")
# see #10063 : install from source if tarball doesn't exist
if download_result is None:
tty.msg("{0} exists in binary cache but with different hash".format(pkg.name))
return False
if download_result is None:
return False
pkg_id = package_id(pkg)
tty.msg("Extracting {0} from binary cache".format(pkg_id))
tty.msg("Extracting {0} from binary cache".format(package_id(pkg)))
with timer.measure("install"), spack.util.path.filter_padding():
# don't print long padded paths while extracting/relocating binaries
timer.start("install")
with spack.util.path.filter_padding():
binary_distribution.extract_tarball(
pkg.spec, download_result, allow_root=False, unsigned=unsigned, force=False
binary_spec, download_result, allow_root=False, unsigned=unsigned, force=False
)
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
return True
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
timer.stop("install")
return True
def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NULL_TIMER):
@@ -417,13 +424,16 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
if not spack.mirror.MirrorCollection():
return False
tty.debug("Searching for binary cache of {0}".format(package_id(pkg)))
pkg_id = package_id(pkg)
tty.debug("Searching for binary cache of {0}".format(pkg_id))
with timer.measure("search"):
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
timer.start("search")
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
timer.stop("search")
return _process_binary_cache_tarball(
pkg,
pkg.spec,
explicit,
unsigned,
mirrors_for_spec=matches,

View File

@@ -31,15 +31,15 @@
import spack.mirror
import spack.spec
import spack.url as url
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
from spack.util.spack_yaml import syaml_dict
from spack.version import VersionList
#: What schemes do we support
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs")
def _is_string(url):
return isinstance(url, str)
def _display_mirror_entry(size, name, url, type_=None):
@@ -51,19 +51,6 @@ def _display_mirror_entry(size, name, url, type_=None):
print("%-*s%s%s" % (size + 4, name, url, type_))
def _url_or_path_to_url(url_or_path: str) -> str:
"""For simplicity we allow mirror URLs in config files to be local, relative paths.
This helper function takes care of distinguishing between URLs and paths, and
canonicalizes paths before transforming them into file:// URLs."""
# Is it a supported URL already? Then don't do path-related canonicalization.
parsed = urllib.parse.urlparse(url_or_path)
if parsed.scheme in supported_url_schemes:
return url_or_path
# Otherwise we interpret it as path, and we should promote it to file:// URL.
return url_util.path_to_file_url(spack.util.path.canonicalize_path(url_or_path))
class Mirror(object):
"""Represents a named location for storing source tarballs and binary
packages.
@@ -103,26 +90,7 @@ def from_json(stream, name=None):
except Exception as e:
raise sjson.SpackJSONError("error parsing JSON mirror:", str(e)) from e
@staticmethod
def from_local_path(path: str):
return Mirror(fetch_url=url_util.path_to_file_url(path))
@staticmethod
def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL."""
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError(
'"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
url, ", ".join(supported_url_schemes)
)
)
return Mirror(fetch_url=url)
def to_dict(self):
# Keep it a key-value pair <name>: <url> when possible.
if isinstance(self._fetch_url, str) and self._push_url is None:
return self._fetch_url
if self._push_url is None:
return syaml_dict([("fetch", self._fetch_url), ("push", self._fetch_url)])
else:
@@ -233,11 +201,7 @@ def set_access_token(self, url_type, connection_token):
@property
def fetch_url(self):
"""Get the valid, canonicalized fetch URL"""
url_or_path = (
self._fetch_url if isinstance(self._fetch_url, str) else self._fetch_url["url"]
)
return _url_or_path_to_url(url_or_path)
return self._fetch_url if _is_string(self._fetch_url) else self._fetch_url["url"]
@fetch_url.setter
def fetch_url(self, url):
@@ -246,12 +210,9 @@ def fetch_url(self, url):
@property
def push_url(self):
"""Get the valid, canonicalized push URL. Returns fetch URL if no custom
push URL is defined"""
if self._push_url is None:
return self.fetch_url
url_or_path = self._push_url if isinstance(self._push_url, str) else self._push_url["url"]
return _url_or_path_to_url(url_or_path)
return self._fetch_url if _is_string(self._fetch_url) else self._fetch_url["url"]
return self._push_url if _is_string(self._push_url) else self._push_url["url"]
@push_url.setter
def push_url(self, url):
@@ -560,17 +521,30 @@ def mirror_cache_and_stats(path, skip_unstable_versions=False):
return mirror_cache, mirror_stats
def add(mirror: Mirror, scope=None):
def add(name, url, scope, args={}):
"""Add a named mirror in the given scope"""
mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml_dict()
if mirror.name in mirrors:
tty.die("Mirror with name {} already exists.".format(mirror.name))
if name in mirrors:
tty.die("Mirror with name %s already exists." % name)
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (mirror.name, mirror.to_dict()))
mirror_data = url
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
# On creation, assume connection data is set for both
if any(value for value in key_values if value in args):
url_dict = {
"url": url,
"access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
"access_token": args.s3_access_token,
"profile": args.s3_profile,
"endpoint_url": args.s3_endpoint_url,
}
mirror_data = {"fetch": url_dict, "push": url_dict}
items.insert(0, (name, mirror_data))
mirrors = syaml_dict(items)
spack.config.set("mirrors", mirrors, scope=scope)
@@ -689,12 +663,31 @@ def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
return True
def require_mirror_name(mirror_name):
"""Find a mirror by name and raise if it does not exist"""
mirror = spack.mirror.MirrorCollection().get(mirror_name)
if not mirror:
def push_url_from_directory(output_directory):
"""Given a directory in the local filesystem, return the URL on
which to push binary packages.
"""
if url_util.validate_scheme(urllib.parse.urlparse(output_directory).scheme):
raise ValueError("expected a local path, but got a URL instead")
mirror_url = url_util.path_to_file_url(output_directory)
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url)
def push_url_from_mirror_name(mirror_name):
"""Given a mirror name, return the URL on which to push binary packages."""
mirror = spack.mirror.MirrorCollection().lookup(mirror_name)
if mirror.name == "<unnamed>":
raise ValueError('no mirror named "{0}"'.format(mirror_name))
return mirror
return url_util.format(mirror.push_url)
def push_url_from_mirror_url(mirror_url):
"""Given a mirror URL, return the URL on which to push binary packages."""
if not url_util.validate_scheme(urllib.parse.urlparse(mirror_url).scheme):
raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url)
class MirrorError(spack.error.SpackError):

View File

@@ -651,12 +651,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: List of test failures encountered during a smoke/install test run.
test_failures = None
#: TestSuite instance used to manage smoke/install tests for one or more specs.
#: TestSuite instance used to manage smoke/install tests for one or more
#: specs.
test_suite = None
#: Path to the log file used for tests
test_log_file = None
def __init__(self, spec):
# this determines how the package should be built.
self.spec = spec
@@ -1886,10 +1884,7 @@ def _setup_test(self, verbose, externals):
pkg_id = self.test_suite.test_pkg_id(self.spec)
else:
self.test_log_file = fsys.join_path(self.stage.path, _spack_install_test_log)
self.test_suite = TestSuite([self.spec])
self.test_suite.stage = self.stage.path
pkg_id = self.spec.format("{name}-{version}-{hash:7}")
fsys.touch(self.test_log_file) # Otherwise log_parse complains
with tty.log.log_output(self.test_log_file, verbose) as logger:
@@ -2446,11 +2441,6 @@ def run_test_callbacks(builder, method_names, callback_type="install"):
with builder.pkg._setup_test(verbose=False, externals=False) as logger:
# Report running each of the methods in the build log
print_test_message(logger, "Running {0}-time tests".format(callback_type), True)
builder.pkg.test_suite.current_test_spec = builder.pkg.spec
builder.pkg.test_suite.current_base_spec = builder.pkg.spec
if "test" in method_names:
_copy_cached_test_files(builder.pkg, builder.pkg.spec)
for name in method_names:
try:
@@ -2499,25 +2489,6 @@ def print_test_message(logger, msg, verbose):
tty.msg(msg)
def _copy_cached_test_files(pkg, spec):
"""Copy any cached stand-alone test-related files."""
# copy installed test sources cache into test cache dir
if spec.concrete:
cache_source = spec.package.install_test_root
cache_dir = pkg.test_suite.current_test_cache_dir
if os.path.isdir(cache_source) and not os.path.exists(cache_dir):
fsys.install_tree(cache_source, cache_dir)
# copy test data into test data dir
data_source = Prefix(spec.package.package_dir).test
data_dir = pkg.test_suite.current_test_data_dir
if os.path.isdir(data_source) and not os.path.exists(data_dir):
# We assume data dir is used read-only
# maybe enforce this later
shutil.copytree(data_source, data_dir)
def test_process(pkg, kwargs):
verbose = kwargs.get("verbose", False)
externals = kwargs.get("externals", False)
@@ -2556,7 +2527,20 @@ def test_process(pkg, kwargs):
except spack.repo.UnknownPackageError:
continue
_copy_cached_test_files(pkg, spec)
# copy installed test sources cache into test cache dir
if spec.concrete:
cache_source = spec_pkg.install_test_root
cache_dir = pkg.test_suite.current_test_cache_dir
if os.path.isdir(cache_source) and not os.path.exists(cache_dir):
fsys.install_tree(cache_source, cache_dir)
# copy test data into test data dir
data_source = Prefix(spec_pkg.package_dir).test
data_dir = pkg.test_suite.current_test_data_dir
if os.path.isdir(data_source) and not os.path.exists(data_dir):
# We assume data dir is used read-only
# maybe enforce this later
shutil.copytree(data_source, data_dir)
# grab the function for each method so we can call
# it with the package

View File

@@ -59,7 +59,6 @@
import enum
import pathlib
import re
import sys
from typing import Iterator, List, Match, Optional
from llnl.util.tty import color
@@ -69,7 +68,6 @@
import spack.variant
import spack.version
IS_WINDOWS = sys.platform == "win32"
#: Valid name for specs and variants. Here we are not using
#: the previous "w[\w.-]*" since that would match most
#: characters that can be part of a word in any language
@@ -82,15 +80,8 @@
HASH = r"[a-zA-Z_0-9]+"
#: A filename starts either with a "." or a "/" or a "{name}/,
# or on Windows, a drive letter followed by a colon and "\"
# or "." or {name}\
WINDOWS_FILENAME = r"(\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)([a-zA-Z0-9-_\.\\]*)(\.json|\.yaml)"
UNIX_FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)"
if not IS_WINDOWS:
FILENAME = UNIX_FILENAME
else:
FILENAME = WINDOWS_FILENAME
#: A filename starts either with a "." or a "/" or a "{name}/"
FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)"
VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import itertools
import multiprocessing.pool
import os
import re
@@ -300,24 +299,17 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
if idpath:
new_idpath = paths_to_paths.get(idpath, None)
if new_idpath and not idpath == new_idpath:
args += [("-id", new_idpath)]
args += ["-id", new_idpath]
for dep in deps:
new_dep = paths_to_paths.get(dep)
if new_dep and dep != new_dep:
args += [("-change", dep, new_dep)]
args += ["-change", dep, new_dep]
new_rpaths = []
for orig_rpath in rpaths:
new_rpath = paths_to_paths.get(orig_rpath)
if new_rpath and not orig_rpath == new_rpath:
args_to_add = ("-rpath", orig_rpath, new_rpath)
if args_to_add not in args and new_rpath not in new_rpaths:
args += [args_to_add]
new_rpaths.append(new_rpath)
args += ["-rpath", orig_rpath, new_rpath]
# Deduplicate and flatten
args = list(itertools.chain.from_iterable(llnl.util.lang.dedupe(args)))
if args:
args.append(str(cur_path))
install_name_tool = executable.Executable("install_name_tool")

View File

@@ -48,7 +48,7 @@
CDashConfiguration = collections.namedtuple(
"CDashConfiguration",
["upload_url", "packages", "build", "site", "buildstamp", "track"],
["upload_url", "packages", "build", "site", "buildstamp", "track", "ctest_parsing"],
)
@@ -106,6 +106,7 @@ def __init__(self, configuration: CDashConfiguration):
self.revision = git("rev-parse", "HEAD", output=str).strip()
self.generator = "spack-{0}".format(spack.main.get_version())
self.multiple_packages = False
self.ctest_parsing = configuration.ctest_parsing
def report_build_name(self, pkg_name):
return (
@@ -264,6 +265,68 @@ def build_report(self, directory_name, specs):
self.build_report_for_package(directory_name, package, duration)
self.finalize_report()
def extract_ctest_test_data(self, package, phases, report_data):
"""Extract ctest test data for the package."""
# Track the phases we perform so we know what reports to create.
# We always report the update step because this is how we tell CDash
# what revision of Spack we are using.
assert "update" in phases
for phase in phases:
report_data[phase] = {}
report_data[phase]["loglines"] = []
report_data[phase]["status"] = 0
report_data[phase]["endtime"] = self.endtime
# Generate a report for this package.
# The first line just says "Testing package name-hash"
report_data["test"]["loglines"].append(
str("{0} output for {1}:".format("test", package["name"]))
)
for line in package["stdout"].splitlines()[1:]:
report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
for phase in phases:
report_data[phase]["starttime"] = self.starttime
report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
errors, warnings = parse_log_events(report_data[phase]["loglines"])
# Cap the number of errors and warnings at 50 each.
errors = errors[0:49]
warnings = warnings[0:49]
if phase == "test":
# Convert log output from ASCII to Unicode and escape for XML.
def clean_log_event(event):
event = vars(event)
event["text"] = xml.sax.saxutils.escape(event["text"])
event["pre_context"] = xml.sax.saxutils.escape("\n".join(event["pre_context"]))
event["post_context"] = xml.sax.saxutils.escape(
"\n".join(event["post_context"])
)
# source_file and source_line_no are either strings or
# the tuple (None,). Distinguish between these two cases.
if event["source_file"][0] is None:
event["source_file"] = ""
event["source_line_no"] = ""
else:
event["source_file"] = xml.sax.saxutils.escape(event["source_file"])
return event
# Convert errors to warnings if the package reported success.
if package["result"] == "success":
warnings = errors + warnings
errors = []
report_data[phase]["errors"] = []
report_data[phase]["warnings"] = []
for error in errors:
report_data[phase]["errors"].append(clean_log_event(error))
for warning in warnings:
report_data[phase]["warnings"].append(clean_log_event(warning))
if phase == "update":
report_data[phase]["revision"] = self.revision
def extract_standalone_test_data(self, package, phases, report_data):
"""Extract stand-alone test outputs for the package."""
@@ -298,7 +361,7 @@ def report_test_data(self, directory_name, package, phases, report_data):
tty.debug("Preparing to upload {0}".format(phase_report))
self.upload(phase_report)
def test_report_for_package(self, directory_name, package, duration):
def test_report_for_package(self, directory_name, package, duration, ctest_parsing=False):
if "stdout" not in package:
# Skip reporting on packages that did not generate any output.
tty.debug("Skipping report for {0}: No generated output".format(package["name"]))
@@ -314,8 +377,12 @@ def test_report_for_package(self, directory_name, package, duration):
report_data = self.initialize_report(directory_name)
report_data["hostname"] = socket.gethostname()
phases = ["testing"]
self.extract_standalone_test_data(package, phases, report_data)
if ctest_parsing:
phases = ["test", "update"]
self.extract_ctest_test_data(package, phases, report_data)
else:
phases = ["testing"]
self.extract_standalone_test_data(package, phases, report_data)
self.report_test_data(directory_name, package, phases, report_data)
@@ -327,7 +394,12 @@ def test_report(self, directory_name, specs):
if "time" in spec:
duration = int(spec["time"])
for package in spec["packages"]:
self.test_report_for_package(directory_name, package, duration)
self.test_report_for_package(
directory_name,
package,
duration,
self.ctest_parsing,
)
self.finalize_report()
@@ -342,7 +414,7 @@ def test_skipped_report(self, directory_name, spec, reason=None):
"result": "skipped",
"stdout": output,
}
self.test_report_for_package(directory_name, package, duration=0.0)
self.test_report_for_package(directory_name, package, duration=0.0, ctest_parsing=False)
def concretization_report(self, directory_name, msg):
self.buildname = self.base_buildname

View File

@@ -6,8 +6,10 @@
"""Schema for mirrors.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/mirrors.py
:lines: 13-
"""
#: Properties for inclusion in other schemas
properties = {
"mirrors": {

View File

@@ -1800,8 +1800,9 @@ def target_defaults(self, specs):
for target in candidate_targets:
self.gen.fact(fn.target(target.name))
self.gen.fact(fn.target_family(target.name, target.family.name))
for parent in sorted(target.parents):
self.gen.fact(fn.target_parent(target.name, parent.name))
self.gen.fact(fn.target_compatible(target.name, target.name))
for ancestor in sorted(target.ancestors):
self.gen.fact(fn.target_compatible(target.name, ancestor.name))
# prefer best possible targets; weight others poorly so
# they're not used unless set explicitly

View File

@@ -816,18 +816,11 @@ node_target_compatible(Package, Target)
:- attr("node_target", Package, MyTarget),
target_compatible(Target, MyTarget).
% target_compatible(T1, T2) means code for T2 can run on T1
% NOTE: target_compatible(T1, T2) means code for T2 can run on T1
% This order is dependent -> dependency in the node DAG, which
% is contravariant with the target DAG.
target_compatible(Target, Target) :- target(Target).
target_compatible(Child, Parent) :- target_parent(Child, Parent).
target_compatible(Descendent, Ancestor)
:- target_parent(Target, Ancestor),
target_compatible(Descendent, Target),
target(Target).
#defined target_satisfies/2.
#defined target_parent/2.
% can't use targets on node if the compiler for the node doesn't support them
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)

View File

@@ -154,17 +154,3 @@ def test_monkey_patching_wrapped_pkg():
s.package.run_tests = True
assert builder.pkg.run_tests is True
assert builder.pkg_with_dispatcher.run_tests is True
@pytest.mark.regression("34440")
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
def test_monkey_patching_test_log_file():
s = spack.spec.Spec("old-style-autotools").concretized()
builder = spack.builder.create(s.package)
assert s.package.test_log_file is None
assert builder.pkg.test_log_file is None
assert builder.pkg_with_dispatcher.test_log_file is None
s.package.test_log_file = "/some/file"
assert builder.pkg.test_log_file == "/some/file"
assert builder.pkg_with_dispatcher.test_log_file == "/some/file"

View File

@@ -23,6 +23,22 @@
import spack.util.spack_yaml as syaml
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
base_name = "internal-testing-scope"
current_overrides = set(x.name for x in cfg.config.matching_scopes(r"^{0}".format(base_name)))
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
scope_name = "{0}{1}".format(base_name, num_overrides)
num_overrides += 1
with cfg.override(cfg.InternalConfigScope(scope_name)):
yield scope_name
def test_urlencode_string():
s = "Spack Test Project"

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import errno
import os
import platform
@@ -267,3 +268,13 @@ def test_buildcache_create_install(
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
def test_deprecation_mirror_url_dir_flag(capfd):
# Test that passing `update-index -d <url>` gives a deprecation warning.
parser = argparse.ArgumentParser()
spack.cmd.buildcache.setup_parser(parser)
url = spack.util.url.path_to_file_url(os.getcwd())
args = parser.parse_args(["update-index", "-d", url])
spack.cmd.buildcache._mirror_url_from_args_deprecated_format(args)
assert "Passing a URL to `update-index -d <url>` is deprecated" in capfd.readouterr()[1]

View File

@@ -1218,7 +1218,7 @@ def test_push_mirror_contents(
working_dir = tmpdir.join("working_dir")
mirror_dir = working_dir.join("mirror")
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
mirror_url = "file://{0}".format(mirror_dir.strpath)
ci.import_signing_key(_signing_key())

View File

@@ -25,6 +25,25 @@
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
base_name = "internal-testing-scope"
current_overrides = set(
x.name for x in spack.config.config.matching_scopes(r"^{0}".format(base_name))
)
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
scope_name = "{0}{1}".format(base_name, num_overrides)
num_overrides += 1
with spack.config.override(spack.config.InternalConfigScope(scope_name)):
yield scope_name
# test gpg command detection
@pytest.mark.parametrize(
"cmd_name,version",
@@ -62,7 +81,7 @@ def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config):
@pytest.mark.maybeslow
def test_gpg(tmpdir, mutable_config, mock_gnupghome):
def test_gpg(tmpdir, tmp_scope, mock_gnupghome):
# Verify a file with an empty keyring.
with pytest.raises(ProcessError):
gpg("verify", os.path.join(mock_gpg_data_path, "content.txt"))
@@ -192,6 +211,6 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
test_path = tmpdir.join("named_cache")
os.makedirs("%s" % test_path)
mirror_url = "file://%s" % test_path
mirror("add", "gpg", mirror_url)
mirror("add", "--scope", tmp_scope, "gpg", mirror_url)
gpg("publish", "--rebuild-index", "-m", "gpg")
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)

View File

@@ -6,7 +6,6 @@
import sys
from textwrap import dedent
import spack.repo
from spack.main import SpackCommand
list = SpackCommand("list")
@@ -124,13 +123,3 @@ def test_list_tags(mock_packages):
output = list("--tag", "tag3")
assert "mpich\n" not in output
assert "mpich2" in output
def test_list_count(mock_packages):
output = list("--count")
assert int(output.strip()) == len(spack.repo.all_package_names())
output = list("--count", "py-")
assert int(output.strip()) == len(
[name for name in spack.repo.all_package_names() if "py-" in name]
)

View File

@@ -26,6 +26,25 @@
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
base_name = "internal-testing-scope"
current_overrides = set(
x.name for x in spack.config.config.matching_scopes(r"^{0}".format(base_name))
)
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
scope_name = "{0}{1}".format(base_name, num_overrides)
num_overrides += 1
with spack.config.override(spack.config.InternalConfigScope(scope_name)):
yield scope_name
@pytest.mark.disable_clean_stage_check
@pytest.mark.regression("8083")
def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
@@ -135,44 +154,48 @@ def test_exclude_file(mock_packages, tmpdir, config):
assert not any(spec.satisfies(y) for spec in mirror_specs for y in expected_exclude)
def test_mirror_crud(mutable_config, capsys):
def test_mirror_crud(tmp_scope, capsys):
with capsys.disabled():
mirror("add", "mirror", "http://spack.io")
mirror("add", "--scope", tmp_scope, "mirror", "http://spack.io")
output = mirror("remove", "mirror")
output = mirror("remove", "--scope", tmp_scope, "mirror")
assert "Removed mirror" in output
mirror("add", "mirror", "http://spack.io")
mirror("add", "--scope", tmp_scope, "mirror", "http://spack.io")
# no-op
output = mirror("set-url", "mirror", "http://spack.io")
output = mirror("set-url", "--scope", tmp_scope, "mirror", "http://spack.io")
assert "No changes made" in output
output = mirror("set-url", "--push", "mirror", "s3://spack-public")
output = mirror("set-url", "--scope", tmp_scope, "--push", "mirror", "s3://spack-public")
assert "Changed (push) url" in output
# no-op
output = mirror("set-url", "--push", "mirror", "s3://spack-public")
output = mirror("set-url", "--scope", tmp_scope, "--push", "mirror", "s3://spack-public")
assert "No changes made" in output
output = mirror("remove", "mirror")
output = mirror("remove", "--scope", tmp_scope, "mirror")
assert "Removed mirror" in output
# Test S3 connection info token
mirror(
"add",
"--scope",
tmp_scope,
"--s3-access-token",
"aaaaaazzzzz",
"mirror",
"s3://spack-public",
)
output = mirror("remove", "mirror")
output = mirror("remove", "--scope", tmp_scope, "mirror")
assert "Removed mirror" in output
# Test S3 connection info id/key
mirror(
"add",
"--scope",
tmp_scope,
"--s3-access-key-id",
"foo",
"--s3-access-key-secret",
@@ -181,12 +204,14 @@ def test_mirror_crud(mutable_config, capsys):
"s3://spack-public",
)
output = mirror("remove", "mirror")
output = mirror("remove", "--scope", tmp_scope, "mirror")
assert "Removed mirror" in output
# Test S3 connection info with endpoint URL
mirror(
"add",
"--scope",
tmp_scope,
"--s3-access-token",
"aaaaaazzzzz",
"--s3-endpoint-url",
@@ -195,32 +220,32 @@ def test_mirror_crud(mutable_config, capsys):
"s3://spack-public",
)
output = mirror("remove", "mirror")
output = mirror("remove", "--scope", tmp_scope, "mirror")
assert "Removed mirror" in output
output = mirror("list")
output = mirror("list", "--scope", tmp_scope)
assert "No mirrors configured" in output
# Test GCS Mirror
mirror("add", "mirror", "gs://spack-test")
mirror("add", "--scope", tmp_scope, "mirror", "gs://spack-test")
output = mirror("remove", "mirror")
output = mirror("remove", "--scope", tmp_scope, "mirror")
assert "Removed mirror" in output
def test_mirror_nonexisting(mutable_config):
def test_mirror_nonexisting(tmp_scope):
with pytest.raises(SpackCommandError):
mirror("remove", "not-a-mirror")
mirror("remove", "--scope", tmp_scope, "not-a-mirror")
with pytest.raises(SpackCommandError):
mirror("set-url", "not-a-mirror", "http://spack.io")
mirror("set-url", "--scope", tmp_scope, "not-a-mirror", "http://spack.io")
def test_mirror_name_collision(mutable_config):
mirror("add", "first", "1")
def test_mirror_name_collision(tmp_scope):
mirror("add", "--scope", tmp_scope, "first", "1")
with pytest.raises(SpackCommandError):
mirror("add", "first", "1")
mirror("add", "--scope", tmp_scope, "first", "1")
def test_mirror_destroy(
@@ -301,6 +326,20 @@ def test_error_conditions(self, cli_args, error_str):
with pytest.raises(spack.error.SpackError, match=error_str):
spack.cmd.mirror.mirror_create(args)
@pytest.mark.parametrize(
"cli_args,expected_end",
[
({"directory": None}, os.path.join("source")),
({"directory": os.path.join("foo", "bar")}, os.path.join("foo", "bar")),
],
)
def test_mirror_path_is_valid(self, cli_args, expected_end, config):
args = MockMirrorArgs(**cli_args)
local_push_url = spack.cmd.mirror.local_mirror_url_from_user(args.directory)
assert local_push_url.startswith("file:")
assert os.path.isabs(local_push_url.replace("file://", ""))
assert local_push_url.endswith(expected_end)
@pytest.mark.parametrize(
"cli_args,not_expected",
[

View File

@@ -68,21 +68,3 @@ def test_error_on_anonymous_dependency(config, mock_packages):
pkg = spack.repo.path.get_pkg_class("a")
with pytest.raises(spack.directives.DependencyError):
spack.directives._depends_on(pkg, "@4.5")
def test_disinherited_version(config, mock_packages):
pkg = spack.repo.path.get_pkg_class("disinherit")
# these would be inherited from A if disinherit failed
assert "1.0" not in pkg.versions
assert "2.0" not in pkg.versions
# these are defined in dininherit
assert "3.0" not in pkg.versions
assert "4.0" not in pkg.versions
def test_disinherit(config, mock_packages):
with pytest.raises(spack.directives.DirectiveError):
# This is not a valid thing to disinherit
spack.directives.disinherit("foobarbaz")

View File

@@ -205,6 +205,16 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd):
assert "has external module in {0}".format(spec.external_modules) in out
def test_process_binary_cache_tarball_none(install_mockery, monkeypatch, capfd):
"""Tests of _process_binary_cache_tarball when no tarball."""
monkeypatch.setattr(spack.binary_distribution, "download_tarball", _none)
s = spack.spec.Spec("trivial-install-test-package").concretized()
assert not inst._process_binary_cache_tarball(s.package, None, False, False)
assert "exists in binary cache but" in capfd.readouterr()[0]
def test_process_binary_cache_tarball_tar(install_mockery, monkeypatch, capfd):
"""Tests of _process_binary_cache_tarball with a tar file."""
@@ -219,7 +229,7 @@ def _spec(spec, unsigned=False, mirrors_for_spec=None):
monkeypatch.setattr(spack.database.Database, "add", _noop)
spec = spack.spec.Spec("a").concretized()
assert inst._process_binary_cache_tarball(spec.package, explicit=False, unsigned=False)
assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
out = capfd.readouterr()[0]
assert "Extracting a" in out

View File

@@ -163,12 +163,13 @@ def upload(*args, **kwargs):
site="fake-site",
buildstamp=None,
track="fake-track",
ctest_parsing=False,
)
monkeypatch.setattr(tty, "_debug", 1)
reporter = MockCDash(configuration=configuration)
pkg_data = {"name": "fake-package"}
reporter.test_report_for_package(tmpdir.strpath, pkg_data, 0)
reporter.test_report_for_package(tmpdir.strpath, pkg_data, 0, False)
err = capfd.readouterr()[1]
assert "Skipping report for" in err
assert "No generated output" in err

View File

@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import os
import re
import sys
import pytest
@@ -12,24 +10,9 @@
import spack.platforms.test
import spack.spec
import spack.variant
from spack.parser import (
UNIX_FILENAME,
WINDOWS_FILENAME,
SpecParser,
SpecTokenizationError,
Token,
TokenType,
)
from spack.parser import SpecParser, SpecTokenizationError, Token, TokenType
FAIL_ON_WINDOWS = pytest.mark.xfail(
sys.platform == "win32",
raises=(SpecTokenizationError, spack.spec.NoSuchHashError),
reason="Unix style path on Windows",
)
FAIL_ON_UNIX = pytest.mark.xfail(
sys.platform != "win32", raises=SpecTokenizationError, reason="Windows style path on Unix"
)
is_windows = sys.platform == "win32"
def simple_package_name(name):
@@ -835,6 +818,18 @@ def test_redundant_spec(query_str, text_fmt, database):
("x platform=test platform=test", spack.spec.DuplicateArchitectureError),
("x os=fe platform=test target=fe os=fe", spack.spec.DuplicateArchitectureError),
("x target=be platform=test os=be os=fe", spack.spec.DuplicateArchitectureError),
# Specfile related errors
("/bogus/path/libdwarf.yaml", spack.spec.NoSuchSpecFileError),
("../../libdwarf.yaml", spack.spec.NoSuchSpecFileError),
("./libdwarf.yaml", spack.spec.NoSuchSpecFileError),
("libfoo ^/bogus/path/libdwarf.yaml", spack.spec.NoSuchSpecFileError),
("libfoo ^../../libdwarf.yaml", spack.spec.NoSuchSpecFileError),
("libfoo ^./libdwarf.yaml", spack.spec.NoSuchSpecFileError),
("/bogus/path/libdwarf.yamlfoobar", spack.spec.SpecFilenameError),
(
"libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml",
spack.spec.SpecFilenameError,
),
],
)
def test_error_conditions(text, exc_cls):
@@ -842,114 +837,7 @@ def test_error_conditions(text, exc_cls):
SpecParser(text).next_spec()
@pytest.mark.parametrize(
"text,exc_cls",
[
# Specfile related errors
pytest.param(
"/bogus/path/libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"../../libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"./libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"libfoo ^/bogus/path/libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"libfoo ^../../libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"libfoo ^./libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"/bogus/path/libdwarf.yamlfoobar",
spack.spec.SpecFilenameError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml",
spack.spec.SpecFilenameError,
marks=FAIL_ON_WINDOWS,
),
pytest.param(
"c:\\bogus\\path\\libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_UNIX,
),
pytest.param(
"..\\..\\libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_UNIX,
),
pytest.param(
".\\libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_UNIX,
),
pytest.param(
"libfoo ^c:\\bogus\\path\\libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_UNIX,
),
pytest.param(
"libfoo ^..\\..\\libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_UNIX,
),
pytest.param(
"libfoo ^.\\libdwarf.yaml",
spack.spec.NoSuchSpecFileError,
marks=FAIL_ON_UNIX,
),
pytest.param(
"c:\\bogus\\path\\libdwarf.yamlfoobar",
spack.spec.SpecFilenameError,
marks=FAIL_ON_UNIX,
),
pytest.param(
"libdwarf^c:\\bogus\\path\\libelf.yamlfoobar ^c:\\path\\to\\bogus.yaml",
spack.spec.SpecFilenameError,
marks=FAIL_ON_UNIX,
),
],
)
def test_specfile_error_conditions_windows(text, exc_cls):
with pytest.raises(exc_cls):
SpecParser(text).next_spec()
@pytest.mark.parametrize(
"filename,regex",
[
(r"c:\abs\windows\\path.yaml", WINDOWS_FILENAME),
(r".\\relative\\dot\\win\\path.yaml", WINDOWS_FILENAME),
(r"relative\\windows\\path.yaml", WINDOWS_FILENAME),
("/absolute/path/to/file.yaml", UNIX_FILENAME),
("relative/path/to/file.yaml", UNIX_FILENAME),
("./dot/rel/to/file.yaml", UNIX_FILENAME),
],
)
def test_specfile_parsing(filename, regex):
match = re.match(regex, filename)
assert match
assert match.end() == len(filename)
@pytest.mark.skipif(is_windows, reason="Spec parsing does not currently support Windows paths")
def test_parse_specfile_simple(specfile_for, tmpdir):
specfile = tmpdir.join("libdwarf.json")
s = specfile_for("libdwarf", specfile)
@@ -995,6 +883,7 @@ def test_parse_filename_missing_slash_as_spec(specfile_for, tmpdir, filename):
)
@pytest.mark.skipif(is_windows, reason="Spec parsing does not currently support Windows paths")
def test_parse_specfile_dependency(default_mock_concretization, tmpdir):
"""Ensure we can use a specfile as a dependency"""
s = default_mock_concretization("libdwarf")
@@ -1010,13 +899,12 @@ def test_parse_specfile_dependency(default_mock_concretization, tmpdir):
with specfile.dirpath().as_cwd():
# Make sure this also works: "spack spec ./libelf.yaml"
spec = SpecParser(f"libdwarf^.{os.path.sep}{specfile.basename}").next_spec()
spec = SpecParser(f"libdwarf^./{specfile.basename}").next_spec()
assert spec["libelf"] == s["libelf"]
# Should also be accepted: "spack spec ../<cur-dir>/libelf.yaml"
spec = SpecParser(
f"libdwarf^..{os.path.sep}{specfile.dirpath().basename}\
{os.path.sep}{specfile.basename}"
f"libdwarf^../{specfile.dirpath().basename}/{specfile.basename}"
).next_spec()
assert spec["libelf"] == s["libelf"]
@@ -1030,20 +918,16 @@ def test_parse_specfile_relative_paths(specfile_for, tmpdir):
with parent_dir.as_cwd():
# Make sure this also works: "spack spec ./libelf.yaml"
spec = SpecParser(f".{os.path.sep}{basename}").next_spec()
spec = SpecParser(f"./{basename}").next_spec()
assert spec == s
# Should also be accepted: "spack spec ../<cur-dir>/libelf.yaml"
spec = SpecParser(
f"..{os.path.sep}{parent_dir.basename}{os.path.sep}{basename}"
).next_spec()
spec = SpecParser(f"../{parent_dir.basename}/{basename}").next_spec()
assert spec == s
# Should also handle mixed clispecs and relative paths, e.g.:
# "spack spec mvapich_foo ../<cur-dir>/libelf.yaml"
specs = SpecParser(
f"mvapich_foo ..{os.path.sep}{parent_dir.basename}{os.path.sep}{basename}"
).all_specs()
specs = SpecParser(f"mvapich_foo ../{parent_dir.basename}/{basename}").all_specs()
assert len(specs) == 2
assert specs[1] == s
@@ -1053,7 +937,7 @@ def test_parse_specfile_relative_subdir_path(specfile_for, tmpdir):
s = specfile_for("libdwarf", specfile)
with tmpdir.as_cwd():
spec = SpecParser(f"subdir{os.path.sep}{specfile.basename}").next_spec()
spec = SpecParser(f"subdir/{specfile.basename}").next_spec()
assert spec == s

View File

@@ -51,32 +51,26 @@ def get_user():
return getpass.getuser()
# return value for replacements with no match
NOMATCH = object()
# Substitutions to perform
def replacements():
# break circular imports
import spack.environment as ev
# break circular import from spack.util.executable
import spack.paths
arch = architecture()
return {
"spack": lambda: spack.paths.prefix,
"user": lambda: get_user(),
"tempdir": lambda: tempfile.gettempdir(),
"user_cache_path": lambda: spack.paths.user_cache_path,
"architecture": lambda: arch,
"arch": lambda: arch,
"platform": lambda: arch.platform,
"operating_system": lambda: arch.os,
"os": lambda: arch.os,
"target": lambda: arch.target,
"target_family": lambda: arch.target.microarchitecture.family,
"date": lambda: date.today().strftime("%Y-%m-%d"),
"env": lambda: ev.active_environment().path if ev.active_environment() else NOMATCH,
"spack": spack.paths.prefix,
"user": get_user(),
"tempdir": tempfile.gettempdir(),
"user_cache_path": spack.paths.user_cache_path,
"architecture": str(arch),
"arch": str(arch),
"platform": str(arch.platform),
"operating_system": str(arch.os),
"os": str(arch.os),
"target": str(arch.target),
"target_family": str(arch.target.microarchitecture.family),
"date": date.today().strftime("%Y-%m-%d"),
}
@@ -299,14 +293,20 @@ def substitute_config_variables(path):
replaced if there is an active environment, and should only be used in
environment yaml files.
"""
import spack.environment as ev # break circular
_replacements = replacements()
env = ev.active_environment()
if env:
_replacements.update({"env": env.path})
else:
# If a previous invocation added env, remove it
_replacements.pop("env", None)
# Look up replacements
def repl(match):
m = match.group(0)
key = m.strip("${}").lower()
repl = _replacements.get(key, lambda: m)()
return m if repl is NOMATCH else str(repl)
m = match.group(0).strip("${}")
return _replacements.get(m.lower(), match.group(0))
# Replace $var or ${var}.
return re.sub(r"(\$\w+\b|\$\{\w+\})", repl, path)

View File

@@ -71,20 +71,6 @@ def file_url_string_to_path(url):
return urllib.request.url2pathname(urllib.parse.urlparse(url).path)
def is_path_instead_of_url(path_or_url):
"""Historically some config files and spack commands used paths
where urls should be used. This utility can be used to validate
and promote paths to urls."""
scheme = urllib.parse.urlparse(path_or_url).scheme
# On non-Windows, no scheme means it's likely a path
if not sys.platform == "win32":
return not scheme
# On Windows, we may have drive letters.
return "A" <= scheme <= "Z"
def format(parsed_url):
"""Format a URL string

View File

@@ -42,9 +42,6 @@ spack:
variants: +termlib
openblas:
variants: threads=openmp
paraview:
# Don't build GUI support or GLX rendering for HPC/container deployments
require: "@5.11 ~qt+osmesa"
python:
version: [3.8.13]
trilinos:
@@ -126,9 +123,8 @@ spack:
- plumed
- precice
- pumi
- py-h5py
- py-jupyterhub
- py-libensemble +mpi +nlopt
- py-libensemble
- py-petsc4py
- py-warpx ^warpx dims=2
- py-warpx ^warpx dims=3
@@ -164,8 +160,8 @@ spack:
- caliper +cuda
- chai ~benchmarks ~tests +cuda ^umpire@6.0.0 ~shared
- dealii +cuda
- ecp-data-vis-sdk +cuda cuda_arch=80
+adios2 +hdf5 +paraview +vtkm +zfp
- ecp-data-vis-sdk +cuda
+adios2 +hdf5 +vtkm +zfp
# Removing ascent because Dray is hung in CI.
# +ascent
- flecsi +cuda
@@ -200,8 +196,8 @@ spack:
- cabana +rocm
- caliper +rocm
- chai ~benchmarks +rocm
- ecp-data-vis-sdk +rocm amdgpu_target=gfx90a
+paraview +vtkm
- ecp-data-vis-sdk +rocm
+vtkm
- gasnet +rocm
- ginkgo +rocm
- heffte +rocm

View File

@@ -500,7 +500,7 @@ _spack_buildcache_create() {
then
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --rebuild-index --spec-file --only"
else
_mirrors
_all_packages
fi
}
@@ -552,21 +552,11 @@ _spack_buildcache_save_specfile() {
}
_spack_buildcache_sync() {
if $list_options
then
SPACK_COMPREPLY="-h --help --manifest-glob --src-directory --src-mirror-name --src-mirror-url --dest-directory --dest-mirror-name --dest-mirror-url"
else
SPACK_COMPREPLY=""
fi
SPACK_COMPREPLY="-h --help --manifest-glob --src-directory --src-mirror-name --src-mirror-url --dest-directory --dest-mirror-name --dest-mirror-url"
}
_spack_buildcache_update_index() {
if $list_options
then
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
else
_mirrors
fi
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
}
_spack_cd() {
@@ -1207,7 +1197,7 @@ _spack_license_update_copyright_year() {
_spack_list() {
if $list_options
then
SPACK_COMPREPLY="-h --help -d --search-description --format -v --virtuals -t --tag --count --update"
SPACK_COMPREPLY="-h --help -d --search-description --format --update -v --virtuals -t --tag"
else
_all_packages
fi

View File

@@ -1,15 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
from spack.pkg.builtin.mock.a import A
class Disinherit(A):
"""Disinherit from A and add our own versions."""
disinherit("versions")
version("4.0", "abcdef0123456789abcdef0123456789")
version("3.0", "0123456789abcdef0123456789abcdef")

View File

@@ -19,7 +19,7 @@
_versions = {
"22.1": {
"RHEL-7": (
"bfbfef9099bf0e90480d48b3a1a741d583fc939284f869958e9c09e177098c73",
"189119a72b2cf2840dc85d4fab74435c018e145d03dd3098f23364bd235ffb7b",
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_RHEL-7_aarch64.tar",
),
"RHEL-8": (

View File

@@ -11,20 +11,12 @@ class AmdAocl(BundlePackage):
libraries tuned specifically for AMD EPYC processor family. They have a
simple interface to take advantage of the latest hardware innovations.
The tuned implementations of industry standard math libraries enable
fast development of scientific and high performance computing projects
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL license agreement.
You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/aocl-eula.html
https://www.amd.com/en/developer/aocl/aocl-4-0-eula.html
"""
fast development of scientific and high performance computing projects"""
homepage = "https://developer.amd.com/amd-aocl/"
maintainers = ["amd-toolchain-support"]
version("4.0")
version("3.2")
version("3.1")
version("3.0")
@@ -32,7 +24,7 @@ class AmdAocl(BundlePackage):
variant("openmp", default=False, description="Enable OpenMP support.")
for vers in ["2.2", "3.0", "3.1", "3.2", "4.0"]:
for vers in ["2.2", "3.0", "3.1", "3.2"]:
depends_on("amdblis@{0} threads=openmp".format(vers), when="@{0} +openmp".format(vers))
depends_on("amdblis@{0} threads=none".format(vers), when="@{0} ~openmp".format(vers))
depends_on("amdfftw@{0} +openmp".format(vers), when="@{0} +openmp".format(vers))

View File

@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack.package import *
@@ -17,12 +16,6 @@ class Amdblis(BlisBase):
isolate essential kernels of computation that, when optimized, immediately
enable optimized implementations of most of its commonly used and
computationally intensive operations.
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL-BLIS license
agreement. You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/blis/blis-4-0-eula.html
https://www.amd.com/en/developer/aocl/blis/blis-eula.html
"""
_name = "amdblis"
@@ -32,7 +25,6 @@ class Amdblis(BlisBase):
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="cddd31176834a932753ac0fc4c76332868feab3e9ac607fa197d8b44c1e74a41")
version("3.2", sha256="5a400ee4fc324e224e12f73cc37b915a00f92b400443b15ce3350278ad46fff6")
version("3.1", sha256="2891948925b9db99eec02a1917d9887a7bee9ad2afc5421c9ba58602a620f2bf")
version("3.0.1", sha256="dff643e6ef946846e91e8f81b75ff8fe21f1f2d227599aecd654d184d9beff3e")

View File

@@ -22,12 +22,6 @@ class Amdfftw(FftwBase):
For single precision build, please use precision value as float.
Example : spack install amdfftw precision=float
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL-FFTW license
agreement. You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/fftw/fftw-libraries-4-0-eula.html
https://www.amd.com/en/developer/aocl/fftw/fftw-libraries-eula.html
"""
_name = "amdfftw"
@@ -37,7 +31,6 @@ class Amdfftw(FftwBase):
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="5f02cb05f224bd86bd88ec6272b294c26dba3b1d22c7fb298745fd7b9d2271c0")
version("3.2", sha256="31cab17a93e03b5b606e88dd6116a1055b8f49542d7d0890dbfcca057087b8d0")
version("3.1", sha256="3e777f3acef13fa1910db097e818b1d0d03a6a36ef41186247c6ab1ab0afc132")
version("3.0.1", sha256="87030c6bbb9c710f0a64f4f306ba6aa91dc4b182bb804c9022b35aef274d1a4c")
@@ -51,9 +44,9 @@ class Amdfftw(FftwBase):
variant(
"amd-fast-planner",
default=False,
description="Option to reduce the planning time without much "
"tradeoff in the performance. It is supported for "
"float and double precisions only.",
description="Option to reduce the planning time without much"
"tradeoff in the performance. It is supported for"
"Float and double precisions only.",
)
variant("amd-top-n-planner", default=False, description="Build with amd-top-n-planner support")
variant(
@@ -217,7 +210,7 @@ def configure(self, spec, prefix):
# Specific SIMD support.
# float and double precisions are supported
simd_features = ["sse2", "avx", "avx2", "avx512"]
simd_features = ["sse2", "avx", "avx2"]
simd_options = []
for feature in simd_features:

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# ----------------------------------------------------------------------------\
from spack.package import *
from spack.pkg.builtin.libflame import LibflameBase
@@ -29,12 +28,6 @@ class Amdlibflame(LibflameBase):
In combination with BLIS library which includes optimizations
for the AMD EPYC processor family, libFLAME enables running
high performing LAPACK functionalities on AMD platform.
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL-libFLAME license
agreement. You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/blis/libflame-4-0-eula.html
https://www.amd.com/en/developer/aocl/blis/libflame-eula.html
"""
_name = "amdlibflame"
@@ -44,7 +37,6 @@ class Amdlibflame(LibflameBase):
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="bcb05763aa1df1e88f0da5e43ff86d956826cbea1d9c5ff591d78a3e091c66a4")
version("3.2", sha256="6b5337fb668b82d0ed0a4ab4b5af4e2f72e4cedbeeb4a8b6eb9a3ef057fb749a")
version("3.1", sha256="4520fb93fcc89161f65a40810cae0fa1f87cecb242da4a69655f502545a53426")
version("3.0.1", sha256="5859e7b39ffbe73115dd598b035f212d36310462cf3a45e555a5087301710776")

View File

@@ -14,25 +14,21 @@ class Amdlibm(SConsPackage):
many routines from the list of standard C99 math functions.
Applications can link into AMD LibM library and invoke math functions
instead of compiler's math functions for better accuracy and
performance.
performance."""
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL-FFTW license
agreement. You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/libm/libm-4-0-eula.html
https://www.amd.com/en/developer/aocl/libm/libm-eula.html
"""
_name = "amdlibm"
homepage = "https://developer.amd.com/amd-aocl/amd-math-library-libm/"
git = "https://github.com/amd/aocl-libm-ose.git"
url = "https://github.com/amd/aocl-libm-ose/archive/refs/tags/3.0.tar.gz"
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="038c1eab544be77598eccda791b26553d3b9e2ee4ab3f5ad85fdd2a77d015a7d")
version("3.2", sha256="c75b287c38a3ce997066af1f5c8d2b19fc460d5e56678ea81f3ac33eb79ec890")
version("3.1", sha256="dee487cc2d89c2dc93508be2c67592670ffc1d02776c017e8907317003f48845")
version("3.0", sha256="eb26b5e174f43ce083928d0d8748a6d6d74853333bba37d50057aac2bef7c7aa")
# If a user who doesn't specify a version
# amdlibm installed for commit ID:4033e02
# of master branch.
# To install amdlibm from latest master branch:
# spack install amdlibm ^amdlibm@master
version("3.2", branch="aocl-3.2")
version("3.1", branch="aocl-3.1")
version("3.0", branch="aocl-3.0")
version("2.2", commit="4033e022da428125747e118ccd6fdd9cee21c470")
variant("verbose", default=False, description="Building with verbosity")
@@ -45,10 +41,13 @@ class Amdlibm(SConsPackage):
patch("0001-libm-ose-Scripts-cleanup-pyc-files.patch", when="@2.2")
patch("0002-libm-ose-prevent-log-v3.c-from-building.patch", when="@2.2")
conflicts("%gcc@:9.1.0", msg="Minimum supported GCC version is 9.2.0")
conflicts("%gcc@12.2.0:", msg="Maximum supported GCC version is 12.1.0")
conflicts("%clang@9:", msg="Minimum supported Clang version is 9.0.0")
conflicts("%aocc@3.2.0", msg="dependency on python@3.6.2")
conflicts("%gcc@:9.1.0", msg="Minimum required GCC version is 9.2.0")
conflicts("%gcc@11.2.0:", msg="Maximum required GCC version is 11.1.0")
conflicts(
"%aocc@3.2.0",
when="@2.2:3.0",
msg="amdlibm 2.2 and 3.0 versions are not supported with AOCC 3.2.0",
)
def build_args(self, spec, prefix):
"""Setting build arguments for amdlibm"""
@@ -57,14 +56,14 @@ def build_args(self, spec, prefix):
# we are circumventing the use of
# Spacks compiler wrappers because
# SCons wipes out all environment variables.
if self.spec.satisfies("@:3.0 %aocc"):
if spec.satisfies("@:3.0 %aocc"):
args.append("--compiler=aocc")
var_prefix = "" if self.spec.satisfies("@:3.0") else "ALM_"
var_prefix = "" if spec.satisfies("@:3.0") else "ALM_"
args.append("{0}CC={1}".format(var_prefix, self.compiler.cc))
args.append("{0}CXX={1}".format(var_prefix, self.compiler.cxx))
if "+verbose" in self.spec:
if "+verbose" in spec:
args.append("--verbose=1")
else:
args.append("--verbose=0")
@@ -79,6 +78,3 @@ def create_symlink(self):
with working_dir(self.prefix.lib):
os.symlink("libalm.a", "libamdlibm.a")
os.symlink("libalm.so", "libamdlibm.so")
if self.spec.satisfies("@4.0:"):
os.symlink("libalmfast.a", "libamdlibmfast.a")
os.symlink("libalmfast.so", "libamdlibmfast.so")

View File

@@ -16,12 +16,6 @@ class Amdscalapack(ScalapackBase):
AMD's optimized version of ScaLAPACK enables using BLIS and
LibFLAME library that have optimized dense matrix functions and
solvers for AMD EPYC processor family CPUs.
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL-ScaLAPACK license
agreement. You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/scalapack/scalapack-libraries-4-0-eula.html
https://www.amd.com/en/developer/aocl/scalapack/scalapack-libraries-eula.html
"""
_name = "amdscalapack"
@@ -30,7 +24,6 @@ class Amdscalapack(ScalapackBase):
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="f02913b5984597b22cdb9a36198ed61039a1bf130308e778dc31b2a7eb88b33b")
version("3.2", sha256="9e00979bb1be39d627bdacb01774bc043029840d542fafc934d16fec3e3b0892")
version("3.1", sha256="4c2ee2c44644a0feec0c6fc1b1a413fa9028f14d7035d43a398f5afcfdbacb98")
version("3.0", sha256="6e6f3578f44a8e64518d276e7580530599ecfa8729f568303ed2590688e7096f")

View File

@@ -28,7 +28,6 @@ class AmrWind(CMakePackage, CudaPackage, ROCmPackage):
variant("openmp", default=False, description="Enable OpenMP for CPU builds")
variant("shared", default=True, description="Build shared libraries")
variant("tests", default=True, description="Activate regression tests")
variant("tiny_profile", default=False, description="Activate tiny profile")
depends_on("hypre~int64@2.20.0:", when="+hypre")
depends_on("hypre+mpi", when="+hypre+mpi")
@@ -74,7 +73,6 @@ def cmake_args(self):
"openfast",
"rocm",
"tests",
"tiny_profile",
]
args = [self.define_from_variant("AMR_WIND_ENABLE_%s" % v.upper(), v) for v in vs]

View File

@@ -12,28 +12,21 @@ class AoclSparse(CMakePackage):
"""AOCL-Sparse is a library that contains basic linear algebra subroutines
for sparse matrices and vectors optimized for AMD EPYC family of processors.
It is designed to be used with C and C++. Current functionality of sparse
library supports SPMV function with CSR and ELLPACK formats.
library supports SPMV function with CSR and ELLPACK formats."""
LICENSING INFORMATION: By downloading, installing and using this software,
you agree to the terms and conditions of the AMD AOCL-Sparse license agreement.
You may obtain a copy of this license agreement from
https://www.amd.com/en/developer/aocl/sparse/sparse-libraries-4-0-eula.html
https://www.amd.com/en/developer/aocl/sparse/sparse-libraries-eula.html
"""
_name = "aocl-sparse"
homepage = "https://developer.amd.com/amd-aocl/aocl-sparse/"
url = "https://github.com/amd/aocl-sparse/archive/3.0.tar.gz"
git = "https://github.com/amd/aocl-sparse.git"
maintainers = ["amd-toolchain-support"]
version("4.0", sha256="68524e441fdc7bb923333b98151005bed39154d9f4b5e8310b5c37de1d69c2c3")
version("3.2", sha256="db7d681a8697d6ef49acf3e97e8bec35b048ce0ad74549c3b738bbdff496618f")
version("3.1", sha256="8536f06095c95074d4297a3d2910654085dd91bce82e116c10368a9f87e9c7b9")
version("3.0", sha256="1d04ba16e04c065051af916b1ed9afce50296edfa9b1513211a7378e1d6b952e")
version("2.2", sha256="33c2ed6622cda61d2613ee63ff12c116a6cd209c62e54307b8fde986cd65f664")
conflicts("%gcc@:9.1", msg="Minimum required GCC version is 9.2.0")
variant(
"build_type",
default="Release",
@@ -42,17 +35,7 @@ class AoclSparse(CMakePackage):
)
variant("shared", default=True, description="Build shared library")
variant("ilp64", default=False, description="Build with ILP64 support")
variant("examples", default=False, description="Build sparse examples")
variant("unit_tests", default=False, description="Build sparse unit tests")
variant("benchmarks", default=False, description="Build Build benchmarks")
variant(
"avx",
default=False,
when="@4.0: target=zen4:",
description="Enable experimental AVX512 support",
)
depends_on("boost", when="+benchmarks")
depends_on("boost", when="@2.2")
depends_on("cmake@3.5:", type="build")
@@ -87,14 +70,15 @@ def cmake_args(self):
else:
args.append("-DCMAKE_BUILD_TYPE=Release")
args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared"))
args.append(self.define_from_variant("BUILD_CLIENTS_SAMPLES", "examples"))
args.append(self.define_from_variant("BUILD_CLIENTS_TESTS", "unit_tests"))
args.append(self.define_from_variant("BUILD_CLIENTS_BENCHMARKS", "benchmarks"))
args.append(self.define_from_variant("USE_AVX512", "avx"))
args.extend(
[
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
"-DBUILD_CLIENTS_BENCHMARKS:BOOL=%s" % ("ON" if self.run_tests else "OFF"),
]
)
if spec.satisfies("@3.0:"):
args.append(self.define_from_variant("BUILD_ILP64", "ilp64"))
args.extend([self.define_from_variant("BUILD_ILP64", "ilp64")])
return args

View File

@@ -15,7 +15,6 @@ class Bubblewrap(AutotoolsPackage):
)
maintainers = ["haampie"]
version("0.7.0", sha256="764ab7100bd037ea53d440d362e099d7a425966bc62d1f00ab26b8fbb882a9dc")
version("0.6.2", sha256="8a0ec802d1b3e956c5bb0a40a81c9ce0b055a31bf30a8efa547433603b8af20b")
version("0.6.1", sha256="9609c7dc162bc68abc29abfab566934fdca37520a15ed01b675adcf3a4303282")
version("0.6.0", sha256="11393cf2058f22e6a6c6e9cca3c85ff4c4239806cb28fee657c62a544df35693")

View File

@@ -17,8 +17,6 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
maintainers = ["sethrj"]
version("0.2.0", sha256="12af28fda0e482a9eba89781b4ead445cf6f170bc1b8d88cc814e49b1ec09e9f")
version("0.1.4", sha256="ea82a03fc750a2a805f87afd9ac944109dd7537edb5c0c370f93d332d4cd47db")
version("0.1.3", sha256="992c49a48adba884fe3933c9624da5bf480ef0694809430ae98903f2c28cc881")
version("0.1.2", sha256="d123ea2e34267adba387d46bae8c9a1146a2e047f87f2ea5f823878c1684678d")
version("0.1.1", sha256="a1d58e29226e89a2330d69c40049d61e7c885cf991824e60ff8c9ccc95fc5ec6")

View File

@@ -43,7 +43,6 @@ class Conduit(CMakePackage):
# is to bridge any spack dependencies that are still using the name master
version("master", branch="develop", submodules=True)
# note: 2021-05-05 latest tagged release is now preferred instead of develop
version("0.8.6", sha256="8ca5d37033143ed7181c7286dd25a3f6126ba0358889066f13a2b32f68fc647e")
version("0.8.5", sha256="b4a6f269a81570a4597e2565927fd0ed2ac45da0a2500ce5a71c26f7c92c5483")
version("0.8.4", sha256="55c37ddc668dbc45d43b60c440192f76e688a530d64f9fe1a9c7fdad8cd525fd")
version("0.8.3", sha256="a9e60945366f3b8c37ee6a19f62d79a8d5888be7e230eabc31af2f837283ed1a")

View File

@@ -23,7 +23,6 @@ class Cp2k(MakefilePackage, CudaPackage):
maintainers = ["dev-zero"]
version("2022.2", sha256="1a473dea512fe264bb45419f83de432d441f90404f829d89cbc3a03f723b8354")
version("2022.1", sha256="2c34f1a7972973c62d471cd35856f444f11ab22f2ff930f6ead20f3454fd228b")
version("9.1", sha256="fedb4c684a98ad857cd49b69a3ae51a73f85a9c36e9cb63e3b02320c74454ce6")
version("8.2", sha256="2e24768720efed1a5a4a58e83e2aca502cd8b95544c21695eb0de71ed652f20a")
@@ -116,7 +115,6 @@ class Cp2k(MakefilePackage, CudaPackage):
depends_on("fftw+openmp", when="^fftw")
depends_on("amdfftw+openmp", when="^amdfftw")
depends_on("cray-fftw+openmp", when="^cray-fftw")
depends_on("armpl-gcc threads=openmp", when="^armpl-gcc")
depends_on("openblas threads=openmp", when="^openblas")
# The Cray compiler wrappers will automatically add libsci_mp with
# -fopenmp. Since CP2K unconditionally links blas/lapack/scalapack
@@ -244,10 +242,6 @@ class Cp2k(MakefilePackage, CudaPackage):
when="@8.2",
)
def url_for_version(self, version):
url = "https://github.com/cp2k/cp2k/releases/download/v{0}/cp2k-{0}.tar.bz2"
return url.format(version)
@property
def makefile_architecture(self):
return "{0.architecture}-{0.compiler.name}".format(self.spec)
@@ -277,9 +271,6 @@ def edit(self, spec, prefix):
elif "^amdfftw" in spec:
fftw = spec["amdfftw:openmp" if "+openmp" in spec else "amdfftw"]
fftw_header_dir = fftw.headers.directories[0]
elif "^armpl-gcc" in spec:
fftw = spec["armpl-gcc:openmp" if "+openmp" in spec else "armpl-gcc"]
fftw_header_dir = fftw.headers.directories[0]
elif "^intel-mkl" in spec:
fftw = spec["intel-mkl"]
fftw_header_dir = fftw.headers.directories[0] + "/fftw"
@@ -578,9 +569,6 @@ def edit(self, spec, prefix):
]
if spec.satisfies("@9:"):
if spec.satisfies("@2022:"):
cppflags += ["-D__OFFLOAD_CUDA"]
acc_compiler_var = "OFFLOAD_CC"
acc_flags_var = "OFFLOAD_FLAGS"
cppflags += [
@@ -589,15 +577,6 @@ def edit(self, spec, prefix):
"-DOFFLOAD_TARGET=cuda",
]
libs += ["-lcublas"]
if spec.satisfies("+cuda_fft"):
if spec.satisfies("@:9"):
cppflags += ["-D__PW_CUDA"]
libs += ["-lcufft"]
else:
if spec.satisfies("@2022:"):
cppflags += ["-D__NO_OFFLOAD_PW"]
else:
acc_compiler_var = "NVCC"
acc_flags_var = "NVFLAGS"
@@ -608,9 +587,9 @@ def edit(self, spec, prefix):
else:
cppflags += ["-D__DBCSR_ACC"]
if spec.satisfies("+cuda_fft"):
cppflags += ["-D__PW_CUDA"]
libs += ["-lcufft", "-lcublas"]
if spec.satisfies("+cuda_fft"):
cppflags += ["-D__PW_CUDA"]
libs += ["-lcufft", "-lcublas"]
cuda_arch = spec.variants["cuda_arch"].value[0]
if cuda_arch:
@@ -783,11 +762,7 @@ def fix_package_config(self):
to generate and override entire libcp2k.pc.
"""
if self.spec.satisfies("@9.1:"):
pkgconfig_file = join_path(self.prefix.lib.pkgconfig, "libcp2k.pc")
filter_file(r"(^includedir=).*", r"\1{0}".format(self.prefix.include), pkgconfig_file)
filter_file(r"(^libdir=).*", r"\1{0}".format(self.prefix.lib), pkgconfig_file)
with open(pkgconfig_file, "r+") as handle:
with open(join_path(self.prefix.lib.pkgconfig, "libcp2k.pc"), "r+") as handle:
content = handle.read().rstrip()
content += " " + self.spec["blas"].libs.ld_flags

View File

@@ -1,81 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack.package import *
class CrtmFix(Package):
"""CRTM coefficient files"""
homepage = "https://github.com/NOAA-EMC/crtm"
url = "ftp://ftp.ssec.wisc.edu/pub/s4/CRTM/fix_REL-2.3.0_emc.tgz"
maintainers = [
"BenjaminTJohnson",
"edwardhartnett",
"AlexanderRichert-NOAA",
"Hang-Lei-NOAA",
"climbfuji",
]
version("2.4.0_emc", sha256="88d659ae5bc4434f7fafa232ff65b4c48442d2d1a25f8fc96078094fa572ac1a")
version("2.3.0_emc", sha256="1452af2d1d11d57ef3c57b6b861646541e7042a9b0f3c230f9a82854d7e90924")
variant("big_endian", default=True, description="Install big_endian fix files")
variant("little_endian", default=False, description="Install little endian fix files")
variant("netcdf", default=True, description="Install netcdf fix files")
conflicts("+big_endian", when="+little_endian", msg="big_endian and little_endian conflict")
def url_for_version(self, version):
url = "ftp://ftp.ssec.wisc.edu/pub/s4/CRTM/fix_REL-{}.tgz"
return url.format(version)
def install(self, spec, prefix):
spec = self.spec
mkdir(self.prefix.fix)
endian_dirs = []
if "+big_endian" in spec:
endian_dirs.append("Big_Endian")
elif "+little_endian" in spec:
endian_dirs.append("Little_Endian")
if "+netcdf" in spec:
endian_dirs.extend(["netcdf", "netCDF"])
fix_files = []
for d in endian_dirs:
fix_files = fix_files + find(".", "*/{}/*".format(d))
# Big_Endian amsua_metop-c.SpcCoeff.bin is incorrect
# Little_Endian amsua_metop-c_v2.SpcCoeff.bin is what it's supposed to be.
# Remove the incorrect file, and install it as noACC,, then install
# correct file under new name.
if "+big_endian" in spec and spec.version == Version("2.4.0_emc"):
remove_path = join_path(
os.getcwd(), "fix", "SpcCoeff", "Big_Endian", "amsua_metop-c.SpcCoeff.bin"
)
fix_files.remove(remove_path)
# This file is incorrect, install it as a different name.
install(
join_path("fix", "SpcCoeff", "Big_Endian", "amsua_metop-c.SpcCoeff.bin"),
join_path(self.prefix.fix, "amsua_metop-c.SpcCoeff.noACC.bin"),
)
# This "Little_Endian" file is actually the correct one.
install(
join_path("fix", "SpcCoeff", "Little_Endian", "amsua_metop-c_v2.SpcCoeff.bin"),
join_path(self.prefix.fix, "amsua_metop-c.SpcCoeff.bin"),
)
for f in fix_files:
install(f, self.prefix.fix)
def setup_run_environment(self, env):
env.set("CRTM_FIX", self.prefix.fix)

View File

@@ -13,55 +13,13 @@ class Crtm(CMakePackage):
scattering, and a solver for a radiative transfer."""
homepage = "https://www.jcsda.org/jcsda-project-community-radiative-transfer-model"
git = "https://github.com/JCSDA/crtm.git"
url = "https://github.com/JCSDA/crtm/archive/refs/tags/v2.3.0.tar.gz"
url = "https://github.com/NOAA-EMC/EMC_crtm/archive/refs/tags/v2.3.0.tar.gz"
maintainers = [
"BenjaminTJohnson",
"t-brown",
"edwardhartnett",
"AlexanderRichert-NOAA",
"Hang-Lei-NOAA",
"climbfuji",
]
variant(
"fix",
default=False,
description='Download CRTM coeffecient or "fix" files (several GBs).',
)
variant(
"build_type",
default="RelWithDebInfo",
description="CMake build type",
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
)
depends_on("cmake@3.15:")
depends_on("git-lfs")
depends_on("netcdf-fortran", when="@2.4.0:")
depends_on("netcdf-fortran", when="@v2.3-jedi.4")
depends_on("netcdf-fortran", when="@v2.4-jedi.1")
depends_on("netcdf-fortran", when="@v2.4-jedi.2")
depends_on("crtm-fix@2.3.0_emc", when="@2.3.0 +fix")
depends_on("crtm-fix@2.4.0_emc", when="@2.4.0 +fix")
depends_on("ecbuild", type=("build"), when="@v2.3-jedi.4")
depends_on("ecbuild", type=("build"), when="@v2.4-jedi.1")
depends_on("ecbuild", type=("build"), when="@v2.4-jedi.2")
# ecbuild release v2.4.0 is broken
# add ecbuild dependency for next release with fix
# depends_on("ecbuild", when="@2.4.0:", type=("build"))
# REL-2.4.0_emc (v2.4.0 ecbuild does not work)
version("2.4.0", commit="5ddd0d6")
# Uses the tip of REL-2.3.0_emc branch
version("2.3.0", commit="99760e6")
# JEDI applications so far use these versions
# Branch release/crtm_jedi
version("v2.3-jedi.4", commit="bfede42")
# Branch release/crtm_jedi_v2.4.0
version("v2.4-jedi.1", commit="8222341")
version("v2.4-jedi.2", commit="62831cb")
version("2.3.0", sha256="3e2c87ae5498c33dd98f9ede5c39e33ee7f298c7317b12adeb552e3a572700ce")

View File

@@ -28,12 +28,11 @@ class Dbus(Package):
version("1.8.2", sha256="5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08")
depends_on("pkgconfig", type="build")
depends_on("docbook-xml", type="build")
depends_on("docbook-xml@4.4", type="build")
depends_on("docbook-xsl", type="build")
depends_on("expat")
depends_on("glib")
depends_on("libsm")
depends_on("xmlto")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix, "--disable-systemd", "--disable-launchd")

View File

@@ -10,13 +10,12 @@ class Dftd4(MesonPackage):
"""Generally Applicable Atomic-Charge Dependent London Dispersion Correction"""
homepage = "https://www.chemie.uni-bonn.de/pctc/mulliken-center/software/dftd4"
url = "https://github.com/dftd4/dftd4/releases/download/v3.5.0/dftd4-3.5.0-source.tar.xz"
url = "https://github.com/dftd4/dftd4/releases/download/v3.3.0/dftd4-3.3.0-source.tar.xz"
git = "https://github.com/dftd4/dftd4.git"
maintainers = ["awvwgk"]
version("main", branch="main")
version("3.5.0", "d2bab992b5ef999fd13fec8eb1da9e9e8d94b8727a2e624d176086197a00a46f")
version("3.4.0", "24fcb225cdd5c292ac26f7d3204ee3c4024174adb5272eeda9ae7bc57113ec8d")
version("3.3.0", "408720b8545532d5240dd743c05d57b140af983192dad6d965b0d79393d0a9ef")
version("3.2.0", "cef505e091469aa9b8f008ee1756545bb87b02760bb2c7ca54854e20ba8c590a")
@@ -27,6 +26,7 @@ class Dftd4(MesonPackage):
variant("python", default=False, description="Build Python extension module")
depends_on("blas")
depends_on("cmake", type="build")
depends_on("lapack")
depends_on("mctc-lib")
depends_on("meson@0.57.1:", type="build") # mesonbuild/meson#8377
@@ -34,8 +34,6 @@ class Dftd4(MesonPackage):
depends_on("py-cffi", when="+python")
depends_on("python@3.6:", when="+python")
extends("python", when="+python")
def meson_args(self):
lapack = self.spec["lapack"].libs.names[0]
if lapack == "lapack":
@@ -46,6 +44,7 @@ def meson_args(self):
lapack = "auto"
return [
"--wrap-mode=nodownload",
"-Dlapack={0}".format(lapack),
"-Dopenmp={0}".format(str("+openmp" in self.spec).lower()),
"-Dpython={0}".format(str("+python" in self.spec).lower()),

View File

@@ -339,58 +339,6 @@ def config_docbook(self):
catalog,
)
# map all versions to current version
dtversions = [
"4.1",
"4.1.1",
"4.1.2",
"4.2",
"4.3",
"4.4",
"4.5",
]
for dtversion in dtversions:
xmlcatalog(
"--noout",
"--add",
"public",
"-//OASIS//DTD DocBook XML V{0}//EN".format(dtversion),
"http://www.oasis-open.org/docbook/xml/{0}/docbookx.dtd".format(dtversion),
docbook,
)
xmlcatalog(
"--noout",
"--add",
"rewriteSystem",
"http://www.oasis-open.org/docbook/xml/{0}".format(dtversion),
"file://{0}".format(prefix),
docbook,
)
xmlcatalog(
"--noout",
"--add",
"rewriteURI",
"http://www.oasis-open.org/docbook/xml/{0}".format(dtversion),
"file://{0}".format(prefix),
docbook,
)
xmlcatalog(
"--noout",
"--add",
"delegateSystem",
"http://www.oasis-open.org/docbook/xml/{0}".format(dtversion),
"file://{0}".format(docbook),
catalog,
)
xmlcatalog(
"--noout",
"--add",
"delegateURI",
"http://www.oasis-open.org/docbook/xml/{0}".format(dtversion),
"file://{0}".format(docbook),
catalog,
)
def setup_run_environment(self, env):
catalog = self.catalog
env.prepend_path("XML_CATALOG_FILES", catalog, separator=" ")

View File

@@ -62,7 +62,7 @@ def config_docbook(self):
"--add",
docbook_rewrite,
"http://{0}/release/xsl/{1}".format(docbook_url, docbook_version),
"file://{0}".format(prefix),
prefix,
catalog,
)

View File

@@ -155,14 +155,22 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage):
depends_on("py-cinemasci", when="+cinema")
# ParaView needs @5.11: in order to use CUDA/ROCM, therefore it is the minimum
# required version since GPU capability is desired for ECP
dav_sdk_depends_on(
"paraview@5.11:+mpi+openpmd+python+kits+shared+catalyst+libcatalyst",
"paraview@5.10:+mpi+openpmd+python+kits+shared+catalyst+libcatalyst",
when="+paraview",
propagate=["adios2", "cuda", "hdf5", "rocm"] + amdgpu_target_variants + cuda_arch_variants,
propagate=["hdf5", "adios2"],
)
dav_sdk_depends_on("libcatalyst@2:+mpi", when="+paraview")
dav_sdk_depends_on("libcatalyst+mpi", when="+paraview")
# ParaView needs @5.11: in order to use cuda and be compatible with other
# SDK packages.
depends_on("paraview +cuda", when="+paraview +cuda ^paraview@5.11:")
for cuda_arch in cuda_arch_variants:
depends_on(
"paraview {0}".format(cuda_arch),
when="+paraview {0} ^paraview@5.11:".format(cuda_arch),
)
depends_on("paraview ~cuda", when="+paraview ~cuda")
conflicts("paraview@master", when="+paraview")
dav_sdk_depends_on("visit+mpi+python+silo", when="+visit", propagate=["hdf5", "adios2"])

View File

@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
from spack.package import *
@@ -13,15 +15,21 @@ class Etcd(Package):
homepage = "https://etcd.io/"
url = "https://github.com/etcd-io/etcd/archive/v3.4.7.tar.gz"
maintainers = ["alecbcs"]
version("3.4.7", sha256="858f5ad8c830a66f6bd0cd19386deea64d374185b32f40650ba979e0a70b8b97")
version("3.4.6", sha256="e9ebd003f5545a05017a8dbdde236d6c9d25f98ee35f8ba237e57b75330664f9")
version("3.4.5", sha256="2888f73dc52ba89da470d9bd40b1348ffe8b3da51cd8fe8bff5a1a8db2e50d46")
version("3.4.4", sha256="46bcd0d034fe9cc6ae86a9f2a72bdc78761ca99bfd5ae4b96b24e4ad93fc627e")
version("3.3.20", sha256="a9fcd2a3343f7f5b99acae956dd7c4fe12f16772b660f16fa9c24368df002477")
version("3.4.23", sha256="055c608c4898d25f23aefbc845ff074bf5e8a07e61ed41dbd5cc4d4f59c93093")
depends_on("go@:1.13.9")
depends_on("go@1.19:")
def setup_run_environment(self, env):
if platform.machine() == "aarch64":
env.set("ETCD_UNSUPPORTED_ARCH", "arm64")
def setup_build_environment(self, env):
# Point GOPATH at the top of the staging dir for the build step.
env.prepend_path("GOPATH", self.stage.path)
if platform.machine() == "aarch64":
env.set("ETCD_UNSUPPORTED_ARCH", "arm64")
def install(self, spec, prefix):
make()

View File

@@ -22,7 +22,7 @@ class Faiss(AutotoolsPackage, CMakePackage, CudaPackage):
homepage = "https://github.com/facebookresearch/faiss"
url = "https://github.com/facebookresearch/faiss/archive/v1.6.3.tar.gz"
maintainers = ["bhatiaharsh", "rblake-llnl", "lpottier"]
maintainers = ["bhatiaharsh", "rblake-llnl"]
build_system(
conditional("cmake", when="@1.7:"), conditional("autotools", when="@:1.6"), default="cmake"
@@ -75,12 +75,6 @@ class Faiss(AutotoolsPackage, CMakePackage, CudaPackage):
def setup_run_environment(self, env):
if "+python" in self.spec:
env.prepend_path("PYTHONPATH", python_platlib)
if self.spec.satisfies("platform=darwin"):
env.append_path(
"DYLD_FALLBACK_LIBRARY_PATH", os.path.join(python_platlib, "faiss")
)
else:
env.append_path("LD_LIBRARY_PATH", os.path.join(python_platlib, "faiss"))
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):

View File

@@ -12,10 +12,6 @@ class Flexiblas(CMakePackage):
homepage = "https://www.mpi-magdeburg.mpg.de/projects/flexiblas"
url = "https://csc.mpi-magdeburg.mpg.de/mpcsc/software/flexiblas/flexiblas-3.0.3.tar.gz"
version("3.3.0", sha256="2696cd63d69b9a007f40f1f4a1ed83ad2fc46f6a930a22753bd221758c503ea2")
version("3.2.1", sha256="5be7e508e2dbb751b3bf372639d8e82a11f79e9ef6cbf243b64981c24a5703cf")
version("3.2.0", sha256="a3f4d66a30b6fa6473e492de86d34abc5f9d4e69d4d91ba23618388e8df05904")
version("3.1.3", sha256="aac6175660e8475ce478b88673eee330671f8aecc0cb852a25833e23e29a0620")
version("3.0.4", sha256="50a88f2e88994dda91b2a2621850afd9654b3b84820e737e335687a46751be5c")
version("3.0.3", sha256="926ab31cf56f0618aec34da85314f3b48b6deb661b4e9d6e6a99dc37872b5341")

View File

@@ -1,29 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Fpart(AutotoolsPackage):
"""Fpart is a filesystem partitioner. It helps you sort file trees and pack them
into bags (called "partitions"). Fpsync wraps fpart and rsync, tar, or cpio
to launch several synchronization jobs in parallel."""
homepage = "https://www.fpart.org"
url = "https://github.com/martymac/fpart/archive/refs/tags/fpart-1.5.1.tar.gz"
git = "https://github.com/martymac/fpart.git"
maintainers = ["drkrynstrng"]
version("master", branch="master")
version("1.5.1", sha256="c353a28f48e4c08f597304cb4ebb88b382f66b7fabfc8d0328ccbb0ceae9220c")
depends_on("autoconf", type="build")
depends_on("automake", type="build")
depends_on("libtool", type="build")
# fpsync has the following run dependencies
depends_on("rsync", type="run")
depends_on("tar", type="run")
depends_on("cpio", type="run")

View File

@@ -37,7 +37,6 @@ class Gasnet(Package, CudaPackage, ROCmPackage):
version("main", branch="stable")
version("master", branch="master")
version("2022.9.2", sha256="2352d52f395a9aa14cc57d82957d9f1ebd928d0a0021fd26c5f1382a06cd6f1d")
version("2022.9.0", sha256="6873ff4ad8ebee49da4378f2d78095a6ccc31333d6ae4cd739b9f772af11f936")
version("2022.3.0", sha256="91b59aa84c0680c807e00d3d1d8fa7c33c1aed50b86d1616f93e499620a9ba09")
version("2021.9.0", sha256="1b6ff6cdad5ecf76b92032ef9507e8a0876c9fc3ee0ab008de847c1fad0359ee")
@@ -88,12 +87,6 @@ def install(self, spec, prefix):
if spec.satisfies("@master:"):
bootstrapsh = Executable("./Bootstrap")
bootstrapsh()
# Record git-describe when fetched from git:
try:
git = which("git")
git("describe", "--long", "--always", output="version.git")
except spack.util.executable.ProcessError:
spack.main.send_warning_to_tty("Omitting version stamp due to git error")
# The GASNet-EX library has a highly multi-dimensional configure space,
# to accomodate the varying behavioral requirements of each client runtime.

View File

@@ -462,13 +462,6 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
patch("patch-fc930b3010bd0de899a3da3209eab20664ddb703.patch", when="@10.1:10.3")
patch("patch-f1feb74046e0feb0596b93bbb822fae02940a90e.patch", when="@11.1")
# libstdc++: Fix inconsistent noexcept-specific for valarray begin/end
patch(
"https://github.com/gcc-mirror/gcc/commit/423cd47cfc9640ba3d6811b780e8a0b94b704dcb.patch?full_index=1",
sha256="0d136226eb07bc43f1b15284f48bd252e3748a0426b5d7ac9084ebc406e15490",
when="@9.5.0:11.2",
)
build_directory = "spack-build"
@classproperty

View File

@@ -14,7 +14,6 @@ class Gnupg(AutotoolsPackage):
maintainers = ["alalazo"]
version("2.4.0", sha256="1d79158dd01d992431dd2e3facb89fdac97127f89784ea2cb610c600fb0c1483")
version("2.3.8", sha256="540b7a40e57da261fb10ef521a282e0021532a80fd023e75fb71757e8a4969ed")
version("2.3.7", sha256="ee163a5fb9ec99ffc1b18e65faef8d086800c5713d15a672ab57d3799da83669")
version("2.2.40", sha256="1164b29a75e8ab93ea15033300149e1872a7ef6bdda3d7c78229a735f8204c28")

View File

@@ -44,19 +44,107 @@ class Go(Package):
maintainers = ["alecbcs"]
version("1.19.5", sha256="8e486e8e85a281fc5ce3f0bedc5b9d2dbf6276d7db0b25d3ec034f313da0375f")
version("1.19.4", sha256="eda74db4ac494800a3e66ee784e495bfbb9b8e535df924a8b01b1a8028b7f368")
version("1.18", sha256="38f423db4cc834883f2b52344282fa7a39fbb93650dc62a11fdf0be6409bdad6")
version("1.17.8", sha256="2effcd898140da79a061f3784ca4f8d8b13d811fb2abe9dad2404442dabbdf7a")
version("1.17.7", sha256="c108cd33b73b1911a02b697741df3dea43e01a5c4e08e409e8b3a0e3745d2b4d")
version(
"1.19.4",
sha256="eda74db4ac494800a3e66ee784e495bfbb9b8e535df924a8b01b1a8028b7f368",
"1.17.3",
sha256="705c64251e5b25d5d55ede1039c6aa22bea40a7a931d14c370339853643c3df0",
deprecated=True,
)
version("1.18.10", sha256="9cedcca58845df0c9474ae00274c44a95c9dfaefb132fc59921c28c7c106f8e6")
version(
"1.18.9",
sha256="fbe7f09b96aca3db6faeaf180da8bb632868ec049731e355ff61695197c0e3ea",
"1.17.2",
sha256="2255eb3e4e824dd7d5fcdc2e7f84534371c186312e546fb1086a34c17752f431",
deprecated=True,
)
version(
"1.17.1",
sha256="49dc08339770acd5613312db8c141eaf61779995577b89d93b541ef83067e5b1",
deprecated=True,
)
version(
"1.17",
sha256="3a70e5055509f347c0fb831ca07a2bf3b531068f349b14a3c652e9b5b67beb5d",
deprecated=True,
)
version("1.16.10", sha256="a905472011585e403d00d2a41de7ced29b8884309d73482a307f689fd0f320b5")
version("1.16.9", sha256="0a1cc7fd7bd20448f71ebed64d846138850d5099b18cf5cc10a4fc45160d8c3d")
version("1.16.6", sha256="a3a5d4bc401b51db065e4f93b523347a4d343ae0c0b08a65c3423b05a138037d")
version("1.16.5", sha256="7bfa7e5908c7cc9e75da5ddf3066d7cbcf3fd9fa51945851325eebc17f50ba80")
version("1.16.4", sha256="ae4f6b6e2a1677d31817984655a762074b5356da50fb58722b99104870d43503")
version("1.16.3", sha256="b298d29de9236ca47a023e382313bcc2d2eed31dfa706b60a04103ce83a71a25")
version("1.16.2", sha256="37ca14287a23cb8ba2ac3f5c3dd8adbc1f7a54b9701a57824bf19a0b271f83ea")
version("1.16", sha256="7688063d55656105898f323d90a79a39c378d86fe89ae192eb3b7fc46347c95a")
version("1.15.13", sha256="99069e7223479cce4553f84f874b9345f6f4045f27cf5089489b546da619a244")
version("1.15.12", sha256="1c6911937df4a277fa74e7b7efc3d08594498c4c4adc0b6c4ae3566137528091")
version("1.15.11", sha256="f25b2441d4c76cf63cde94d59bab237cc33e8a2a139040d904c8630f46d061e5")
version("1.15.8", sha256="540c0ab7781084d124991321ed1458e479982de94454a98afab6acadf38497c2")
version("1.15.7", sha256="8631b3aafd8ecb9244ec2ffb8a2a8b4983cf4ad15572b9801f7c5b167c1a2abc")
version("1.15.6", sha256="890bba73c5e2b19ffb1180e385ea225059eb008eb91b694875dd86ea48675817")
version("1.15.5", sha256="c1076b90cf94b73ebed62a81d802cd84d43d02dea8c07abdc922c57a071c84f1")
version("1.15.2", sha256="28bf9d0bcde251011caae230a4a05d917b172ea203f2a62f2c2f9533589d4b4d")
version("1.15.1", sha256="d3743752a421881b5cc007c76b4b68becc3ad053e61275567edab1c99e154d30")
version("1.15", sha256="69438f7ed4f532154ffaf878f3dfd83747e7a00b70b3556eddabf7aaee28ac3a")
version("1.14.14", sha256="6204bf32f58fae0853f47f1bd0c51d9e0ac11f1ffb406bed07a0a8b016c8a76f")
version("1.14.13", sha256="ba1d244c6b5c0ed04aa0d7856d06aceb89ed31b895de6ff783efb1cc8ab6b177")
version("1.14.12", sha256="b34f4b7ad799eab4c1a52bdef253602ce957125a512f5a1b28dce43c6841b971")
version("1.14.9", sha256="c687c848cc09bcabf2b5e534c3fc4259abebbfc9014dd05a1a2dc6106f404554")
version("1.14.8", sha256="d9a613fb55f508cf84e753456a7c6a113c8265839d5b7fe060da335c93d6e36a")
version("1.14.6", sha256="73fc9d781815d411928eccb92bf20d5b4264797be69410eac854babe44c94c09")
version("1.14.5", sha256="ca4c080c90735e56152ac52cd77ae57fe573d1debb1a58e03da9cc362440315c")
version("1.14.4", sha256="7011af3bbc2ac108d1b82ea8abb87b2e63f78844f0259be20cde4d42c5c40584")
version("1.14.3", sha256="93023778d4d1797b7bc6a53e86c3a9b150c923953225f8a48a2d5fabc971af56")
version("1.14.2", sha256="98de84e69726a66da7b4e58eac41b99cbe274d7e8906eeb8a5b7eb0aadee7f7c")
version("1.14.1", sha256="2ad2572115b0d1b4cb4c138e6b3a31cee6294cb48af75ee86bec3dca04507676")
version("1.14", sha256="6d643e46ad565058c7a39dac01144172ef9bd476521f42148be59249e4b74389")
version("1.13.14", sha256="197333e97290e9ea8796f738d61019dcba1c377c2f3961fd6a114918ecc7ab06")
version("1.13.13", sha256="ab7e44461e734ce1fd5f4f82c74c6d236e947194d868514d48a2b1ea73d25137")
version("1.13.12", sha256="17ba2c4de4d78793a21cc659d9907f4356cd9c8de8b7d0899cdedcef712eba34")
version("1.13.11", sha256="89ed1abce25ad003521c125d6583c93c1280de200ad221f961085200a6c00679")
version("1.13.10", sha256="eb9ccc8bf59ed068e7eff73e154e4f5ee7eec0a47a610fb864e3332a2fdc8b8c")
version("1.13.9", sha256="34bb19d806e0bc4ad8f508ae24bade5e9fedfa53d09be63b488a9314d2d4f31d")
version("1.13.8", sha256="b13bf04633d4d8cf53226ebeaace8d4d2fd07ae6fa676d0844a688339debec34")
version("1.13.7", sha256="e4ad42cc5f5c19521fbbbde3680995f2546110b5c6aa2b48c3754ff7af9b41f4")
version("1.13.6", sha256="aae5be954bdc40bcf8006eb77e8d8a5dde412722bc8effcdaf9772620d06420c")
version("1.13.5", sha256="27d356e2a0b30d9983b60a788cf225da5f914066b37a6b4f69d457ba55a626ff")
version("1.13.4", sha256="95dbeab442ee2746b9acf0934c8e2fc26414a0565c008631b04addb8c02e7624")
version("1.13.3", sha256="4f7123044375d5c404280737fbd2d0b17064b66182a65919ffe20ffe8620e3df")
version("1.13.2", sha256="1ea68e01472e4276526902b8817abd65cf84ed921977266f0c11968d5e915f44")
version("1.13.1", sha256="81f154e69544b9fa92b1475ff5f11e64270260d46e7e36c34aafc8bc96209358")
version("1.13", sha256="3fc0b8b6101d42efd7da1da3029c0a13f22079c0c37ef9730209d8ec665bf122")
version("1.12.17", sha256="de878218c43aa3c3bad54c1c52d95e3b0e5d336e1285c647383e775541a28b25")
version("1.12.15", sha256="8aba74417e527524ad5724e6e6c21016795d1017692db76d1b0851c6bdec84c3")
version("1.12.14", sha256="39dbf05f7e2ffcb19b08f07d53dcc96feadeb1987fef9e279e7ff0c598213064")
version("1.12.13", sha256="5383d3b8db4baa48284ffcb14606d9cad6f03e9db843fa6d835b94d63cccf5a7")
version("1.12.12", sha256="fcb33b5290fa9bcc52be3211501540df7483d7276b031fc77528672a3c705b99")
version("1.12.11", sha256="fcf58935236802929f5726e96cd1d900853b377bec2c51b2e37219c658a4950f")
version("1.12.10", sha256="f56e48fce80646d3c94dcf36d3e3f490f6d541a92070ad409b87b6bbb9da3954")
version("1.12.9", sha256="ab0e56ed9c4732a653ed22e232652709afbf573e710f56a07f7fdeca578d62fc")
version("1.12.8", sha256="11ad2e2e31ff63fcf8a2bdffbe9bfa2e1845653358daed593c8c2d03453c9898")
version("1.12.6", sha256="c96c5ccc7455638ae1a8b7498a030fe653731c8391c5f8e79590bce72f92b4ca")
version("1.12.5", sha256="2aa5f088cbb332e73fc3def546800616b38d3bfe6b8713b8a6404060f22503e8")
version("1.11.13", sha256="5032095fd3f641cafcce164f551e5ae873785ce7b07ca7c143aecd18f7ba4076")
version("1.11.11", sha256="1fff7c33ef2522e6dfaf6ab96ec4c2a8b76d018aae6fc88ce2bd40f2202d0f8c")
version("1.11.10", sha256="df27e96a9d1d362c46ecd975f1faa56b8c300f5c529074e9ea79bdd885493c1b")
version("1.11.5", sha256="bc1ef02bb1668835db1390a2e478dcbccb5dd16911691af9d75184bbe5aa943e")
version("1.11.4", sha256="4cfd42720a6b1e79a8024895fa6607b69972e8e32446df76d6ce79801bbadb15")
version("1.11.2", sha256="042fba357210816160341f1002440550e952eb12678f7c9e7e9d389437942550")
version("1.11.1", sha256="558f8c169ae215e25b81421596e8de7572bd3ba824b79add22fba6e284db1117")
version("1.11", sha256="afc1e12f5fe49a471e3aae7d906c73e9d5b1fdd36d52d72652dde8f6250152fb")
version("1.10.3", sha256="567b1cc66c9704d1c019c50bef946272e911ec6baf244310f87f4e678be155f2")
version("1.10.2", sha256="6264609c6b9cd8ed8e02ca84605d727ce1898d74efa79841660b2e3e985a98bd")
version("1.10.1", sha256="589449ff6c3ccbff1d391d4e7ab5bb5d5643a5a41a04c99315e55c16bbf73ddc")
version("1.9.5", sha256="f1c2bb7f32bbd8fa7a19cc1608e0d06582df32ff5f0340967d83fb0017c49fbc")
version("1.9.2", sha256="665f184bf8ac89986cfd5a4460736976f60b57df6b320ad71ad4cef53bb143dc")
version("1.9.1", sha256="a84afc9dc7d64fe0fa84d4d735e2ece23831a22117b50dafc75c1484f1cb550e")
version("1.9", sha256="a4ab229028ed167ba1986825751463605264e44868362ca8e7accc8be057e993")
version("1.8.3", sha256="5f5dea2447e7dcfdc50fa6b94c512e58bfba5673c039259fd843f68829d99fa6")
version("1.8.1", sha256="33daf4c03f86120fdfdc66bddf6bfff4661c7ca11c5da473e537f4d69b470e57")
version("1.8", sha256="406865f587b44be7092f206d73fc1de252600b79b3cacc587b74b5ef5c623596")
version("1.7.5", sha256="4e834513a2079f8cbbd357502cccaac9507fd00a1efe672375798858ff291815")
version("1.7.4", sha256="4c189111e9ba651a2bb3ee868aa881fab36b2f2da3409e80885ca758a6b614cc")
version("1.6.4", sha256="8796cc48217b59595832aa9de6db45f58706dae68c9c7fbbd78c9fdbe3cd9032")
provides("golang")

View File

@@ -18,13 +18,14 @@ class GromacsChainCoordinate(Gromacs):
git = "https://gitlab.com/cbjh/gromacs-chain-coordinate.git"
maintainers = ["w8jcik"]
disinherit("versions")
version("main", branch="main")
version(
"2021.5-0.2",
sha256="33dda1e39cd47c5ae32b5455af8534225d3888fd7e4968f499b8483620fa770a",
url="https://gitlab.com/cbjh/gromacs-chain-coordinate/-/archive/release-2021.chaincoord-0.2/gromacs-chain-coordinate-release-2021.chaincoord-0.2.tar.bz2",
)
version(
"2021.2-0.1",
sha256="879fdd04662370a76408b72c9fbc4aff60a6387b459322ac2700d27359d0dd87",
@@ -33,6 +34,21 @@ class GromacsChainCoordinate(Gromacs):
conflicts("+plumed")
def remove_parent_versions(self):
"""
By inheriting GROMACS package we also inherit versions.
They are not valid, so we are removing them.
"""
for version_key in Gromacs.versions.keys():
if version_key in self.versions:
del self.versions[version_key]
def __init__(self, spec):
super(GromacsChainCoordinate, self).__init__(spec)
self.remove_parent_versions()
def check(self):
"""The default 'test' targets does not compile the test programs"""
with working_dir(self.build_directory):

View File

@@ -15,7 +15,6 @@ class GromacsSwaxs(Gromacs):
git = "https://gitlab.com/cbjh/gromacs-swaxs.git"
maintainers = ["w8jcik"]
disinherit("versions")
version(
"2021.5-0.4",
sha256="9f8ed6d448a04789d45e847cbbc706a07130377f578388220a9d5357fae9d1c3",
@@ -137,3 +136,18 @@ class GromacsSwaxs(Gromacs):
conflicts("+plumed")
conflicts("+opencl")
conflicts("+sycl")
def remove_parent_versions(self):
"""
By inheriting GROMACS package we also inherit versions.
They are not valid, so we are removing them.
"""
for version_key in Gromacs.versions.keys():
if version_key in self.versions:
del self.versions[version_key]
def __init__(self, spec):
super(GromacsSwaxs, self).__init__(spec)
self.remove_parent_versions()

View File

@@ -38,8 +38,8 @@ class GtkDoc(AutotoolsPackage):
depends_on("py-six", type=("test"))
depends_on("libxslt")
depends_on("libxml2@2.3.6:")
depends_on("docbook-xsl")
depends_on("docbook-xml")
depends_on("docbook-xsl@1.78.1")
depends_on("docbook-xml@4.3")
# depends_on('dblatex', when='+pdf')
patch("build.patch")

View File

@@ -14,17 +14,11 @@ class Harfbuzz(MesonPackage, AutotoolsPackage):
url = "https://github.com/harfbuzz/harfbuzz/releases/download/2.9.1/harfbuzz-2.9.1.tar.xz"
git = "https://github.com/harfbuzz/harfbuzz.git"
version("5.1.0", sha256="2edb95db668781aaa8d60959d21be2ff80085f31b12053cdd660d9a50ce84f05")
build_system(
conditional("autotools", when="@:2.9"), conditional("meson", when="@3:"), default="meson"
)
version("6.0.0", sha256="1d1010a1751d076d5291e433c138502a794d679a7498d1268ee21e2d4a140eb4")
version(
"5.3.1",
sha256="4a6ce097b75a8121facc4ba83b5b083bfec657f45b003cd5a3424f2ae6b4434d",
preferred=True,
)
version("5.1.0", sha256="2edb95db668781aaa8d60959d21be2ff80085f31b12053cdd660d9a50ce84f05")
version("4.2.1", sha256="bd17916513829aeff961359a5ccebba6de2f4bf37a91faee3ac29c120e3d7ee1")
version("4.1.0", sha256="f7984ff4241d4d135f318a93aa902d910a170a8265b7eaf93b5d9a504eed40c8")
version("4.0.1", sha256="98f68777272db6cd7a3d5152bac75083cd52a26176d87bc04c8b3929d33bce49")

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob
from spack.package import *
@@ -17,12 +16,6 @@ class Hisat2(MakefilePackage):
homepage = "https://daehwankimlab.github.io/hisat2/"
url = "ftp://ftp.ccb.jhu.edu/pub/infphilo/hisat2/downloads/hisat2-2.1.0-source.zip"
version(
"2.2.1",
sha256="48e933330d4d8470d2b3dfe7ec3918f2e98a75f7381891e23b7df1fb4f135eb1",
url="https://cloud.biohpc.swmed.edu/index.php/s/fE9QCsX3NH4QwBi/download",
extension="zip",
)
version(
"2.2.0",
sha256="0dd55168853b82c1b085f79ed793dd029db163773f52272d7eb51b3b5e4a4cdd",
@@ -31,36 +24,14 @@ class Hisat2(MakefilePackage):
)
version("2.1.0", sha256="89a276eed1fc07414b1601947bc9466bdeb50e8f148ad42074186fe39a1ee781")
variant("sra", default=False, description="Add SRA (Sequence Read Archive) support")
depends_on("perl", type="run")
depends_on("python", type="run")
depends_on("sra-tools", when="+sra")
depends_on("ncbi-vdb", when="+sra")
# patch to get SRA working
patch("sra.patch", when="+sra")
@when("+sra")
def build(self, spec, prefix):
make(
"USE_SRA=1",
"NCBI_NGS_DIR={0}".format(spec["sra-tools"].prefix),
"NCBI_VDB_DIR={0}".format(spec["ncbi-vdb"].prefix),
)
def install(self, spec, prefix):
if spec.satisfies("@:2.1.0"):
install_tree("doc", prefix.doc)
install_tree("example", prefix.example)
install_tree("hisatgenotype_modules", prefix.hisatgenotype_modules)
install_tree("hisatgenotype_scripts", prefix.hisatgenotype_scripts)
install_tree("scripts", prefix.scripts)
if "@:2.2.0" in spec:
install_tree("hisatgenotype_modules", prefix.hisatgenotype_modules)
install_tree("hisatgenotype_scripts", prefix.hisatgenotype_scripts)
mkdirp(prefix.bin)
install("hisat2", prefix.bin)
install("hisat2-align-s", prefix.bin)
@@ -73,38 +44,5 @@ def install(self, spec, prefix):
install("hisat2-inspect-l", prefix.bin)
install("*.py", prefix.bin)
if "@2.2:" in spec:
install("hisat2-repeat", prefix.bin)
@run_after("install")
def filter_sbang(self):
with working_dir(self.prefix.bin):
pattern = "^#!.*/usr/bin/env python"
repl = "#!{0}".format(self.spec["python"].command.path)
files = [
"hisat2-build",
"hisat2-inspect",
]
for file in files:
filter_file(pattern, repl, *files, backup=False)
pattern = "^#!.*/usr/bin/env perl"
repl = "#!{0}".format(self.spec["perl"].command.path)
files = [
"hisat2",
]
for file in files:
filter_file(pattern, repl, *files, backup=False)
pattern = "^#!.*/usr/bin/env python3"
repl = "#!{0}".format(self.spec["python"].command.path)
files = glob.glob("*.py")
for file in files:
filter_file(pattern, repl, *files, backup=False)
with working_dir(self.prefix.scripts):
pattern = "^#!.*/usr/bin/perl"
repl = "#!{0}".format(self.spec["perl"].command.path)
files = glob.glob("*.pl")
for file in files:
filter_file(pattern, repl, *files, backup=False)
def setup_run_environment(self, env):
env.prepend_path("PATH", self.spec.prefix)

View File

@@ -1,11 +0,0 @@
--- a/Makefile 2020-07-24 10:46:16.000000000 -0500
+++ b/Makefile 2023-01-12 20:28:08.919588735 -0600
@@ -94,7 +94,7 @@
SERACH_INC =
ifeq (1,$(USE_SRA))
SRA_DEF = -DUSE_SRA
- SRA_LIB = -lncbi-ngs-c++-static -lngs-c++-static -lncbi-vdb-static -ldl
+ SRA_LIB = -lncbi-ngs-c++-static -lngs-c++-static -lncbi-ngs-static -lncbi-vdb-static -ldl
SEARCH_INC += -I$(NCBI_NGS_DIR)/include -I$(NCBI_VDB_DIR)/include
SEARCH_LIBS += -L$(NCBI_NGS_DIR)/lib64 -L$(NCBI_VDB_DIR)/lib64
endif

View File

@@ -18,25 +18,10 @@ class Hugo(Package):
maintainers = ["alecbcs"]
version("0.109.0", sha256="35a5ba92057fe2c20b2218c374e762887021e978511d19bbe81ce4d9c21f0c78")
version("0.106.0", sha256="9219434beb51466487b9f8518edcbc671027c1998e5a5820d76d517e1dfbd96a")
# https://nvd.nist.gov/vuln/detail/CVE-2020-26284
version(
"0.74.3",
sha256="9b296fa0396c20956fa6a1f7afadaa78739af62c277b6c0cfae79a91b0fe823f",
deprecated=True,
)
version(
"0.68.3",
sha256="38e743605e45e3aafd9563feb9e78477e72d79535ce83b56b243ff991d3a2b6e",
deprecated=True,
)
version(
"0.53",
sha256="48e65a33d3b10527101d13c354538379d9df698e5c38f60f4660386f4232e65c",
deprecated=True,
)
version("0.74.3", sha256="9b296fa0396c20956fa6a1f7afadaa78739af62c277b6c0cfae79a91b0fe823f")
version("0.68.3", sha256="38e743605e45e3aafd9563feb9e78477e72d79535ce83b56b243ff991d3a2b6e")
version("0.53", sha256="48e65a33d3b10527101d13c354538379d9df698e5c38f60f4660386f4232e65c")
# Uses go modules.
# See https://gohugo.io/getting-started/installing/#fetch-from-github

View File

@@ -32,7 +32,6 @@ class Hwloc(AutotoolsPackage, CudaPackage, ROCmPackage):
executables = ["^hwloc-bind$"]
version("master", branch="master")
version("2.9.0", sha256="9d7d3450e0a5fea4cb80ca07dc8db939abb7ab62e2a7bb27f9376447658738ec")
version("2.8.0", sha256="20b2bd4df436827d8e50f7afeafb6f967259f2fb374ce7330244f8d0ed2dde6f")
version("2.7.1", sha256="4cb0a781ed980b03ad8c48beb57407aa67c4b908e45722954b9730379bc7f6d5")
version("2.7.0", sha256="d9b23e9b0d17247e8b50254810427ca8a9857dc868e2e3a049f958d7c66af374")

View File

@@ -114,11 +114,11 @@ def setup_dependent_package(self, module, dep_spec):
self.spec.mpifc = join_path(self.component_prefix.bin, "mpiifort")
def setup_dependent_build_environment(self, env, dependent_spec):
env.set("I_MPI_CC", spack_cc)
env.set("I_MPI_CXX", spack_cxx)
env.set("I_MPI_F77", spack_f77)
env.set("I_MPI_F90", spack_fc)
env.set("I_MPI_FC", spack_fc)
env.set("MPICH_CC", spack_cc)
env.set("MPICH_CXX", spack_cxx)
env.set("MPICH_F77", spack_f77)
env.set("MPICH_F90", spack_fc)
env.set("MPICH_FC", spack_fc)
# Set compiler wrappers for dependent build stage
if "+generic-names" in self.spec:

View File

@@ -24,7 +24,6 @@ class Itk(CMakePackage):
maintainers = ["glennpj"]
version("5.3.0", sha256="57a4471133dc8f76bde3d6eb45285c440bd40d113428884a1487472b7b71e383")
version("5.3rc02", sha256="163aaf4a6cecd5b70ff718c1a986c746581797212fd1b629fa81f12ae4756d14")
version(
"5.2.1",
@@ -63,7 +62,6 @@ class Itk(CMakePackage):
depends_on("jpeg")
depends_on("libpng")
depends_on("libtiff")
depends_on("mpi")
depends_on("zlib")
def cmake_args(self):

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
@@ -16,39 +15,18 @@ class Libcroco(AutotoolsPackage):
version("0.6.13", sha256="767ec234ae7aa684695b3a735548224888132e063f92db585759b422570621d4")
version("0.6.12", sha256="ddc4b5546c9fb4280a5017e2707fbd4839034ed1aba5b7d4372212f34f84f860")
variant("doc", default=False, description="Build documentation with gtk-doc")
# libcroco has a --enable-gtk-doc configure flag that appears to be
# ignored as of version 0.6.13. Until that flag is honored, the +doc
# variant is a no-op
# variant("doc", default=False,
# description="Build documentation with gtk-doc")
depends_on("glib")
depends_on("libxml2")
depends_on("gtk-doc", type="build", when="+doc")
depends_on("docbook-xml", type="build", when="+doc")
depends_on("docbook-xsl", type="build", when="+doc")
depends_on("py-pygments", type="build", when="+doc")
depends_on("gtk-doc", type="build")
depends_on("pkgconfig", type="build")
def configure_args(self):
config_args = []
if "+doc" in self.spec:
config_args.extend(
[
"--enable-gtk-doc",
"--enable-gtk-doc-html",
# PDF not supported in gtk-doc
"--disable-gtk-doc-pdf",
]
)
else:
config_args.extend(
[
"--disable-gtk-doc",
"--disable-gtk-doc-html",
"--disable-gtk-doc-pdf",
]
)
# macOS ld does not support this flag
# https://github.com/Homebrew/homebrew-core/blob/HEAD/Formula/libcroco.rb
if self.spec.satisfies("platform=darwin"):
config_args.append("--disable-Bsymbolic")
return config_args
return ["--disable-Bsymbolic"]

View File

@@ -3,10 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
import spack.platforms.cray
from spack.package import *
@@ -19,8 +15,6 @@ class Libfabric(AutotoolsPackage):
git = "https://github.com/ofiwg/libfabric.git"
maintainers = ["rajachan"]
executables = ["^fi_info$"]
version("main", branch="main")
version("1.16.1", sha256="53f992d33f9afe94b8a4ea3d105504887f4311cf4b68cea99a24a85fcc39193f")
version("1.15.1", sha256="cafa3005a9dc86064de179b0af4798ad30b46b2f862fe0268db03d13943e10cd")
@@ -50,7 +44,6 @@ class Libfabric(AutotoolsPackage):
version("1.4.2", sha256="5d027d7e4e34cb62508803e51d6bd2f477932ad68948996429df2bfff37ca2a5")
fabrics = (
conditional("cxi", when=spack.platforms.cray.slingshot_network()),
"efa",
"gni",
"mlx",
@@ -70,9 +63,6 @@ class Libfabric(AutotoolsPackage):
"xpmem",
)
# CXI is a closed source package and only exists when an external.
conflicts("fabrics=cxi")
variant(
"fabrics",
default="sockets,tcp,udp",
@@ -116,30 +106,6 @@ class Libfabric(AutotoolsPackage):
conflicts("@1.9.0", when="platform=darwin", msg="This distribution is missing critical files")
conflicts("fabrics=opx", when="@:1.14.99")
@classmethod
def determine_version(cls, exe):
output = Executable(exe)("--version", output=str, error=str)
match = re.search(r"libfabric: (\d+\.\d+\.\d+)(\D*\S*)", output)
return match.group(1) if match else None
@classmethod
def determine_variants(cls, exes, version):
results = []
for exe in exes:
variants = []
output = Executable(exe)("--list", output=str, error=os.devnull)
# fabrics
fabrics = get_options_from_variant(cls, "fabrics")
used_fabrics = []
for fabric in fabrics:
match = re.search(r"^%s:.*\n.*version: (\S+)" % fabric, output, re.MULTILINE)
if match:
used_fabrics.append(fabric)
if used_fabrics:
variants.append("fabrics=" + ",".join(used_fabrics))
results.append(" ".join(variants))
return results
def setup_build_environment(self, env):
if self.run_tests:
env.prepend_path("PATH", self.prefix.bin)
@@ -159,7 +125,7 @@ def configure_args(self):
else:
args.append("--with-kdreg=no")
for fabric in [f if isinstance(f, str) else f[0].value for f in self.fabrics]:
for fabric in self.fabrics:
if "fabrics=" + fabric in self.spec:
args.append("--enable-{0}=yes".format(fabric))
else:
@@ -170,20 +136,3 @@ def configure_args(self):
def installcheck(self):
fi_info = Executable(self.prefix.bin.fi_info)
fi_info()
# This code gets all the fabric names from the variants list
# Idea taken from the AutotoolsPackage source.
def get_options_from_variant(self, name):
values = self.variants[name][0].values
explicit_values = []
if getattr(values, "feature_values", None):
values = values.feature_values
for value in sorted(values):
if hasattr(value, "when"):
if value.when is True:
# Explicitly extract the True value for downstream use
explicit_values.append("{0}".format(value))
else:
explicit_values.append(value)
return explicit_values

View File

@@ -17,39 +17,12 @@ class Libksba(AutotoolsPackage):
maintainers = ["alalazo"]
version("1.6.3", sha256="3f72c68db30971ebbf14367527719423f0a4d5f8103fc9f4a1c01a9fa440de5c")
# Deprecated over CVE-2022-3515 (https://gnupg.org/blog/20221017-pepe-left-the-ksba.html)
version(
"1.6.2",
sha256="fce01ccac59812bddadffacff017dac2e4762bdb6ebc6ffe06f6ed4f6192c971",
deprecated=True,
)
version(
"1.6.0",
sha256="dad683e6f2d915d880aa4bed5cea9a115690b8935b78a1bbe01669189307a48b",
deprecated=True,
)
version(
"1.5.1",
sha256="b0f4c65e4e447d9a2349f6b8c0e77a28be9531e4548ba02c545d1f46dc7bf921",
deprecated=True,
)
version(
"1.5.0",
sha256="ae4af129216b2d7fdea0b5bf2a788cd458a79c983bb09a43f4d525cc87aba0ba",
deprecated=True,
)
version(
"1.4.0",
sha256="bfe6a8e91ff0f54d8a329514db406667000cb207238eded49b599761bfca41b6",
deprecated=True,
)
version(
"1.3.5",
sha256="41444fd7a6ff73a79ad9728f985e71c9ba8cd3e5e53358e70d5f066d35c1a340",
deprecated=True,
)
version("1.6.2", sha256="fce01ccac59812bddadffacff017dac2e4762bdb6ebc6ffe06f6ed4f6192c971")
version("1.6.0", sha256="dad683e6f2d915d880aa4bed5cea9a115690b8935b78a1bbe01669189307a48b")
version("1.5.1", sha256="b0f4c65e4e447d9a2349f6b8c0e77a28be9531e4548ba02c545d1f46dc7bf921")
version("1.5.0", sha256="ae4af129216b2d7fdea0b5bf2a788cd458a79c983bb09a43f4d525cc87aba0ba")
version("1.4.0", sha256="bfe6a8e91ff0f54d8a329514db406667000cb207238eded49b599761bfca41b6")
version("1.3.5", sha256="41444fd7a6ff73a79ad9728f985e71c9ba8cd3e5e53358e70d5f066d35c1a340")
depends_on("libgpg-error@1.8:")

View File

@@ -35,7 +35,6 @@ class Llvm(CMakePackage, CudaPackage):
family = "compiler" # Used by lmod
version("main", branch="main")
version("15.0.7", sha256="42a0088f148edcf6c770dfc780a7273014a9a89b66f357c761b4ca7c8dfa10ba")
version("15.0.6", sha256="4d857d7a180918bdacd09a5910bf9743c9861a1e49cb065a85f7a990f812161d")
version("15.0.5", sha256="c47640269e0251e009ae18a25162df4e20e175885286e21d28c054b084b991a4")
version("15.0.4", sha256="e24b4d3bf7821dcb1c901d1e09096c1f88fb00095c5a6ef893baab4836975e52")

View File

@@ -27,5 +27,6 @@ class MctcLib(MesonPackage):
def meson_args(self):
return [
"--wrap-mode=nodownload",
"-Djson={0}".format("enabled" if "+json" in self.spec else "disabled"),
]

View File

@@ -16,7 +16,6 @@ class Meshtool(MakefilePackage):
version("master", branch="master", preferred=True)
# Version to use with openCARP releases
version("oc12.0", commit="867431d")
version("oc11.0", commit="867431d")
version("oc10.0", commit="6c5cfbd067120901f15a04bf63beec409bda6dc9")
version("oc9.0", commit="6c5cfbd067120901f15a04bf63beec409bda6dc9")

View File

@@ -805,22 +805,9 @@ def find_optional_library(name, prefix):
"apf_zoltan",
"spr",
]
pumi_dep_zoltan = ""
pumi_dep_parmetis = ""
if "+zoltan" in spec["pumi"]:
pumi_dep_zoltan = ld_flags_from_dirs([spec["zoltan"].prefix.lib], ["zoltan"])
if "+parmetis" in spec["zoltan"]:
pumi_dep_parmetis = ld_flags_from_dirs(
[spec["parmetis"].prefix.lib], ["parmetis"]
)
options += [
"PUMI_OPT=-I%s" % spec["pumi"].prefix.include,
"PUMI_LIB=%s %s %s"
% (
ld_flags_from_dirs([spec["pumi"].prefix.lib], pumi_libs),
pumi_dep_zoltan,
pumi_dep_parmetis,
),
"PUMI_LIB=%s" % ld_flags_from_dirs([spec["pumi"].prefix.lib], pumi_libs),
]
if "+gslib" in spec:

View File

@@ -32,7 +32,7 @@ class Mumax(MakefilePackage, CudaPackage):
variant("gnuplot", default=False, description="Use gnuplot for graphs")
depends_on("cuda")
depends_on("go", type="build")
depends_on("go@:1.15", type="build")
depends_on("gnuplot", type="run", when="+gnuplot")
conflicts("~cuda", msg="mumax requires cuda")
@@ -82,7 +82,6 @@ def setup_build_environment(self, env):
env.set("NVCC_CCBIN", spack_cc)
def install(self, spec, prefix):
go("mod init github.com/mumax/3")
make()
with working_dir(self.gopath):
install_tree("bin", prefix.bin)

View File

@@ -1,293 +0,0 @@
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import re
import sys
from spack.package import *
class Mvapich(AutotoolsPackage):
"""Mvapich is a High-Performance MPI Library for clusters with diverse
networks (InfiniBand, Omni-Path, Ethernet/iWARP, and RoCE) and computing
platforms (x86 (Intel and AMD), ARM and OpenPOWER)"""
homepage = "https://mvapich.cse.ohio-state.edu/userguide/userguide_spack/"
url = "https://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-3.0a.tar.gz"
list_url = "https://mvapich.cse.ohio-state.edu/downloads/"
maintainers = ["natshineman", "harisubramoni", "ndcontini"]
executables = ["^mpiname$", "^mpichversion$"]
# Prefer the latest stable release
version("3.0a", "71f6593bfbfe9a9f6f5c750904461f007bf74bec479544e4da375b7d4a56b2ac")
provides("mpi")
provides("mpi@:3.1")
variant("wrapperrpath", default=True, description="Enable wrapper rpath")
variant("debug", default=False, description="Enable debug info and error messages at run-time")
variant("cuda", default=False, description="Enable CUDA extension")
variant("regcache", default=True, description="Enable memory registration cache")
# Accepted values are:
# single - No threads (MPI_THREAD_SINGLE)
# funneled - Only the main thread calls MPI (MPI_THREAD_FUNNELED)
# serialized - User serializes calls to MPI (MPI_THREAD_SERIALIZED)
# multiple - Fully multi-threaded (MPI_THREAD_MULTIPLE)
# runtime - Alias to "multiple"
variant(
"threads",
default="multiple",
values=("single", "funneled", "serialized", "multiple"),
multi=False,
description="Control the level of thread support",
)
# 32 is needed when job size exceeds 32768 cores
variant(
"ch3_rank_bits",
default="32",
values=("16", "32"),
multi=False,
description="Number of bits allocated to the rank field (16 or 32)",
)
variant(
"process_managers",
description="List of the process managers to activate",
values=disjoint_sets(("auto",), ("slurm",), ("hydra", "gforker", "remshell"))
.prohibit_empty_set()
.with_error("'slurm' or 'auto' cannot be activated along with " "other process managers")
.with_default("auto")
.with_non_feature_values("auto"),
)
variant(
"netmod",
description="Select the netmod to be enabled for this build."
"For IB/RoCE systems, use the ucx netmod, for interconnects supported "
"by libfabrics, use the ofi netmod. For more info, visit the "
"homepage url.",
default="ofi",
values=(
"ofi",
"ucx",
),
multi=False,
)
variant(
"alloca", default=False, description="Use alloca to allocate temporary memory if available"
)
variant(
"file_systems",
description="List of the ROMIO file systems to activate",
values=auto_or_any_combination_of("lustre", "gpfs", "nfs", "ufs"),
)
depends_on("findutils", type="build")
depends_on("bison", type="build")
depends_on("pkgconfig", type="build")
depends_on("zlib")
depends_on("libpciaccess", when=(sys.platform != "darwin"))
depends_on("libxml2")
depends_on("cuda", when="+cuda")
depends_on("libfabric", when="netmod=ofi")
depends_on("slurm", when="process_managers=slurm")
depends_on("ucx", when="netmod=ucx")
filter_compiler_wrappers("mpicc", "mpicxx", "mpif77", "mpif90", "mpifort", relative_root="bin")
@classmethod
def determine_version(cls, exe):
if exe.endswith("mpichversion"):
output = Executable(exe)(output=str, error=str)
match = re.search(r"^MVAPICH2 Version:\s*(\S+)", output)
elif exe.endswith("mpiname"):
output = Executable(exe)("-a", output=str, error=str)
match = re.search(r"^MVAPICH2 (\S+)", output)
return match.group(1) if match else None
@property
def libs(self):
query_parameters = self.spec.last_query.extra_parameters
libraries = ["libmpi"]
if "cxx" in query_parameters:
libraries = ["libmpicxx"] + libraries
return find_libraries(libraries, root=self.prefix, shared=True, recursive=True)
@property
def process_manager_options(self):
spec = self.spec
other_pms = []
for x in ("hydra", "gforker", "remshell"):
if "process_managers={0}".format(x) in spec:
other_pms.append(x)
opts = []
if len(other_pms) > 0:
opts = ["--with-pm=%s" % ":".join(other_pms)]
# See: http://slurm.schedmd.com/mpi_guide.html#mvapich2
if "process_managers=slurm" in spec:
opts = [
"--with-pm=slurm",
"--with-pmi=simple",
"--with-slurm={0}".format(spec["slurm"].prefix),
"CFLAGS=-I{0}/include/slurm".format(spec["slurm"].prefix),
]
return opts
@property
def network_options(self):
opts = []
# From here on I can suppose that only one variant has been selected
if "netmod=ofi" in self.spec:
opts = ["--with-device=ch4:ofi"]
elif "netmod=ucx" in self.spec:
opts = ["--with-device=ch4:ucx"]
return opts
@property
def file_system_options(self):
spec = self.spec
fs = []
for x in ("lustre", "gpfs", "nfs", "ufs"):
if "file_systems={0}".format(x) in spec:
fs.append(x)
opts = []
if len(fs) > 0:
opts.append("--with-file-system=%s" % "+".join(fs))
return opts
def flag_handler(self, name, flags):
if name == "fflags":
# https://bugzilla.redhat.com/show_bug.cgi?id=1795817
if self.spec.satisfies("%gcc@10:"):
if flags is None:
flags = []
flags.append("-fallow-argument-mismatch")
return (flags, None, None)
def setup_build_environment(self, env):
# mvapich2 configure fails when F90 and F90FLAGS are set
env.unset("F90")
env.unset("F90FLAGS")
def setup_run_environment(self, env):
env.set("MPI_ROOT", self.prefix)
# Because MPI functions as a compiler, we need to treat it as one and
# add its compiler paths to the run environment.
self.setup_compiler_environment(env)
def setup_dependent_build_environment(self, env, dependent_spec):
self.setup_compiler_environment(env)
# use the Spack compiler wrappers under MPI
env.set("MPICH_CC", spack_cc)
env.set("MPICH_CXX", spack_cxx)
env.set("MPICH_F77", spack_f77)
env.set("MPICH_F90", spack_fc)
env.set("MPICH_FC", spack_fc)
def setup_compiler_environment(self, env):
# For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers.
# Cray MPIs always have cray in the module name, e.g. "cray-mvapich"
if self.spec.satisfies("platform=cray"):
env.set("MPICC", spack_cc)
env.set("MPICXX", spack_cxx)
env.set("MPIF77", spack_fc)
env.set("MPIF90", spack_fc)
else:
env.set("MPICC", join_path(self.prefix.bin, "mpicc"))
env.set("MPICXX", join_path(self.prefix.bin, "mpicxx"))
env.set("MPIF77", join_path(self.prefix.bin, "mpif77"))
env.set("MPIF90", join_path(self.prefix.bin, "mpif90"))
def setup_dependent_package(self, module, dependent_spec):
# For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers.
# Cray MPIs always have cray in the module name, e.g. "cray-mvapich"
if self.spec.satisfies("platform=cray"):
self.spec.mpicc = spack_cc
self.spec.mpicxx = spack_cxx
self.spec.mpifc = spack_fc
self.spec.mpif77 = spack_f77
else:
self.spec.mpicc = join_path(self.prefix.bin, "mpicc")
self.spec.mpicxx = join_path(self.prefix.bin, "mpicxx")
self.spec.mpifc = join_path(self.prefix.bin, "mpif90")
self.spec.mpif77 = join_path(self.prefix.bin, "mpif77")
self.spec.mpicxx_shared_libs = [
os.path.join(self.prefix.lib, "libmpicxx.{0}".format(dso_suffix)),
os.path.join(self.prefix.lib, "libmpi.{0}".format(dso_suffix)),
]
@run_before("configure")
def die_without_fortran(self):
# Until we can pass variants such as +fortran through virtual
# dependencies depends_on('mpi'), require Fortran compiler to
# avoid delayed build errors in dependents.
if (self.compiler.f77 is None) or (self.compiler.fc is None):
raise InstallError("Mvapich2 requires both C and Fortran compilers!")
def configure_args(self):
spec = self.spec
args = [
"--enable-shared",
"--enable-romio",
"--disable-silent-rules",
"--disable-new-dtags",
"--enable-fortran=all",
"--enable-threads={0}".format(spec.variants["threads"].value),
"--with-ch3-rank-bits={0}".format(spec.variants["ch3_rank_bits"].value),
"--enable-wrapper-rpath={0}".format("no" if "~wrapperrpath" in spec else "yes"),
]
args.extend(self.enable_or_disable("alloca"))
if "+debug" in self.spec:
args.extend(
[
"--disable-fast",
"--enable-error-checking=runtime",
"--enable-error-messages=all",
# Permits debugging with TotalView
"--enable-g=dbg",
"--enable-debuginfo",
]
)
else:
args.append("--enable-fast=all")
if "+cuda" in self.spec:
args.extend(["--enable-cuda", "--with-cuda={0}".format(spec["cuda"].prefix)])
else:
args.append("--disable-cuda")
if "+regcache" in self.spec:
args.append("--enable-registration-cache")
else:
args.append("--disable-registration-cache")
args.extend(self.process_manager_options)
args.extend(self.network_options)
args.extend(self.file_system_options)
return args

View File

@@ -6,7 +6,7 @@
from spack.package import *
class Mxnet(CMakePackage, CudaPackage, PythonExtension):
class Mxnet(CMakePackage, CudaPackage):
"""MXNet is a deep learning framework
designed for both efficiency and flexibility."""
@@ -16,6 +16,51 @@ class Mxnet(CMakePackage, CudaPackage, PythonExtension):
git = "https://github.com/apache/incubator-mxnet.git"
maintainers = ["adamjstewart"]
import_modules = [
"mxnet",
"mxnet.numpy_extension",
"mxnet.optimizer",
"mxnet.module",
"mxnet.io",
"mxnet.cython",
"mxnet.ndarray",
"mxnet.gluon",
"mxnet.symbol",
"mxnet._cy3",
"mxnet.contrib",
"mxnet.numpy",
"mxnet._ffi",
"mxnet.image",
"mxnet.kvstore",
"mxnet.notebook",
"mxnet._ctypes",
"mxnet.rnn",
"mxnet.ndarray.numpy_extension",
"mxnet.ndarray.numpy",
"mxnet.gluon.nn",
"mxnet.gluon.model_zoo",
"mxnet.gluon.contrib",
"mxnet.gluon.data",
"mxnet.gluon.rnn",
"mxnet.gluon.model_zoo.vision",
"mxnet.gluon.contrib.nn",
"mxnet.gluon.contrib.estimator",
"mxnet.gluon.contrib.cnn",
"mxnet.gluon.contrib.data",
"mxnet.gluon.contrib.rnn",
"mxnet.gluon.data.vision",
"mxnet.symbol.numpy_extension",
"mxnet.symbol.numpy",
"mxnet.contrib.onnx",
"mxnet.contrib.svrg_optimization",
"mxnet.contrib.amp",
"mxnet.contrib.text",
"mxnet.contrib.onnx.mx2onnx",
"mxnet.contrib.onnx.onnx2mx",
"mxnet.contrib.amp.lists",
"mxnet._ffi._cy3",
"mxnet._ffi._ctypes",
]
version("master", branch="master", submodules=True)
version("1.master", branch="v1.x", submodules=True)
@@ -128,3 +173,17 @@ def install_python(self):
with working_dir("python"):
args = std_pip_args + ["--prefix=" + prefix, "."]
pip(*args)
def test(self):
"""Attempts to import modules of the installed package."""
if "+python" in self.spec:
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
self.run_test(
self.spec["python"].command.path,
["-c", "import {0}".format(module)],
purpose="checking import of {0}".format(module),
work_dir="spack-test",
)

View File

@@ -20,13 +20,12 @@ class Ncview(AutotoolsPackage):
depends_on("libpng")
depends_on("libxaw")
def patch(self):
# Disable the netcdf-c compiler check, save and restore the
# modification timestamp of the file to prevent autoreconf.
patched_file = "configure"
with keep_modification_time(patched_file):
filter_file(
"if test x\$CC_TEST_SAME != x\$NETCDF_CC_TEST_SAME; then", # noqa: W605
"if false; then",
patched_file,
)
def configure_args(self):
spec = self.spec
config_args = []
if spec.satisfies("^netcdf-c+mpi"):
config_args.append("CC={0}".format(spec["mpi"].mpicc))
return config_args

View File

@@ -35,12 +35,8 @@ class Ninja(Package):
version("1.7.2", sha256="2edda0a5421ace3cf428309211270772dd35a91af60c96f93f90df6bc41b16d9")
version("1.6.0", sha256="b43e88fb068fe4d92a3dfd9eb4d19755dae5c33415db2e9b7b61b4659009cde7")
variant(
"re2c", default=not sys.platform == "win32", description="Enable buidling Ninja with re2c"
)
depends_on("python", type="build")
depends_on("re2c@0.11.3:", type="build", when="+re2c")
depends_on("re2c@0.11.3:", type="build")
phases = ["configure", "install"]

Some files were not shown because too many files have changed in this diff Show More