Compare commits
91 Commits
packages/r
...
psakiev/ca
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5986173e5b | ||
![]() |
9e942bb3a3 | ||
![]() |
815cb4f5b2 | ||
![]() |
944b3dad3f | ||
![]() |
9fe2796e9d | ||
![]() |
200191cc3d | ||
![]() |
63fe6fc893 | ||
![]() |
4f2a1806f9 | ||
![]() |
12a7e8d73a | ||
![]() |
21d8c09c5e | ||
![]() |
43596b4e23 | ||
![]() |
97edcb5acc | ||
![]() |
fc268b0945 | ||
![]() |
0b4477c0df | ||
![]() |
eff4c14a09 | ||
![]() |
f485a622c8 | ||
![]() |
f151bc65f7 | ||
![]() |
99d849b2e6 | ||
![]() |
3d8f9a7b22 | ||
![]() |
c88e7bc492 | ||
![]() |
931d034da4 | ||
![]() |
a3a49daf8f | ||
![]() |
2c05ce3607 | ||
![]() |
6587b2a231 | ||
![]() |
f1c743e235 | ||
![]() |
6a48121ed7 | ||
![]() |
9c03f15cbd | ||
![]() |
6eda1b4d04 | ||
![]() |
0240120d4f | ||
![]() |
88d7249141 | ||
![]() |
8d9af73d83 | ||
![]() |
6fb1ded7c3 | ||
![]() |
b932c14008 | ||
![]() |
285f95a4d8 | ||
![]() |
3de68ef976 | ||
![]() |
5c7fe24bec | ||
![]() |
ecb122f4c1 | ||
![]() |
6219780691 | ||
![]() |
8ec1369d2b | ||
![]() |
e3fcc41162 | ||
![]() |
ae582c45c3 | ||
![]() |
252a4d1076 | ||
![]() |
df37a8ba76 | ||
![]() |
99d06b95a3 | ||
![]() |
38829b01df | ||
![]() |
2a6a6602da | ||
![]() |
1527e9703d | ||
![]() |
4a22df5477 | ||
![]() |
2b4f2daa73 | ||
![]() |
02501bc4af | ||
![]() |
7cd039d022 | ||
![]() |
1ff81c1c88 | ||
![]() |
3e3cb73446 | ||
![]() |
8e948c03fc | ||
![]() |
572e790b3d | ||
![]() |
1873d6909a | ||
![]() |
4a24ab53df | ||
![]() |
671c394d32 | ||
![]() |
ce3b511f59 | ||
![]() |
03073a5fed | ||
![]() |
787bff0d6a | ||
![]() |
2504a76079 | ||
![]() |
f665f4c41b | ||
![]() |
4cab31323c | ||
![]() |
fcbe8c50cd | ||
![]() |
37de90c98c | ||
![]() |
5ccd9dc64b | ||
![]() |
1f59ada2c2 | ||
![]() |
a8a402115b | ||
![]() |
c2f3539a5e | ||
![]() |
cdeb67ec02 | ||
![]() |
2ddd8cd1aa | ||
![]() |
5b352c3088 | ||
![]() |
95c26245c1 | ||
![]() |
6a0e03b81c | ||
![]() |
858f70bf6f | ||
![]() |
123c26c22d | ||
![]() |
b42ef1e7b8 | ||
![]() |
2f2c65f56b | ||
![]() |
883d0739e6 | ||
![]() |
f1a31fe5f7 | ||
![]() |
c3785f4d30 | ||
![]() |
cc8983cf82 | ||
![]() |
30cea3ce8a | ||
![]() |
1252bd975c | ||
![]() |
6547758b2f | ||
![]() |
c633149874 | ||
![]() |
d640ce74e0 | ||
![]() |
6d2cc2d27a | ||
![]() |
43f180c2c5 | ||
![]() |
0685c6277e |
2
.flake8
2
.flake8
@@ -28,7 +28,7 @@ max-line-length = 99
|
||||
# - F821: undefined name `name`
|
||||
#
|
||||
per-file-ignores =
|
||||
var/spack/repos/*/package.py:F403,F405,F821
|
||||
var/spack/*/package.py:F403,F405,F821
|
||||
*-ci-package.py:F403,F405,F821
|
||||
|
||||
# exclude things we usually do not want linting for.
|
||||
|
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
@@ -42,17 +42,17 @@ jobs:
|
||||
# built-in repository or documentation
|
||||
filters: |
|
||||
bootstrap:
|
||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/python/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
|
8
.github/workflows/prechecks.yml
vendored
8
.github/workflows/prechecks.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
@@ -65,7 +65,11 @@ jobs:
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
# do not run if the commit message or PR description contains [skip-verify-checksums]
|
||||
if: >-
|
||||
${{ inputs.with_packages == 'true' &&
|
||||
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
|
||||
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
|
@@ -5,4 +5,4 @@ isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250403
|
||||
vermin==1.6.0
|
||||
pylint==3.3.6
|
||||
pylint==3.3.7
|
||||
|
34
.github/workflows/sync-packages.yaml
vendored
Normal file
34
.github/workflows/sync-packages.yaml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: sync with spack/spack-packages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout spack/spack
|
||||
run: git clone https://github.com/spack/spack.git
|
||||
- name: Checkout spack/spack-packages
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ssh-key: ${{ secrets.SYNC_PACKAGES_KEY }}
|
||||
path: spack-packages
|
||||
repository: spack/spack-packages
|
||||
- name: Install git-filter-repo
|
||||
run: |
|
||||
curl -LfsO https://raw.githubusercontent.com/newren/git-filter-repo/refs/tags/v2.47.0/git-filter-repo
|
||||
echo "67447413e273fc76809289111748870b6f6072f08b17efe94863a92d810b7d94 git-filter-repo" | sha256sum -c -
|
||||
chmod +x git-filter-repo
|
||||
sudo mv git-filter-repo /usr/local/bin/
|
||||
- name: Sync spack/spack-packages with spack/spack
|
||||
run: |
|
||||
cd spack-packages
|
||||
git-filter-repo --quiet --source ../spack --subdirectory-filter var/spack/repos --refs develop
|
||||
- name: Push
|
||||
run: |
|
||||
cd spack-packages
|
||||
git push git@github.com:spack/spack-packages.git develop:develop --force
|
@@ -11,4 +11,4 @@
|
||||
# ~/.spack/repos.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
@@ -1916,7 +1916,7 @@ diagnostics. Issues, if found, are reported to stdout:
|
||||
PKG-DIRECTIVES: 1 issue found
|
||||
1. lammps: wrong variant in "conflicts" directive
|
||||
the variant 'adios' does not exist
|
||||
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
|
||||
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py
|
||||
|
||||
|
||||
------------
|
||||
|
@@ -45,10 +45,14 @@ provided binary cache, which can be a local directory or a remote URL.
|
||||
Here is an example where a build cache is created in a local directory named
|
||||
"spack-cache", to which we push the "ninja" spec:
|
||||
|
||||
ninja-1.12.1-vmvycib6vmiofkdqgrblo7zsvp7odwut
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
==> Selected 30 specs to push to file:///home/spackuser/spack/spack-cache
|
||||
...
|
||||
==> [30/30] Pushed ninja@1.12.1/ngldn2k
|
||||
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
@@ -98,9 +102,10 @@ Now you can use list:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache list
|
||||
==> 1 cached build.
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
ninja@1.10.2
|
||||
==> 24 cached builds.
|
||||
-- linux-ubuntu22.04-sapphirerapids / gcc@12.3.0 ----------------
|
||||
[ ... ]
|
||||
ninja@1.12.1
|
||||
|
||||
With ``mymirror`` configured and an index available, Spack will automatically
|
||||
use it during concretization and installation. That means that you can expect
|
||||
@@ -111,17 +116,17 @@ verify by re-installing ninja:
|
||||
|
||||
$ spack uninstall ninja
|
||||
$ spack install ninja
|
||||
==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
|
||||
gpg: Signature made Do 12 Jan 2023 16:01:04 CET
|
||||
gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
|
||||
[ ... ]
|
||||
==> Installing ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh [24/24]
|
||||
gpg: Signature made Thu 06 Mar 2025 10:03:38 AM MST
|
||||
gpg: using RSA key 75BC0528114909C076E2607418010FFAD73C9B07
|
||||
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
|
||||
==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
|
||||
[+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/f0/f08eb62661ad159d2d258890127fc6053f5302a2f490c1c7f7bd677721010ee0
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/c7/c79ac6e40dfdd01ac499b020e52e57aa91151febaea3ad183f90c0f78b64a31a
|
||||
==> Extracting ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh from binary cache
|
||||
==> ninja: Successfully installed ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
|
||||
Search: 0.00s. Fetch: 0.11s. Install: 0.11s. Extract: 0.10s. Relocate: 0.00s. Total: 0.22s
|
||||
[+] /home/spackuser/spack/opt/spack/linux-ubuntu22.04-sapphirerapids/gcc-12.3.0/ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
|
||||
|
||||
It worked! You've just completed a full example of creating a build cache with
|
||||
a spec of interest, adding it as a mirror, updating its index, listing the contents,
|
||||
@@ -344,19 +349,18 @@ which lets you get started quickly. See the following resources for more informa
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create tarball of installed Spack package and all dependencies.
|
||||
Tarballs are checksummed and signed if gpg2 is available.
|
||||
Places them in a directory ``build_cache`` that can be copied to a mirror.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches.
|
||||
Tarballs and specfiles are compressed and checksummed, manifests are signed if gpg2 is available.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors to get the list of build caches.
|
||||
|
||||
============== ========================================================================================================================
|
||||
Arguments Description
|
||||
============== ========================================================================================================================
|
||||
``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches
|
||||
``-d <path>`` directory in which ``build_cache`` directory is created, defaults to ``.``
|
||||
``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists
|
||||
``-d <path>`` directory in which ``v3`` and ``blobs`` directories are created, defaults to ``.``
|
||||
``-f`` overwrite compressed tarball and spec metadata files if they already exist
|
||||
``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used.
|
||||
``-r`` make paths in binaries relative before creating tarball
|
||||
``-y`` answer yes to all create unsigned ``build_cache`` questions
|
||||
``-y`` answer yes to all questions about creating unsigned build caches
|
||||
============== ========================================================================================================================
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -397,6 +401,165 @@ List public keys available on Spack mirror.
|
||||
========= ==============================================
|
||||
Arguments Description
|
||||
========= ==============================================
|
||||
``-i`` trust the keys downloaded with prompt for each
|
||||
``-it`` trust the keys downloaded with prompt for each
|
||||
``-y`` answer yes to all trust all keys downloaded
|
||||
========= ==============================================
|
||||
|
||||
.. _build_cache_layout:
|
||||
|
||||
------------------
|
||||
Build Cache Layout
|
||||
------------------
|
||||
|
||||
This section describes the structure and content of URL-style build caches, as
|
||||
distinguished from OCI-style build caches.
|
||||
|
||||
The entry point for a binary package is a manifest json file that points to at
|
||||
least two other files stored as content-addressed blobs. These files include a spec
|
||||
metadata file, as well as the installation directory of the package stored as
|
||||
a compressed archive file. Binary package manifest files are named to indicate
|
||||
the package name and version, as well as the hash of the concrete spec. For
|
||||
example::
|
||||
|
||||
gcc-runtime-12.3.0-qyu2lvgt3nxh7izxycugdbgf5gsdpkjt.spec.manifest.json
|
||||
|
||||
would contain the manifest for a binary package of ``gcc-runtime@12.3.0``.
|
||||
The id of the built package is defined to be the DAG hash of the concrete spec,
|
||||
and exists in the name of the file as well. The id distinguishes a particular
|
||||
binary package from all other binary packages with the same package name and
|
||||
version. Below is an example binary package manifest file. Such a file would
|
||||
live in the versioned spec manifests directory of a binary mirror, for example
|
||||
``v3/manifests/spec/``::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 10731083,
|
||||
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
|
||||
},
|
||||
{
|
||||
"contentLength": 1000,
|
||||
"mediaType": "application/vnd.spack.spec.v5+json",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
The manifest points to both the compressed tar file as well as the compressed
|
||||
spec metadata file, and contains the checksum of each. This checksum
|
||||
is also used as the address of the associated file, and hence, must be
|
||||
known in order to locate the tarball or spec file within the mirror. Once the
|
||||
tarball or spec metadata file is downloaded, the checksum should be computed locally
|
||||
and compared to the checksum in the manifest to ensure the contents have not changed
|
||||
since the binary package was pushed. Spack stores all data files (including compressed
|
||||
tar files, spec metadata, indices, public keys, etc) within a ``blobs/<hash-algorithm>/``
|
||||
directory, using the first two characters of the checksum as a sub-directory
|
||||
to reduce the number files in a single folder. Here is a depiction of the
|
||||
organization of binary mirror contents::
|
||||
|
||||
mirror_directory/
|
||||
v3/
|
||||
layout.json
|
||||
manifests/
|
||||
spec/
|
||||
gcc-runtime/
|
||||
gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
|
||||
gmake/
|
||||
gmake-4.4.1-lpr4j77rcgkg5536tmiuzwzlcjsiomph.spec.manifest.json
|
||||
compiler-wrapper/
|
||||
compiler-wrapper-1.0-s7ieuyievp57vwhthczhaq2ogowf3ohe.spec.manifest.json
|
||||
index/
|
||||
index.manifest.json
|
||||
key/
|
||||
75BC0528114909C076E2607418010FFAD73C9B07.key.manifest.json
|
||||
keys.manifest.json
|
||||
blobs/
|
||||
sha256/
|
||||
0f/
|
||||
0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210
|
||||
fb/
|
||||
fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041
|
||||
2a/
|
||||
2a21836d206ccf0df780ab0be63fdf76d24501375306a35daa6683c409b7922f
|
||||
...
|
||||
|
||||
Files within the ``manifests`` directory are organized into subdirectories by
|
||||
the type of entity they represent. Binary package manifests live in the ``spec/``
|
||||
directory, binary cache index manifests live in the ``index/`` directory, and
|
||||
manifests for public keys and their indices live in the ``key/`` subdirectory.
|
||||
Regardless of the type of entity they represent, all manifest files are named
|
||||
with an extension ``.manifest.json``.
|
||||
|
||||
Every manifest contains a ``data`` array, each element of which refers to an
|
||||
associated file stored a content-addressed blob. Considering the example spec
|
||||
manifest shown above, the compressed installation archive can be found by
|
||||
picking out the data blob with the appropriate ``mediaType``, which in this
|
||||
case would be ``application/vnd.spack.install.v1.tar+gzip``. The associated
|
||||
file is found by looking in the blobs directory under ``blobs/sha256/fb/`` for
|
||||
the file named with the complete checksum value.
|
||||
|
||||
As mentioned above, every entity in a binary mirror (aka build cache) is stored
|
||||
as a content-addressed blob pointed to by a manifest. While an example spec
|
||||
manifest (i.e. a manifest for a binary package) is shown above, here is what
|
||||
the manifest of a build cache index looks like::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 6411,
|
||||
"mediaType": "application/vnd.spack.db.v8+json",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "225a3e9da24d201fdf9d8247d66217f5b3f4d0fc160db1498afd998bfd115234"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Some things to note about this manifest are that it points to a blob that is not
|
||||
compressed (``compression: "none"``), and that the ``mediaType`` is one we have
|
||||
not seen yet, ``application/vnd.spack.db.v8+json``. The decision not to compress
|
||||
build cache indices stems from the fact that spack does not yet sign build cache
|
||||
index manifests. Once that changes, you may start to see these indices stored as
|
||||
compressed blobs.
|
||||
|
||||
For completeness, here are examples of manifests for the other two types of entities
|
||||
you might find in a spack build cache. First a public key manifest::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 2472,
|
||||
"mediaType": "application/pgp-keys",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "9fc18374aebc84deb2f27898da77d4d4410e5fb44c60c6238cb57fb36147e5c7"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Note the ``mediaType`` of ``application/pgp-keys``. Finally, a public key index manifest::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 56,
|
||||
"mediaType": "application/vnd.spack.keyindex.v1+json",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "29b3a0eb6064fd588543bc43ac7d42d708a69058dafe4be0859e3200091a9a1c"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Again note the ``mediaType`` of ``application/vnd.spack.keyindex.v1+json``. Also note
|
||||
that both the above manifest examples refer to uncompressed blobs, this is for the same
|
||||
reason spack does not yet compress build cache index blobs.
|
||||
|
@@ -83,7 +83,7 @@ packages. You can quickly find examples by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd var/spack/repos/builtin/packages
|
||||
$ cd var/spack/repos/spack_repo/builtin/packages
|
||||
$ grep -l QMakePackage */package.py
|
||||
|
||||
|
||||
|
@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
@@ -199,7 +199,7 @@ a variant to control this:
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
|
||||
focuses mostly on how Spack's build systems work.
|
||||
|
||||
In this guide, we will be using the
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_
|
||||
packages as examples. ``perl``'s build system is a hand-written
|
||||
``Configure`` shell script, while ``cmake`` bootstraps itself during
|
||||
installation. Both of these packages require custom build systems.
|
||||
|
@@ -96,9 +96,9 @@ there are any other variables you need to set, you can do this in the
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_
|
||||
is a good example of a simple package that does this, while
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_
|
||||
is a good example of a more complex package.
|
||||
|
||||
""""""""""""""""""""""
|
||||
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
|
||||
]
|
||||
|
||||
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_
|
||||
is a good example of a package that uses this strategy.
|
||||
|
||||
"""""""""""""
|
||||
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_
|
||||
is a good example of a package that involves editing a Makefile to set
|
||||
the appropriate variables.
|
||||
|
||||
@@ -192,7 +192,7 @@ well for storing variables:
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_
|
||||
is a good example of a package that uses a dictionary to store
|
||||
configuration variables.
|
||||
|
||||
@@ -213,7 +213,7 @@ them in a list:
|
||||
inc.write(f"{var}\n")
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_
|
||||
is a good example of a package that uses a list to store
|
||||
configuration variables.
|
||||
|
||||
|
@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
|
||||
If it isn't on CRAN, try Bioconductor, another common R repository.
|
||||
|
||||
For the purposes of this tutorial, we will be walking through
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_
|
||||
as an example. If you search for "CRAN caret", you will quickly find what
|
||||
you are looking for at https://cran.r-project.org/package=caret.
|
||||
https://cran.r-project.org is the main CRAN website. However, CRAN also
|
||||
@@ -337,7 +337,7 @@ Non-R dependencies
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some packages depend on non-R libraries for linking. Check out the
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_
|
||||
package for an example: https://cloud.r-project.org/package=stringi.
|
||||
If you search for the text "SystemRequirements", you will see:
|
||||
|
||||
@@ -352,7 +352,7 @@ Passing arguments to the installation
|
||||
|
||||
Some R packages provide additional flags that can be passed to
|
||||
``R CMD INSTALL``, often to locate non-R dependencies.
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_
|
||||
is an example of this, and flags for linking to an MPI library. To pass
|
||||
these to the installation command, you can override ``configure_args``
|
||||
like so:
|
||||
|
@@ -104,10 +104,10 @@ Finding available options
|
||||
|
||||
The first place to start when looking for a list of valid options to
|
||||
build a package is ``scons --help``. Some packages like
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_
|
||||
don't bother overwriting the default SCons help message, so this isn't
|
||||
very useful, but other packages like
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_
|
||||
print a list of valid command-line variables:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
|
||||
|
||||
|
||||
More advanced packages like
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_
|
||||
use ``scons --help`` to print a list of subcommands:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -225,10 +225,14 @@ def setup(sphinx):
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:class", "llnl.util.lang.K"),
|
||||
("py:class", "llnl.util.lang.V"),
|
||||
("py:class", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.K"),
|
||||
("py:obj", "llnl.util.lang.V"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
|
||||
|
||||
Modified files:
|
||||
|
||||
var/spack/repos/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/builtin/packages/hdf/package.py
|
||||
var/spack/repos/builtin/packages/netcdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py
|
||||
=======================================================
|
||||
Flake8 checks were clean.
|
||||
|
||||
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
Flake8 found errors.
|
||||
|
||||
Most of the error messages are straightforward, but if you don't understand what
|
||||
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. warning::
|
||||
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_.
|
||||
If you're using a ``python`` from Spack and you installed
|
||||
``py-sphinx`` and friends, you need to make them available to your
|
||||
``python``. The easiest way to do this is to run:
|
||||
|
@@ -154,9 +154,7 @@ Package-related modules
|
||||
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
Python module names, and Python class names. Functions like
|
||||
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||
module names to class names.
|
||||
Python module names, and Python class names.
|
||||
|
||||
:mod:`spack.directives`
|
||||
*Directives* are functions that can be called inside a package definition
|
||||
|
@@ -131,7 +131,7 @@ creates a simple python file:
|
||||
It doesn't take much python coding to get from there to a working
|
||||
package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:lines: 5-
|
||||
|
||||
Spack also provides wrapper functions around common commands like
|
||||
|
@@ -120,6 +120,16 @@ what it looks like:
|
||||
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
|
||||
copy it over to the machine you want it hosted on.
|
||||
|
||||
Customization of the mirror contents can be done by selectively excluding
|
||||
specs using the ``--exclude-file`` or ``--exclude-specs`` flags with
|
||||
``spack mirror create``. Note that these only apply to source mirrors.
|
||||
|
||||
You may additionally add an ``exclude`` or ``include``
|
||||
section to the ``mirrors`` configuration section for pushing to binary mirrors.
|
||||
These are lists of abstract or concrete specs to configure what gets pushed to your mirror.
|
||||
If overlapping inclusion and exclusions are applied then inclusion is preferred.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Custom package sets
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
@@ -443,7 +443,7 @@ lives in:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location -p gmp
|
||||
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
|
||||
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py
|
||||
|
||||
but ``spack edit`` provides a much simpler shortcut and saves you the
|
||||
trouble of typing the full path.
|
||||
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
|
||||
handles creating package files for you, so you can skip most of the
|
||||
details here.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/spack_repo/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A Spack installation directory is structured like a standard UNIX
|
||||
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
||||
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``.
|
||||
|
||||
If you ``cd`` to that directory, you will see directories for each
|
||||
package:
|
||||
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls
|
||||
:shell:
|
||||
:ellipsis: 10
|
||||
|
||||
@@ -479,7 +479,7 @@ package lives in:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
|
||||
Alongside the ``package.py`` file, a package may contain extra
|
||||
directories or files (like patches) that it needs to build.
|
||||
@@ -492,7 +492,7 @@ Packages are named after the directory containing ``package.py``. So,
|
||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||
The ``package.py`` file defines a class called ``Libelf``, which
|
||||
extends Spack's ``Package`` class. For example, here is
|
||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
@@ -520,7 +520,7 @@ these:
|
||||
$ spack install libelf@0.8.13
|
||||
|
||||
Spack sees the package name in the spec and looks for
|
||||
``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
|
||||
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``.
|
||||
Likewise, if you run ``spack install py-numpy``, Spack looks for
|
||||
``py-numpy/package.py``.
|
||||
|
||||
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
|
||||
In order to handle this, you can define a ``url_for_version()`` function
|
||||
like so:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.url_for_version
|
||||
|
||||
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
|
||||
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
|
||||
takes care of all of the plumbing and requires packagers to just define a proper
|
||||
``gnu_mirror_path`` attribute:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py
|
||||
:lines: 9-18
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1995,7 +1995,7 @@ structure like this:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/
|
||||
mvapich2/
|
||||
package.py
|
||||
ad_lustre_rwcontig_open_source.patch
|
||||
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
|
||||
|
||||
.. _pyside-patch:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py
|
||||
:pyobject: PyPyside.patch
|
||||
:linenos:
|
||||
|
||||
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
|
||||
|
||||
$ spack resource show 3877ab54
|
||||
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch
|
||||
applies to: builtin.m4
|
||||
|
||||
``spack resource show`` looks up downloadable resources from package
|
||||
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
|
||||
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
|
||||
$ spack resource show b37164268
|
||||
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch
|
||||
applies to: builtin.boost
|
||||
patched by: builtin.dealii
|
||||
|
||||
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
|
||||
build system specific helper methods or attributes to provide, for instance,
|
||||
configure arguments:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py
|
||||
:pyobject: M4.configure_args
|
||||
:linenos:
|
||||
|
||||
@@ -4110,7 +4110,7 @@ Shell command functions
|
||||
|
||||
Recall the install method from ``libelf``:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:pyobject: Libelf.install
|
||||
:linenos:
|
||||
|
||||
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
|
||||
additional properties on it to help you out. E.g., in openmpi, you'll
|
||||
find this:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.setup_dependent_package
|
||||
|
||||
That code allows the ``openmpi`` package to associate an ``mpicc`` property
|
||||
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
|
||||
* - Parent/Provider Package
|
||||
- Stand-alone Tests
|
||||
* - `C
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_
|
||||
- Compiles ``hello.c`` and runs it
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortran
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_
|
||||
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
|
||||
* - :ref:`PythonPackage <pythonpackage>`
|
||||
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
|
||||
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
One example of a package that adds its own stand-alone tests to those
|
||||
"inherited" by the virtual package it provides an implementation for is
|
||||
the `Openmpi package
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_.
|
||||
|
||||
Below are snippets from running and viewing the stand-alone test results
|
||||
for ``openmpi``:
|
||||
|
@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
|
||||
=================================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a
|
||||
directory that Spack searches when it needs to find a package by name.
|
||||
You may need to maintain packages for restricted, proprietary or
|
||||
experimental software separately from the built-in repository. Spack
|
||||
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
The file starts with ``repos:`` and contains a single ordered list of
|
||||
paths to repositories. Each path is on a separate line starting with
|
||||
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /opt/local-repo
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /opt/repos/spack_repo/local_repo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
|
||||
it searches these repositories in order (first to last) to resolve each
|
||||
package name. In this example, Spack will look for the following
|
||||
packages and use the first valid file:
|
||||
|
||||
1. ``/opt/local-repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
|
||||
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py``
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -101,14 +101,15 @@ Namespaces
|
||||
|
||||
Every repository in Spack has an associated **namespace** defined in its
|
||||
top-level ``repo.yaml`` file. If you look at
|
||||
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
see that its namespace is ``builtin``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cat var/spack/repos/builtin/repo.yaml
|
||||
$ cat var/spack/repos/spack_repo/builtin/repo.yaml
|
||||
repo:
|
||||
namespace: builtin
|
||||
api: v2.0
|
||||
|
||||
Spack records the repository namespace of each installed package. For
|
||||
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
||||
@@ -217,15 +218,15 @@ Suppose you have three repositories: the builtin Spack repo
|
||||
repo containing your own prototype packages (``proto``). Suppose they
|
||||
contain packages as follows:
|
||||
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+====================================+=============================+
|
||||
| ``proto`` | ``~/proto`` | ``mpich`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+===============================================+=============================+
|
||||
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
|
||||
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
||||
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
@@ -233,8 +234,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
||||
|
||||
@@ -243,9 +244,9 @@ If, instead, ``repos.yaml`` looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- ~/proto
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- ~/my_spack_repos/spack_repo/proto
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
||||
|
||||
@@ -326,8 +327,8 @@ files, use ``spack repo list``.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
myrepo ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Each repository is listed with its associated namespace. To get the raw,
|
||||
merged YAML from all configuration files, use ``spack config get repos``:
|
||||
@@ -335,9 +336,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get repos
|
||||
repos:srepos:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
repos:
|
||||
- ~/my_spack_repos/spack_repo/myrepo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Note that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
@@ -351,66 +352,54 @@ yourself; you can use the ``spack repo create`` command.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo
|
||||
$ spack repo create ~/my_spack_repos myrepo
|
||||
==> Created repo with namespace 'myrepo'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
$ ls ~/my_spack_repos/spack_repo/myrepo
|
||||
packages/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: 'myrepo'
|
||||
api: v2.0
|
||||
|
||||
By default, the namespace of a new repo matches its directory's name.
|
||||
You can supply a custom namespace with a second argument, e.g.:
|
||||
Namespaces can also be nested, which can be useful if you have
|
||||
multiple package repositories for an organization. Spack will
|
||||
create the corresponding directory structure for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo llnl.comp
|
||||
$ spack repo create ~/my_spack_repos llnl.comp
|
||||
==> Created repo with namespace 'llnl.comp'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
|
||||
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||
argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create -d applications myrepo apps
|
||||
==> Created repo with namespace 'apps'.
|
||||
==> To register it with Spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
applications/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: apps
|
||||
subdirectory: applications
|
||||
api: v2.0
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once your repository is created, you can register it with Spack with
|
||||
``spack repo add``:
|
||||
``spack repo add``. You nee to specify the path to the directory that
|
||||
contains the ``repo.yaml`` file.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo add ./myrepo
|
||||
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Added repo with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
llnl.comp ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
|
||||
This simply adds the repo to your ``repos.yaml`` file.
|
||||
|
||||
@@ -432,46 +421,43 @@ By namespace:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm llnl.comp
|
||||
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
By path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm ~/myrepo
|
||||
==> Removed repository ~/myrepo
|
||||
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
--------------------------------
|
||||
Repo namespaces and Python
|
||||
--------------------------------
|
||||
|
||||
You may have noticed that namespace notation for repositories is similar
|
||||
to the notation for namespaces in Python. As it turns out, you *can*
|
||||
treat Spack repositories like Python packages; this is how they are
|
||||
implemented.
|
||||
Package repositories are implemented as Python packages. To be precise,
|
||||
they are `namespace packages
|
||||
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_
|
||||
with ``spack_repo`` the top-level namespace, followed by the repository
|
||||
namespace as submodules. For example, the builtin repository corresponds
|
||||
to the Python module ``spack_repo.builtin.packages``.
|
||||
|
||||
You could, for example, extend a ``builtin`` package in your own
|
||||
This structure allows you to extend a ``builtin`` package in your own
|
||||
repository:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
from spack_repo.builtin.packages.mpich.package import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
...
|
||||
|
||||
Spack repo namespaces are actually Python namespaces tacked on under
|
||||
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||
implemented using Python's built-in `sys.path
|
||||
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
Spack populates ``sys.path`` at runtime with the path to the root of your
|
||||
package repository's ``spack_repo`` directory.
|
||||
|
@@ -176,92 +176,72 @@ community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
.. _build_cache_signing:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
-------------------
|
||||
Build Cache Signing
|
||||
-------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
For an in-depth description of the layout of a binary mirror, see
|
||||
the :ref:`documentation<build_cache_layout>` covering binary caches. The
|
||||
key takeaway from that discussion that applies here is that the entry point
|
||||
to a binary package is it's manifest. The manifest refers unambiguously to the
|
||||
spec metadata and compressed archive, which are stored as content-addressed
|
||||
blobs.
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
The manifest files can either be signed or unsigned, but are always given
|
||||
a name ending with ``.spec.manifest.json`` regardless. The difference between
|
||||
signed and unsigned manifests is simply that the signed version is wrapped in
|
||||
a gpg cleartext signature, as illustrated below::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
}
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 10731083,
|
||||
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
|
||||
},
|
||||
{
|
||||
"contentLength": 1000,
|
||||
"mediaType": "application/vnd.spack.spec.v5+json",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
|
||||
iQGzBAEBCgAdFiEEdbwFKBFJCcB24mB0GAEP+tc8mwcFAmf2rr4ACgkQGAEP+tc8
|
||||
mwfefwv+KJs8MsQ5ovFaBdmyx5H/3k4rO4QHBzuSPOB6UaxErA9IyOB31iP6vNTU
|
||||
HzYpxz6F5dJCJWmmNEMN/0+vjhMHEOkqd7M1l5reVcxduTF2yc4tBZUO2gienEHL
|
||||
W0e+SnUznl1yc/aVpChUiahO2zToCsI8HZRNT4tu6iCnE/OpghqjsSdBOZHmSNDD
|
||||
5wuuCxfDUyWI6ZlLclaaB7RdbCUUJf/iqi711J+wubvnDFhc6Ynwm1xai5laJ1bD
|
||||
ev3NrSb2AAroeNFVo4iECA0fZC1OZQYzaRmAEhBXtCideGJ5Zf2Cp9hmCwNK8Hq6
|
||||
bNt94JP9LqC3FCCJJOMsPyOOhMSA5MU44zyyzloRwEQpHHLuFzVdbTHA3dmTc18n
|
||||
HxNLkZoEMYRc8zNr40g0yb2lCbc+P11TtL1E+5NlE34MX15mPewRCiIFTMwhCnE3
|
||||
gFSKtW1MKustZE35/RUwd2mpJRf+mSRVCl1f1RiFjktLjz7vWQq7imIUSam0fPDr
|
||||
XD4aDogm
|
||||
=RrFX
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
used to sign the above manifest file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
$ gpg --verify gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
When attempting to install a binary package that has been signed, spack will
|
||||
attempt to verify the signature with one of the trusted keys in its keyring,
|
||||
and will fail if unable to do so. While not recommended, it is possible to
|
||||
force installation of a signed package without verification by providing the
|
||||
``--no-check-signature`` argument to ``spack install ...``.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
@@ -320,10 +300,10 @@ the following way:
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
4. Assuming the package has dependencies those spec manifests are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
5. The package is built and the spec manifest is generated
|
||||
6. The spec manifest is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
@@ -376,24 +356,24 @@ following way:
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
5. The job script syncs all spec manifest files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
signatures on the spec.manifest.json files. If any signed manifest
|
||||
does not verify, the job immediately fails.
|
||||
8. Assuming all manifests are verified, the ``sign.sh`` script then unpacks
|
||||
the manifest json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
10. The private key is then used to sign each of the manifests and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
11. The re-signed manifests are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
|
@@ -21,6 +21,7 @@
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
@@ -436,46 +437,39 @@ def add_func_to_class(name, func):
|
||||
return cls
|
||||
|
||||
|
||||
K = TypeVar("K")
|
||||
V = TypeVar("V")
|
||||
|
||||
|
||||
@lazy_lexicographic_ordering
|
||||
class HashableMap(collections.abc.MutableMapping):
|
||||
class HashableMap(typing.MutableMapping[K, V]):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
__slots__ = ("dict",)
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {}
|
||||
self.dict: Dict[K, V] = {}
|
||||
|
||||
def __getitem__(self, key):
|
||||
def __getitem__(self, key: K) -> V:
|
||||
return self.dict[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
def __setitem__(self, key: K, value: V) -> None:
|
||||
self.dict[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[K]:
|
||||
return iter(self.dict)
|
||||
|
||||
def __len__(self):
|
||||
def __len__(self) -> int:
|
||||
return len(self.dict)
|
||||
|
||||
def __delitem__(self, key):
|
||||
def __delitem__(self, key: K) -> None:
|
||||
del self.dict[key]
|
||||
|
||||
def _cmp_iter(self):
|
||||
for _, v in sorted(self.items()):
|
||||
yield v
|
||||
|
||||
def copy(self):
|
||||
"""Type-agnostic clone method. Preserves subclass type."""
|
||||
# Construct a new dict of my type
|
||||
self_type = type(self)
|
||||
clone = self_type()
|
||||
|
||||
# Copy everything from this dict into it.
|
||||
for key in self:
|
||||
clone[key] = self[key].copy()
|
||||
return clone
|
||||
|
||||
|
||||
def match_predicate(*args):
|
||||
"""Utility function for making string matching predicates.
|
||||
|
@@ -18,7 +18,7 @@
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
package_api_version = (2, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -36,7 +36,7 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||
compiler_version_argument: Union[str, Tuple[str, ...]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
|
@@ -1055,8 +1055,8 @@ def setup_dependent_build_environment(
|
||||
) -> None:
|
||||
# NB: This function is overwritten by 'mpi' provider packages:
|
||||
#
|
||||
# var/spack/repos/builtin/packages/intel-mpi/package.py
|
||||
# var/spack/repos/builtin/packages/intel-parallel-studio/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py
|
||||
#
|
||||
# They call _setup_dependent_env_callback() as well, but with the
|
||||
# dictionary kwarg compilers_of_client{} present and populated.
|
||||
|
351
lib/spack/spack/buildcache_migrate.py
Normal file
351
lib/spack/spack/buildcache_migrate.py
Normal file
@@ -0,0 +1,351 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import NamedTuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
from .url_buildcache import (
|
||||
BlobRecord,
|
||||
BuildcacheComponent,
|
||||
compressed_json_from_dict,
|
||||
get_url_buildcache_class,
|
||||
sign_file,
|
||||
try_verify,
|
||||
)
|
||||
|
||||
|
||||
def v2_tarball_directory_name(spec):
|
||||
"""
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def v2_tarball_name(spec, ext):
|
||||
"""
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
def v2_tarball_path_name(spec, ext):
|
||||
"""
|
||||
Return the full path+name for a given spec according to the convention
|
||||
<tarball_directory_name>/<tarball_name>
|
||||
"""
|
||||
return os.path.join(v2_tarball_directory_name(spec), v2_tarball_name(spec, ext))
|
||||
|
||||
|
||||
class MigrateSpecResult(NamedTuple):
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
class MigrationException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when migration fails irrevocably
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
def _migrate_spec(
|
||||
s: spack.spec.Spec, mirror_url: str, tmpdir: str, unsigned: bool = False, signing_key: str = ""
|
||||
) -> MigrateSpecResult:
|
||||
"""Parallelizable function to migrate a single spec"""
|
||||
print_spec = f"{s.name}/{s.dag_hash()[:7]}"
|
||||
|
||||
# Check if the spec file exists in the new location and exit early if so
|
||||
|
||||
v3_cache_class = get_url_buildcache_class(layout_version=3)
|
||||
v3_cache_entry = v3_cache_class(mirror_url, s, allow_unsigned=unsigned)
|
||||
exists = v3_cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
v3_cache_entry.destroy()
|
||||
|
||||
if exists:
|
||||
msg = f"No need to migrate {print_spec}"
|
||||
return MigrateSpecResult(True, msg)
|
||||
|
||||
# Try to fetch the spec metadata
|
||||
v2_metadata_urls = [
|
||||
url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json.sig"))
|
||||
]
|
||||
|
||||
if unsigned:
|
||||
v2_metadata_urls.append(
|
||||
url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json"))
|
||||
)
|
||||
|
||||
spec_contents = None
|
||||
|
||||
for meta_url in v2_metadata_urls:
|
||||
try:
|
||||
_, _, meta_file = web_util.read_from_url(meta_url)
|
||||
spec_contents = codecs.getreader("utf-8")(meta_file).read()
|
||||
v2_spec_url = meta_url
|
||||
break
|
||||
except (web_util.SpackWebError, OSError):
|
||||
pass
|
||||
else:
|
||||
msg = f"Unable to read metadata for {print_spec}"
|
||||
return MigrateSpecResult(False, msg)
|
||||
|
||||
spec_dict = {}
|
||||
|
||||
if unsigned:
|
||||
# User asked for unsigned, if we found a signed specfile, just ignore
|
||||
# the signature
|
||||
if v2_spec_url.endswith(".sig"):
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(spec_contents)
|
||||
else:
|
||||
spec_dict = json.loads(spec_contents)
|
||||
else:
|
||||
# User asked for signed, we must successfully verify the signature
|
||||
local_signed_pre_verify = os.path.join(
|
||||
tmpdir, f"{s.name}_{s.dag_hash()}_verify.spec.json.sig"
|
||||
)
|
||||
with open(local_signed_pre_verify, "w", encoding="utf-8") as fd:
|
||||
fd.write(spec_contents)
|
||||
if not try_verify(local_signed_pre_verify):
|
||||
return MigrateSpecResult(False, f"Failed to verify signature of {print_spec}")
|
||||
with open(local_signed_pre_verify, encoding="utf-8") as fd:
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(fd.read())
|
||||
|
||||
# Read out and remove the bits needed to rename and position the archive
|
||||
bcc = spec_dict.pop("binary_cache_checksum", None)
|
||||
if not bcc:
|
||||
msg = "Cannot migrate a spec that does not have 'binary_cache_checksum'"
|
||||
return MigrateSpecResult(False, msg)
|
||||
|
||||
algorithm = bcc["hash_algorithm"]
|
||||
checksum = bcc["hash"]
|
||||
|
||||
# TODO: Remove this key once oci buildcache no longer uses it
|
||||
spec_dict["buildcache_layout_version"] = 2
|
||||
|
||||
v2_archive_url = url_util.join(mirror_url, "build_cache", v2_tarball_path_name(s, ".spack"))
|
||||
|
||||
# spacks web utilities do not include direct copying of s3 objects, so we
|
||||
# need to download the archive locally, and then push it back to the target
|
||||
# location
|
||||
archive_stage_path = os.path.join(tmpdir, f"archive_stage_{s.name}_{s.dag_hash()}")
|
||||
archive_stage = spack.stage.Stage(v2_archive_url, path=archive_stage_path)
|
||||
|
||||
try:
|
||||
archive_stage.create()
|
||||
archive_stage.fetch()
|
||||
except spack.error.FetchError:
|
||||
return MigrateSpecResult(False, f"Unable to fetch archive for {print_spec}")
|
||||
|
||||
local_tarfile_path = archive_stage.save_filename
|
||||
|
||||
# As long as we have to download the tarball anyway, we might as well compute the
|
||||
# checksum locally and check it against the expected value
|
||||
local_checksum = spack.util.crypto.checksum(
|
||||
spack.util.crypto.hash_fun_for_algo(algorithm), local_tarfile_path
|
||||
)
|
||||
|
||||
if local_checksum != checksum:
|
||||
return MigrateSpecResult(
|
||||
False, f"Checksum mismatch for {print_spec}: expected {checksum}, got {local_checksum}"
|
||||
)
|
||||
|
||||
spec_dict["archive_size"] = os.stat(local_tarfile_path).st_size
|
||||
|
||||
# Compress the spec dict and compute its checksum
|
||||
metadata_checksum_algo = "sha256"
|
||||
spec_json_path = os.path.join(tmpdir, f"{s.name}_{s.dag_hash()}.spec.json")
|
||||
metadata_checksum, metadata_size = compressed_json_from_dict(
|
||||
spec_json_path, spec_dict, metadata_checksum_algo
|
||||
)
|
||||
|
||||
tarball_blob_record = BlobRecord(
|
||||
spec_dict["archive_size"], v3_cache_class.TARBALL_MEDIATYPE, "gzip", algorithm, checksum
|
||||
)
|
||||
|
||||
metadata_blob_record = BlobRecord(
|
||||
metadata_size,
|
||||
v3_cache_class.SPEC_MEDIATYPE,
|
||||
"gzip",
|
||||
metadata_checksum_algo,
|
||||
metadata_checksum,
|
||||
)
|
||||
|
||||
# Compute the urls to the new blobs
|
||||
v3_archive_url = v3_cache_class.get_blob_url(mirror_url, tarball_blob_record)
|
||||
v3_spec_url = v3_cache_class.get_blob_url(mirror_url, metadata_blob_record)
|
||||
|
||||
# First push the tarball
|
||||
tty.debug(f"Pushing {local_tarfile_path} to {v3_archive_url}")
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_tarfile_path, v3_archive_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push archive for {print_spec}")
|
||||
|
||||
# Then push the spec file
|
||||
tty.debug(f"Pushing {spec_json_path} to {v3_spec_url}")
|
||||
|
||||
try:
|
||||
web_util.push_to_url(spec_json_path, v3_spec_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push spec metadata for {print_spec}")
|
||||
|
||||
# Generate the manifest and write it to a temporary location
|
||||
manifest = {
|
||||
"version": v3_cache_class.get_layout_version(),
|
||||
"data": [tarball_blob_record.to_dict(), metadata_blob_record.to_dict()],
|
||||
}
|
||||
|
||||
manifest_path = os.path.join(tmpdir, f"{s.dag_hash()}.manifest.json")
|
||||
with open(manifest_path, "w", encoding="utf-8") as f:
|
||||
json.dump(manifest, f, indent=0, separators=(",", ":"))
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995
|
||||
# chars). If lines are longer, they are truncated without error. So,
|
||||
# here we still add newlines, but no indent, so save on file size and
|
||||
# line length.
|
||||
|
||||
# Possibly sign the manifest
|
||||
if not unsigned:
|
||||
manifest_path = sign_file(signing_key, manifest_path)
|
||||
|
||||
v3_manifest_url = v3_cache_class.get_manifest_url(s, mirror_url)
|
||||
|
||||
# Push the manifest
|
||||
try:
|
||||
web_util.push_to_url(manifest_path, v3_manifest_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push manifest for {print_spec}")
|
||||
|
||||
return MigrateSpecResult(True, f"Successfully migrated {print_spec}")
|
||||
|
||||
|
||||
def migrate(
|
||||
mirror: spack.mirrors.mirror.Mirror, unsigned: bool = False, delete_existing: bool = False
|
||||
) -> None:
|
||||
"""Perform migration of the given mirror
|
||||
|
||||
If unsigned is True, signatures on signed specs will be ignored, and specs
|
||||
will not be re-signed before pushing to the new location. Otherwise, spack
|
||||
will attempt to verify signatures and re-sign specs, and will fail if not
|
||||
able to do so. If delete_existing is True, spack will delete the original
|
||||
contents of the mirror once the migration is complete."""
|
||||
signing_key = ""
|
||||
if not unsigned:
|
||||
try:
|
||||
signing_key = bindist.select_signing_key()
|
||||
except (bindist.NoKeyException, bindist.PickKeyException):
|
||||
raise MigrationException(
|
||||
"Signed migration requires exactly one secret key in keychain"
|
||||
)
|
||||
|
||||
delete_action = "deleting" if delete_existing else "keeping"
|
||||
sign_action = "an unsigned" if unsigned else "a signed"
|
||||
mirror_url = mirror.fetch_url
|
||||
|
||||
tty.msg(
|
||||
f"Performing {sign_action} migration of {mirror.push_url} "
|
||||
f"and {delete_action} existing contents"
|
||||
)
|
||||
|
||||
index_url = url_util.join(mirror_url, "build_cache", spack_db.INDEX_JSON_FILE)
|
||||
contents = None
|
||||
|
||||
try:
|
||||
_, _, index_file = web_util.read_from_url(index_url)
|
||||
contents = codecs.getreader("utf-8")(index_file).read()
|
||||
except (web_util.SpackWebError, OSError):
|
||||
raise MigrationException("Buildcache migration requires a buildcache index")
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
index_path = os.path.join(tmpdir, "_tmp_index.json")
|
||||
with open(index_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(contents)
|
||||
|
||||
db = bindist.BuildCacheDatabase(tmpdir)
|
||||
db._read_from_file(pathlib.Path(index_path))
|
||||
|
||||
specs_to_migrate = [
|
||||
s
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
if not s.external and db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
# Run the tasks in parallel if possible
|
||||
executor = spack.util.parallel.make_concurrent_executor()
|
||||
migrate_futures = [
|
||||
executor.submit(_migrate_spec, spec, mirror_url, tmpdir, unsigned, signing_key)
|
||||
for spec in specs_to_migrate
|
||||
]
|
||||
|
||||
success_count = 0
|
||||
|
||||
tty.msg("Migration summary:")
|
||||
for spec, migrate_future in zip(specs_to_migrate, migrate_futures):
|
||||
result = migrate_future.result()
|
||||
msg = f" {spec.name}/{spec.dag_hash()[:7]}: {result.message}"
|
||||
if result.success:
|
||||
success_count += 1
|
||||
tty.msg(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
# The migrated index should have the same specs as the original index,
|
||||
# modulo any specs that we failed to migrate for whatever reason. So
|
||||
# to avoid having to re-fetch all the spec files now, just mark them
|
||||
# appropriately in the existing database and push that.
|
||||
db.mark(spec, "in_buildcache", result.success)
|
||||
|
||||
if success_count > 0:
|
||||
tty.msg("Updating index and pushing keys")
|
||||
|
||||
# If the layout.json doesn't yet exist on this mirror, push it
|
||||
v3_cache_class = get_url_buildcache_class(layout_version=3)
|
||||
v3_cache_class.maybe_push_layout_json(mirror_url)
|
||||
|
||||
# Push the migrated mirror index
|
||||
index_tmpdir = os.path.join(tmpdir, "rebuild_index")
|
||||
os.mkdir(index_tmpdir)
|
||||
bindist._push_index(db, index_tmpdir, mirror_url)
|
||||
|
||||
# Push the public part of the signing key
|
||||
if not unsigned:
|
||||
keys_tmpdir = os.path.join(tmpdir, "keys")
|
||||
os.mkdir(keys_tmpdir)
|
||||
bindist._url_push_keys(
|
||||
mirror_url, keys=[signing_key], update_index=True, tmpdir=keys_tmpdir
|
||||
)
|
||||
else:
|
||||
tty.warn("No specs migrated, did you mean to perform an unsigned migration instead?")
|
||||
|
||||
# Delete the old layout if the user requested it
|
||||
if delete_existing:
|
||||
delete_prefix = url_util.join(mirror_url, "build_cache")
|
||||
tty.msg(f"Recursively deleting {delete_prefix}")
|
||||
web_util.remove_url(delete_prefix, recursive=True)
|
||||
|
||||
tty.msg("Migration complete")
|
@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if cls and spack.repo.is_package_module(cls.__module__):
|
||||
return cls
|
||||
return None
|
||||
|
||||
@@ -121,6 +121,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
new_cls_name,
|
||||
bases,
|
||||
{
|
||||
"__module__": package_cls.__module__,
|
||||
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
||||
"test_requires_compiler": property(
|
||||
lambda x: x.wrapped_package_object.test_requires_compiler
|
||||
@@ -129,7 +130,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
"tester": property(lambda x: x.wrapped_package_object.tester),
|
||||
},
|
||||
)
|
||||
new_cls.__module__ = package_cls.__module__
|
||||
self.__class__ = new_cls
|
||||
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||
|
||||
|
@@ -33,6 +33,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
@@ -150,10 +151,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
return False
|
||||
|
||||
|
||||
def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]:
|
||||
"""Determine which packages were added, removed or changed
|
||||
between rev1 and rev2, and return the names as a set"""
|
||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
@@ -245,7 +246,9 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
||||
if not spec_locations:
|
||||
return RebuildDecision(True, "not found anywhere")
|
||||
|
||||
urls = ",".join([loc["mirror_url"] for loc in spec_locations])
|
||||
urls = ",".join(
|
||||
[f"{loc.url_and_version.url}@v{loc.url_and_version.version}" for loc in spec_locations]
|
||||
)
|
||||
message = f"up-to-date [{urls}]"
|
||||
return RebuildDecision(False, message)
|
||||
|
||||
@@ -1242,33 +1245,31 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
"""Given a url to write to and the details of the failed job, write an entry
|
||||
in the broken specs list.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
try:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def read_broken_spec(broken_spec_url):
|
||||
|
@@ -31,12 +31,12 @@
|
||||
import spack.spec
|
||||
import spack.util.compression as compression
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
from spack.url_buildcache import get_url_buildcache_class
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
@@ -179,33 +179,13 @@ def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
||||
|
||||
for release_spec in specs:
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
buildcache_copies[release_spec_dag_hash] = {
|
||||
"src": cache_class.get_manifest_url(release_spec, src_prefix),
|
||||
"dest": cache_class.get_manifest_url(release_spec, dest_prefix),
|
||||
}
|
||||
|
||||
target_dir = os.path.dirname(output_file)
|
||||
|
||||
|
@@ -292,6 +292,9 @@ def main_script_replacements(cmd):
|
||||
)
|
||||
maybe_generate_manifest(pipeline, options, manifest_path)
|
||||
|
||||
relative_specs_url = bindist.buildcache_relative_specs_url()
|
||||
relative_keys_url = bindist.buildcache_relative_keys_url()
|
||||
|
||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
@@ -301,9 +304,12 @@ def main_script_replacements(cmd):
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"][
|
||||
"SPACK_COPY_ONLY_DESTINATION"
|
||||
] = options.buildcache_destination.fetch_url
|
||||
sync_job["variables"].update(
|
||||
{
|
||||
"SPACK_COPY_ONLY_DESTINATION": options.buildcache_destination.fetch_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url,
|
||||
}
|
||||
)
|
||||
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
@@ -333,9 +339,13 @@ def main_script_replacements(cmd):
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = options.buildcache_destination.push_url
|
||||
signing_job["variables"].update(
|
||||
{
|
||||
"SPACK_BUILDCACHE_DESTINATION": options.buildcache_destination.push_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_SPECS_URL": relative_specs_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url,
|
||||
}
|
||||
)
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -28,7 +29,7 @@
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache-v3.tar.gz"
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||
@@ -410,8 +411,9 @@ def _mirror(args):
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, "build_cache")
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
stage_dir = pathlib.Path(stage.source_path)
|
||||
for entry in stage_dir.iterdir():
|
||||
shutil.move(str(entry), mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
@@ -436,7 +438,6 @@ def write_metadata(subdir, metadata):
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||
print(instructions)
|
||||
|
||||
|
||||
|
@@ -4,11 +4,9 @@
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -27,14 +25,21 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..buildcache_migrate import migrate
|
||||
from ..enums import InstallRecordStatus
|
||||
from ..url_buildcache import (
|
||||
BuildcacheComponent,
|
||||
BuildcacheEntryError,
|
||||
URLBuildcacheEntry,
|
||||
check_mirror_for_layout,
|
||||
get_url_buildcache_class,
|
||||
)
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -272,6 +277,27 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
# Migrate a buildcache from layout_version 2 to version 3
|
||||
migrate = subparsers.add_parser("migrate", help=migrate_fn.__doc__)
|
||||
migrate.add_argument("mirror", type=arguments.mirror_name, help="name of a configured mirror")
|
||||
migrate.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Ignore signatures and do not resign, default is False",
|
||||
)
|
||||
migrate.add_argument(
|
||||
"-d",
|
||||
"--delete-existing",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Delete the previous layout, the default is to keep it.",
|
||||
)
|
||||
arguments.add_common_arguments(migrate, ["yes_to_all"])
|
||||
# TODO: add -y argument to prompt if user really means to delete existing
|
||||
migrate.set_defaults(func=migrate_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
@@ -397,6 +423,10 @@ def push_fn(args):
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
|
||||
# Warn about possible old binary mirror layout
|
||||
if not mirror.push_url.startswith("oci://"):
|
||||
check_mirror_for_layout(mirror)
|
||||
|
||||
with bindist.make_uploader(
|
||||
mirror=mirror,
|
||||
force=args.force,
|
||||
@@ -527,8 +557,7 @@ def download_fn(args):
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
bindist.download_single_spec(specs[0], args.path)
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -553,29 +582,78 @@ def save_specfile_fn(args):
|
||||
)
|
||||
|
||||
|
||||
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
"""Copy from source url to destination url"""
|
||||
tmpdir = None
|
||||
def copy_buildcache_entry(cache_entry: URLBuildcacheEntry, destination_url: str):
|
||||
"""Download buildcache entry and copy it to the destination_url"""
|
||||
try:
|
||||
spec_dict = cache_entry.fetch_metadata()
|
||||
cache_entry.fetch_archive()
|
||||
except bindist.BuildcacheEntryError as e:
|
||||
tty.warn(f"Failed to retrieve buildcache for copying due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
if not local_path:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
local_path = os.path.join(tmpdir, os.path.basename(src_url))
|
||||
spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC)
|
||||
local_spec_path = cache_entry.get_local_spec_path()
|
||||
tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL)
|
||||
local_tarball_path = cache_entry.get_local_archive_path()
|
||||
|
||||
target_spec = spack.spec.Spec.from_dict(spec_dict)
|
||||
spec_label = f"{target_spec.name}/{target_spec.dag_hash()[:7]}"
|
||||
|
||||
if not tarball_blob_record:
|
||||
cache_entry.destroy()
|
||||
raise BuildcacheEntryError(f"No source tarball blob record, failed to sync {spec_label}")
|
||||
|
||||
# Try to push the tarball
|
||||
tarball_dest_url = cache_entry.get_blob_url(destination_url, tarball_blob_record)
|
||||
|
||||
try:
|
||||
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
temp_stage.destroy()
|
||||
finally:
|
||||
if tmpdir and os.path.exists(tmpdir):
|
||||
shutil.rmtree(tmpdir)
|
||||
web_util.push_to_url(local_tarball_path, tarball_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push {local_tarball_path} to {tarball_dest_url} due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
if not spec_blob_record:
|
||||
cache_entry.destroy()
|
||||
raise BuildcacheEntryError(f"No source spec blob record, failed to sync {spec_label}")
|
||||
|
||||
# Try to push the spec file
|
||||
spec_dest_url = cache_entry.get_blob_url(destination_url, spec_blob_record)
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_spec_path, spec_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push {local_spec_path} to {spec_dest_url} due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
# Stage the manifest locally, since if it's signed, we don't want to try to
|
||||
# to reproduce that here. Instead just push the locally staged manifest to
|
||||
# the expected path at the destination url.
|
||||
manifest_src_url = cache_entry.remote_manifest_url
|
||||
manifest_dest_url = cache_entry.get_manifest_url(target_spec, destination_url)
|
||||
|
||||
manifest_stage = spack.stage.Stage(manifest_src_url)
|
||||
|
||||
try:
|
||||
manifest_stage.create()
|
||||
manifest_stage.fetch()
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to fetch manifest from {manifest_src_url} due to {e}")
|
||||
manifest_stage.destroy()
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
local_manifest_path = manifest_stage.save_filename
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_manifest_path, manifest_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push manifest to {manifest_dest_url} due to {e}")
|
||||
|
||||
manifest_stage.destroy()
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
@@ -615,37 +693,21 @@ def sync_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
buildcache_rel_paths = []
|
||||
|
||||
tty.debug("Syncing the following specs:")
|
||||
for s in env.all_specs():
|
||||
specs_to_sync = [s for s in env.all_specs() if not s.external]
|
||||
for s in specs_to_sync:
|
||||
tty.debug(" {0}{1}: {2}".format("* " if s in env.roots() else " ", s.name, s.dag_hash()))
|
||||
|
||||
buildcache_rel_paths.extend(
|
||||
[
|
||||
os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")),
|
||||
]
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
for rel_path in buildcache_rel_paths:
|
||||
src_url = url_util.join(src_mirror_url, rel_path)
|
||||
local_path = os.path.join(tmpdir, rel_path)
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path))
|
||||
copy_buildcache_file(src_url, dest_url, local_path=local_path)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
src_cache_entry = cache_class(src_mirror_url, s, allow_unsigned=True)
|
||||
src_cache_entry.read_manifest()
|
||||
copy_buildcache_entry(src_cache_entry, dest_mirror_url)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
def manifest_copy(
|
||||
manifest_file_list: List[str], dest_mirror: Optional[spack.mirrors.mirror.Mirror] = None
|
||||
):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -655,21 +717,24 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
for manifest_path in manifest_file_list:
|
||||
with open(manifest_path, encoding="utf-8") as fd:
|
||||
manifest = json.loads(fd.read())
|
||||
for spec_hash, copy_list in manifest.items():
|
||||
for spec_hash, copy_obj in manifest.items():
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
deduped_manifest[spec_hash] = copy_obj
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
for spec_hash, copy_obj in deduped_manifest.items():
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
src_cache_entry = cache_class(
|
||||
cache_class.get_base_url(copy_obj["src"]), allow_unsigned=True
|
||||
)
|
||||
src_cache_entry.read_manifest(manifest_url=copy_obj["src"])
|
||||
if dest_mirror:
|
||||
destination_url = dest_mirror.push_url
|
||||
else:
|
||||
destination_url = cache_class.get_base_url(copy_obj["dest"])
|
||||
tty.debug("copying {0} to {1}".format(copy_obj["src"], destination_url))
|
||||
copy_buildcache_entry(src_cache_entry, destination_url)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
@@ -693,13 +758,9 @@ def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
bindist._url_generate_package_index(url, tmpdir)
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
bindist.generate_key_index(url, tmpdir)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
@@ -711,5 +772,53 @@ def update_index_fn(args):
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def migrate_fn(args):
|
||||
"""perform in-place binary mirror migration (2 to 3)
|
||||
|
||||
A mirror can contain both layout version 2 and version 3 simultaneously without
|
||||
interference. This command performs in-place migration of a binary mirror laid
|
||||
out according to version 2, to a binary mirror laid out according to layout
|
||||
version 3. Only indexed specs will be migrated, so consider updating the mirror
|
||||
index before running this command. Re-run the command to migrate any missing
|
||||
items.
|
||||
|
||||
The default mode of operation is to perform a signed migration, that is, spack
|
||||
will attempt to verify the signatures on specs, and then re-sign them before
|
||||
migration, using whatever keys are already installed in your key ring. You can
|
||||
migrate a mirror of unsigned binaries (or convert a mirror of signed binaries
|
||||
to unsigned) by providing the --unsigned argument.
|
||||
|
||||
By default spack will leave the original mirror contents (in the old layout) in
|
||||
place after migration. You can have spack remove the old contents by providing
|
||||
the --delete-existing argument. Because migrating a mostly-already-migrated
|
||||
mirror should be fast, consider a workflow where you perform a default migration,
|
||||
(i.e. preserve the existing layout rather than deleting it) then evaluate the
|
||||
state of the migrated mirror by attempting to install from it, and finally
|
||||
running the migration again with --delete-existing."""
|
||||
target_mirror = args.mirror
|
||||
unsigned = args.unsigned
|
||||
assert isinstance(target_mirror, spack.mirrors.mirror.Mirror)
|
||||
delete_existing = args.delete_existing
|
||||
|
||||
proceed = True
|
||||
if delete_existing and not args.yes_to_all:
|
||||
msg = (
|
||||
"Using --delete-existing will delete the entire contents \n"
|
||||
" of the old layout within the mirror. Because migrating a mirror \n"
|
||||
" that has already been migrated should be fast, consider a workflow \n"
|
||||
" where you perform a default migration (i.e. preserve the existing \n"
|
||||
" layout rather than deleting it), then evaluate the state of the \n"
|
||||
" migrated mirror by attempting to install from it, and finally, \n"
|
||||
" run the migration again with --delete-existing."
|
||||
)
|
||||
tty.warn(msg)
|
||||
proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die("Migration aborted.")
|
||||
|
||||
migrate(target_mirror, unsigned=unsigned, delete_existing=delete_existing)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
return args.func(args)
|
||||
|
@@ -423,7 +423,7 @@ def ci_rebuild(args):
|
||||
# jobs in subsequent stages.
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
tty.msg(" {0}".format(match.url_and_version.url))
|
||||
|
||||
# Now we are done and successful
|
||||
return 0
|
||||
@@ -791,7 +791,9 @@ def ci_verify_versions(args):
|
||||
"""
|
||||
# Get a list of all packages that have been changed or added
|
||||
# between from_ref and to_ref
|
||||
pkgs = spack.repo.get_all_package_diffs("AC", args.from_ref, args.to_ref)
|
||||
pkgs = spack.repo.get_all_package_diffs(
|
||||
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref
|
||||
)
|
||||
|
||||
failed_version = False
|
||||
for pkg_name in pkgs:
|
||||
|
@@ -23,7 +23,7 @@
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
from spack.util.naming import pkg_name_to_class_name, simplify_name
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
|
||||
|
||||
def __init__(self, name: str, versions, languages: List[str]):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.class_name = pkg_name_to_class_name(name)
|
||||
self.versions = versions
|
||||
self.languages = languages
|
||||
|
||||
@@ -874,7 +874,7 @@ def get_name(name, url):
|
||||
|
||||
result = simplify_name(result)
|
||||
|
||||
if not valid_fully_qualified_module_name(result):
|
||||
if not re.match(r"^[a-z0-9-]+$", result):
|
||||
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
||||
|
||||
return result
|
||||
|
@@ -10,11 +10,13 @@
|
||||
import re
|
||||
import sys
|
||||
from html import escape
|
||||
from typing import Type
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import VersionList
|
||||
@@ -139,10 +141,10 @@ def name_only(pkgs, out):
|
||||
tty.msg("%d packages" % len(pkgs))
|
||||
|
||||
|
||||
def github_url(pkg):
|
||||
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str:
|
||||
"""Link to a package file on github."""
|
||||
url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
|
||||
return url.format(pkg.name)
|
||||
mod_path = pkg.__module__.replace(".", "/")
|
||||
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py"
|
||||
|
||||
|
||||
def rows_for_ncols(elts, ncols):
|
||||
|
@@ -134,6 +134,24 @@ def setup_parser(subparser):
|
||||
default=None,
|
||||
dest="signed",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--include-file",
|
||||
help="specs which Spack should always try to add to a mirror"
|
||||
" (listed in a file, one per line)",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--include-specs",
|
||||
help="specs which Spack should always try to add to a mirror (specified on command line)",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--exclude-file",
|
||||
help="specs which Spack should not try to add to a mirror"
|
||||
" (listed in a file, one per line)",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--exclude-specs",
|
||||
help="specs which Spack should not try to add to a mirror (specified on command line)",
|
||||
)
|
||||
arguments.add_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
@@ -222,6 +240,24 @@ def setup_parser(subparser):
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--include-file",
|
||||
help="specs which Spack should always try to add to a mirror"
|
||||
" (listed in a file, one per line)",
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--include-specs",
|
||||
help="specs which Spack should always try to add to a mirror (specified on command line)",
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--exclude-file",
|
||||
help="specs which Spack should not try to add to a mirror"
|
||||
" (listed in a file, one per line)",
|
||||
)
|
||||
set_parser.add_argument(
|
||||
"--exclude-specs",
|
||||
help="specs which Spack should not try to add to a mirror (specified on command line)",
|
||||
)
|
||||
arguments.add_connection_args(set_parser, False)
|
||||
|
||||
# List
|
||||
@@ -299,6 +335,30 @@ def _default_variable(id_):
|
||||
return None
|
||||
|
||||
|
||||
def _manage_filters(args, mirror) -> bool:
|
||||
include_specs = []
|
||||
if args.include_file:
|
||||
include_specs.extend(specs_from_text_file(args.include_file, concretize=False))
|
||||
if args.include_specs:
|
||||
include_specs.extend(spack.cmd.parse_specs(str(args.include_specs).split()))
|
||||
if include_specs:
|
||||
# round trip specs to assure they are valid
|
||||
mirror.update({"include": [str(s) for s in include_specs]})
|
||||
|
||||
exclude_specs = []
|
||||
if args.exclude_file:
|
||||
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
if exclude_specs:
|
||||
# round trip specs to assure they are valid
|
||||
mirror.update({"exclude": [str(s) for s in exclude_specs]})
|
||||
if include_specs or exclude_specs:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
@@ -368,6 +428,9 @@ def mirror_add(args):
|
||||
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
|
||||
|
||||
_manage_filters(args, mirror)
|
||||
|
||||
spack.mirrors.utils.add(mirror, args.scope)
|
||||
|
||||
|
||||
@@ -428,6 +491,8 @@ def _configure_mirror(args):
|
||||
changes["source"] = "source" in args.type
|
||||
|
||||
changed = entry.update(changes, direction)
|
||||
if hasattr(args, "include_file"):
|
||||
changed = changed | _manage_filters(args, entry)
|
||||
|
||||
if changed:
|
||||
mirrors[args.name] = entry.to_dict()
|
||||
@@ -470,7 +535,10 @@ def specs_from_text_file(filename, concretize=False):
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
specs_in_file = f.readlines()
|
||||
specs_in_file = [s.strip() for s in specs_in_file]
|
||||
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
||||
if concretize:
|
||||
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=True)
|
||||
else:
|
||||
return spack.cmd.parse_specs(specs_in_file, concretize=False)
|
||||
|
||||
|
||||
def concrete_specs_from_user(args):
|
||||
|
@@ -89,17 +89,17 @@ def setup_parser(subparser):
|
||||
|
||||
def pkg_add(args):
|
||||
"""add a package to the git stage with `git add`"""
|
||||
spack.repo.add_package_to_git_stage(args.packages)
|
||||
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo())
|
||||
|
||||
|
||||
def pkg_list(args):
|
||||
"""list packages associated with a particular spack git revision"""
|
||||
colify(spack.repo.list_packages(args.rev))
|
||||
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo()))
|
||||
|
||||
|
||||
def pkg_diff(args):
|
||||
"""compare packages available in two different git revisions"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
|
||||
if u1:
|
||||
print("%s:" % args.rev1)
|
||||
@@ -114,21 +114,23 @@ def pkg_diff(args):
|
||||
|
||||
def pkg_removed(args):
|
||||
"""show packages removed since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u1:
|
||||
colify(sorted(u1))
|
||||
|
||||
|
||||
def pkg_added(args):
|
||||
"""show packages added since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u2:
|
||||
colify(sorted(u2))
|
||||
|
||||
|
||||
def pkg_changed(args):
|
||||
"""show packages changed since a commit"""
|
||||
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
|
||||
packages = spack.repo.get_all_package_diffs(
|
||||
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
|
||||
)
|
||||
|
||||
if packages:
|
||||
colify(sorted(packages))
|
||||
|
@@ -4,6 +4,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -24,9 +25,7 @@ def setup_parser(subparser):
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
"namespace", help="name or namespace to identify packages in the repository"
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-d",
|
||||
@@ -138,7 +137,7 @@ def repo_remove(args):
|
||||
def repo_list(args):
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
repos: List[spack.repo.Repo] = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.from_path(r))
|
||||
@@ -146,17 +145,14 @@ def repo_list(args):
|
||||
continue
|
||||
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies."))
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
||||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
|
@@ -136,20 +136,7 @@ def solve(parser, args):
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
if unify == "when_possible":
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
@@ -166,3 +153,29 @@ def solve(parser, args):
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
elif unify:
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for spec in specs:
|
||||
tty.msg("SOLVING SPEC:", spec)
|
||||
result = solver.solve(
|
||||
[spec],
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@@ -59,7 +59,7 @@ def is_package(f):
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/") and f.endswith("package.py")
|
||||
return f.startswith("var/spack/") and f.endswith("package.py")
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -380,7 +380,7 @@ def run_black(black_cmd, file_list, args):
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg":
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
|
@@ -65,7 +65,7 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if spack.repo.is_package_module(cls.__module__):
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
|
@@ -2312,8 +2312,12 @@ def update_environment_repository(self) -> None:
|
||||
|
||||
def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
"""Add the root node of the spec to the environment repository"""
|
||||
repository_dir = os.path.join(self.repos_path, spec_node.namespace)
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
namespace: str = spec_node.namespace
|
||||
repository = spack.repo.create_or_construct(
|
||||
root=os.path.join(self.repos_path, namespace),
|
||||
namespace=namespace,
|
||||
package_api=spack.repo.PATH.get_repo(namespace).package_api,
|
||||
)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
|
@@ -202,3 +202,16 @@ class MirrorError(SpackError):
|
||||
|
||||
def __init__(self, msg, long_msg=None):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class NoChecksumException(SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
@@ -65,6 +65,7 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.timer as timer
|
||||
from spack.url_buildcache import BuildcacheEntryError
|
||||
from spack.util.environment import EnvironmentModifications, dump_environment
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -449,17 +450,17 @@ def _process_binary_cache_tarball(
|
||||
else ``False``
|
||||
"""
|
||||
with timer.measure("fetch"):
|
||||
download_result = binary_distribution.download_tarball(
|
||||
tarball_stage = binary_distribution.download_tarball(
|
||||
pkg.spec.build_spec, unsigned, mirrors_for_spec
|
||||
)
|
||||
|
||||
if download_result is None:
|
||||
if tarball_stage is None:
|
||||
return False
|
||||
|
||||
tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache")
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
binary_distribution.extract_tarball(pkg.spec, tarball_stage, force=False, timer=timer)
|
||||
|
||||
if pkg.spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
@@ -566,10 +567,11 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
tty.warn(f"Warning: Couldn't copy in provenance for {node.name}")
|
||||
|
||||
# Create a destination repository
|
||||
dest_repo_root = os.path.join(path, node.namespace)
|
||||
if not os.path.exists(dest_repo_root):
|
||||
spack.repo.create_repo(dest_repo_root)
|
||||
repo = spack.repo.from_path(dest_repo_root)
|
||||
pkg_api = spack.repo.PATH.get_repo(node.namespace).package_api
|
||||
repo_root = os.path.join(path, node.namespace) if pkg_api < (2, 0) else path
|
||||
repo = spack.repo.create_or_construct(
|
||||
repo_root, namespace=node.namespace, package_api=pkg_api
|
||||
)
|
||||
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
@@ -2176,7 +2178,7 @@ def install(self) -> None:
|
||||
)
|
||||
raise
|
||||
|
||||
except binary_distribution.NoChecksumException as exc:
|
||||
except BuildcacheEntryError as exc:
|
||||
if task.cache_only:
|
||||
raise
|
||||
|
||||
|
38
lib/spack/spack/mirrors/filter.py
Normal file
38
lib/spack/spack/mirrors/filter.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import List
|
||||
|
||||
import spack.spec
|
||||
from spack.mirrors.mirror import Mirror
|
||||
|
||||
|
||||
class MirrorSpecFilter:
|
||||
def __init__(self, mirror: Mirror):
|
||||
self.exclude = [spack.spec.Spec(spec) for spec in mirror.exclusions]
|
||||
self.include = [spack.spec.Spec(spec) for spec in mirror.inclusions]
|
||||
|
||||
def __call__(self, specs: List[spack.spec.Spec]):
|
||||
"""
|
||||
Determine the intersection of include/exclude filters
|
||||
Tie goes to keeping
|
||||
|
||||
skip | keep | outcome
|
||||
------------------------
|
||||
False | False | Keep
|
||||
True | True | Keep
|
||||
False | True | Keep
|
||||
True | False | Skip
|
||||
"""
|
||||
filter = []
|
||||
filtrate = []
|
||||
for spec in specs:
|
||||
skip = any([spec.satisfies(test) for test in self.exclude])
|
||||
keep = any([spec.satisfies(test) for test in self.include])
|
||||
|
||||
if skip and not keep:
|
||||
filtrate.append(spec)
|
||||
else:
|
||||
filter.append(spec)
|
||||
|
||||
return filter, filtrate
|
@@ -5,7 +5,7 @@
|
||||
import operator
|
||||
import os
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -99,6 +99,11 @@ def display(self, max_len=0):
|
||||
binary = "b" if self.binary else " "
|
||||
print(f"{self.name: <{max_len}} [{source}{binary}] {url}")
|
||||
|
||||
def _process_spec_filters(self, key: str) -> List[str]:
|
||||
if isinstance(self._data, str):
|
||||
return []
|
||||
return self._data.get(key, [])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name or "<unnamed>"
|
||||
@@ -131,6 +136,14 @@ def push_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
return self.get_url("push")
|
||||
|
||||
@property
|
||||
def exclusions(self):
|
||||
return self._process_spec_filters("exclude")
|
||||
|
||||
@property
|
||||
def inclusions(self):
|
||||
return self._process_spec_filters("include")
|
||||
|
||||
def ensure_mirror_usable(self, direction: str = "push"):
|
||||
access_pair = self._get_value("access_pair", direction)
|
||||
access_token_variable = self._get_value("access_token_variable", direction)
|
||||
@@ -192,7 +205,7 @@ def _update_connection_dict(self, current_data: dict, new_data: dict, top_level:
|
||||
"endpoint_url",
|
||||
]
|
||||
if top_level:
|
||||
keys += ["binary", "source", "signed", "autopush"]
|
||||
keys += ["binary", "source", "signed", "autopush", "exclude", "include"]
|
||||
changed = False
|
||||
for key in keys:
|
||||
if key in new_data and current_data.get(key) != new_data[key]:
|
||||
|
@@ -172,3 +172,5 @@ class tty:
|
||||
spack_cxx: str
|
||||
spack_f77: str
|
||||
spack_fc: str
|
||||
prefix: Prefix
|
||||
dso_suffix: str
|
||||
|
@@ -47,6 +47,7 @@
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.naming
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
import spack.variant
|
||||
@@ -838,26 +839,36 @@ def fullname(cls):
|
||||
def fullnames(cls):
|
||||
"""Fullnames for this package and any packages from which it inherits."""
|
||||
fullnames = []
|
||||
for cls in cls.__mro__:
|
||||
namespace = getattr(cls, "namespace", None)
|
||||
if namespace:
|
||||
fullnames.append("%s.%s" % (namespace, cls.name))
|
||||
if namespace == "builtin":
|
||||
# builtin packages cannot inherit from other repos
|
||||
for base in cls.__mro__:
|
||||
if not spack.repo.is_package_module(base.__module__):
|
||||
break
|
||||
fullnames.append(base.fullname)
|
||||
return fullnames
|
||||
|
||||
@classproperty
|
||||
def name(cls):
|
||||
"""The name of this package.
|
||||
|
||||
The name of a package is the name of its Python module, without
|
||||
the containing module names.
|
||||
"""
|
||||
"""The name of this package."""
|
||||
if cls._name is None:
|
||||
cls._name = cls.module.__name__
|
||||
if "." in cls._name:
|
||||
cls._name = cls._name[cls._name.rindex(".") + 1 :]
|
||||
# We cannot know the exact package API version, but we can distinguish between v1
|
||||
# v2 based on the module. We don't want to figure out the exact package API version
|
||||
# since it requires parsing the repo.yaml.
|
||||
module = cls.__module__
|
||||
|
||||
if module.startswith(spack.repo.PKG_MODULE_PREFIX_V1):
|
||||
version = (1, 0)
|
||||
elif module.startswith(spack.repo.PKG_MODULE_PREFIX_V2):
|
||||
version = (2, 0)
|
||||
else:
|
||||
raise ValueError(f"Package {cls.__qualname__} is not a known Spack package")
|
||||
|
||||
if version < (2, 0):
|
||||
# spack.pkg.builtin.package_name.
|
||||
_, _, pkg_module = module.rpartition(".")
|
||||
else:
|
||||
# spack_repo.builtin.packages.package_name.package
|
||||
pkg_module = module.rsplit(".", 2)[-2]
|
||||
|
||||
cls._name = spack.util.naming.pkg_dir_to_pkg_name(pkg_module, version)
|
||||
return cls._name
|
||||
|
||||
@classproperty
|
||||
|
@@ -56,8 +56,9 @@
|
||||
|
||||
# read-only things in $spack/var/spack
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
packages_path = os.path.join(repos_path, "builtin")
|
||||
mock_packages_path = os.path.join(repos_path, "builtin.mock")
|
||||
test_repos_path = os.path.join(var_path, "test_repos")
|
||||
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
|
||||
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
@@ -47,40 +47,34 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
PKG_MODULE_PREFIX_V1 = "spack.pkg."
|
||||
PKG_MODULE_PREFIX_V2 = "spack_repo."
|
||||
|
||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
|
||||
For instance:
|
||||
|
||||
python_package_for_repo('builtin') == 'spack.pkg.builtin'
|
||||
|
||||
Args:
|
||||
namespace (str): repo namespace
|
||||
"""
|
||||
return "{0}.{1}".format(ROOT_PYTHON_NAMESPACE, namespace)
|
||||
def is_package_module(fullname: str) -> bool:
|
||||
"""Check if the given module is a package module."""
|
||||
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
|
||||
|
||||
|
||||
def namespace_from_fullname(fullname):
|
||||
def namespace_from_fullname(fullname: str) -> str:
|
||||
"""Return the repository namespace only for the full module name.
|
||||
|
||||
For instance:
|
||||
|
||||
namespace_from_fullname('spack.pkg.builtin.hdf5') == 'builtin'
|
||||
namespace_from_fullname("spack.pkg.builtin.hdf5") == "builtin"
|
||||
namespace_from_fullname("spack_repo.x.y.z.packages.pkg_name.package") == "x.y.z"
|
||||
|
||||
Args:
|
||||
fullname (str): full name for the Python module
|
||||
fullname: full name for the Python module
|
||||
"""
|
||||
namespace, dot, module = fullname.rpartition(".")
|
||||
prefix_and_dot = "{0}.".format(ROOT_PYTHON_NAMESPACE)
|
||||
if namespace.startswith(prefix_and_dot):
|
||||
namespace = namespace[len(prefix_and_dot) :]
|
||||
return namespace
|
||||
if fullname.startswith(PKG_MODULE_PREFIX_V1):
|
||||
namespace, _, _ = fullname.rpartition(".")
|
||||
return namespace[len(PKG_MODULE_PREFIX_V1) :]
|
||||
elif fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package"):
|
||||
return ".".join(fullname.split(".")[1:-3])
|
||||
return fullname
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
@@ -92,14 +86,14 @@ def exec_module(self, module):
|
||||
|
||||
|
||||
class ReposFinder:
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
||||
"""MetaPathFinder class that loads a Python module corresponding to an API v1 Spack package.
|
||||
|
||||
Returns a loader based on the inspection of the current repository list.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._repo_init = _path
|
||||
self._repo = None
|
||||
self._repo: Optional[RepoType] = None
|
||||
|
||||
@property
|
||||
def current_repository(self):
|
||||
@@ -127,7 +121,7 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
raise RuntimeError('cannot reload module "{0}"'.format(fullname))
|
||||
|
||||
# Preferred API from https://peps.python.org/pep-0451/
|
||||
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
|
||||
if not fullname.startswith(PKG_MODULE_PREFIX_V1) and fullname != "spack.pkg":
|
||||
return None
|
||||
|
||||
loader = self.compute_loader(fullname)
|
||||
@@ -135,16 +129,17 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
def compute_loader(self, fullname: str):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
namespace, dot, module_name = fullname.rpartition(".")
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
|
||||
is_repo_path = isinstance(self.current_repository, RepoPath)
|
||||
current_repo = self.current_repository
|
||||
is_repo_path = isinstance(current_repo, RepoPath)
|
||||
if is_repo_path:
|
||||
repos = self.current_repository.repos
|
||||
repos = current_repo.repos
|
||||
else:
|
||||
repos = [self.current_repository]
|
||||
repos = [current_repo]
|
||||
|
||||
for repo in repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
@@ -161,7 +156,9 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
||||
if is_repo_path and current_repo.by_namespace.is_prefix(
|
||||
fullname[len(PKG_MODULE_PREFIX_V1) :]
|
||||
):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -179,12 +176,12 @@ def compute_loader(self, fullname):
|
||||
NOT_PROVIDED = object()
|
||||
|
||||
|
||||
def packages_path():
|
||||
def builtin_repo() -> "Repo":
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
return PATH.get_repo("builtin.mock")
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
return PATH.get_repo("builtin")
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -192,24 +189,25 @@ class GitExe:
|
||||
# invocations.
|
||||
#
|
||||
# Not using -C as that is not supported for git < 1.8.5.
|
||||
def __init__(self):
|
||||
def __init__(self, packages_path: str):
|
||||
self._git_cmd = spack.util.git.git(required=True)
|
||||
self.packages_dir = packages_path
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with working_dir(packages_path()):
|
||||
return self._git_cmd(*args, **kwargs)
|
||||
def __call__(self, *args, **kwargs) -> str:
|
||||
with working_dir(self.packages_dir):
|
||||
return self._git_cmd(*args, **kwargs, output=str)
|
||||
|
||||
|
||||
def list_packages(rev):
|
||||
def list_packages(rev: str, repo: "Repo") -> List[str]:
|
||||
"""List all packages associated with the given revision"""
|
||||
git = GitExe()
|
||||
git = GitExe(repo.packages_path)
|
||||
|
||||
# git ls-tree does not support ... merge-base syntax, so do it manually
|
||||
if rev.endswith("..."):
|
||||
ref = rev.replace("...", "")
|
||||
rev = git("merge-base", ref, "HEAD", output=str).strip()
|
||||
rev = git("merge-base", ref, "HEAD").strip()
|
||||
|
||||
output = git("ls-tree", "-r", "--name-only", rev, output=str)
|
||||
output = git("ls-tree", "-r", "--name-only", rev)
|
||||
|
||||
# recursively list the packages directory
|
||||
package_paths = [
|
||||
@@ -217,54 +215,54 @@ def list_packages(rev):
|
||||
]
|
||||
|
||||
# take the directory names with one-level-deep package files
|
||||
package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
|
||||
package_names = [
|
||||
nm.pkg_dir_to_pkg_name(line[0], repo.package_api)
|
||||
for line in package_paths
|
||||
if len(line) == 2
|
||||
]
|
||||
|
||||
return package_names
|
||||
return sorted(set(package_names))
|
||||
|
||||
|
||||
def diff_packages(rev1, rev2):
|
||||
def diff_packages(rev1: str, rev2: str, repo: "Repo") -> Tuple[Set[str], Set[str]]:
|
||||
"""Compute packages lists for the two revisions and return a tuple
|
||||
containing all the packages in rev1 but not in rev2 and all the
|
||||
packages in rev2 but not in rev1."""
|
||||
p1 = set(list_packages(rev1))
|
||||
p2 = set(list_packages(rev2))
|
||||
p1 = set(list_packages(rev1, repo))
|
||||
p2 = set(list_packages(rev2, repo))
|
||||
return p1.difference(p2), p2.difference(p1)
|
||||
|
||||
|
||||
def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
"""Show packages changed, added, or removed (or any combination of those)
|
||||
since a commit.
|
||||
def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -> Set[str]:
|
||||
"""Get packages changed, added, or removed (or any combination of those) since a commit.
|
||||
|
||||
Arguments:
|
||||
|
||||
type (str): String containing one or more of 'A', 'R', 'C'
|
||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||
|
||||
Returns:
|
||||
|
||||
A set contain names of affected packages.
|
||||
type: String containing one or more of 'A', 'R', 'C'
|
||||
rev1: Revision to compare against, default is 'HEAD^'
|
||||
rev2: Revision to compare to rev1, default is 'HEAD'
|
||||
"""
|
||||
lower_type = type.lower()
|
||||
if not re.match("^[arc]*$", lower_type):
|
||||
tty.die(
|
||||
"Invald change type: '%s'." % type,
|
||||
"Can contain only A (added), R (removed), or C (changed)",
|
||||
f"Invalid change type: '{type}'. "
|
||||
"Can contain only A (added), R (removed), or C (changed)"
|
||||
)
|
||||
|
||||
removed, added = diff_packages(rev1, rev2)
|
||||
removed, added = diff_packages(rev1, rev2, repo)
|
||||
|
||||
git = GitExe()
|
||||
out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip()
|
||||
git = GitExe(repo.packages_path)
|
||||
out = git("diff", "--relative", "--name-only", rev1, rev2).strip()
|
||||
|
||||
lines = [] if not out else re.split(r"\s+", out)
|
||||
changed = set()
|
||||
changed: Set[str] = set()
|
||||
for path in lines:
|
||||
pkg_name, _, _ = path.partition("/")
|
||||
dir_name, _, _ = path.partition("/")
|
||||
pkg_name = nm.pkg_dir_to_pkg_name(dir_name, repo.package_api)
|
||||
if pkg_name not in added and pkg_name not in removed:
|
||||
changed.add(pkg_name)
|
||||
|
||||
packages = set()
|
||||
packages: Set[str] = set()
|
||||
if "a" in lower_type:
|
||||
packages |= added
|
||||
if "r" in lower_type:
|
||||
@@ -275,14 +273,14 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
return packages
|
||||
|
||||
|
||||
def add_package_to_git_stage(packages):
|
||||
def add_package_to_git_stage(packages: List[str], repo: "Repo") -> None:
|
||||
"""add a package to the git stage with `git add`"""
|
||||
git = GitExe()
|
||||
git = GitExe(repo.packages_path)
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
tty.die(f"No such package: {pkg_name}. Path does not exist:", filename)
|
||||
|
||||
git("add", filename)
|
||||
|
||||
@@ -352,9 +350,10 @@ class FastPackageChecker(collections.abc.Mapping):
|
||||
#: Global cache, reused by every instance
|
||||
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
|
||||
|
||||
def __init__(self, packages_path):
|
||||
def __init__(self, packages_path: str, package_api: Tuple[int, int]):
|
||||
# The path of the repository managed by this instance
|
||||
self.packages_path = packages_path
|
||||
self.package_api = package_api
|
||||
|
||||
# If the cache we need is not there yet, then build it appropriately
|
||||
if packages_path not in self._paths_cache:
|
||||
@@ -379,41 +378,38 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
# Create a dictionary that will store the mapping between a
|
||||
# package name and its stat info
|
||||
cache: Dict[str, os.stat_result] = {}
|
||||
for pkg_name in os.listdir(self.packages_path):
|
||||
# Skip non-directories in the package root.
|
||||
pkg_dir = os.path.join(self.packages_path, pkg_name)
|
||||
with os.scandir(self.packages_path) as entries:
|
||||
for entry in entries:
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(entry.path, package_file_name)
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||
try:
|
||||
sinfo = os.stat(pkg_file)
|
||||
except OSError as e:
|
||||
if e.errno in (errno.ENOENT, errno.ENOTDIR):
|
||||
# No package.py file here.
|
||||
continue
|
||||
elif e.errno == errno.EACCES:
|
||||
tty.warn(f"Can't read package file {pkg_file}.")
|
||||
continue
|
||||
raise e
|
||||
|
||||
# If it's not a file, skip it.
|
||||
if not stat.S_ISREG(sinfo.st_mode):
|
||||
continue
|
||||
|
||||
# Only consider package.py files in directories that are valid module names under
|
||||
# the current package API
|
||||
if not nm.valid_module_name(entry.name, self.package_api):
|
||||
x, y = self.package_api
|
||||
tty.warn(
|
||||
'Skipping package at {0}. "{1}" is not '
|
||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||
f"Package {pkg_file} cannot be used because `{entry.name}` is not a valid "
|
||||
f"Spack package module name for Package API v{x}.{y}."
|
||||
)
|
||||
continue
|
||||
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(self.packages_path, pkg_name, package_file_name)
|
||||
|
||||
# Use stat here to avoid lots of calls to the filesystem.
|
||||
try:
|
||||
sinfo = os.stat(pkg_file)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# No package.py file here.
|
||||
continue
|
||||
elif e.errno == errno.EACCES:
|
||||
tty.warn("Can't read package file %s." % pkg_file)
|
||||
continue
|
||||
raise e
|
||||
|
||||
# If it's not a file, skip it.
|
||||
if stat.S_ISDIR(sinfo.st_mode):
|
||||
continue
|
||||
|
||||
# If it is a file, then save the stats under the
|
||||
# appropriate key
|
||||
cache[pkg_name] = sinfo
|
||||
# Store the stat info by package name.
|
||||
cache[nm.pkg_dir_to_pkg_name(entry.name, self.package_api)] = sinfo
|
||||
|
||||
return cache
|
||||
|
||||
@@ -688,7 +684,7 @@ def put_first(self, repo: "Repo") -> None:
|
||||
return
|
||||
|
||||
self.repos.insert(0, repo)
|
||||
self.by_namespace[repo.full_namespace] = repo
|
||||
self.by_namespace[repo.namespace] = repo
|
||||
|
||||
def put_last(self, repo):
|
||||
"""Add repo last in the search path."""
|
||||
@@ -700,8 +696,8 @@ def put_last(self, repo):
|
||||
self.repos.append(repo)
|
||||
|
||||
# don't mask any higher-precedence repos with same namespace
|
||||
if repo.full_namespace not in self.by_namespace:
|
||||
self.by_namespace[repo.full_namespace] = repo
|
||||
if repo.namespace not in self.by_namespace:
|
||||
self.by_namespace[repo.namespace] = repo
|
||||
|
||||
def remove(self, repo):
|
||||
"""Remove a repo from the search path."""
|
||||
@@ -710,10 +706,9 @@ def remove(self, repo):
|
||||
|
||||
def get_repo(self, namespace: str) -> "Repo":
|
||||
"""Get a repository by namespace."""
|
||||
full_namespace = python_package_for_repo(namespace)
|
||||
if full_namespace not in self.by_namespace:
|
||||
if namespace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace)
|
||||
return self.by_namespace[full_namespace]
|
||||
return self.by_namespace[namespace]
|
||||
|
||||
def first_repo(self) -> Optional["Repo"]:
|
||||
"""Get the first repo in precedence order."""
|
||||
@@ -821,10 +816,9 @@ def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
|
||||
# If the spec already has a namespace, then return the
|
||||
# corresponding repo if we know about it.
|
||||
if namespace:
|
||||
fullspace = python_package_for_repo(namespace)
|
||||
if fullspace not in self.by_namespace:
|
||||
if namespace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace, name=name)
|
||||
return self.by_namespace[fullspace]
|
||||
return self.by_namespace[namespace]
|
||||
|
||||
# If there's no namespace, search in the RepoPath.
|
||||
for repo in self.repos:
|
||||
@@ -845,8 +839,15 @@ def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||
return self.repo_for_pkg(spec).get(spec)
|
||||
|
||||
def python_paths(self) -> List[str]:
|
||||
"""Return a list of all the Python paths in the repos."""
|
||||
return [repo.python_path for repo in self.repos if repo.python_path]
|
||||
|
||||
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
|
||||
"""Find a class for the spec's package and return the class object."""
|
||||
for p in self.python_paths():
|
||||
if p not in sys.path:
|
||||
sys.path.insert(0, p)
|
||||
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
|
||||
|
||||
@autospec
|
||||
@@ -942,6 +943,30 @@ def _parse_package_api_version(
|
||||
)
|
||||
|
||||
|
||||
def _validate_and_normalize_subdir(subdir: Any, root: str, package_api: Tuple[int, int]) -> str:
|
||||
if not isinstance(subdir, str):
|
||||
raise BadRepoError(f"Invalid subdirectory '{subdir}' in '{root}'. Must be a string")
|
||||
|
||||
if package_api < (2, 0):
|
||||
return subdir # In v1.x we did not validate subdir names
|
||||
|
||||
if subdir in (".", ""):
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Use a symlink packages -> . instead"
|
||||
)
|
||||
|
||||
# Otherwise we expect a directory name (not path) that can be used as a Python module.
|
||||
if os.sep in subdir:
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Expected a directory name, not a path"
|
||||
)
|
||||
if not nm.valid_module_name(subdir, package_api):
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Must be a valid Python module name"
|
||||
)
|
||||
return subdir
|
||||
|
||||
|
||||
class Repo:
|
||||
"""Class representing a package repository in the filesystem.
|
||||
|
||||
@@ -962,6 +987,8 @@ class Repo:
|
||||
:py:data:`spack.package_api_version`.
|
||||
"""
|
||||
|
||||
namespace: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
@@ -991,32 +1018,79 @@ def check(condition, msg):
|
||||
|
||||
# Read configuration and validate namespace
|
||||
config = self._read_config()
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
self.subdirectory = _validate_and_normalize_subdir(
|
||||
config.get("subdirectory", packages_dir_name), root, self.package_api
|
||||
)
|
||||
self.packages_path = os.path.join(self.root, self.subdirectory)
|
||||
|
||||
check(
|
||||
"namespace" in config,
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
os.path.isdir(self.packages_path),
|
||||
f"No directory '{self.subdirectory}' found in '{root}'",
|
||||
)
|
||||
|
||||
self.namespace: str = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
# The parent dir of spack_repo/ which should be added to sys.path for api v2.x
|
||||
self.python_path: Optional[str] = None
|
||||
|
||||
if self.package_api < (2, 0):
|
||||
check(
|
||||
"namespace" in config,
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
self.namespace = config["namespace"]
|
||||
# Note: for Package API v1.x the namespace validation always had bugs, which won't be
|
||||
# fixed for compatibility reasons. The regex is missing "$" at the end, and it claims
|
||||
# to test for valid identifiers, but fails to split on `.` first.
|
||||
check(
|
||||
isinstance(self.namespace, str)
|
||||
and re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
else:
|
||||
# From Package API v2.0 the namespace follows from the directory structure.
|
||||
check(
|
||||
f"{os.sep}spack_repo{os.sep}" in self.root,
|
||||
f"Invalid repository path '{self.root}'. "
|
||||
f"Path must contain 'spack_repo{os.sep}'",
|
||||
)
|
||||
derived_namespace = self.root.rpartition(f"spack_repo{os.sep}")[2].replace(os.sep, ".")
|
||||
if "namespace" in config:
|
||||
self.namespace = config["namespace"]
|
||||
|
||||
check(
|
||||
isinstance(self.namespace, str) and self.namespace == derived_namespace,
|
||||
f"Namespace '{self.namespace}' should be {derived_namespace} or omitted in "
|
||||
f"{os.path.join(root, repo_config_name)}",
|
||||
)
|
||||
else:
|
||||
self.namespace = derived_namespace
|
||||
|
||||
# strip the namespace directories from the root path to get the python path
|
||||
# e.g. /my/pythonpath/spack_repo/x/y/z -> /my/pythonpath
|
||||
python_path = self.root
|
||||
for _ in self.namespace.split("."):
|
||||
python_path = os.path.dirname(python_path)
|
||||
self.python_path = os.path.dirname(python_path)
|
||||
|
||||
# check that all subdirectories are valid module names
|
||||
check(
|
||||
all(nm.valid_module_name(x, self.package_api) for x in self.namespace.split(".")),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'",
|
||||
)
|
||||
|
||||
# Set up 'full_namespace' to include the super-namespace
|
||||
self.full_namespace = python_package_for_repo(self.namespace)
|
||||
if self.package_api < (2, 0):
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V1}{self.namespace}"
|
||||
elif self.subdirectory == ".":
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}"
|
||||
else:
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}.{self.subdirectory}"
|
||||
|
||||
# Keep name components around for checking prefixes.
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
@@ -1030,27 +1104,36 @@ def check(condition, msg):
|
||||
self._repo_index: Optional[RepoIndex] = None
|
||||
self._cache = cache
|
||||
|
||||
@property
|
||||
def package_api_str(self) -> str:
|
||||
return f"v{self.package_api[0]}.{self.package_api[1]}"
|
||||
|
||||
def finder(self, value: RepoPath) -> None:
|
||||
self._finder = value
|
||||
|
||||
def real_name(self, import_name: str) -> Optional[str]:
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
|
||||
A python identifier might map to many different Spack package
|
||||
names due to hyphen/underscore ambiguity.
|
||||
In Package API v1.x, there was no canonical module name for a package, and package's dir
|
||||
was not necessarily a valid Python module name. For that case we have to guess the actual
|
||||
package directory. From Package API v2.0 there is a one-to-one mapping between Spack
|
||||
package names and Python module names, so there is no guessing.
|
||||
|
||||
Easy example:
|
||||
num3proxy -> 3proxy
|
||||
|
||||
Ambiguous:
|
||||
For Packge API v1.x we support the following one-to-many mappings:
|
||||
num3proxy -> 3proxy
|
||||
foo_bar -> foo_bar, foo-bar
|
||||
|
||||
More ambiguous:
|
||||
foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
|
||||
"""
|
||||
if self.package_api >= (2, 0):
|
||||
if nm.pkg_dir_to_pkg_name(import_name, package_api=self.package_api) in self:
|
||||
return import_name
|
||||
return None
|
||||
|
||||
if import_name in self:
|
||||
return import_name
|
||||
|
||||
# For v1 generate the possible package names from a module name, and return the first
|
||||
# package name that exists in this repo.
|
||||
options = nm.possible_spack_module_names(import_name)
|
||||
try:
|
||||
options.remove(import_name)
|
||||
@@ -1183,7 +1266,9 @@ def extensions_for(
|
||||
def dirname_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Given a package name, get the directory containing its package.py file."""
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return os.path.join(self.packages_path, unqualified_name)
|
||||
return os.path.join(
|
||||
self.packages_path, nm.pkg_name_to_pkg_dir(unqualified_name, self.package_api)
|
||||
)
|
||||
|
||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Get the filename for the module we should load for a particular
|
||||
@@ -1200,7 +1285,7 @@ def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
@property
|
||||
def _pkg_checker(self) -> FastPackageChecker:
|
||||
if self._fast_package_checker is None:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path, self.package_api)
|
||||
return self._fast_package_checker
|
||||
|
||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
@@ -1212,7 +1297,9 @@ def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
|
||||
def package_path(self, name: str) -> str:
|
||||
"""Get path to package.py file for this repo."""
|
||||
return os.path.join(self.packages_path, name, package_file_name)
|
||||
return os.path.join(
|
||||
self.packages_path, nm.pkg_name_to_pkg_dir(name, self.package_api), package_file_name
|
||||
)
|
||||
|
||||
def all_package_paths(self) -> Generator[str, None, None]:
|
||||
for name in self.all_package_names():
|
||||
@@ -1270,15 +1357,19 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
|
||||
package. Then extracts the package class from the module
|
||||
according to Spack's naming convention.
|
||||
"""
|
||||
namespace, pkg_name = self.partition_package_name(pkg_name)
|
||||
class_name = nm.mod_to_class(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||
_, pkg_name = self.partition_package_name(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{nm.pkg_name_to_pkg_dir(pkg_name, self.package_api)}"
|
||||
if self.package_api >= (2, 0):
|
||||
fullname += ".package"
|
||||
|
||||
class_name = nm.pkg_name_to_class_name(pkg_name)
|
||||
if self.python_path and self.python_path not in sys.path:
|
||||
sys.path.insert(0, self.python_path)
|
||||
try:
|
||||
with REPOS_FINDER.switch_repo(self._finder or self):
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(fullname)
|
||||
except ImportError as e:
|
||||
raise UnknownPackageError(fullname) from e
|
||||
except Exception as e:
|
||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||
raise RepoError(msg) from e
|
||||
@@ -1369,46 +1460,71 @@ def partition_package_name(pkg_name: str) -> Tuple[str, str]:
|
||||
return namespace, pkg_name
|
||||
|
||||
|
||||
def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
def get_repo_yaml_dir(
|
||||
root: str, namespace: Optional[str], package_api: Tuple[int, int]
|
||||
) -> Tuple[str, str]:
|
||||
"""Returns the directory where repo.yaml is located and the effective namespace."""
|
||||
if package_api < (2, 0):
|
||||
namespace = namespace or os.path.basename(root)
|
||||
# This ad-hoc regex is left for historical reasons, and should not have a breaking change.
|
||||
if not re.match(r"\w[\.\w-]*", namespace):
|
||||
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace.")
|
||||
return root, namespace
|
||||
|
||||
# Package API v2 has <root>/spack_repo/<namespace>/<subdir> structure and requires a namespace
|
||||
if namespace is None:
|
||||
raise InvalidNamespaceError("Namespace must be provided.")
|
||||
|
||||
# if namespace has dots those translate to subdirs of further namespace packages.
|
||||
namespace_components = namespace.split(".")
|
||||
|
||||
if not all(nm.valid_module_name(n, package_api=package_api) for n in namespace_components):
|
||||
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace." % namespace)
|
||||
|
||||
return os.path.join(root, "spack_repo", *namespace_components), namespace
|
||||
|
||||
|
||||
def create_repo(
|
||||
root,
|
||||
namespace: Optional[str] = None,
|
||||
subdir: str = packages_dir_name,
|
||||
package_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Tuple[str, str]:
|
||||
"""Create a new repository in root with the specified namespace.
|
||||
|
||||
If the namespace is not provided, use basename of root.
|
||||
Return the canonicalized path and namespace of the created repository.
|
||||
"""
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
if not namespace:
|
||||
namespace = os.path.basename(root)
|
||||
repo_yaml_dir, namespace = get_repo_yaml_dir(os.path.abspath(root), namespace, package_api)
|
||||
|
||||
if not re.match(r"\w[\.\w-]*", namespace):
|
||||
raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
|
||||
existed = True
|
||||
try:
|
||||
dir_entry = next(os.scandir(repo_yaml_dir), None)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
existed = False
|
||||
dir_entry = None
|
||||
else:
|
||||
raise BadRepoError(f"Cannot create new repo in {root}: {e}")
|
||||
|
||||
existed = False
|
||||
if os.path.exists(root):
|
||||
if os.path.isfile(root):
|
||||
raise BadRepoError("File %s already exists and is not a directory" % root)
|
||||
elif os.path.isdir(root):
|
||||
if not os.access(root, os.R_OK | os.W_OK):
|
||||
raise BadRepoError("Cannot create new repo in %s: cannot access directory." % root)
|
||||
if os.listdir(root):
|
||||
raise BadRepoError("Cannot create new repo in %s: directory is not empty." % root)
|
||||
existed = True
|
||||
if dir_entry is not None:
|
||||
raise BadRepoError(f"Cannot create new repo in {root}: directory is not empty.")
|
||||
|
||||
full_path = os.path.realpath(root)
|
||||
parent = os.path.dirname(full_path)
|
||||
if not os.access(parent, os.R_OK | os.W_OK):
|
||||
raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
|
||||
config_path = os.path.join(repo_yaml_dir, repo_config_name)
|
||||
|
||||
subdir = _validate_and_normalize_subdir(subdir, root, package_api)
|
||||
|
||||
packages_path = os.path.join(repo_yaml_dir, subdir)
|
||||
|
||||
try:
|
||||
config_path = os.path.join(root, repo_config_name)
|
||||
packages_path = os.path.join(root, subdir)
|
||||
|
||||
fs.mkdirp(packages_path)
|
||||
with open(config_path, "w", encoding="utf-8") as config:
|
||||
config.write("repo:\n")
|
||||
config.write(f" namespace: '{namespace}'\n")
|
||||
if subdir != packages_dir_name:
|
||||
config.write(f" subdirectory: '{subdir}'\n")
|
||||
x, y = spack.package_api_version
|
||||
x, y = package_api
|
||||
config.write(f" api: v{x}.{y}\n")
|
||||
|
||||
except OSError as e:
|
||||
@@ -1421,22 +1537,27 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
|
||||
raise BadRepoError(
|
||||
"Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e)
|
||||
)
|
||||
) from e
|
||||
|
||||
return full_path, namespace
|
||||
return repo_yaml_dir, namespace
|
||||
|
||||
|
||||
def from_path(path: str) -> "Repo":
|
||||
def from_path(path: str) -> Repo:
|
||||
"""Returns a repository from the path passed as input. Injects the global misc cache."""
|
||||
return Repo(path, cache=spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
def create_or_construct(path, namespace=None):
|
||||
def create_or_construct(
|
||||
root: str,
|
||||
namespace: Optional[str] = None,
|
||||
package_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Repo:
|
||||
"""Create a repository, or just return a Repo if it already exists."""
|
||||
if not os.path.exists(path):
|
||||
fs.mkdirp(path)
|
||||
create_repo(path, namespace)
|
||||
return from_path(path)
|
||||
repo_yaml_dir, _ = get_repo_yaml_dir(root, namespace, package_api)
|
||||
if not os.path.exists(repo_yaml_dir):
|
||||
fs.mkdirp(root)
|
||||
create_repo(root, namespace=namespace, package_api=package_api)
|
||||
return from_path(repo_yaml_dir)
|
||||
|
||||
|
||||
def _path(configuration=None):
|
||||
@@ -1514,8 +1635,10 @@ class MockRepositoryBuilder:
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
def __init__(self, root_directory, namespace=None):
|
||||
namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
|
||||
self.root, self.namespace = create_repo(str(root_directory), namespace)
|
||||
namespace = namespace or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
|
||||
repo_root = os.path.join(root_directory, namespace)
|
||||
os.mkdir(repo_root)
|
||||
self.root, self.namespace = create_repo(repo_root, namespace)
|
||||
|
||||
def add_package(self, name, dependencies=None):
|
||||
"""Create a mock package in the repository, using a Jinja2 template.
|
||||
@@ -1527,7 +1650,7 @@ def add_package(self, name, dependencies=None):
|
||||
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
|
||||
"""
|
||||
dependencies = dependencies or []
|
||||
context = {"cls_name": nm.mod_to_class(name), "dependencies": dependencies}
|
||||
context = {"cls_name": nm.pkg_name_to_class_name(name), "dependencies": dependencies}
|
||||
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
|
||||
text = template.render(context)
|
||||
package_py = self.recipe_filename(name)
|
||||
@@ -1539,8 +1662,10 @@ def remove(self, name):
|
||||
package_py = self.recipe_filename(name)
|
||||
shutil.rmtree(os.path.dirname(package_py))
|
||||
|
||||
def recipe_filename(self, name):
|
||||
return os.path.join(self.root, "packages", name, "package.py")
|
||||
def recipe_filename(self, name: str):
|
||||
return os.path.join(
|
||||
self.root, "packages", nm.pkg_name_to_pkg_dir(name, package_api=(2, 0)), "package.py"
|
||||
)
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
@@ -1590,7 +1715,10 @@ def __init__(self, name, repo=None):
|
||||
|
||||
# We need to compare the base package name
|
||||
pkg_name = name.rsplit(".", 1)[-1]
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
try:
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
except Exception:
|
||||
similar = []
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg += "\n\nDid you mean one of the following packages?\n "
|
||||
|
@@ -19,10 +19,6 @@
|
||||
"additionalProperties": True,
|
||||
"items": spack.schema.spec.properties,
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"type": "object",
|
||||
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
|
||||
},
|
||||
"buildcache_layout_version": {"type": "number"},
|
||||
}
|
||||
|
||||
@@ -30,6 +26,6 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack buildcache specfile schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"additionalProperties": True,
|
||||
"properties": properties,
|
||||
}
|
||||
|
@@ -77,6 +77,8 @@
|
||||
"fetch": fetch_and_push,
|
||||
"push": fetch_and_push,
|
||||
"autopush": {"type": "boolean"},
|
||||
"exclude": {"type": "array", "items": {"type": "string"}},
|
||||
"include": {"type": "array", "items": {"type": "string"}},
|
||||
**connection, # type: ignore
|
||||
},
|
||||
**connection_ext, # type: ignore
|
||||
|
45
lib/spack/spack/schema/url_buildcache_manifest.py
Normal file
45
lib/spack/spack/schema/url_buildcache_manifest.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for buildcache entry manifest file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/url_buildcache_manifest.py
|
||||
:lines: 11-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"version": {"type": "integer"},
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"contentLength",
|
||||
"mediaType",
|
||||
"compression",
|
||||
"checksumAlgorithm",
|
||||
"checksum",
|
||||
],
|
||||
"properties": {
|
||||
"contentLength": {"type": "integer"},
|
||||
"mediaType": {"type": "string"},
|
||||
"compression": {"type": "string"},
|
||||
"checksumAlgorithm": {"type": "string"},
|
||||
"checksum": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Buildcache manifest schema",
|
||||
"type": "object",
|
||||
"required": ["version", "data"],
|
||||
"additionalProperties": True,
|
||||
"properties": properties,
|
||||
}
|
@@ -2492,7 +2492,7 @@ def _spec_clauses(
|
||||
# TODO: variant="*" means 'variant is defined to something', which used to
|
||||
# be meaningless in concretization, as all variants had to be defined. But
|
||||
# now that variants can be conditional, it should force a variant to exist.
|
||||
if variant.value == ("*",):
|
||||
if not variant.values:
|
||||
continue
|
||||
|
||||
for value in variant.values:
|
||||
|
@@ -837,7 +837,7 @@ def _shared_subset_pair_iterate(container1, container2):
|
||||
b_idx += 1
|
||||
|
||||
|
||||
class FlagMap(lang.HashableMap):
|
||||
class FlagMap(lang.HashableMap[str, List[CompilerFlag]]):
|
||||
__slots__ = ("spec",)
|
||||
|
||||
def __init__(self, spec):
|
||||
@@ -1861,9 +1861,7 @@ def add_dependency_edge(
|
||||
@property
|
||||
def fullname(self):
|
||||
return (
|
||||
("%s.%s" % (self.namespace, self.name))
|
||||
if self.namespace
|
||||
else (self.name if self.name else "")
|
||||
f"{self.namespace}.{self.name}" if self.namespace else (self.name if self.name else "")
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -4490,7 +4488,7 @@ def has_virtual_dependency(self, virtual: str) -> bool:
|
||||
return bool(self.dependencies(virtuals=(virtual,)))
|
||||
|
||||
|
||||
class VariantMap(lang.HashableMap):
|
||||
class VariantMap(lang.HashableMap[str, vt.VariantValue]):
|
||||
"""Map containing variant instances. New values can be added only
|
||||
if the key is not already present."""
|
||||
|
||||
|
@@ -93,6 +93,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
]
|
||||
|
||||
|
||||
# TODO/RepoSplit: Should this not rely on mock packages post split?
|
||||
@pytest.mark.parametrize(
|
||||
"config_section,data,failing_check",
|
||||
[
|
||||
@@ -113,7 +114,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_config_audits(config_section, data, failing_check):
|
||||
def test_config_audits(config_section, data, failing_check, mock_packages):
|
||||
with spack.config.override(config_section, data):
|
||||
reports = spack.audit.run_group("configs")
|
||||
assert any((check == failing_check) and errors for check, errors in reports)
|
||||
|
@@ -17,11 +17,10 @@
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Callable, Dict, NamedTuple, Optional
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util.filesystem import copy_tree, join_path
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
@@ -38,16 +37,27 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
from spack.url_buildcache import (
|
||||
INDEX_MANIFEST_FILE,
|
||||
BuildcacheComponent,
|
||||
BuildcacheEntryError,
|
||||
URLBuildcacheEntry,
|
||||
URLBuildcacheEntryV2,
|
||||
compression_writer,
|
||||
get_url_buildcache_class,
|
||||
get_valid_spec_file,
|
||||
)
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
@@ -372,7 +382,7 @@ def test_built_spec_cache(temporary_mirror_dir):
|
||||
|
||||
for s in [gspec, cspec]:
|
||||
results = bindist.get_mirrors_for_spec(s)
|
||||
assert any([r["spec"] == s for r in results])
|
||||
assert any([r.spec == s for r in results])
|
||||
|
||||
|
||||
def fake_dag_hash(spec, length=None):
|
||||
@@ -435,7 +445,11 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
assert "libelf" in cache_list
|
||||
|
||||
# Remove dependency from cache
|
||||
libelf_files = glob.glob(os.path.join(mirror_dir.join("build_cache").strpath, "*libelf*"))
|
||||
libelf_files = glob.glob(
|
||||
os.path.join(
|
||||
mirror_dir.join(bindist.buildcache_relative_specs_path()).strpath, "libelf", "*libelf*"
|
||||
)
|
||||
)
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
@@ -480,8 +494,7 @@ def mock_list_url(url, recursive=False):
|
||||
|
||||
assert (
|
||||
"Warning: Encountered problem listing packages at "
|
||||
f"{test_url}/{bindist.BUILD_CACHE_RELATIVE_PATH}: Some HTTP error"
|
||||
in capfd.readouterr().err
|
||||
f"{test_url}: Some HTTP error" in capfd.readouterr().err
|
||||
)
|
||||
|
||||
|
||||
@@ -538,29 +551,6 @@ def test_update_sbang(tmp_path, temporary_mirror, mock_fetch, install_mockery):
|
||||
assert f.read() == new_contents
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64",
|
||||
reason="test data uses gcc 4.5.0 which does not support aarch64",
|
||||
)
|
||||
def test_install_legacy_buildcache_layout(mutable_config, compiler_factory, install_mockery):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
compressed archive file containing the install tree. This test
|
||||
makes sure we can still read that layout."""
|
||||
legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout")
|
||||
mirror_url = f"file://{legacy_layout_dir}"
|
||||
filename = (
|
||||
"test-debian6-core2-gcc-4.5.0-archive-files-2.0-"
|
||||
"l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json"
|
||||
)
|
||||
spec_json_path = os.path.join(legacy_layout_dir, "build_cache", filename)
|
||||
mirror_cmd("add", "--scope", "site", "test-legacy-layout", mirror_url)
|
||||
output = install_cmd("--no-check-signature", "--cache-only", "-f", spec_json_path, output=str)
|
||||
mirror_cmd("rm", "--scope=site", "test-legacy-layout")
|
||||
expect_line = "Extracting archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache"
|
||||
assert expect_line in output
|
||||
|
||||
|
||||
def test_FetchCacheError_only_accepts_lists_of_errors():
|
||||
with pytest.raises(TypeError, match="list"):
|
||||
bindist.FetchCacheError("error")
|
||||
@@ -600,7 +590,60 @@ def test_text_relocate_if_needed(install_mockery, temporary_store, mock_fetch, t
|
||||
assert join_path("bin", "secretexe") not in manifest["relocate_textfiles"]
|
||||
|
||||
|
||||
def test_etag_fetching_304():
|
||||
def test_compression_writer(tmp_path):
|
||||
text = "This is some text. We might or might not like to compress it as we write."
|
||||
checksum_algo = "sha256"
|
||||
|
||||
# Write the data using gzip compression
|
||||
compressed_output_path = str(tmp_path / "compressed_text")
|
||||
with compression_writer(compressed_output_path, "gzip", checksum_algo) as (
|
||||
compressor,
|
||||
checker,
|
||||
):
|
||||
compressor.write(text.encode("utf-8"))
|
||||
|
||||
compressed_size = checker.length
|
||||
compressed_checksum = checker.hexdigest()
|
||||
|
||||
with open(compressed_output_path, "rb") as f:
|
||||
binary_content = f.read()
|
||||
|
||||
assert bindist.compute_hash(binary_content) == compressed_checksum
|
||||
assert os.stat(compressed_output_path).st_size == compressed_size
|
||||
assert binary_content[:2] == b"\x1f\x8b"
|
||||
decompressed_content = gzip.decompress(binary_content).decode("utf-8")
|
||||
|
||||
assert decompressed_content == text
|
||||
|
||||
# Write the data without compression
|
||||
uncompressed_output_path = str(tmp_path / "uncompressed_text")
|
||||
with compression_writer(uncompressed_output_path, "none", checksum_algo) as (
|
||||
compressor,
|
||||
checker,
|
||||
):
|
||||
compressor.write(text.encode("utf-8"))
|
||||
|
||||
uncompressed_size = checker.length
|
||||
uncompressed_checksum = checker.hexdigest()
|
||||
|
||||
with open(uncompressed_output_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
assert bindist.compute_hash(content) == uncompressed_checksum
|
||||
assert os.stat(uncompressed_output_path).st_size == uncompressed_size
|
||||
assert content == text
|
||||
|
||||
# Make sure we raise if requesting unknown compression type
|
||||
nocare_output_path = str(tmp_path / "wontwrite")
|
||||
with pytest.raises(BuildcacheEntryError, match="Unknown compression type"):
|
||||
with compression_writer(nocare_output_path, "gsip", checksum_algo) as (
|
||||
compressor,
|
||||
checker,
|
||||
):
|
||||
compressor.write(text)
|
||||
|
||||
|
||||
def test_v2_etag_fetching_304():
|
||||
# Test conditional fetch with etags. If the remote hasn't modified the file
|
||||
# it returns 304, which is an HTTPError in urllib-land. That should be
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
@@ -613,7 +656,7 @@ def response_304(request: urllib.request.Request):
|
||||
)
|
||||
assert False, "Should not fetch {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
fetcher = bindist.EtagIndexFetcherV2(
|
||||
url="https://www.example.com",
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_304,
|
||||
@@ -624,7 +667,7 @@ def response_304(request: urllib.request.Request):
|
||||
assert result.fresh
|
||||
|
||||
|
||||
def test_etag_fetching_200():
|
||||
def test_v2_etag_fetching_200():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
@@ -638,7 +681,7 @@ def response_200(request: urllib.request.Request):
|
||||
)
|
||||
assert False, "Should not fetch {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
fetcher = bindist.EtagIndexFetcherV2(
|
||||
url="https://www.example.com",
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_200,
|
||||
@@ -652,7 +695,7 @@ def response_200(request: urllib.request.Request):
|
||||
assert result.hash == bindist.compute_hash("Result")
|
||||
|
||||
|
||||
def test_etag_fetching_404():
|
||||
def test_v2_etag_fetching_404():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_404(request: urllib.request.Request):
|
||||
raise urllib.error.HTTPError(
|
||||
@@ -663,7 +706,7 @@ def response_404(request: urllib.request.Request):
|
||||
fp=None,
|
||||
)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
fetcher = bindist.EtagIndexFetcherV2(
|
||||
url="https://www.example.com",
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_404,
|
||||
@@ -673,13 +716,13 @@ def response_404(request: urllib.request.Request):
|
||||
fetcher.conditional_fetch()
|
||||
|
||||
|
||||
def test_default_index_fetch_200():
|
||||
def test_v2_default_index_fetch_200():
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
if url.endswith("index.json.hash"):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -697,7 +740,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash="outdated", urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -710,7 +753,7 @@ def urlopen(request: urllib.request.Request):
|
||||
assert result.hash == index_json_hash
|
||||
|
||||
|
||||
def test_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
def test_v2_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
# When we don't have local hash, we should not be fetching the
|
||||
# remote index.json.hash file, but only index.json.
|
||||
index_json = '{"Hello": "World"}'
|
||||
@@ -728,7 +771,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash=None, urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -741,13 +784,13 @@ def urlopen(request: urllib.request.Request):
|
||||
assert not result.fresh
|
||||
|
||||
|
||||
def test_default_index_not_modified():
|
||||
def test_v2_default_index_not_modified():
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
if url.endswith("index.json.hash"):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -758,7 +801,7 @@ def urlopen(request: urllib.request.Request):
|
||||
# No request to index.json should be made.
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -766,7 +809,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("index_json", [b"\xa9", b"!#%^"])
|
||||
def test_default_index_invalid_hash_file(index_json):
|
||||
def test_v2_default_index_invalid_hash_file(index_json):
|
||||
# Test invalid unicode / invalid hash type
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
@@ -778,21 +821,21 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen
|
||||
)
|
||||
|
||||
assert fetcher.get_remote_hash() is None
|
||||
|
||||
|
||||
def test_default_index_json_404():
|
||||
def test_v2_default_index_json_404():
|
||||
# Test invalid unicode / invalid hash type
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
if url.endswith("index.json.hash"):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -811,7 +854,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
assert False, "Unexpected fetch {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash="invalid", urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -1097,9 +1140,7 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success):
|
||||
json.dump(spec_dict, f)
|
||||
|
||||
try:
|
||||
spec_dict_disk, layout_disk = bindist._get_valid_spec_file(
|
||||
str(path), max_supported_layout=1
|
||||
)
|
||||
spec_dict_disk, layout_disk = get_valid_spec_file(str(path), max_supported_layout=1)
|
||||
assert expect_success
|
||||
assert spec_dict_disk == spec_dict
|
||||
assert layout_disk == effective_layout
|
||||
@@ -1109,51 +1150,66 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success):
|
||||
|
||||
def test_get_valid_spec_file_doesnt_exist(tmp_path):
|
||||
with pytest.raises(bindist.InvalidMetadataFile, match="No such file"):
|
||||
bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1)
|
||||
|
||||
|
||||
def test_get_valid_spec_file_gzipped(tmp_path):
|
||||
# Create a gzipped file, contents don't matter
|
||||
path = tmp_path / "spec.json.gz"
|
||||
with gzip.open(path, "wb") as f:
|
||||
f.write(b"hello")
|
||||
with pytest.raises(
|
||||
bindist.InvalidMetadataFile, match="Compressed spec files are not supported"
|
||||
):
|
||||
bindist._get_valid_spec_file(str(path), max_supported_layout=1)
|
||||
get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"])
|
||||
def test_get_valid_spec_file_no_json(tmp_path, filename):
|
||||
tmp_path.joinpath(filename).write_text("not json")
|
||||
with pytest.raises(bindist.InvalidMetadataFile):
|
||||
bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1)
|
||||
get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1)
|
||||
|
||||
|
||||
def test_download_tarball_with_unsupported_layout_fails(
|
||||
tmp_path, mock_packages, mutable_config, capsys
|
||||
):
|
||||
layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1
|
||||
spec = spack.concretize.concretize_one("pkg-c")
|
||||
spec_dict = spec.to_dict()
|
||||
spec_dict["buildcache_layout_version"] = layout_version
|
||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "temporary_mirror")
|
||||
def test_url_buildcache_entry_v3(monkeypatch, tmpdir):
|
||||
"""Make sure URLBuildcacheEntry behaves as expected"""
|
||||
|
||||
# Setup a basic local build cache structure
|
||||
path = (
|
||||
tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json")
|
||||
)
|
||||
path.parent.mkdir(parents=True)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(spec_dict, f)
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||
|
||||
# Configure as a mirror.
|
||||
mirror_cmd("add", "test-mirror", str(tmp_path))
|
||||
s = Spec("libdwarf").concretized()
|
||||
|
||||
# Shouldn't be able "download" this.
|
||||
assert bindist.download_tarball(spec, unsigned=True) is None
|
||||
# Install libdwarf
|
||||
install_cmd("--fake", s.name)
|
||||
|
||||
# And there should be a warning about an unsupported layout version.
|
||||
assert f"Layout version {layout_version} is too new" in capsys.readouterr().err
|
||||
# Push libdwarf to buildcache
|
||||
buildcache_cmd("push", "-u", mirror_dir.strpath, s.name)
|
||||
|
||||
cache_class = get_url_buildcache_class(bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION)
|
||||
build_cache = cache_class(mirror_url, s, allow_unsigned=True)
|
||||
|
||||
manifest = build_cache.read_manifest()
|
||||
spec_dict = build_cache.fetch_metadata()
|
||||
local_tarball_path = build_cache.fetch_archive()
|
||||
|
||||
assert "spec" in spec_dict
|
||||
|
||||
for blob_record in manifest.data:
|
||||
blob_path = build_cache.get_staged_blob_path(blob_record)
|
||||
assert os.path.exists(blob_path)
|
||||
actual_blob_size = os.stat(blob_path).st_size
|
||||
assert blob_record.content_length == actual_blob_size
|
||||
|
||||
build_cache.destroy()
|
||||
|
||||
assert not os.path.exists(local_tarball_path)
|
||||
|
||||
|
||||
def test_relative_path_components():
|
||||
blobs_v3 = URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.BLOB)
|
||||
assert len(blobs_v3) == 1
|
||||
assert "blobs" in blobs_v3
|
||||
|
||||
blobs_v2 = URLBuildcacheEntryV2.get_relative_path_components(BuildcacheComponent.BLOB)
|
||||
assert len(blobs_v2) == 1
|
||||
assert "build_cache" in blobs_v2
|
||||
|
||||
v2_spec_url = "file:///home/me/mymirror/build_cache/linux-ubuntu22.04-sapphirerapids-gcc-12.3.0-gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.json.sig"
|
||||
assert URLBuildcacheEntryV2.get_base_url(v2_spec_url) == "file:///home/me/mymirror"
|
||||
|
||||
v3_manifest_url = "file:///home/me/mymirror/v3/manifests/gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.manifest.json"
|
||||
assert URLBuildcacheEntry.get_base_url(v3_manifest_url) == "file:///home/me/mymirror"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -1170,3 +1226,244 @@ def test_download_tarball_with_unsupported_layout_fails(
|
||||
def test_default_tag(spec: str):
|
||||
"""Make sure that computed image tags are valid."""
|
||||
assert re.fullmatch(spack.oci.image.tag, bindist._oci_default_tag(spack.spec.Spec(spec)))
|
||||
|
||||
|
||||
class IndexInformation(NamedTuple):
|
||||
manifest_contents: Dict[str, Any]
|
||||
index_contents: str
|
||||
index_hash: str
|
||||
manifest_path: str
|
||||
index_path: str
|
||||
manifest_etag: str
|
||||
fetched_blob: Callable[[], bool]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_index(tmp_path, monkeypatch) -> IndexInformation:
|
||||
mirror_root = tmp_path / "mymirror"
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
fetched = False
|
||||
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
index_blob_path = os.path.join(
|
||||
str(mirror_root),
|
||||
*cache_class.get_relative_path_components(BuildcacheComponent.BLOB),
|
||||
"sha256",
|
||||
index_json_hash[:2],
|
||||
index_json_hash,
|
||||
)
|
||||
|
||||
os.makedirs(os.path.dirname(index_blob_path))
|
||||
with open(index_blob_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(index_json)
|
||||
|
||||
index_blob_record = bindist.BlobRecord(
|
||||
os.stat(index_blob_path).st_size,
|
||||
cache_class.BUILDCACHE_INDEX_MEDIATYPE,
|
||||
"none",
|
||||
"sha256",
|
||||
index_json_hash,
|
||||
)
|
||||
|
||||
index_manifest = {
|
||||
"version": cache_class.get_layout_version(),
|
||||
"data": [index_blob_record.to_dict()],
|
||||
}
|
||||
|
||||
manifest_json_path = cache_class.get_index_url(str(mirror_root))
|
||||
|
||||
os.makedirs(os.path.dirname(manifest_json_path))
|
||||
|
||||
with open(manifest_json_path, "w", encoding="utf-8") as f:
|
||||
json.dump(index_manifest, f)
|
||||
|
||||
def fetch_patch(stage, mirror_only: bool = False, err_msg: Optional[str] = None):
|
||||
nonlocal fetched
|
||||
fetched = True
|
||||
|
||||
@property # type: ignore
|
||||
def save_filename_patch(stage):
|
||||
return str(index_blob_path)
|
||||
|
||||
monkeypatch.setattr(spack.stage.Stage, "fetch", fetch_patch)
|
||||
monkeypatch.setattr(spack.stage.Stage, "save_filename", save_filename_patch)
|
||||
|
||||
def get_did_fetch():
|
||||
# nonlocal fetched
|
||||
return fetched
|
||||
|
||||
return IndexInformation(
|
||||
index_manifest,
|
||||
index_json,
|
||||
index_json_hash,
|
||||
manifest_json_path,
|
||||
index_blob_path,
|
||||
"59bcc3ad6775562f845953cf01624225",
|
||||
get_did_fetch,
|
||||
)
|
||||
|
||||
|
||||
def test_etag_fetching_304():
|
||||
# Test conditional fetch with etags. If the remote hasn't modified the file
|
||||
# it returns 304, which is an HTTPError in urllib-land. That should be
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
def response_304(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
raise urllib.error.HTTPError(
|
||||
url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type]
|
||||
)
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_304,
|
||||
)
|
||||
|
||||
result = fetcher.conditional_fetch()
|
||||
assert isinstance(result, bindist.FetchIndexResult)
|
||||
assert result.fresh
|
||||
|
||||
|
||||
def test_etag_fetching_200(mock_index):
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(json.dumps(mock_index.manifest_contents).encode()),
|
||||
headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type]
|
||||
url=url,
|
||||
code=200,
|
||||
)
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_200,
|
||||
)
|
||||
|
||||
result = fetcher.conditional_fetch()
|
||||
assert isinstance(result, bindist.FetchIndexResult)
|
||||
assert not result.fresh
|
||||
assert mock_index.fetched_blob()
|
||||
assert result.etag == mock_index.manifest_etag
|
||||
assert result.data == mock_index.index_contents
|
||||
assert result.hash == mock_index.index_hash
|
||||
|
||||
|
||||
def test_etag_fetching_404():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_404(request: urllib.request.Request):
|
||||
raise urllib.error.HTTPError(
|
||||
request.get_full_url(),
|
||||
404,
|
||||
"Not found",
|
||||
hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
fp=None,
|
||||
)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_404,
|
||||
)
|
||||
|
||||
with pytest.raises(bindist.FetchIndexError):
|
||||
fetcher.conditional_fetch()
|
||||
|
||||
|
||||
def test_default_index_fetch_200(mock_index):
|
||||
# We fetch the manifest and then the index blob if the hash is outdated
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(json.dumps(mock_index.manifest_contents).encode()),
|
||||
headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type]
|
||||
url=url,
|
||||
code=200,
|
||||
)
|
||||
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
local_hash="outdated",
|
||||
urlopen=urlopen,
|
||||
)
|
||||
|
||||
result = fetcher.conditional_fetch()
|
||||
|
||||
assert isinstance(result, bindist.FetchIndexResult)
|
||||
assert not result.fresh
|
||||
assert mock_index.fetched_blob()
|
||||
assert result.etag == mock_index.manifest_etag
|
||||
assert result.data == mock_index.index_contents
|
||||
assert result.hash == mock_index.index_hash
|
||||
|
||||
|
||||
def test_default_index_404():
|
||||
# We get a fetch error if the index can't be fetched
|
||||
def urlopen(request: urllib.request.Request):
|
||||
raise urllib.error.HTTPError(
|
||||
request.get_full_url(),
|
||||
404,
|
||||
"Not found",
|
||||
hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
fp=None,
|
||||
)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
local_hash=None,
|
||||
urlopen=urlopen,
|
||||
)
|
||||
|
||||
with pytest.raises(bindist.FetchIndexError):
|
||||
fetcher.conditional_fetch()
|
||||
|
||||
|
||||
def test_default_index_not_modified(mock_index):
|
||||
# We don't fetch the index blob if hash didn't change
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(json.dumps(mock_index.manifest_contents).encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
url=url,
|
||||
code=200,
|
||||
)
|
||||
|
||||
# No other request should be made.
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
local_hash=mock_index.index_hash,
|
||||
urlopen=urlopen,
|
||||
)
|
||||
|
||||
assert fetcher.conditional_fetch().fresh
|
||||
assert not mock_index.fetched_blob()
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -37,12 +37,7 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
|
||||
assert not skipped
|
||||
|
||||
# Remove the tarball, which should cause push to push.
|
||||
os.remove(
|
||||
tmp_path
|
||||
/ bd.BUILD_CACHE_RELATIVE_PATH
|
||||
/ bd.tarball_directory_name(spec)
|
||||
/ bd.tarball_name(spec, ".spack")
|
||||
)
|
||||
shutil.rmtree(tmp_path / bd.buildcache_relative_blobs_path())
|
||||
|
||||
with bd.make_uploader(mirror) as uploader:
|
||||
skipped = uploader.push_or_raise(specs)
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
@pytest.fixture()
|
||||
def builder_test_repository(config):
|
||||
builder_test_path = os.path.join(spack.paths.repos_path, "builder.test")
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -414,7 +414,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
||||
|
||||
|
||||
@pytest.mark.regression("29947")
|
||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_packages):
|
||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path):
|
||||
e = ev.create("first_concretization")
|
||||
e.add("mpileaks~shared")
|
||||
e.add("mpileaks+shared")
|
||||
@@ -444,7 +444,7 @@ def _fail(self, args):
|
||||
ci.process_command("help", [], str(repro_dir))
|
||||
|
||||
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, monkeypatch):
|
||||
"""Test that create_buildcache returns a list of objects with the correct
|
||||
keys and types."""
|
||||
monkeypatch.setattr(ci, "push_to_build_cache", lambda a, b, c: True)
|
||||
@@ -483,7 +483,7 @@ def test_ci_run_standalone_tests_missing_requirements(
|
||||
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd
|
||||
):
|
||||
log_file = tmp_path / "junit.xml"
|
||||
args = {
|
||||
@@ -501,7 +501,7 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
||||
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd
|
||||
):
|
||||
"""Test run_standalone_tests with cdash and related options."""
|
||||
log_file = tmp_path / "junit.xml"
|
||||
@@ -537,7 +537,7 @@ def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
assert "No such file or directory" in err
|
||||
|
||||
|
||||
def test_ci_skipped_report(tmpdir, mock_packages, config):
|
||||
def test_ci_skipped_report(tmpdir, config):
|
||||
"""Test explicit skipping of report as well as CI's 'package' arg."""
|
||||
pkg = "trivial-smoke-test"
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
|
@@ -5,12 +5,16 @@
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import copy_tree, find
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.buildcache_migrate as migrate
|
||||
import spack.cmd.buildcache
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
@@ -18,8 +22,16 @@
|
||||
import spack.main
|
||||
import spack.mirrors.mirror
|
||||
import spack.spec
|
||||
import spack.util.url
|
||||
import spack.util.url as url_util
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.url_buildcache import (
|
||||
BuildcacheComponent,
|
||||
URLBuildcacheEntry,
|
||||
URLBuildcacheEntryV2,
|
||||
check_mirror_for_layout,
|
||||
get_url_buildcache_class,
|
||||
)
|
||||
|
||||
buildcache = spack.main.SpackCommand("buildcache")
|
||||
install = spack.main.SpackCommand("install")
|
||||
@@ -74,20 +86,6 @@ def test_buildcache_list_allarch(database, mock_get_specs_multiarch, capsys):
|
||||
assert output.count("mpileaks") == 2
|
||||
|
||||
|
||||
def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
""" "Ensure that buildcache create creates output files"""
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
|
||||
|
||||
|
||||
def tests_buildcache_create_env(
|
||||
install_mockery, mock_fetch, monkeypatch, tmpdir, mutable_mock_env_path
|
||||
):
|
||||
@@ -102,10 +100,15 @@ def tests_buildcache_create_env(
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
|
||||
|
||||
mirror_url = f"file://{tmpdir.strpath}"
|
||||
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
cache_entry = cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
@@ -159,12 +162,14 @@ def test_update_key_index(
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
key_dir_list = os.listdir(
|
||||
os.path.join(mirror_dir.strpath, spack.binary_distribution.buildcache_relative_keys_path())
|
||||
)
|
||||
|
||||
uninstall("-y", s.name)
|
||||
mirror("rm", "test-mirror")
|
||||
|
||||
assert "index.json" in key_dir_list
|
||||
assert "keys.manifest.json" in key_dir_list
|
||||
|
||||
|
||||
def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
||||
@@ -180,10 +185,46 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
||||
# Install and generate build cache index
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
|
||||
assert s.name is not None
|
||||
manifest_file = URLBuildcacheEntry.get_manifest_filename(s)
|
||||
specs_dirs = os.path.join(
|
||||
*URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.SPEC), s.name
|
||||
)
|
||||
|
||||
assert not (mirror_dir / "build_cache" / metadata_file).exists()
|
||||
assert (mirror_autopush_dir / "build_cache" / metadata_file).exists()
|
||||
assert not (mirror_dir / specs_dirs / manifest_file).exists()
|
||||
assert (mirror_autopush_dir / specs_dirs / manifest_file).exists()
|
||||
|
||||
|
||||
def test_buildcache_exclude(tmp_path, install_mockery, mock_fetch):
|
||||
"""Test buildcache with autopush can exclude"""
|
||||
mirror_dir = tmp_path / "mirror_a"
|
||||
|
||||
mirror(
|
||||
"add",
|
||||
"--autopush",
|
||||
"--exclude-specs",
|
||||
"libelf",
|
||||
"--unsigned",
|
||||
"mirror-autopush",
|
||||
mirror_dir.as_uri(),
|
||||
)
|
||||
|
||||
s = spack.concretize.concretize_one("libdwarf")
|
||||
|
||||
# Install and generate build cache index
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
found_file = URLBuildcacheEntry.get_manifest_filename(s)
|
||||
missing_file = URLBuildcacheEntry.get_manifest_filename(s["libelf"])
|
||||
found_dirs = os.path.join(
|
||||
*URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.SPEC), s.name
|
||||
)
|
||||
missing_dirs = os.path.join(
|
||||
*URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.SPEC),
|
||||
s["libelf"].name,
|
||||
)
|
||||
|
||||
assert (mirror_dir / found_dirs / found_file).exists()
|
||||
assert not (mirror_dir / missing_dirs / missing_file).exists()
|
||||
|
||||
|
||||
def test_buildcache_sync(
|
||||
@@ -205,7 +246,11 @@ def test_buildcache_sync(
|
||||
out_env_pkg = "libdwarf"
|
||||
|
||||
def verify_mirror_contents():
|
||||
dest_list = os.listdir(os.path.join(dest_mirror_dir, "build_cache"))
|
||||
dest_list = os.listdir(
|
||||
os.path.join(
|
||||
dest_mirror_dir, spack.binary_distribution.buildcache_relative_specs_path()
|
||||
)
|
||||
)
|
||||
|
||||
found_pkg = False
|
||||
|
||||
@@ -252,33 +297,15 @@ def verify_mirror_contents():
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
def manifest_insert(manifest, spec, dest_url):
|
||||
manifest[spec.dag_hash()] = [
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
manifest[spec.dag_hash()] = {
|
||||
"src": cache_class.get_manifest_url(spec, src_mirror_url),
|
||||
"dest": cache_class.get_manifest_url(spec, dest_url),
|
||||
}
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json")
|
||||
with open(manifest_file, "w", encoding="utf-8") as fd:
|
||||
@@ -298,9 +325,7 @@ def manifest_insert(manifest, spec, dest_url):
|
||||
with open(manifest_file, "w", encoding="utf-8") as fd:
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(
|
||||
manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path")
|
||||
)
|
||||
manifest_insert(manifest, spec, url_util.join(dest_mirror_url, "invalid_path"))
|
||||
json.dump(manifest, fd)
|
||||
|
||||
# Trigger the warning
|
||||
@@ -327,11 +352,37 @@ def test_buildcache_create_install(
|
||||
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
mirror_url = f"file://{tmpdir.strpath}"
|
||||
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
cache_entry = cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert spec.name is not None
|
||||
manifest_path = os.path.join(
|
||||
str(tmpdir),
|
||||
*cache_class.get_relative_path_components(BuildcacheComponent.SPEC),
|
||||
spec.name,
|
||||
cache_class.get_manifest_filename(spec),
|
||||
)
|
||||
|
||||
assert os.path.exists(manifest_path)
|
||||
cache_entry.read_manifest()
|
||||
spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC)
|
||||
tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL)
|
||||
|
||||
spec_blob_path = os.path.join(
|
||||
tmpdir.strpath, *cache_class.get_blob_path_components(spec_blob_record)
|
||||
)
|
||||
assert os.path.exists(spec_blob_path)
|
||||
|
||||
tarball_blob_path = os.path.join(
|
||||
tmpdir.strpath, *cache_class.get_blob_path_components(tarball_blob_record)
|
||||
)
|
||||
assert os.path.exists(tarball_blob_path)
|
||||
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -503,3 +554,230 @@ def test_push_without_build_deps(tmp_path, temporary_store, mock_packages, mutab
|
||||
"push", "--update-index", "--without-build-dependencies", "my-mirror", f"/{s.dag_hash()}"
|
||||
)
|
||||
assert spack.binary_distribution.update_cache_and_get_specs() == [s]
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def v2_buildcache_layout(tmp_path):
|
||||
def _layout(signedness: str = "signed"):
|
||||
source_path = str(pathlib.Path(test_path) / "data" / "mirrors" / "v2_layout" / signedness)
|
||||
test_mirror_path = tmp_path / "mirror"
|
||||
copy_tree(source_path, test_mirror_path)
|
||||
return test_mirror_path
|
||||
|
||||
return _layout
|
||||
|
||||
|
||||
def test_check_mirror_for_layout(v2_buildcache_layout, mutable_config, capsys):
|
||||
"""Check printed warning in the presence of v2 layout binary mirrors"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
|
||||
check_mirror_for_layout(spack.mirrors.mirror.Mirror.from_local_path(str(test_mirror_path)))
|
||||
err = str(capsys.readouterr()[1])
|
||||
assert all([word in err for word in ["Warning", "missing", "layout"]])
|
||||
|
||||
|
||||
def test_url_buildcache_entry_v2_exists(
|
||||
capsys, v2_buildcache_layout, mock_packages, mutable_config
|
||||
):
|
||||
"""Test existence check for v2 buildcache entries"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror_url = f"file://{test_mirror_path}"
|
||||
mirror("add", "v2mirror", mirror_url)
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "-a", "-l")
|
||||
|
||||
assert "Fetching an index from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
||||
v2_cache_class = URLBuildcacheEntryV2
|
||||
|
||||
# If you don't give it a spec, it returns False
|
||||
build_cache = v2_cache_class(mirror_url)
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
spec = spack.concretize.concretize_one("libdwarf")
|
||||
|
||||
# In v2 we have to ask for both, because we need to have the spec to have the tarball
|
||||
build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert not build_cache.exists([BuildcacheComponent.TARBALL])
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC])
|
||||
# But if we do ask for both, they should be there in this case
|
||||
assert build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
spec_path = build_cache._get_spec_url(spec, mirror_url, ext=".spec.json")[7:]
|
||||
tarball_path = build_cache._get_tarball_url(spec, mirror_url)[7:]
|
||||
|
||||
os.remove(tarball_path)
|
||||
build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
os.remove(spec_path)
|
||||
build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("signing", ["unsigned", "signed"])
|
||||
def test_install_v2_layout(
|
||||
signing,
|
||||
capsys,
|
||||
v2_buildcache_layout,
|
||||
mock_packages,
|
||||
mutable_config,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_gnupghome,
|
||||
monkeypatch,
|
||||
):
|
||||
"""Ensure we can still install from signed and unsigned v2 buildcache"""
|
||||
test_mirror_path = v2_buildcache_layout(signing)
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
# Trust original signing key (no-op if this is the unsigned pass)
|
||||
buildcache("keys", "--install", "--trust")
|
||||
|
||||
with capsys.disabled():
|
||||
output = install("--fake", "--no-check-signature", "libdwarf")
|
||||
|
||||
assert "Extracting libelf" in output
|
||||
assert "libelf: Successfully installed" in output
|
||||
assert "Extracting libdwarf" in output
|
||||
assert "libdwarf: Successfully installed" in output
|
||||
assert "Installing a spec from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
||||
|
||||
def test_basic_migrate_unsigned(capsys, v2_buildcache_layout, mutable_config):
|
||||
"""Make sure first unsigned migration results in usable buildcache,
|
||||
leaving the previous layout in place. Also test that a subsequent one
|
||||
doesn't need to migrate anything, and that using --delete-existing
|
||||
removes the previous layout"""
|
||||
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("migrate", "--unsigned", "my-mirror")
|
||||
|
||||
# The output indicates both specs were migrated
|
||||
assert output.count("Successfully migrated") == 6
|
||||
|
||||
build_cache_path = str(test_mirror_path / "build_cache")
|
||||
|
||||
# Without "--delete-existing" and "--yes-to-all", migration leaves the
|
||||
# previous layout in place
|
||||
assert os.path.exists(build_cache_path)
|
||||
assert os.path.isdir(build_cache_path)
|
||||
|
||||
# Now list the specs available under the new layout
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "--allarch")
|
||||
|
||||
assert "libdwarf" in output and "libelf" in output
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache(
|
||||
"migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror"
|
||||
)
|
||||
|
||||
# A second migration of the same mirror indicates neither spec
|
||||
# needs to be migrated
|
||||
assert output.count("No need to migrate") == 6
|
||||
|
||||
# When we provide "--delete-existing" and "--yes-to-all", migration
|
||||
# removes the old layout
|
||||
assert not os.path.exists(build_cache_path)
|
||||
|
||||
|
||||
def test_basic_migrate_signed(
|
||||
capsys, v2_buildcache_layout, monkeypatch, mock_gnupghome, mutable_config
|
||||
):
|
||||
"""Test a signed migration requires a signing key, requires the public
|
||||
key originally used to sign the pkgs, fails and prints reasonable messages
|
||||
if those requirements are unmet, and eventually succeeds when they are met."""
|
||||
test_mirror_path = v2_buildcache_layout("signed")
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with pytest.raises(migrate.MigrationException) as error:
|
||||
buildcache("migrate", "my-mirror")
|
||||
|
||||
# Without a signing key spack fails and explains why
|
||||
assert error.value.message == "Signed migration requires exactly one secret key in keychain"
|
||||
|
||||
# Create a signing key and trust the key used to sign the pkgs originally
|
||||
gpg("create", "New Test Signing Key", "noone@nowhere.org")
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("migrate", "my-mirror")
|
||||
|
||||
# Without trusting the original signing key, spack fails with an explanation
|
||||
assert "Failed to verify signature of libelf" in output
|
||||
assert "Failed to verify signature of libdwarf" in output
|
||||
assert "did you mean to perform an unsigned migration" in output
|
||||
|
||||
# Trust original signing key (since it's in the original layout location,
|
||||
# this is where the monkeypatched attribute is used)
|
||||
with capsys.disabled():
|
||||
output = buildcache("keys", "--install", "--trust")
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("migrate", "my-mirror")
|
||||
|
||||
# Once we have the proper keys, migration should succeed
|
||||
assert "Successfully migrated libelf" in output
|
||||
assert "Successfully migrated libelf" in output
|
||||
|
||||
# Now list the specs available under the new layout
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "--allarch")
|
||||
|
||||
assert "libdwarf" in output and "libelf" in output
|
||||
|
||||
|
||||
def test_unsigned_migrate_of_signed_mirror(capsys, v2_buildcache_layout, mutable_config):
|
||||
"""Test spack can do an unsigned migration of a signed buildcache by
|
||||
ignoring signatures and skipping re-signing."""
|
||||
|
||||
test_mirror_path = v2_buildcache_layout("signed")
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache(
|
||||
"migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror"
|
||||
)
|
||||
|
||||
# Now list the specs available under the new layout
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "--allarch")
|
||||
|
||||
assert "libdwarf" in output and "libelf" in output
|
||||
|
||||
# We should find two spec manifest files, one for each spec
|
||||
file_list = find(test_mirror_path, "*.spec.manifest.json")
|
||||
assert len(file_list) == 6
|
||||
assert any(["libdwarf" in file for file in file_list])
|
||||
assert any(["libelf" in file for file in file_list])
|
||||
|
||||
# The two spec manifest files should be unsigned
|
||||
for file_path in file_list:
|
||||
with open(file_path, "r", encoding="utf-8") as fd:
|
||||
assert json.load(fd)
|
||||
|
||||
|
||||
def test_migrate_requires_index(capsys, v2_buildcache_layout, mutable_config):
|
||||
"""Test spack fails with a reasonable error message when mirror does
|
||||
not have an index"""
|
||||
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
v2_index_path = test_mirror_path / "build_cache" / "index.json"
|
||||
v2_index_hash_path = test_mirror_path / "build_cache" / "index.json.hash"
|
||||
os.remove(str(v2_index_path))
|
||||
os.remove(str(v2_index_hash_path))
|
||||
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with pytest.raises(migrate.MigrationException) as error:
|
||||
buildcache("migrate", "--unsigned", "my-mirror")
|
||||
|
||||
# If the buildcache has no index, spack fails and explains why
|
||||
assert error.value.message == "Buildcache migration requires a buildcache index"
|
||||
|
@@ -31,11 +31,8 @@
|
||||
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
||||
from spack.ci.generator_registry import generator
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.error import SpackError
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import MockHTTPResponse
|
||||
|
||||
config_cmd = spack.main.SpackCommand("config")
|
||||
@@ -718,7 +715,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-f", "-u", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-f", "-u", "--update-index", mirror_url, "archive-files")
|
||||
|
||||
with working_dir(tmp_path):
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
@@ -855,18 +852,18 @@ def test_push_to_build_cache(
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
# Validate resulting buildcache (database) index
|
||||
layout_version = spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
url_and_version = spack.binary_distribution.MirrorURLAndVersion(
|
||||
mirror_url, layout_version
|
||||
)
|
||||
index_fetcher = spack.binary_distribution.DefaultIndexFetcher(url_and_version, None)
|
||||
result = index_fetcher.conditional_fetch()
|
||||
jsonschema.validate(json.loads(result.data), db_idx_schema)
|
||||
|
||||
# Now that index is regenerated, validate "buildcache list" output
|
||||
assert "patchelf" in buildcache_cmd("list", output=str)
|
||||
# Also test buildcache_spec schema
|
||||
for file_name in os.listdir(mirror_dir / "build_cache"):
|
||||
if file_name.endswith(".spec.json.sig"):
|
||||
with open(mirror_dir / "build_cache" / file_name, encoding="utf-8") as f:
|
||||
spec_dict = Spec.extract_json_from_clearsig(f.read())
|
||||
jsonschema.validate(spec_dict, specfile_schema)
|
||||
|
||||
logs_dir = scratch / "logs_dir"
|
||||
logs_dir.mkdir()
|
||||
@@ -1032,7 +1029,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
|
||||
|
||||
def test_ci_rebuild_index(
|
||||
tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch
|
||||
tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch, capsys
|
||||
):
|
||||
scratch = tmp_path / "working_dir"
|
||||
mirror_dir = scratch / "mirror"
|
||||
@@ -1069,8 +1066,9 @@ def test_ci_rebuild_index(
|
||||
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as f:
|
||||
jsonschema.validate(json.load(f), db_idx_schema)
|
||||
with capsys.disabled():
|
||||
output = buildcache_cmd("list", "--allarch")
|
||||
assert "callpath" in output
|
||||
|
||||
|
||||
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
|
||||
@@ -2032,7 +2030,7 @@ def test_ci_verify_versions_valid(
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3])
|
||||
assert "Validated diff-test@2.1.5" in out
|
||||
@@ -2049,7 +2047,7 @@ def test_ci_verify_versions_standard_invalid(
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
|
||||
assert "Invalid checksum found diff-test@2.1.5" in out
|
||||
@@ -2060,7 +2058,7 @@ def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
pkg_class = spack.spec.Spec("diff-test").package_class
|
||||
monkeypatch.setattr(pkg_class, "manual_download", True)
|
||||
|
@@ -20,6 +20,8 @@
|
||||
config = spack.main.SpackCommand("config")
|
||||
env = spack.main.SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("mock_packages")
|
||||
|
||||
|
||||
def _create_config(scope=None, data={}, section="packages"):
|
||||
scope = scope or spack.config.default_modify_scope()
|
||||
|
@@ -1829,7 +1829,7 @@ def test_indirect_build_dep(tmp_path):
|
||||
build-only dep. Make sure this concrete DAG is preserved when writing the
|
||||
environment out and reading it back.
|
||||
"""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path / "repo")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None)])
|
||||
@@ -1862,7 +1862,7 @@ def test_store_different_build_deps(tmp_path):
|
||||
z1
|
||||
|
||||
"""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path / "mirror")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.util.executable
|
||||
import spack.util.gpg
|
||||
from spack.main import SpackCommand
|
||||
@@ -172,23 +173,25 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
|
||||
# Verification should now succeed again.
|
||||
gpg("verify", str(test_path))
|
||||
|
||||
relative_keys_path = bindist.buildcache_relative_keys_path()
|
||||
|
||||
# Publish the keys using a directory path
|
||||
test_path = tmpdir.join("dir_cache")
|
||||
os.makedirs("%s" % test_path)
|
||||
os.makedirs(f"{test_path}")
|
||||
gpg("publish", "--rebuild-index", "-d", str(test_path))
|
||||
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
|
||||
assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json")
|
||||
|
||||
# Publish the keys using a mirror url
|
||||
test_path = tmpdir.join("url_cache")
|
||||
os.makedirs("%s" % test_path)
|
||||
test_url = "file://%s" % test_path
|
||||
os.makedirs(f"{test_path}")
|
||||
test_url = f"file://{test_path}"
|
||||
gpg("publish", "--rebuild-index", "--mirror-url", test_url)
|
||||
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
|
||||
assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json")
|
||||
|
||||
# Publish the keys using a mirror name
|
||||
test_path = tmpdir.join("named_cache")
|
||||
os.makedirs("%s" % test_path)
|
||||
mirror_url = "file://%s" % test_path
|
||||
os.makedirs(f"{test_path}")
|
||||
mirror_url = f"file://{test_path}"
|
||||
mirror("add", "gpg", mirror_url)
|
||||
gpg("publish", "--rebuild-index", "-m", "gpg")
|
||||
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
|
||||
assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json")
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import spack.cmd.info
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
|
||||
@@ -31,15 +33,12 @@ def _print(*args, **kwargs):
|
||||
return buffer
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"]
|
||||
)
|
||||
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
|
||||
def test_it_just_runs(pkg, extra_args):
|
||||
info(pkg, *extra_args)
|
||||
def test_it_just_runs(extra_args):
|
||||
info("vtk-m", *extra_args)
|
||||
|
||||
|
||||
def test_info_noversion(mock_packages, print_buffer):
|
||||
def test_info_noversion(print_buffer):
|
||||
"""Check that a mock package with no versions outputs None."""
|
||||
info("noversion")
|
||||
|
||||
@@ -58,7 +57,7 @@ def test_info_noversion(mock_packages, print_buffer):
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query,expected", [("zlib", "False"), ("find-externals1", "True (version)")]
|
||||
)
|
||||
def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, print_buffer):
|
||||
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
|
||||
args = parser.parse_args(["--detectable", pkg_query])
|
||||
spack.cmd.info.info(parser, args)
|
||||
|
||||
@@ -70,13 +69,7 @@ def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, pr
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query",
|
||||
[
|
||||
"hdf5",
|
||||
"cloverleaf3d",
|
||||
"trilinos",
|
||||
"gcc", # This should ensure --test's c_names processing loop covered
|
||||
],
|
||||
"pkg_query", ["vtk-m", "gcc"] # This should ensure --test's c_names processing loop covered
|
||||
)
|
||||
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
|
||||
def test_info_fields(pkg_query, extra_args, parser, print_buffer):
|
||||
|
@@ -6,16 +6,20 @@
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
list = SpackCommand("list")
|
||||
|
||||
|
||||
def test_list():
|
||||
output = list()
|
||||
assert "cloverleaf3d" in output
|
||||
assert "bzip2" in output
|
||||
assert "hdf5" in output
|
||||
|
||||
|
||||
@@ -41,7 +45,7 @@ def test_list_cli_output_format(mock_tty_stdout):
|
||||
assert out == out_str
|
||||
|
||||
|
||||
def test_list_filter(mock_packages):
|
||||
def test_list_filter():
|
||||
output = list("py-*")
|
||||
assert "py-extension1" in output
|
||||
assert "py-extension2" in output
|
||||
@@ -57,18 +61,18 @@ def test_list_filter(mock_packages):
|
||||
assert "mpich" not in output
|
||||
|
||||
|
||||
def test_list_search_description(mock_packages):
|
||||
def test_list_search_description():
|
||||
output = list("--search-description", "one build dependency")
|
||||
assert "depb" in output
|
||||
|
||||
|
||||
def test_list_format_name_only(mock_packages):
|
||||
def test_list_format_name_only():
|
||||
output = list("--format", "name_only")
|
||||
assert "zmpi" in output
|
||||
assert "hdf5" in output
|
||||
|
||||
|
||||
def test_list_format_version_json(mock_packages):
|
||||
def test_list_format_version_json():
|
||||
output = list("--format", "version_json")
|
||||
assert '{"name": "zmpi",' in output
|
||||
assert '{"name": "dyninst",' in output
|
||||
@@ -77,7 +81,7 @@ def test_list_format_version_json(mock_packages):
|
||||
json.loads(output)
|
||||
|
||||
|
||||
def test_list_format_html(mock_packages):
|
||||
def test_list_format_html():
|
||||
output = list("--format", "html")
|
||||
assert '<div class="section" id="zmpi">' in output
|
||||
assert "<h1>zmpi" in output
|
||||
@@ -86,7 +90,7 @@ def test_list_format_html(mock_packages):
|
||||
assert "<h1>hdf5" in output
|
||||
|
||||
|
||||
def test_list_update(tmpdir, mock_packages):
|
||||
def test_list_update(tmpdir):
|
||||
update_file = tmpdir.join("output")
|
||||
|
||||
# not yet created when list is run
|
||||
@@ -113,7 +117,7 @@ def test_list_update(tmpdir, mock_packages):
|
||||
assert f.read() == "empty\n"
|
||||
|
||||
|
||||
def test_list_tags(mock_packages):
|
||||
def test_list_tags():
|
||||
output = list("--tag", "tag1")
|
||||
assert "mpich" in output
|
||||
assert "mpich2" in output
|
||||
@@ -127,7 +131,7 @@ def test_list_tags(mock_packages):
|
||||
assert "mpich2" in output
|
||||
|
||||
|
||||
def test_list_count(mock_packages):
|
||||
def test_list_count():
|
||||
output = list("--count")
|
||||
assert int(output.strip()) == len(spack.repo.all_package_names())
|
||||
|
||||
@@ -137,11 +141,10 @@ def test_list_count(mock_packages):
|
||||
)
|
||||
|
||||
|
||||
# def test_list_repos(mock_packages, builder_test_repository):
|
||||
def test_list_repos():
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.repos_path, "builder.test"),
|
||||
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "builder.test"),
|
||||
):
|
||||
total_pkgs = len(list().strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin.mock").strip().split())
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import spack.main
|
||||
import spack.repo
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
maintainers = spack.main.SpackCommand("maintainers")
|
||||
|
||||
MAINTAINED_PACKAGES = [
|
||||
@@ -26,17 +28,17 @@ def split(output):
|
||||
return re.split(r"\s+", output) if output else []
|
||||
|
||||
|
||||
def test_maintained(mock_packages):
|
||||
def test_maintained():
|
||||
out = split(maintainers("--maintained"))
|
||||
assert out == MAINTAINED_PACKAGES
|
||||
|
||||
|
||||
def test_unmaintained(mock_packages):
|
||||
def test_unmaintained():
|
||||
out = split(maintainers("--unmaintained"))
|
||||
assert out == sorted(set(spack.repo.all_package_names()) - set(MAINTAINED_PACKAGES))
|
||||
|
||||
|
||||
def test_all(mock_packages, capfd):
|
||||
def test_all(capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--all"))
|
||||
assert out == [
|
||||
@@ -63,7 +65,7 @@ def test_all(mock_packages, capfd):
|
||||
assert out == ["maintainers-1:", "user1,", "user2"]
|
||||
|
||||
|
||||
def test_all_by_user(mock_packages, capfd):
|
||||
def test_all_by_user(capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--all", "--by-user"))
|
||||
assert out == [
|
||||
@@ -100,22 +102,22 @@ def test_all_by_user(mock_packages, capfd):
|
||||
]
|
||||
|
||||
|
||||
def test_no_args(mock_packages):
|
||||
def test_no_args():
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
maintainers()
|
||||
|
||||
|
||||
def test_no_args_by_user(mock_packages):
|
||||
def test_no_args_by_user():
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
maintainers("--by-user")
|
||||
|
||||
|
||||
def test_mutex_args_fail(mock_packages):
|
||||
def test_mutex_args_fail():
|
||||
with pytest.raises(SystemExit):
|
||||
maintainers("--maintained", "--unmaintained")
|
||||
|
||||
|
||||
def test_maintainers_list_packages(mock_packages, capfd):
|
||||
def test_maintainers_list_packages(capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("maintainers-1"))
|
||||
assert out == ["user1", "user2"]
|
||||
@@ -129,13 +131,13 @@ def test_maintainers_list_packages(mock_packages, capfd):
|
||||
assert out == ["user2", "user3"]
|
||||
|
||||
|
||||
def test_maintainers_list_fails(mock_packages, capfd):
|
||||
def test_maintainers_list_fails(capfd):
|
||||
out = maintainers("pkg-a", fail_on_error=False)
|
||||
assert not out
|
||||
assert maintainers.returncode == 1
|
||||
|
||||
|
||||
def test_maintainers_list_by_user(mock_packages, capfd):
|
||||
def test_maintainers_list_by_user(capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user1"))
|
||||
assert out == ["maintainers-1", "maintainers-3", "py-extension1"]
|
||||
|
@@ -6,6 +6,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd.mirror
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -365,8 +366,10 @@ def test_mirror_destroy(
|
||||
install("--fake", "--no-cache", spec_name)
|
||||
buildcache("push", "-u", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
blobs_path = bindist.buildcache_relative_blobs_path()
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
assert blobs_path in contents
|
||||
|
||||
# Destroy mirror by name
|
||||
mirror("destroy", "-m", "atest")
|
||||
@@ -376,7 +379,7 @@ def test_mirror_destroy(
|
||||
buildcache("push", "-u", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
assert blobs_path in contents
|
||||
|
||||
# Destroy mirror by url
|
||||
mirror("destroy", "--mirror-url", mirror_url)
|
||||
@@ -558,3 +561,36 @@ def test_mirror_add_set_autopush(mutable_config):
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("remove", "example")
|
||||
|
||||
|
||||
def test_mirror_add_filters(mutable_config, tmpdir):
|
||||
exclude_path = os.path.join(str(tmpdir), "test-exclude.txt")
|
||||
with open(exclude_path, "w", encoding="utf-8") as exclude_file:
|
||||
exclude_file.write(
|
||||
"""\
|
||||
mpich@3.0.1:3.0.2
|
||||
mpich@1.0
|
||||
"""
|
||||
)
|
||||
include_path = os.path.join(str(tmpdir), "test-include.txt")
|
||||
with open(include_path, "w", encoding="utf-8") as include_file:
|
||||
include_file.write(
|
||||
"""\
|
||||
build_type=Debug
|
||||
gcc-runtime
|
||||
"""
|
||||
)
|
||||
mirror("add", "--exclude-specs", "foo", "example", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "exclude": ["foo"]}
|
||||
mirror("set", "--include-specs", "+shared", "example")
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"exclude": ["foo"],
|
||||
"include": ["+shared"],
|
||||
}
|
||||
mirror("set", "--include-file", include_path, "--exclude-file", exclude_path, "example")
|
||||
assert spack.config.get("mirrors:example") == {
|
||||
"url": "http://example.com",
|
||||
"exclude": ["mpich@3.0.1:3.0.2", "mpich@1.0"],
|
||||
"include": ["build_type=Debug", "gcc-runtime"],
|
||||
}
|
||||
|
@@ -111,12 +111,12 @@ def split(output):
|
||||
pkg = spack.main.SpackCommand("pkg")
|
||||
|
||||
|
||||
def test_packages_path():
|
||||
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin").packages_path
|
||||
def test_builtin_repo():
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin")
|
||||
|
||||
|
||||
def test_mock_packages_path(mock_packages):
|
||||
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
def test_mock_builtin_repo(mock_packages):
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin.mock")
|
||||
|
||||
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
providers = SpackCommand("providers")
|
||||
|
||||
|
||||
@@ -24,16 +26,28 @@ def test_it_just_runs(pkg):
|
||||
(
|
||||
("mpi",),
|
||||
[
|
||||
"mpich",
|
||||
"mpilander",
|
||||
"mvapich2",
|
||||
"openmpi",
|
||||
"openmpi@1.7.5:",
|
||||
"openmpi@2.0.0:",
|
||||
"spectrum-mpi",
|
||||
"intel-parallel-studio",
|
||||
"low-priority-provider",
|
||||
"mpich@3:",
|
||||
"mpich2",
|
||||
"multi-provider-mpi@1.10.0",
|
||||
"multi-provider-mpi@2.0.0",
|
||||
"zmpi",
|
||||
],
|
||||
),
|
||||
(("D", "awk"), ["ldc", "gawk", "mawk"]), # Call 2 virtual packages at once
|
||||
(
|
||||
("lapack", "something"),
|
||||
[
|
||||
"intel-parallel-studio",
|
||||
"low-priority-provider",
|
||||
"netlib-lapack",
|
||||
"openblas-with-lapack",
|
||||
"simple-inheritance",
|
||||
"splice-a",
|
||||
"splice-h",
|
||||
"splice-vh",
|
||||
],
|
||||
), # Call 2 virtual packages at once
|
||||
],
|
||||
)
|
||||
def test_provider_lists(vpkg, provider_list):
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,19 +23,19 @@ def test_help_option():
|
||||
assert repo.returncode in (None, 0)
|
||||
|
||||
|
||||
def test_create_add_list_remove(mutable_config, tmpdir):
|
||||
def test_create_add_list_remove(mutable_config, tmp_path: pathlib.Path):
|
||||
# Create a new repository and check that the expected
|
||||
# files are there
|
||||
repo("create", str(tmpdir), "mockrepo")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "repo.yaml"))
|
||||
repo("create", str(tmp_path), "mockrepo")
|
||||
assert (tmp_path / "spack_repo" / "mockrepo" / "repo.yaml").exists()
|
||||
|
||||
# Add the new repository and check it appears in the list output
|
||||
repo("add", "--scope=site", str(tmpdir))
|
||||
repo("add", "--scope=site", str(tmp_path / "spack_repo" / "mockrepo"))
|
||||
output = repo("list", "--scope=site", output=str)
|
||||
assert "mockrepo" in output
|
||||
|
||||
# Then remove it and check it's not there
|
||||
repo("remove", "--scope=site", str(tmpdir))
|
||||
repo("remove", "--scope=site", str(tmp_path / "spack_repo" / "mockrepo"))
|
||||
output = repo("list", "--scope=site", output=str)
|
||||
assert "mockrepo" not in output
|
||||
|
||||
|
@@ -116,7 +116,7 @@ def test_changed_no_base(git, tmpdir, capfd):
|
||||
assert "This repository does not have a 'foobar'" in err
|
||||
|
||||
|
||||
def test_changed_files_all_files():
|
||||
def test_changed_files_all_files(mock_packages):
|
||||
# it's hard to guarantee "all files", so do some sanity checks.
|
||||
files = set(
|
||||
[
|
||||
@@ -159,8 +159,12 @@ def test_bad_root(tmpdir):
|
||||
|
||||
def test_style_is_package(tmpdir):
|
||||
"""Ensure the is_package() function works."""
|
||||
assert spack.cmd.style.is_package("var/spack/repos/builtin/packages/hdf5/package.py")
|
||||
assert spack.cmd.style.is_package("var/spack/repos/builtin/packages/zlib/package.py")
|
||||
assert spack.cmd.style.is_package(
|
||||
"var/spack/repos/spack_repo/builtin/packages/hdf5/package.py"
|
||||
)
|
||||
assert spack.cmd.style.is_package(
|
||||
"var/spack/repos/spack_repo/builtin/packages/zlib/package.py"
|
||||
)
|
||||
assert not spack.cmd.style.is_package("lib/spack/spack/spec.py")
|
||||
assert not spack.cmd.style.is_package("lib/spack/external/pytest.py")
|
||||
|
||||
|
@@ -19,7 +19,7 @@ def test_list():
|
||||
|
||||
def test_list_with_pytest_arg():
|
||||
output = spack_test("--list", cmd_test_py)
|
||||
assert output.strip() == cmd_test_py
|
||||
assert cmd_test_py in output.strip()
|
||||
|
||||
|
||||
def test_list_with_keywords():
|
||||
@@ -27,7 +27,7 @@ def test_list_with_keywords():
|
||||
# since the behavior is inconsistent across different pytest
|
||||
# versions, see https://stackoverflow.com/a/48814787/771663
|
||||
output = spack_test("--list", "-k", "unit_test.py")
|
||||
assert output.strip() == cmd_test_py
|
||||
assert cmd_test_py in output.strip()
|
||||
|
||||
|
||||
def test_list_long(capsys):
|
||||
|
@@ -10,6 +10,9 @@
|
||||
versions = SpackCommand("versions")
|
||||
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
|
||||
def test_safe_versions():
|
||||
"""Only test the safe versions of a package."""
|
||||
|
||||
@@ -70,11 +73,11 @@ def test_no_unchecksummed_versions():
|
||||
def test_versions_no_url():
|
||||
"""Test a package with versions but without a ``url`` attribute."""
|
||||
|
||||
versions("graphviz")
|
||||
versions("attributes-foo-app")
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
def test_no_versions_no_url():
|
||||
"""Test a package without versions or a ``url`` attribute."""
|
||||
|
||||
versions("opengl")
|
||||
versions("no-url-or-version")
|
||||
|
@@ -29,7 +29,7 @@ def _concretize_with_reuse(*, root_str, reused_str):
|
||||
|
||||
@pytest.fixture
|
||||
def runtime_repo(mutable_config):
|
||||
repo = os.path.join(spack.paths.repos_path, "compiler_runtime.test")
|
||||
repo = os.path.join(spack.paths.test_repos_path, "compiler_runtime.test")
|
||||
with spack.repo.use_repositories(repo) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -1719,7 +1719,7 @@ def test_reuse_with_unknown_namespace_dont_raise(
|
||||
|
||||
@pytest.mark.regression("45538")
|
||||
def test_reuse_from_other_namespace_no_raise(self, tmpdir, temporary_store, monkeypatch):
|
||||
myrepo = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"), namespace="myrepo")
|
||||
myrepo = spack.repo.MockRepositoryBuilder(tmpdir, namespace="mock_repo")
|
||||
myrepo.add_package("zlib")
|
||||
|
||||
builtin = spack.concretize.concretize_one("zlib")
|
||||
@@ -1727,21 +1727,19 @@ def test_reuse_from_other_namespace_no_raise(self, tmpdir, temporary_store, monk
|
||||
|
||||
with spack.repo.use_repositories(myrepo.root, override=False):
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
myrepo = spack.concretize.concretize_one("myrepo.zlib")
|
||||
myrepo = spack.concretize.concretize_one("mock_repo.zlib")
|
||||
|
||||
assert myrepo.namespace == "myrepo"
|
||||
assert myrepo.namespace == "mock_repo"
|
||||
|
||||
@pytest.mark.regression("28259")
|
||||
def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, monkeypatch):
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"), namespace="myrepo")
|
||||
builder = spack.repo.MockRepositoryBuilder(str(tmpdir), namespace="myrepo")
|
||||
builder.add_package("pkg-c")
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace == "myrepo"
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
del sys.modules["spack.pkg.myrepo.pkg-c"]
|
||||
del sys.modules["spack.pkg.myrepo"]
|
||||
del sys.modules["spack_repo.myrepo.packages.pkg_c"]
|
||||
builder.remove("pkg-c")
|
||||
with spack.repo.use_repositories(builder.root, override=False) as repos:
|
||||
# TODO (INJECT CONFIGURATION): unclear why the cache needs to be invalidated explicitly
|
||||
@@ -2334,7 +2332,7 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
from cli.
|
||||
"""
|
||||
# 'builtin.mock" and "duplicates.test" share a 'gmake' package
|
||||
additional_repo = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
additional_repo = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(additional_repo, override=False):
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
|
||||
@@ -2578,7 +2576,7 @@ def test_correct_external_is_selected_from_packages_yaml(self, mutable_config):
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
repository_path = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
@@ -2813,7 +2811,7 @@ def test_adding_specs(self, input_specs, default_mock_concretization):
|
||||
|
||||
@pytest.fixture()
|
||||
def edges_test_repository():
|
||||
repository_path = os.path.join(spack.paths.repos_path, "edges.test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "edges.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
@@ -3107,7 +3105,9 @@ def test_spec_unification(unify, mutable_config, mock_packages):
|
||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
||||
|
||||
|
||||
def test_concretization_cache_roundtrip(use_concretization_cache, monkeypatch, mutable_config):
|
||||
def test_concretization_cache_roundtrip(
|
||||
mock_packages, use_concretization_cache, monkeypatch, mutable_config
|
||||
):
|
||||
"""Tests whether we can write the results of a clingo solve to the cache
|
||||
and load the same spec request from the cache to produce identical specs"""
|
||||
# Force determinism:
|
||||
|
@@ -46,7 +46,7 @@
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
repo_dir = pathlib.Path(spack.paths.repos_path) / "flags.test"
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "flags.test"
|
||||
with spack.repo.use_repositories(str(repo_dir)) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
@@ -28,7 +28,7 @@ def update_packages_config(conf_str):
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
repo_dir = pathlib.Path(spack.paths.repos_path) / "requirements.test"
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "requirements.test"
|
||||
with spack.repo.use_repositories(str(repo_dir)) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
@@ -914,7 +914,7 @@ def test_single_file_scope(config, env_yaml):
|
||||
assert spack.config.get("config:checksum") is True
|
||||
assert spack.config.get("config:checksum") is True
|
||||
assert spack.config.get("packages:externalmodule:buildable") is False
|
||||
assert spack.config.get("repos") == ["/x/y/z", "$spack/var/spack/repos/builtin"]
|
||||
assert spack.config.get("repos") == ["/x/y/z", "$spack/var/spack/repos/spack_repo/builtin"]
|
||||
|
||||
|
||||
def test_single_file_scope_section_override(tmpdir, config):
|
||||
@@ -950,7 +950,7 @@ def test_single_file_scope_section_override(tmpdir, config):
|
||||
# from the lower config scopes
|
||||
assert spack.config.get("config:checksum") is True
|
||||
assert not spack.config.get("packages:externalmodule")
|
||||
assert spack.config.get("repos") == ["/x/y/z", "$spack/var/spack/repos/builtin"]
|
||||
assert spack.config.get("repos") == ["/x/y/z", "$spack/var/spack/repos/spack_repo/builtin"]
|
||||
|
||||
|
||||
def test_write_empty_single_file_scope(tmpdir):
|
||||
|
@@ -259,9 +259,9 @@ def mock_git_package_changes(git, tmpdir, override_git_repos_cache_path, monkeyp
|
||||
Important attributes of the repo for test coverage are: multiple package
|
||||
versions are added with some coming from a tarball and some from git refs.
|
||||
"""
|
||||
filename = "diff-test/package.py"
|
||||
filename = "diff_test/package.py"
|
||||
|
||||
repo_path, _ = spack.repo.create_repo(str(tmpdir.mkdir("myrepo")))
|
||||
repo_path, _ = spack.repo.create_repo(str(tmpdir), namespace="myrepo")
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmpdir.mkdir("cache")))
|
||||
|
||||
repo = spack.repo.Repo(repo_path, cache=repo_cache)
|
||||
@@ -1068,9 +1068,7 @@ def install_mockery(temporary_store: spack.store.Store, mutable_config, mock_pac
|
||||
@pytest.fixture(scope="module")
|
||||
def temporary_mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -1084,9 +1082,7 @@ def temporary_mirror(temporary_mirror_dir):
|
||||
@pytest.fixture(scope="function")
|
||||
def mutable_temporary_mirror_dir(tmpdir_factory):
|
||||
dir = tmpdir_factory.mktemp("mirror")
|
||||
dir.ensure("build_cache", dir=True)
|
||||
yield str(dir)
|
||||
dir.join("build_cache").remove()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
@@ -1,2 +1,2 @@
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
@@ -1,54 +0,0 @@
|
||||
{
|
||||
"spec": {
|
||||
"_meta": {
|
||||
"version": 3
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"name": "archive-files",
|
||||
"version": "2.0",
|
||||
"arch": {
|
||||
"platform": "test",
|
||||
"platform_os": "debian6",
|
||||
"target": {
|
||||
"name": "core2",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"nocona"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "4.5.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"package_hash": "ncv2pr4o2yemepsa4h7u4p4dsgieul5fxvh6s5am5fsb65ebugaa====",
|
||||
"hash": "l3vdiqvbobmspwyb4q2b62fz6nitd4hk"
|
||||
}
|
||||
]
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "c226b51d88876746efd6f9737cc6dfdd349870b6c0b9c045d9bad0f2764a40b9"
|
||||
},
|
||||
"buildinfo": {
|
||||
"relative_prefix": "test-debian6-core2/gcc-4.5.0/archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
Binary file not shown.
@@ -0,0 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGf23+EBEAC6UqaiE43cF9jFuVjA8xJ5j31BMhufpnk0cwoE5Iks/GgR/Hki
|
||||
LMYbzy36V7TZGObel+5DtFKipX+WCwWj2XsjbeqHeuCkxZhzHFwfi1UJl9FO2T28
|
||||
iNn6OsBiGeU6ULNmehSia2hx0uhj1re/FUwJExOAvuYv8nc7M+nozqi7Pp/WjP8v
|
||||
UTiqP2onzZJbidlSBvmZ2nheWk7G78e617gcV/ye+UyXZvciiF2UQBg9YV6D8JuD
|
||||
YhBbNAVOzJOiyOdTBmZmOkmYsGx58sEbFVqGeOMB0xoxZrqKjMm9NhvjqjJF/sWs
|
||||
hN/PD5ylW1UR05/fGxlG2GLKKfBInbdqnC101OFWXP5HenYHmKaBJoCKCAUfsoJ0
|
||||
r/t/GVh3z3w/99p0TRDONnTecKm5S9z3/5QjjE5RsWcd4ll7mRikUiVpe1WhKRwT
|
||||
4T76pQLq3XwNJqiOmuMQuSHoBE9OMufvRFiTYC0QHyLoCV2H5PCWtS2xSsIDN4PB
|
||||
0RNd0hnHKanVV7d2TkIrGOagoAo0wXqyW/Op6KUG1NdaFYYziDFEHeZxfGoPKytO
|
||||
iS5PEwZG2FqambAZhJU5OXwzgnCRIoE5DCZad4YS6U5YD/2zg+RrQ/5GUxl5Cc+W
|
||||
Zwesn9FV5jywx/oFePYbTSNQVPQ6jbUDvhmHvZ8c/OfGOVXQr0VpvfIwdwARAQAB
|
||||
tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv
|
||||
ZHlAbm93aGVyZS5jb20+iQJRBBMBCAA7FiEEqYoEuILhnYX9Nu4GlWXYCwVckv8F
|
||||
Amf23+ECGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQlWXYCwVckv9i
|
||||
pg//eGjBR9ph9hUYRsekzKWM1xB5zFOFfNoqlpCut/W7LAfy0XXkFy/y6EvPdcgn
|
||||
lLWRWPsOFfsKGwZd7LgSovhEMQ2MRsAUUB/KNZx7s6vO/P773PmJspF3odQ/lcrM
|
||||
1fum2lShChWqimdBdNLrXxG+8duO9uWaMBIp28diBCyB25M/MqpHtKYu00FB/QJ6
|
||||
ZwQH4OsgXVQHRjyrtIGx/2FQoWt0ah3eJMJCEw46GgkgiojtoTfXQQc4fIJP324b
|
||||
O1sxz5lx3xVBG/EZYzyV3xnSoG9aZNJ1cJq8EKO7ZoNKc/8jwkVu5gewGaXYI0LK
|
||||
/WkOeiXcSHPMSdu7TpnitvLYFCjc9YAEKQnjooXdt7+BElwC3+5hZJNXEnoGPMzn
|
||||
3UL60sQE/ViCsGcW+l9rtzXPNTmLMjEg4rGRqOhX+UmwyhvGD2QYbZtXlayu5xn+
|
||||
5m/PfmdqgL1xsdvNsLo/BOo+6kizMdBk48Xfp0YM8AC4BzUEENypGzC4T0WYF0k1
|
||||
Jfc6/eSwiytIcIkJ42GlaVfEFE8UxfYc1/2zqTBN9EdzWJqy0Bh+mVOgOaeb0Dzi
|
||||
xWpUpChi1fBB3PXWJ5iAS/w0HSVn4G5/JAIEFAs7r6ju2YtKBfuk+u/K5Q28mo7W
|
||||
6LrZQywN44nBMTvSQUhhXpSNYG+juyotXJUJ3F2u9Cf/jVU=
|
||||
=TkbL
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@@ -0,0 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGfHlp4BEAC5wkZSHqF9z6GcymuHpk1m9aNXCJdt4ZWvE8ck8GcuVu1nbzlZ
|
||||
h959jqtwk7nFMki5YaNMz6jcQf0eeS75viL4CoPAqFiVyhyCCh5am75h9F7vTBq6
|
||||
190017lhu9IgkAkiklnjfDbyXH+BwqJ78nXp6e6R4ShFMHNGGvYLem1wmPKzqPlZ
|
||||
zN0yjc0+d5pw4hu+IEFrM63yqGp2BVX1X132IKUEcROCQt1QOma5oORhYEtSCieX
|
||||
PuhuHJOA7q6nJuFccPCs5OcDS4IbQgGAbWL4L1+LAGVLVGpK4IVtqEZ831Srclh8
|
||||
0ruyFFeV/hqOONThwwile0Jwh5Jz/2sYxT5c+nlumXWK+CXTm4OCfGt1UuGy6c6u
|
||||
Rz84PHfanbKnATp6RUjz4DMREkmA6qBnUFqGLLGaBKBsm42b7kbo7m5aeItuOwLE
|
||||
U7AcnBEqqHLfI7O1zrHKjQCxhEWP/iok0kgEdiJ4tlPhfDjQRG6thlmZnVdt/08V
|
||||
+bvVkbYZyWPzjbG3QHyFew1+uzPHb2UopgpByVKYEWhCgNfcFtE56lEI9c40Ba5o
|
||||
LaZl0VlgfSLP4c+LoFB6gZp1gcVQuPo1JKd1v5WP60f1iHhazL5LEeMYcW6kvujK
|
||||
58Q683gSH5DsVAnxaj1uU4nvtKDh8IF1CNKKXk8RVsltdpv9bGhV8b4qVQARAQAB
|
||||
tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv
|
||||
ZHlAbm93aGVyZS5jb20+iQJOBBMBCgA4FiEE6J1JcfAJex56PrVzcbSEgC54180F
|
||||
AmfHlp4CGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQcbSEgC54180aDg//
|
||||
f7GqIW5LzYqIqkey+IjdkSSfeD47tlWc2ukKYStHu0gTlHhrUp4rHNJ/s8XQ1o6o
|
||||
jwzWfNMYh68wt9sjuM2BEkkh3RUFEjVqqW+k562gS5ibfKTDtJb2Yj0n/CQKWvoi
|
||||
vUUzO88xW0AnZFieP+vD5iI5Zw4H2dY8cH4X1XlWAJufFdH4WBaZjujNwNOcCsnd
|
||||
w2nE050wKTR2wroWq0HKn1Ni3QNtKWPpLoHGAlhW6ACLa+EFqxHU6D3KhW6IV4Jc
|
||||
sdt36nHNiRiy6nT99asqtN6Z0Yw+EnQSuIDosIbmSgZoieINh0gU6AKwgydxLUxL
|
||||
Cu1w2fZHGuFR/ym0c/tTpM893DxHMc/EZ/SpU8fXkC9lYnQO3or/Y0mLHd0kSEv7
|
||||
XoonvcOu1tOQzmvrvUQUtTn4+6OKpGViyZG5C8Lbk8/yKWFv5b+Gpss/EiGTHSsk
|
||||
bPTHf5jMsWElv0GgFq2TpybtIcY52yJoZ1fBMEA9Nk76Y/MNFlN0d7HyS6tWGr6E
|
||||
8FWJB7RYG5XHMEDIKSheq+Q5cORwz92JPFI+sovZukp+20G7f7/gwos441KamJPc
|
||||
y1+M4uO21aKX2fA07bcgFtm25gNLoHyvjQLcmyDis6xogvciCV3iQ/mtunewgYp/
|
||||
lUX1dv0R5o8TteaAIkbJicbdLtur/iuAWN404E/QShc=
|
||||
=8P00
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@@ -0,0 +1 @@
|
||||
{"keys":{"A98A04B882E19D85FD36EE069565D80B055C92FF":{},"E89D4971F0097B1E7A3EB57371B484802E78D7CD":{}}}
|
File diff suppressed because one or more lines are too long
@@ -0,0 +1 @@
|
||||
7f94d6038bb4e5e7fff817151da5b22d7dd6d1e2d9ad51bd55504676786c17bd
|
@@ -0,0 +1,124 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec":{
|
||||
"_meta":{
|
||||
"version":4
|
||||
},
|
||||
"nodes":[
|
||||
{
|
||||
"name":"libdwarf",
|
||||
"version":"20130729",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"core2",
|
||||
"vendor":"GenuineIntel",
|
||||
"features":[
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"nocona"
|
||||
],
|
||||
"cpupart":""
|
||||
}
|
||||
},
|
||||
"compiler":{
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"libelf",
|
||||
"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build",
|
||||
"link"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"hash":"sk2gqqz4n5njmvktycnd25wq25jxiqkr"
|
||||
},
|
||||
{
|
||||
"name":"libelf",
|
||||
"version":"0.8.13",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"core2",
|
||||
"vendor":"GenuineIntel",
|
||||
"features":[
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"nocona"
|
||||
],
|
||||
"cpupart":""
|
||||
}
|
||||
},
|
||||
"compiler":{
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====",
|
||||
"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez"
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildcache_layout_version":2,
|
||||
"binary_cache_checksum":{
|
||||
"hash_algorithm":"sha256",
|
||||
"hash":"811f500a89ae7d2f61e2c0ef6f56e352dfbac245ae88275809088a1481489d5b"
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54
|
||||
180hlxAAisLofFhr/PQvLcQ79T3t3V0tqGgz9x6QnPKfbPCgvb66tTNlny+ML0fY
|
||||
y1H9xXQO53QOxfN9cdXcf2EVbRQ2eT6ltmwekI3ZZuCaTguflNu/i11UV6UnDy3x
|
||||
dXOYQhky5QjtPbhJ0NxG5XDKoRFoUPR/rgXsiNG5O0sk3M5H9ldpsj8af5W/6LCL
|
||||
gCTNM8fF0TVbd4MF9TiIECFBng2CrxhHwpl2gPHHxab1zxLRCF6t1lZvL6To0hmC
|
||||
e/Tqre+42PhRSCtXuwhK22r0rvreVUaiglYn8udjOJHwNVKdzLnTZ1OBAFeIq00U
|
||||
9uuroyaF841pq9+8PitwUORurv0lsnHUbfbi/+ou0HzMiaXzz+MPdOXt8nUuyScs
|
||||
oKOi8ExvpWJ7vn6klkvQtMK/Gakzd4YOxO/nk9K8BJgVN3qrODwHYSORk8RrdITS
|
||||
tkjiEJiIoklddiwCf3NUzlxiIYWbiqKqNbY+Pxh4B+OpVDnvRmpkJHgoSuVoCS8b
|
||||
coaOTIgqDpnIClHIj7ogxO+ureRjIIkGNNh6wVhlHDlgm1GzxNUOklMrzDkYMD01
|
||||
eTYxrbicw7ZVwqhFtR8olODKT9QAqXUJOkGHS9IA6FJctatkUkIOG1DSI52AZV1r
|
||||
PYzgdKtTxS60EkN8Igl6VMTkaC05anLygCTyOvGaV7sqVKmzHY8=
|
||||
=8OR5
|
||||
-----END PGP SIGNATURE-----
|
@@ -0,0 +1,72 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec":{
|
||||
"_meta":{
|
||||
"version":4
|
||||
},
|
||||
"nodes":[
|
||||
{
|
||||
"name":"libelf",
|
||||
"version":"0.8.13",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"core2",
|
||||
"vendor":"GenuineIntel",
|
||||
"features":[
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"nocona"
|
||||
],
|
||||
"cpupart":""
|
||||
}
|
||||
},
|
||||
"compiler":{
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====",
|
||||
"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez"
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildcache_layout_version":2,
|
||||
"binary_cache_checksum":{
|
||||
"hash_algorithm":"sha256",
|
||||
"hash":"48c8aa769a62535f9d9f613722e3d3f5a48b91fde3c99a644b22f277a4502d75"
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54
|
||||
182ezg/7Bkil1mY6d4recJMkFhpBzzDs8aMD+WQOBPoy/bWHIGsPb1DyOOW7lTLa
|
||||
QC9jh9Rq02oMeX0LWvNg7k6iMTayWcrPzJwk1rgh3pg/ySgCTZ576/aP/UOZwA8h
|
||||
HT/3RzsDFlq7Wkh4yYaDgSEDVc5PgUevb1p2f126Z9HMFjG8siEWmuZQOcy4I9JG
|
||||
osQFtwWTLmx96sBMzweZTu2i3iGTPNz4Ae1hu+v5clmSFg43eW7EWChEVoob+3hb
|
||||
hLRxajZEPsIho4yR5yynoxduXeXrLLP7GH6XGnYt7Z2GJR0UamIrPfxYuWBK76V1
|
||||
03Ie2rRXwOKfsjDWw9Z8ziTVu25G0aZ274DX6eQyaWKfvzz69cBXO0fgw1lU8B9S
|
||||
K0j9k/xtnDCrIkPSh4QGQpFRlbzxkj20E+EnwgDCGIlK1rBzo2V5na4YNj+SbC91
|
||||
0BmWrj6dRkQZUMJHeb95kBMfFpKG5B6u7HQxZtIwHFAfF0nypbiB7xmdy/gAmUao
|
||||
ej3Cu34DvWtLVeSh7lRimeEc44WyBDk2YSPqYleAwYMZBn4WSozUS/KVLU2T/AhZ
|
||||
VlLaEBaFrVngmsw5PCdck0XRSNSAN9HUgPItpOzYig20NeT1/69wIlUZVNpLEYGT
|
||||
yvZsmqHFnkunAs6av3XmGl0i8rSA6DujunpNXML6hUciFEK5wg4=
|
||||
=Aq8h
|
||||
-----END PGP SIGNATURE-----
|
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,429 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
|
||||
{
|
||||
"spec":{
|
||||
"_meta":{
|
||||
"version":5
|
||||
},
|
||||
"nodes":[
|
||||
{
|
||||
"name":"libdwarf",
|
||||
"version":"20130729",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"compiler-wrapper",
|
||||
"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"gcc",
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[
|
||||
"c",
|
||||
"cxx"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"gcc-runtime",
|
||||
"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"link"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"libelf",
|
||||
"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build",
|
||||
"link"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh"
|
||||
},
|
||||
{
|
||||
"name":"compiler-wrapper",
|
||||
"version":"1.0",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====",
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc"
|
||||
},
|
||||
{
|
||||
"name":"gcc",
|
||||
"version":"10.2.1",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
"c",
|
||||
"c++",
|
||||
"fortran"
|
||||
],
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"external":{
|
||||
"path":"/path",
|
||||
"module":null,
|
||||
"extra_attributes":{
|
||||
"compilers":{
|
||||
"c":"/path/bin/gcc-10",
|
||||
"cxx":"/path/bin/g++-10",
|
||||
"fortran":"/path/bin/gfortran-10"
|
||||
}
|
||||
}
|
||||
},
|
||||
"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====",
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq"
|
||||
},
|
||||
{
|
||||
"name":"gcc-runtime",
|
||||
"version":"10.2.1",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"gcc",
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n"
|
||||
},
|
||||
{
|
||||
"name":"libelf",
|
||||
"version":"0.8.13",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"compiler-wrapper",
|
||||
"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"gcc",
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[
|
||||
"c"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"gcc-runtime",
|
||||
"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"link"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa"
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildcache_layout_version":2,
|
||||
"binary_cache_checksum":{
|
||||
"hash_algorithm":"sha256",
|
||||
"hash":"0898457b4cc4b18d71059ea254667fb6690f5933c82e1627f9fed3606488dbca"
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv9Xlg//d7uWhVbHjujSXRpoN3hzH5sUvvTSZ9xzvXGAXCoAu2oEGg4hxZPIFQJ3
|
||||
pZzKysZMfeFg+UKwDzex5TlKZ3JtKgCTKYl64zZfUl2EQgo/d/Fjz5mSFHW/6sa1
|
||||
1uTe3+sVt+HlijN72t2412Qbp+/uGvU+KBvXPA7kgkp88Kd/PL9xe3jlT9ytH5Nw
|
||||
3LIghe++JiepjFAKXTfIA04EjLb8c50AAxsK5Xx37HOOVHHQ8L9anFnOVYM+DxAz
|
||||
gn4dBYUQ9Uu5k5uEu5CwtxsED2/Yar7YWIepEnyp6z4zQVbwjO4/w0vZ3wSJ9c4P
|
||||
UhZs8V2akuqIWyzlQuBOjywnEQc/nw9v0py+Dr/Qr3U4XWh/LARWABMxa4IqXMOK
|
||||
aVmd6weVjV4U929gaOT/FCtZPfaFNRbk97YP8yAxuLhSdiGS0Mp16Ygz21fVWB7C
|
||||
UjkGGsKK1cdiJQ0m1CffmydU/nbDjSuw4WZIoIgDzvN7SFm7YBtE+xY+RUPsHU22
|
||||
QMAXojF5abwn48HJeP47MYdfR7+nUJq6XJiJ7/80a7Ciy8SAVxinQWqvigf/hmTf
|
||||
kAiQaqOVSlRBJ2yry5fYBKHSIRvghCqS4t4es8o13R7n2wz68VqKu0JkNlT3Ijjc
|
||||
QjJYtI+844PCDNetPVV8iNWF6upnTJnPHcFmKAEO1663hOc3Dh8=
|
||||
=3fA5
|
||||
-----END PGP SIGNATURE-----
|
@@ -0,0 +1,317 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
|
||||
{
|
||||
"spec":{
|
||||
"_meta":{
|
||||
"version":5
|
||||
},
|
||||
"nodes":[
|
||||
{
|
||||
"name":"libelf",
|
||||
"version":"0.8.13",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"compiler-wrapper",
|
||||
"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"gcc",
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[
|
||||
"c"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"gcc-runtime",
|
||||
"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"link"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa"
|
||||
},
|
||||
{
|
||||
"name":"compiler-wrapper",
|
||||
"version":"1.0",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====",
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc"
|
||||
},
|
||||
{
|
||||
"name":"gcc",
|
||||
"version":"10.2.1",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
"c",
|
||||
"c++",
|
||||
"fortran"
|
||||
],
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"external":{
|
||||
"path":"/path",
|
||||
"module":null,
|
||||
"extra_attributes":{
|
||||
"compilers":{
|
||||
"c":"/path/bin/gcc-10",
|
||||
"cxx":"/path/bin/g++-10",
|
||||
"fortran":"/path/bin/gfortran-10"
|
||||
}
|
||||
}
|
||||
},
|
||||
"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====",
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq"
|
||||
},
|
||||
{
|
||||
"name":"gcc-runtime",
|
||||
"version":"10.2.1",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"gcc",
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n"
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildcache_layout_version":2,
|
||||
"binary_cache_checksum":{
|
||||
"hash_algorithm":"sha256",
|
||||
"hash":"c068bcd1a27a3081c07ba775d83e90228e340bb6a7f0d55deb18a462760c4bcf"
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv/zSg/+NrS4JjT9TFSFR/q2vaN9aL7fSTunxp+M8eAzTmg0sgHc/D6ov2PMpUF7
|
||||
1E2mnZ2gL5a5dHtsSCf30ILFzQoD+m+I9yOwcJopcbEjr8pcnXBFe6TT8lkxlXtI
|
||||
EHNsYGMUHFbFvc+hFdWatQJicdDaIbdyEMGAC7Kobs/4KpdBF5VWV+sIrzD5+XzO
|
||||
ACiKRjBmcaJpa950nuEaFzBITgq1aDtZ0EEZdXYvjRnzj9Bm6gbqmWzlllW1wf4r
|
||||
5hSMTpAsRED4TxL433nuf0nKIvTD5Mywzs88kiLCtEABfDy1qccyBAnjyNypFF6B
|
||||
fPqSDnr33s+JQ35t7RcHKfrgowk69UablE25YOUrQP6LtH4QzLBLj4/Z0zuz33hO
|
||||
v+YYe51DgixsMQ2WCKWEO6sNcrcrLBJMFVwUP2FyTTdW3jCYRlFiTYLSfoDhTRJ/
|
||||
4o7f2eEp3sVoOe12jKI6dw/P+c70dl8K4+1ICcnZkwsb0pd0vt2z4J2kPs2+1/0g
|
||||
vpywJO1HL5Zy7/ZRlmeeSMHYEDX2eKhm7QRFbxw1IEbg3stQCA7a425JWztyJ05K
|
||||
sfhFQgPt7F/xanJVFYk/hdza+3+5pFr1K/ARcLFBdLBKGxAXTMMR+NkMp3J5NiOo
|
||||
SMZJ3jG6xA2ntvSkyx/GFawD0FpnlgEByU3E+R/WiQA4VojLpvo=
|
||||
=kfWI
|
||||
-----END PGP SIGNATURE-----
|
@@ -0,0 +1,99 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
|
||||
{
|
||||
"spec":{
|
||||
"_meta":{
|
||||
"version":5
|
||||
},
|
||||
"nodes":[
|
||||
{
|
||||
"name":"compiler-wrapper",
|
||||
"version":"1.0",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====",
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc"
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildcache_layout_version":2,
|
||||
"binary_cache_checksum":{
|
||||
"hash_algorithm":"sha256",
|
||||
"hash":"2c1c5576e30b7063aa02a22111eb24b3f2a93c35ac0f64b4e491c7078706c0ea"
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv/T8BAAhK/v7CP6lMIKILj35nEi+Gftjs7B7f6qvb4QNtqcGHum6z9t3JxkOOrd
|
||||
+q+Wd329kLYAFs/y9eaGe5X7wY1U7/f863i3XrxHbtmrnMci61D8qMjA1xnBGC+5
|
||||
yd746aVeV/VRbJxTeB9kGcKPMcIQYcearlDMgj5fKfpCKM8a+VyJfw7qHNUyrTnu
|
||||
d6LSGsEey6tGkJecgnJZTNSwryO3BZbg/4EviivMXm38AKGZrSib06qjkoHrPRvB
|
||||
8ftGSGlK4YmFs5/YjKFL7QzuNJeqPNJt4mD64tsk21urOfbQJe5AmdMLPGY0PbW/
|
||||
w++06c8lsd/6FmzUwlnTBUa39lKJjhkhoK7KFGVqZROcXZfhwAyqPZt7ReA5FDMV
|
||||
l5X7sytjQuSFaQPGi5g1xXQGEI394T2I55p5T5/RuQ2PXcFxxSOmIcEcD8o6Z7+x
|
||||
XWLq44KUWQyQP/StjaVhIz9YPogeBBJllA9hN+GzVrr2i+Esu1QO5uDgVuJP7pTA
|
||||
9wwCLV/t0hf2TZcpU2fwEu+DMniaHm6haVwqiu6QGkbkMBx49zkV9b5i9L441GoC
|
||||
Q86R2Gs9O0+QzHuN6egbQ0xKm/lfU8dmJSzV0snXawAeQ/vgCpdinx40EMc7Nz03
|
||||
rgZ3j88c/ADvCb1DVKmu1Phf6U7WqG6/AvB9tYl4Zl30VX7ETaw=
|
||||
=ifvQ
|
||||
-----END PGP SIGNATURE-----
|
@@ -0,0 +1,151 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
|
||||
{
|
||||
"spec":{
|
||||
"_meta":{
|
||||
"version":5
|
||||
},
|
||||
"nodes":[
|
||||
{
|
||||
"name":"gcc-runtime",
|
||||
"version":"10.2.1",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":{
|
||||
"name":"m1",
|
||||
"vendor":"Apple",
|
||||
"features":[
|
||||
"aes",
|
||||
"asimd",
|
||||
"asimddp",
|
||||
"asimdfhm",
|
||||
"asimdhp",
|
||||
"asimdrdm",
|
||||
"atomics",
|
||||
"cpuid",
|
||||
"crc32",
|
||||
"dcpodp",
|
||||
"dcpop",
|
||||
"dit",
|
||||
"evtstrm",
|
||||
"fcma",
|
||||
"flagm",
|
||||
"flagm2",
|
||||
"fp",
|
||||
"fphp",
|
||||
"frint",
|
||||
"ilrcpc",
|
||||
"jscvt",
|
||||
"lrcpc",
|
||||
"paca",
|
||||
"pacg",
|
||||
"pmull",
|
||||
"sb",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"sha3",
|
||||
"sha512",
|
||||
"ssbs",
|
||||
"uscat"
|
||||
],
|
||||
"generation":0,
|
||||
"parents":[
|
||||
"armv8.4a"
|
||||
],
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====",
|
||||
"dependencies":[
|
||||
{
|
||||
"name":"gcc",
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq",
|
||||
"parameters":{
|
||||
"deptypes":[
|
||||
"build"
|
||||
],
|
||||
"virtuals":[]
|
||||
}
|
||||
}
|
||||
],
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n"
|
||||
},
|
||||
{
|
||||
"name":"gcc",
|
||||
"version":"10.2.1",
|
||||
"arch":{
|
||||
"platform":"test",
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
"c",
|
||||
"c++",
|
||||
"fortran"
|
||||
],
|
||||
"cflags":[],
|
||||
"cppflags":[],
|
||||
"cxxflags":[],
|
||||
"fflags":[],
|
||||
"ldflags":[],
|
||||
"ldlibs":[]
|
||||
},
|
||||
"external":{
|
||||
"path":"/path",
|
||||
"module":null,
|
||||
"extra_attributes":{
|
||||
"compilers":{
|
||||
"c":"/path/bin/gcc-10",
|
||||
"cxx":"/path/bin/g++-10",
|
||||
"fortran":"/path/bin/gfortran-10"
|
||||
}
|
||||
}
|
||||
},
|
||||
"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====",
|
||||
"annotations":{
|
||||
"original_specfile_version":5
|
||||
},
|
||||
"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq"
|
||||
}
|
||||
]
|
||||
},
|
||||
"buildcache_layout_version":2,
|
||||
"binary_cache_checksum":{
|
||||
"hash_algorithm":"sha256",
|
||||
"hash":"f33e7a6798a5fb2db6e538d3a530cc79b298e36d56a1df385d93889a9ba431d0"
|
||||
}
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv+MsRAAsaQjZbB9iW/Lq9b87H/E5Zmv6RrClvpjSnwvhLR4nhPL3p0G70k6tI/b
|
||||
NEdXctDyvBOJOEoLaEBrCODl/3GjV8B9Gj7OhT/BIKQjlOfJqVdwIrnHgav5ri+Q
|
||||
UUXLtejhJiUNoxeILI/xZx2CoKT9q/3EpQ5ysqdybJmYJCf/hv+lXEhnwUIv8vV/
|
||||
xdRYY//rfeMowCNIZtFPjSejMywXJfFKjl7h5dN5kwM63D6z/sh4zW7tqHq4kk+A
|
||||
2m0WcorVg93wAm+YoJaQJVx8bYeMGfV/TjmY/cSouCt8PM4Vi93vwieZCkzEpXbM
|
||||
BkVN4X3PTMZSOf0WTkEbnQD5v090/DoQPZyBrcDoJ/HmWDiz5Is2wUI0mLVkbg2L
|
||||
+rKNC3ZajJhsWElMGNNtZRLmGeTIe8hT+LNAejo221vrOJbnUmpIjKxVjStDbXmW
|
||||
nulgyEPSTfsJaXgbXmeJ8LOk0tWpBAGC16VzgXrPxoGD2XKxoiPCGLNrF/l1wyl+
|
||||
n+nw3TchNFrofpPrqJzT/vS71B6KDb0PVSTQZfM9+FahrQ+YbsIkzDAuxVZb5t3q
|
||||
HUME95RgoIBbccUGxAPwkaNme2OLaLzsJZ/Xhl5I8T1fraLYapsKNjQ5+CSKO8+t
|
||||
MlJYgSHuazWSetRbZ2H7g7QJWqeHUAWi9i1szpNDYxTFSs8wgDY=
|
||||
=edPy
|
||||
-----END PGP SIGNATURE-----
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user