From f1c743e2350ef6827ed0f19ae3569f39cedd02ba Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 6 May 2025 17:23:40 +0200 Subject: [PATCH 01/57] gha: sync to spack/spack-packages (#50322) --- .github/workflows/sync-packages.yaml | 34 ++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 .github/workflows/sync-packages.yaml diff --git a/.github/workflows/sync-packages.yaml b/.github/workflows/sync-packages.yaml new file mode 100644 index 00000000000..3909c7da536 --- /dev/null +++ b/.github/workflows/sync-packages.yaml @@ -0,0 +1,34 @@ +name: sync with spack/spack-packages + +on: + push: + branches: + - develop + +jobs: + sync: + if: github.repository == 'spack/spack' + runs-on: ubuntu-latest + steps: + - name: Checkout spack/spack + run: git clone https://github.com/spack/spack.git + - name: Checkout spack/spack-packages + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + ssh-key: ${{ secrets.SYNC_PACKAGES_KEY }} + path: spack-packages + repository: spack/spack-packages + - name: Install git-filter-repo + run: | + curl -LfsO https://raw.githubusercontent.com/newren/git-filter-repo/refs/tags/v2.47.0/git-filter-repo + echo "67447413e273fc76809289111748870b6f6072f08b17efe94863a92d810b7d94 git-filter-repo" | sha256sum -c - + chmod +x git-filter-repo + sudo mv git-filter-repo /usr/local/bin/ + - name: Sync spack/spack-packages with spack/spack + run: | + cd spack-packages + git-filter-repo --quiet --source ../spack --subdirectory-filter var/spack/repos --refs develop + - name: Push + run: | + cd spack-packages + git push git@github.com:spack/spack-packages.git develop:develop --force From 6587b2a231448ddd60070289d1f33cfce2b886ac Mon Sep 17 00:00:00 2001 From: Simon Pintarelli <1237199+simonpintarelli@users.noreply.github.com> Date: Tue, 6 May 2025 17:30:45 +0200 Subject: [PATCH 02/57] costa v2.2.3, v2.2.4 (#50319) --- var/spack/repos/spack_repo/builtin/packages/costa/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/costa/package.py b/var/spack/repos/spack_repo/builtin/packages/costa/package.py index ad821aa2efe..649029df770 100644 --- a/var/spack/repos/spack_repo/builtin/packages/costa/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/costa/package.py @@ -14,13 +14,17 @@ class Costa(CMakePackage): maintainers("haampie", "kabicm", "RMeli", "mtaillefumier") homepage = "https://github.com/eth-cscs/COSTA" + url = "https://github.com/eth-cscs/COSTA/archive/refs/tags/v2.2.4.tar.gz" git = "https://github.com/eth-cscs/COSTA.git" + list_url = "https://github.com/eth-cscs/COSTA/releases" license("BSD-3-Clause") # note: The default archives produced with github do not have the archives # of the submodules. version("master", branch="master", submodules=True) + version("2.2.4", sha256="2155af3696cd0db1d18f9da7325de6fbcd87833c5b9e62445229e17151f7fd0b") + version("2.2.3", sha256="e0b74851603b9da1a104dfaf50504c8af748c73999610a37f9384ed0c23ae5df") version("2.2.2", sha256="e87bc37aad14ac0c5922237be5d5390145c9ac6aef0350ed17d86cb2d994e67c") version("2.2.1", sha256="aa8aa2a4a79de094f857c22293825de270ff72becd6bd736ff9f2dd8c192446d") version("2.2", sha256="3e7333f012af76ec3508276ea90800313f6136504667021fe229e710bf6acdc7") From 2c05ce3607bb346897c24a7d0486c08de1f7fea9 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Tue, 6 May 2025 12:32:15 -0600 Subject: [PATCH 03/57] binary_distribution: content addressable tarballs (#48713) binary_distribution: content addressable url buildcache Change how binary mirrors are laid out, adopting content addressing for every piece of data spack stores in a binary mirror. Items (e.g. tarballs, specfiles, public keys, indices, etc) are now discoverable via manifest files which give the size, checksum, compression type, etc of the the stored item. The information in the manifest, in turn, is used to find the actual data, which is stored by its content address in the blobs directory. Additionally, signing is now applied to the manifest files, rather than to the spec files themselves. --- lib/spack/docs/binary_caches.rst | 205 ++- lib/spack/docs/signing.rst | 140 +- lib/spack/spack/binary_distribution.py | 1549 ++++++++--------- lib/spack/spack/buildcache_migrate.py | 351 ++++ lib/spack/spack/ci/__init__.py | 51 +- lib/spack/spack/ci/common.py | 36 +- lib/spack/spack/ci/gitlab.py | 22 +- lib/spack/spack/cmd/bootstrap.py | 9 +- lib/spack/spack/cmd/buildcache.py | 249 ++- lib/spack/spack/cmd/ci.py | 2 +- lib/spack/spack/error.py | 13 + lib/spack/spack/installer.py | 9 +- lib/spack/spack/schema/buildcache_spec.py | 6 +- .../spack/schema/url_buildcache_manifest.py | 45 + lib/spack/spack/test/bindist.py | 467 ++++- lib/spack/spack/test/build_distribution.py | 9 +- lib/spack/spack/test/cmd/buildcache.py | 362 +++- lib/spack/spack/test/cmd/ci.py | 30 +- lib/spack/spack/test/cmd/gpg.py | 19 +- lib/spack/spack/test/cmd/mirror.py | 7 +- lib/spack/spack/test/conftest.py | 4 - ...l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json | 54 - ...2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack | Bin 10240 -> 0 bytes ...8A04B882E19D85FD36EE069565D80B055C92FF.pub | 29 + ...9D4971F0097B1E7A3EB57371B484802E78D7CD.pub | 29 + .../signed/build_cache/_pgp/index.json | 1 + .../v2_layout/signed/build_cache/index.json | 1 + .../signed/build_cache/index.json.hash | 1 + ...qqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig | 124 ++ ...vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig | 72 + ...729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack | Bin 0 -> 4099 bytes ....13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack | Bin 0 -> 3633 bytes ...3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig | 429 +++++ ...ipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig | 317 ++++ ...cxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig | 99 ++ ...pzeljwairalfjm3k6fntbb64nt6n.spec.json.sig | 151 ++ ...729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack | Bin 0 -> 9063 bytes ....13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack | Bin 0 -> 8625 bytes ...1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack | Bin 0 -> 5589 bytes ...2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack | Bin 0 -> 5077 bytes .../v2_layout/unsigned/build_cache/index.json | 1 + .../unsigned/build_cache/index.json.hash | 1 + ...sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json | 105 ++ ...rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json | 53 + ...729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack | Bin 0 -> 4092 bytes ....13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack | Bin 0 -> 3622 bytes ...u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json | 410 +++++ ...jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json | 298 ++++ ...qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json | 80 + ...izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json | 132 ++ ...729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack | Bin 0 -> 9058 bytes ....13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack | Bin 0 -> 8627 bytes ...1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack | Bin 0 -> 5596 bytes ...2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack | Bin 0 -> 5070 bytes lib/spack/spack/url_buildcache.py | 1239 +++++++++++++ .../gitlab/cloud_pipelines/configs/ci.yaml | 23 +- .../configs/multi-src-mirrors.yaml.in | 5 - .../aws-pcluster-neoverse_v1/spack.yaml | 5 +- .../stacks/aws-pcluster-x86_64_v4/spack.yaml | 5 +- share/spack/spack-completion.bash | 11 +- share/spack/spack-completion.fish | 13 + 61 files changed, 5940 insertions(+), 1333 deletions(-) create mode 100644 lib/spack/spack/buildcache_migrate.py create mode 100644 lib/spack/spack/schema/url_buildcache_manifest.py delete mode 100644 lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json delete mode 100644 lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2/gcc-4.5.0/archive-files-2.0/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/index.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/index.json.hash create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack create mode 100644 lib/spack/spack/url_buildcache.py diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 35f7441fe1f..f736fab75f0 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -45,10 +45,14 @@ provided binary cache, which can be a local directory or a remote URL. Here is an example where a build cache is created in a local directory named "spack-cache", to which we push the "ninja" spec: +ninja-1.12.1-vmvycib6vmiofkdqgrblo7zsvp7odwut + .. code-block:: console $ spack buildcache push ./spack-cache ninja - ==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache + ==> Selected 30 specs to push to file:///home/spackuser/spack/spack-cache + ... + ==> [30/30] Pushed ninja@1.12.1/ngldn2k Note that ``ninja`` must be installed locally for this to work. @@ -98,9 +102,10 @@ Now you can use list: .. code-block:: console $ spack buildcache list - ==> 1 cached build. - -- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------ - ninja@1.10.2 + ==> 24 cached builds. + -- linux-ubuntu22.04-sapphirerapids / gcc@12.3.0 ---------------- + [ ... ] + ninja@1.12.1 With ``mymirror`` configured and an index available, Spack will automatically use it during concretization and installation. That means that you can expect @@ -111,17 +116,17 @@ verify by re-installing ninja: $ spack uninstall ninja $ spack install ninja - ==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz - ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig - gpg: Signature made Do 12 Jan 2023 16:01:04 CET - gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6 + [ ... ] + ==> Installing ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh [24/24] + gpg: Signature made Thu 06 Mar 2025 10:03:38 AM MST + gpg: using RSA key 75BC0528114909C076E2607418010FFAD73C9B07 gpg: Good signature from "example (GPG created for Spack) " [ultimate] - ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack - ==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache - ==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz - Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s - [+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz - + ==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/f0/f08eb62661ad159d2d258890127fc6053f5302a2f490c1c7f7bd677721010ee0 + ==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/c7/c79ac6e40dfdd01ac499b020e52e57aa91151febaea3ad183f90c0f78b64a31a + ==> Extracting ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh from binary cache + ==> ninja: Successfully installed ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh + Search: 0.00s. Fetch: 0.11s. Install: 0.11s. Extract: 0.10s. Relocate: 0.00s. Total: 0.22s + [+] /home/spackuser/spack/opt/spack/linux-ubuntu22.04-sapphirerapids/gcc-12.3.0/ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh It worked! You've just completed a full example of creating a build cache with a spec of interest, adding it as a mirror, updating its index, listing the contents, @@ -344,19 +349,18 @@ which lets you get started quickly. See the following resources for more informa ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Create tarball of installed Spack package and all dependencies. -Tarballs are checksummed and signed if gpg2 is available. -Places them in a directory ``build_cache`` that can be copied to a mirror. -Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches. +Tarballs and specfiles are compressed and checksummed, manifests are signed if gpg2 is available. +Commands like ``spack buildcache install`` will search Spack mirrors to get the list of build caches. ============== ======================================================================================================================== Arguments Description ============== ======================================================================================================================== ```` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches -``-d `` directory in which ``build_cache`` directory is created, defaults to ``.`` -``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists +``-d `` directory in which ``v3`` and ``blobs`` directories are created, defaults to ``.`` +``-f`` overwrite compressed tarball and spec metadata files if they already exist ``-k `` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used. ``-r`` make paths in binaries relative before creating tarball -``-y`` answer yes to all create unsigned ``build_cache`` questions +``-y`` answer yes to all questions about creating unsigned build caches ============== ======================================================================================================================== ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -397,6 +401,165 @@ List public keys available on Spack mirror. ========= ============================================== Arguments Description ========= ============================================== -``-i`` trust the keys downloaded with prompt for each +``-it`` trust the keys downloaded with prompt for each ``-y`` answer yes to all trust all keys downloaded ========= ============================================== + +.. _build_cache_layout: + +------------------ +Build Cache Layout +------------------ + +This section describes the structure and content of URL-style build caches, as +distinguished from OCI-style build caches. + +The entry point for a binary package is a manifest json file that points to at +least two other files stored as content-addressed blobs. These files include a spec +metadata file, as well as the installation directory of the package stored as +a compressed archive file. Binary package manifest files are named to indicate +the package name and version, as well as the hash of the concrete spec. For +example:: + + gcc-runtime-12.3.0-qyu2lvgt3nxh7izxycugdbgf5gsdpkjt.spec.manifest.json + +would contain the manifest for a binary package of ``gcc-runtime@12.3.0``. +The id of the built package is defined to be the DAG hash of the concrete spec, +and exists in the name of the file as well. The id distinguishes a particular +binary package from all other binary packages with the same package name and +version. Below is an example binary package manifest file. Such a file would +live in the versioned spec manifests directory of a binary mirror, for example +``v3/manifests/spec/``:: + + { + "version": 3, + "data": [ + { + "contentLength": 10731083, + "mediaType": "application/vnd.spack.install.v2.tar+gzip", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210" + }, + { + "contentLength": 1000, + "mediaType": "application/vnd.spack.spec.v5+json", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041" + } + ] + } + +The manifest points to both the compressed tar file as well as the compressed +spec metadata file, and contains the checksum of each. This checksum +is also used as the address of the associated file, and hence, must be +known in order to locate the tarball or spec file within the mirror. Once the +tarball or spec metadata file is downloaded, the checksum should be computed locally +and compared to the checksum in the manifest to ensure the contents have not changed +since the binary package was pushed. Spack stores all data files (including compressed +tar files, spec metadata, indices, public keys, etc) within a ``blobs//`` +directory, using the first two characters of the checksum as a sub-directory +to reduce the number files in a single folder. Here is a depiction of the +organization of binary mirror contents:: + + mirror_directory/ + v3/ + layout.json + manifests/ + spec/ + gcc-runtime/ + gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json + gmake/ + gmake-4.4.1-lpr4j77rcgkg5536tmiuzwzlcjsiomph.spec.manifest.json + compiler-wrapper/ + compiler-wrapper-1.0-s7ieuyievp57vwhthczhaq2ogowf3ohe.spec.manifest.json + index/ + index.manifest.json + key/ + 75BC0528114909C076E2607418010FFAD73C9B07.key.manifest.json + keys.manifest.json + blobs/ + sha256/ + 0f/ + 0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210 + fb/ + fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041 + 2a/ + 2a21836d206ccf0df780ab0be63fdf76d24501375306a35daa6683c409b7922f + ... + +Files within the ``manifests`` directory are organized into subdirectories by +the type of entity they represent. Binary package manifests live in the ``spec/`` +directory, binary cache index manifests live in the ``index/`` directory, and +manifests for public keys and their indices live in the ``key/`` subdirectory. +Regardless of the type of entity they represent, all manifest files are named +with an extension ``.manifest.json``. + +Every manifest contains a ``data`` array, each element of which refers to an +associated file stored a content-addressed blob. Considering the example spec +manifest shown above, the compressed installation archive can be found by +picking out the data blob with the appropriate ``mediaType``, which in this +case would be ``application/vnd.spack.install.v1.tar+gzip``. The associated +file is found by looking in the blobs directory under ``blobs/sha256/fb/`` for +the file named with the complete checksum value. + +As mentioned above, every entity in a binary mirror (aka build cache) is stored +as a content-addressed blob pointed to by a manifest. While an example spec +manifest (i.e. a manifest for a binary package) is shown above, here is what +the manifest of a build cache index looks like:: + + { + "version": 3, + "data": [ + { + "contentLength": 6411, + "mediaType": "application/vnd.spack.db.v8+json", + "compression": "none", + "checksumAlgorithm": "sha256", + "checksum": "225a3e9da24d201fdf9d8247d66217f5b3f4d0fc160db1498afd998bfd115234" + } + ] + } + +Some things to note about this manifest are that it points to a blob that is not +compressed (``compression: "none"``), and that the ``mediaType`` is one we have +not seen yet, ``application/vnd.spack.db.v8+json``. The decision not to compress +build cache indices stems from the fact that spack does not yet sign build cache +index manifests. Once that changes, you may start to see these indices stored as +compressed blobs. + +For completeness, here are examples of manifests for the other two types of entities +you might find in a spack build cache. First a public key manifest:: + + { + "version": 3, + "data": [ + { + "contentLength": 2472, + "mediaType": "application/pgp-keys", + "compression": "none", + "checksumAlgorithm": "sha256", + "checksum": "9fc18374aebc84deb2f27898da77d4d4410e5fb44c60c6238cb57fb36147e5c7" + } + ] + } + +Note the ``mediaType`` of ``application/pgp-keys``. Finally, a public key index manifest:: + + { + "version": 3, + "data": [ + { + "contentLength": 56, + "mediaType": "application/vnd.spack.keyindex.v1+json", + "compression": "none", + "checksumAlgorithm": "sha256", + "checksum": "29b3a0eb6064fd588543bc43ac7d42d708a69058dafe4be0859e3200091a9a1c" + } + ] + } + +Again note the ``mediaType`` of ``application/vnd.spack.keyindex.v1+json``. Also note +that both the above manifest examples refer to uncompressed blobs, this is for the same +reason spack does not yet compress build cache index blobs. diff --git a/lib/spack/docs/signing.rst b/lib/spack/docs/signing.rst index 5a34305cbdd..2ef0429711b 100644 --- a/lib/spack/docs/signing.rst +++ b/lib/spack/docs/signing.rst @@ -176,92 +176,72 @@ community without needing deep familiarity with GnuPG or Public Key Infrastructure. -.. _build_cache_format: +.. _build_cache_signing: ------------------- -Build Cache Format ------------------- +------------------- +Build Cache Signing +------------------- -A binary package consists of a metadata file unambiguously defining the -built package (and including other details such as how to relocate it) -and the installation directory of the package stored as a compressed -archive file. The metadata files can either be unsigned, in which case -the contents are simply the json-serialized concrete spec plus metadata, -or they can be signed, in which case the json-serialized concrete spec -plus metadata is wrapped in a gpg cleartext signature. Built package -metadata files are named to indicate the operating system and -architecture for which the package was built as well as the compiler -used to build it and the packages name and version. For example:: +For an in-depth description of the layout of a binary mirror, see +the :ref:`documentation` covering binary caches. The +key takeaway from that discussion that applies here is that the entry point +to a binary package is it's manifest. The manifest refers unambiguously to the +spec metadata and compressed archive, which are stored as content-addressed +blobs. - linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig - -would contain the concrete spec and binary metadata for a binary package -of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell`` -architecture. The id of the built package exists in the name of the file -as well (after the package name and version) and in this case begins -with ``llv2ys``. The id distinguishes a particular built package from all -other built packages with the same os/arch, compiler, name, and version. -Below is an example of a signed binary package metadata file. Such a -file would live in the ``build_cache`` directory of a binary mirror:: +The manifest files can either be signed or unsigned, but are always given +a name ending with ``.spec.manifest.json`` regardless. The difference between +signed and unsigned manifests is simply that the signed version is wrapped in +a gpg cleartext signature, as illustrated below:: -----BEGIN PGP SIGNED MESSAGE----- Hash: SHA512 { - "spec": { - - }, - - "buildcache_layout_version": 1, - "binary_cache_checksum": { - "hash_algorithm": "sha256", - "hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423" - } + "version": 3, + "data": [ + { + "contentLength": 10731083, + "mediaType": "application/vnd.spack.install.v2.tar+gzip", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210" + }, + { + "contentLength": 1000, + "mediaType": "application/vnd.spack.spec.v5+json", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041" + } + ] } - -----BEGIN PGP SIGNATURE----- - iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc - ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn - 4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+ - QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b - 887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6 - 4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH - qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7 - QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi - Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx - j3DXty57 - =3gvm + + iQGzBAEBCgAdFiEEdbwFKBFJCcB24mB0GAEP+tc8mwcFAmf2rr4ACgkQGAEP+tc8 + mwfefwv+KJs8MsQ5ovFaBdmyx5H/3k4rO4QHBzuSPOB6UaxErA9IyOB31iP6vNTU + HzYpxz6F5dJCJWmmNEMN/0+vjhMHEOkqd7M1l5reVcxduTF2yc4tBZUO2gienEHL + W0e+SnUznl1yc/aVpChUiahO2zToCsI8HZRNT4tu6iCnE/OpghqjsSdBOZHmSNDD + 5wuuCxfDUyWI6ZlLclaaB7RdbCUUJf/iqi711J+wubvnDFhc6Ynwm1xai5laJ1bD + ev3NrSb2AAroeNFVo4iECA0fZC1OZQYzaRmAEhBXtCideGJ5Zf2Cp9hmCwNK8Hq6 + bNt94JP9LqC3FCCJJOMsPyOOhMSA5MU44zyyzloRwEQpHHLuFzVdbTHA3dmTc18n + HxNLkZoEMYRc8zNr40g0yb2lCbc+P11TtL1E+5NlE34MX15mPewRCiIFTMwhCnE3 + gFSKtW1MKustZE35/RUwd2mpJRf+mSRVCl1f1RiFjktLjz7vWQq7imIUSam0fPDr + XD4aDogm + =RrFX -----END PGP SIGNATURE----- If a user has trusted the public key associated with the private key -used to sign the above spec file, the signature can be verified with +used to sign the above manifest file, the signature can be verified with gpg, as follows:: - $ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig + $ gpg --verify gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json -The metadata (regardless whether signed or unsigned) contains the checksum -of the ``.spack`` file containing the actual installation. The checksum should -be compared to a checksum computed locally on the ``.spack`` file to ensure the -contents have not changed since the binary spec plus metadata were signed. The -``.spack`` files are actually tarballs containing the compressed archive of the -install tree. These files, along with the metadata files, live within the -``build_cache`` directory of the mirror, and together are organized as follows:: - - build_cache/ - # unsigned metadata (for indexing, contains sha256 of .spack file) - ----24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json - # clearsigned metadata (same as above, but signed) - ----24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig - / - / - -/ - # tar.gz-compressed prefix (may support more compression formats later) - ----24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack - -Uncompressing and extracting the ``.spack`` file results in the install tree. -This is in contrast to previous versions of spack, where the ``.spack`` file -contained a (duplicated) metadata file, a signature file and a nested tarball -containing the install tree. +When attempting to install a binary package that has been signed, spack will +attempt to verify the signature with one of the trusted keys in its keyring, +and will fail if unable to do so. While not recommended, it is possible to +force installation of a signed package without verification by providing the +``--no-check-signature`` argument to ``spack install ...``. .. _internal_implementation: @@ -320,10 +300,10 @@ the following way: Reputational Public Key are imported into a keyring by the ``spack gpg …`` sub-command. This is initiated by the job’s build script which is created by the generate job at the beginning of the pipeline. -4. Assuming the package has dependencies those specs are verified using +4. Assuming the package has dependencies those spec manifests are verified using the keyring. -5. The package is built and the spec.json is generated -6. The spec.json is signed by the keyring and uploaded to the mirror’s +5. The package is built and the spec manifest is generated +6. The spec manifest is signed by the keyring and uploaded to the mirror’s build cache. **Reputational Key** @@ -376,24 +356,24 @@ following way: 4. In addition to the secret, the runner creates a tmpfs memory mounted directory where the GnuPG keyring will be created to verify, and then resign the package specs. -5. The job script syncs all spec.json.sig files from the build cache to +5. The job script syncs all spec manifest files from the build cache to a working directory in the job’s execution environment. 6. The job script then runs the ``sign.sh`` script built into the notary Docker image. 7. The ``sign.sh`` script imports the public components of the Reputational and Intermediate CI Keys and uses them to verify good - signatures on the spec.json.sig files. If any signed spec does not - verify the job immediately fails. -8. Assuming all specs are verified, the ``sign.sh`` script then unpacks - the spec json data from the signed file in preparation for being + signatures on the spec.manifest.json files. If any signed manifest + does not verify, the job immediately fails. +8. Assuming all manifests are verified, the ``sign.sh`` script then unpacks + the manifest json data from the signed file in preparation for being re-signed with the Reputational Key. 9. The private components of the Reputational Key are decrypted to standard out using ``aws-encryption-cli`` directly into a ``gpg –import …`` statement which imports the key into the keyring mounted in-memory. -10. The private key is then used to sign each of the json specs and the +10. The private key is then used to sign each of the manifests and the keyring is removed from disk. -11. The re-signed json specs are resynced to the AWS S3 Mirror and the +11. The re-signed manifests are resynced to the AWS S3 Mirror and the public signing of the packages for the develop or release pipeline that created them is complete. diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 167e2129a97..a46b2fcb938 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -7,6 +7,7 @@ import concurrent.futures import contextlib import copy +import datetime import hashlib import io import itertools @@ -24,7 +25,7 @@ import urllib.request import warnings from contextlib import closing -from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union +from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union import llnl.util.filesystem as fsys import llnl.util.lang @@ -32,7 +33,7 @@ from llnl.util.filesystem import mkdirp import spack.caches -import spack.config as config +import spack.config import spack.database as spack_db import spack.deptypes as dt import spack.error @@ -83,16 +84,20 @@ from spack.util.executable import which from .enums import InstallRecordStatus - -BUILD_CACHE_RELATIVE_PATH = "build_cache" -BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp" - -#: The build cache layout version that this version of Spack creates. -#: Version 2: includes parent directories of the package prefix in the tarball -CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2 - - -INDEX_HASH_FILE = "index.json.hash" +from .url_buildcache import ( + CURRENT_BUILD_CACHE_LAYOUT_VERSION, + SUPPORTED_LAYOUT_VERSIONS, + BlobRecord, + BuildcacheComponent, + BuildcacheEntryError, + BuildcacheManifest, + InvalidMetadataFile, + MirrorForSpec, + MirrorURLAndVersion, + URLBuildcacheEntry, + get_url_buildcache_class, + get_valid_spec_file, +) class BuildCacheDatabase(spack_db.Database): @@ -166,10 +171,11 @@ def __init__(self, cache_root: Optional[str] = None): self._index_contents_key = "contents.json" # a FileCache instance storing copies of remote binary cache indices - self._index_file_cache: Optional[file_cache.FileCache] = None + self._index_file_cache: file_cache.FileCache = file_cache.FileCache(self._index_cache_root) + self._index_file_cache_initialized = False - # stores a map of mirror URL to index hash and cache key (index path) - self._local_index_cache: Optional[dict] = None + # stores a map of mirror URL and version layout to index hash and cache key (index path) + self._local_index_cache: dict[str, dict] = {} # hashes of remote indices already ingested into the concrete spec # cache (_mirrors_for_spec) @@ -177,22 +183,15 @@ def __init__(self, cache_root: Optional[str] = None): # mapping from mirror urls to the time.time() of the last index fetch and a bool indicating # whether the fetch succeeded or not. - self._last_fetch_times: Dict[str, float] = {} + self._last_fetch_times: Dict[MirrorURLAndVersion, float] = {} # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # entries indicating mirrors where that concrete spec can be found. - # Each entry is a dictionary consisting of: - # - # - the mirror where the spec is, keyed by ``mirror_url`` - # - the concrete spec itself, keyed by ``spec`` (including the - # full hash, since the dag hash may match but we want to - # use the updated source if available) - self._mirrors_for_spec: Dict[str, dict] = {} + # Each entry is a MirrorURLAndVersion. + self._mirrors_for_spec: Dict[str, List[MirrorForSpec]] = {} def _init_local_index_cache(self): - if not self._index_file_cache: - self._index_file_cache = file_cache.FileCache(self._index_cache_root) - + if not self._index_file_cache_initialized: cache_key = self._index_contents_key self._index_file_cache.init_entry(cache_key) @@ -203,13 +202,15 @@ def _init_local_index_cache(self): with self._index_file_cache.read_transaction(cache_key) as cache_file: self._local_index_cache = json.load(cache_file) + self._index_file_cache_initialized = True + def clear(self): """For testing purposes we need to be able to empty the cache and clear associated data structures.""" if self._index_file_cache: self._index_file_cache.destroy() - self._index_file_cache = None - self._local_index_cache = None + self._index_file_cache = file_cache.FileCache(self._index_cache_root) + self._local_index_cache = {} self._specs_already_associated = set() self._last_fetch_times = {} self._mirrors_for_spec = {} @@ -231,18 +232,21 @@ def regenerate_spec_cache(self, clear_existing=False): self._specs_already_associated = set() self._mirrors_for_spec = {} - for mirror_url in self._local_index_cache: - cache_entry = self._local_index_cache[mirror_url] + for url_and_version in self._local_index_cache: + cache_entry = self._local_index_cache[url_and_version] cached_index_path = cache_entry["index_path"] cached_index_hash = cache_entry["index_hash"] if cached_index_hash not in self._specs_already_associated: - self._associate_built_specs_with_mirror(cached_index_path, mirror_url) + self._associate_built_specs_with_mirror( + cached_index_path, MirrorURLAndVersion.from_string(url_and_version) + ) self._specs_already_associated.add(cached_index_hash) - def _associate_built_specs_with_mirror(self, cache_key, mirror_url): - tmpdir = tempfile.mkdtemp() + def _associate_built_specs_with_mirror(self, cache_key, url_and_version: MirrorURLAndVersion): + mirror_url = url_and_version.url + layout_version = url_and_version.version - try: + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: db = BuildCacheDatabase(tmpdir) try: @@ -252,8 +256,9 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url): db._read_from_file(pathlib.Path(cache_path)) except spack_db.InvalidDatabaseVersionError as e: tty.warn( - f"you need a newer Spack version to read the buildcache index for the " - f"following mirror: '{mirror_url}'. {e.database_version_message}" + "you need a newer Spack version to read the buildcache index " + f"for the following v{layout_version} mirror: '{mirror_url}'. " + f"{e.database_version_message}" ) return @@ -272,15 +277,16 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url): for entry in self._mirrors_for_spec[dag_hash]: # A binary mirror can only have one spec per DAG hash, so # if we already have an entry under this DAG hash for this - # mirror url, we're done. - if entry["mirror_url"] == mirror_url: + # mirror url/layout version, we're done. + if ( + entry.url_and_version.url == mirror_url + and entry.url_and_version.version == layout_version + ): break else: self._mirrors_for_spec[dag_hash].append( - {"mirror_url": mirror_url, "spec": indexed_spec} + MirrorForSpec(url_and_version, indexed_spec) ) - finally: - shutil.rmtree(tmpdir) def get_all_built_specs(self): spec_list = [] @@ -289,7 +295,7 @@ def get_all_built_specs(self): # with the same DAG hash are equivalent, so we can just # return the first one in the list. if len(self._mirrors_for_spec[dag_hash]) > 0: - spec_list.append(self._mirrors_for_spec[dag_hash][0]["spec"]) + spec_list.append(self._mirrors_for_spec[dag_hash][0].spec) return spec_list @@ -340,9 +346,9 @@ def find_by_hash(self, find_hash, mirrors_to_check=None): if not mirrors_to_check: return results mirror_urls = mirrors_to_check.values() - return [r for r in results if r["mirror_url"] in mirror_urls] + return [r for r in results if r.url_and_version.url in mirror_urls] - def update_spec(self, spec, found_list): + def update_spec(self, spec: spack.spec.Spec, found_list: List[MirrorForSpec]): """ Take list of {'mirror_url': m, 'spec': s} objects and update the local built_spec_cache @@ -355,13 +361,11 @@ def update_spec(self, spec, found_list): current_list = self._mirrors_for_spec[spec_dag_hash] for new_entry in found_list: for cur_entry in current_list: - if new_entry["mirror_url"] == cur_entry["mirror_url"]: - cur_entry["spec"] = new_entry["spec"] + if new_entry.url_and_version == cur_entry.url_and_version: + cur_entry.spec = new_entry.spec break else: - current_list.append( - {"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]} - ) + current_list.append(MirrorForSpec(new_entry.url_and_version, new_entry.spec)) def update(self, with_cooldown=False): """Make sure local cache of buildcache index files is up to date. @@ -373,8 +377,10 @@ def update(self, with_cooldown=False): from each configured mirror and stored locally (both in memory and on disk under ``_index_cache_root``).""" self._init_local_index_cache() - configured_mirror_urls = [ - m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values() + configured_mirrors = [ + MirrorURLAndVersion(m.fetch_url, layout_version) + for layout_version in SUPPORTED_LAYOUT_VERSIONS + for m in spack.mirrors.mirror.MirrorCollection(binary=True).values() ] items_to_remove = [] spec_cache_clear_needed = False @@ -408,34 +414,36 @@ def update(self, with_cooldown=False): ttl = spack.config.get("config:binary_index_ttl", 600) now = time.time() - for cached_mirror_url in self._local_index_cache: - cache_entry = self._local_index_cache[cached_mirror_url] + for local_index_cache_key in self._local_index_cache: + urlAndVersion = MirrorURLAndVersion.from_string(local_index_cache_key) + cached_mirror_url = urlAndVersion.url + cache_entry = self._local_index_cache[local_index_cache_key] cached_index_path = cache_entry["index_path"] - if cached_mirror_url in configured_mirror_urls: + if urlAndVersion in configured_mirrors: # Only do a fetch if the last fetch was longer than TTL ago if ( with_cooldown and ttl > 0 and cached_mirror_url in self._last_fetch_times - and now - self._last_fetch_times[cached_mirror_url][0] < ttl + and now - self._last_fetch_times[urlAndVersion][0] < ttl ): # We're in the cooldown period, don't try to fetch again # If the fetch succeeded last time, consider this update a success, otherwise # re-report the error here - if self._last_fetch_times[cached_mirror_url][1]: + if self._last_fetch_times[urlAndVersion][1]: all_methods_failed = False else: # May need to fetch the index and update the local caches try: needs_regen = self._fetch_and_cache_index( - cached_mirror_url, cache_entry=cache_entry + urlAndVersion, cache_entry=cache_entry ) - self._last_fetch_times[cached_mirror_url] = (now, True) + self._last_fetch_times[urlAndVersion] = (now, True) all_methods_failed = False except FetchIndexError as e: needs_regen = False fetch_errors.append(e) - self._last_fetch_times[cached_mirror_url] = (now, False) + self._last_fetch_times[urlAndVersion] = (now, False) # The need to regenerate implies a need to clear as well. spec_cache_clear_needed |= needs_regen spec_cache_regenerate_needed |= needs_regen @@ -443,12 +451,12 @@ def update(self, with_cooldown=False): # No longer have this mirror, cached index should be removed items_to_remove.append( { - "url": cached_mirror_url, + "url": local_index_cache_key, "cache_key": os.path.join(self._index_cache_root, cached_index_path), } ) - if cached_mirror_url in self._last_fetch_times: - del self._last_fetch_times[cached_mirror_url] + if urlAndVersion in self._last_fetch_times: + del self._last_fetch_times[urlAndVersion] spec_cache_clear_needed = True spec_cache_regenerate_needed = True @@ -462,19 +470,19 @@ def update(self, with_cooldown=False): # Iterate the configured mirrors now. Any mirror urls we do not # already have in our cache must be fetched, stored, and represented # locally. - for mirror_url in configured_mirror_urls: - if mirror_url in self._local_index_cache: + for urlAndVersion in configured_mirrors: + if str(urlAndVersion) in self._local_index_cache: continue # Need to fetch the index and update the local caches try: - needs_regen = self._fetch_and_cache_index(mirror_url) - self._last_fetch_times[mirror_url] = (now, True) + needs_regen = self._fetch_and_cache_index(urlAndVersion) + self._last_fetch_times[urlAndVersion] = (now, True) all_methods_failed = False except FetchIndexError as e: fetch_errors.append(e) needs_regen = False - self._last_fetch_times[mirror_url] = (now, False) + self._last_fetch_times[urlAndVersion] = (now, False) # Generally speaking, a new mirror wouldn't imply the need to # clear the spec cache, so leave it as is. if needs_regen: @@ -482,7 +490,7 @@ def update(self, with_cooldown=False): self._write_local_index_cache() - if configured_mirror_urls and all_methods_failed: + if configured_mirrors and all_methods_failed: raise FetchCacheError(fetch_errors) if fetch_errors: tty.warn( @@ -492,14 +500,14 @@ def update(self, with_cooldown=False): if spec_cache_regenerate_needed: self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed) - def _fetch_and_cache_index(self, mirror_url, cache_entry={}): + def _fetch_and_cache_index(self, url_and_version: MirrorURLAndVersion, cache_entry={}): """Fetch a buildcache index file from a remote mirror and cache it. If we already have a cached index from this mirror, then we first check if the hash has changed, and we avoid fetching it if not. Args: - mirror_url (str): Base url of mirror + url_and_version: Contains mirror base url and target binary cache layout version cache_entry (dict): Old cache metadata with keys ``index_hash``, ``index_path``, ``etag`` @@ -509,24 +517,18 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): Throws: FetchIndexError """ + mirror_url = url_and_version.url + layout_version = url_and_version.version + # TODO: get rid of this request, handle 404 better scheme = urllib.parse.urlparse(mirror_url).scheme - if scheme != "oci" and not web_util.url_exists( - url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) - ): - return False - - if scheme == "oci": - # TODO: Actually etag and OCI are not mutually exclusive... - fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None)) - elif cache_entry.get("etag"): - fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"]) - else: - fetcher = DefaultIndexFetcher( - mirror_url, local_hash=cache_entry.get("index_hash", None) - ) + if scheme != "oci": + cache_class = get_url_buildcache_class(layout_version=layout_version) + if not web_util.url_exists(cache_class.get_index_url(mirror_url)): + return False + fetcher: IndexFetcher = get_index_fetcher(scheme, url_and_version, cache_entry) result = fetcher.conditional_fetch() # Nothing to do @@ -534,13 +536,13 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): return False # Persist new index.json - url_hash = compute_hash(mirror_url) + url_hash = compute_hash(f"{mirror_url}/v{layout_version}") cache_key = "{}_{}.json".format(url_hash[:10], result.hash[:10]) self._index_file_cache.init_entry(cache_key) with self._index_file_cache.write_transaction(cache_key) as (old, new): new.write(result.data) - self._local_index_cache[mirror_url] = { + self._local_index_cache[str(url_and_version)] = { "index_hash": result.hash, "index_path": cache_key, "etag": result.etag, @@ -572,18 +574,6 @@ def compute_hash(data): return hashlib.sha256(data).hexdigest() -def build_cache_relative_path(): - return BUILD_CACHE_RELATIVE_PATH - - -def build_cache_keys_relative_path(): - return BUILD_CACHE_KEYS_RELATIVE_PATH - - -def build_cache_prefix(prefix): - return os.path.join(prefix, build_cache_relative_path()) - - def buildinfo_file_name(prefix): """Filename of the binary package meta-data file""" return os.path.join(prefix, ".spack", "binary_distribution") @@ -631,31 +621,55 @@ def get_buildinfo_dict(spec): } -def tarball_directory_name(spec): - """ - Return name of the tarball directory according to the convention - -//-/ - """ - return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}") +def buildcache_relative_keys_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.KEY)) -def tarball_name(spec, ext): - """ - Return the name of the tarfile according to the convention - --- - """ - spec_formatted = spec.format_path( - "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" +def buildcache_relative_keys_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.KEY)) + + +def buildcache_relative_specs_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.SPEC)) + + +def buildcache_relative_specs_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.SPEC)) + + +def buildcache_relative_blobs_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.BLOB)) + + +def buildcache_relative_blobs_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.BLOB)) + + +def buildcache_relative_index_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.INDEX)) + + +def buildcache_relative_index_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.INDEX)) + + +@llnl.util.lang.memoized +def warn_v2_layout(mirror_url: str, action: str) -> bool: + tty.warn( + f"{action} from a v2 binary mirror layout, located at \n" + f" {mirror_url} is deprecated. Support for this will be \n" + " removed in a future version of spack. Please consider running `spack \n" + " buildcache migrate' or rebuilding the specs in this mirror." ) - return f"{spec_formatted}{ext}" - - -def tarball_path_name(spec, ext): - """ - Return the full path+name for a given spec according to the convention - / - """ - return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext)) + return True def select_signing_key() -> str: @@ -672,11 +686,17 @@ def select_signing_key() -> str: return keys[0] -def sign_specfile(key: str, specfile_path: str) -> str: - """sign and return the path to the signed specfile""" - signed_specfile_path = f"{specfile_path}.sig" - spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True) - return signed_specfile_path +def _push_index(db: BuildCacheDatabase, temp_dir: str, cache_prefix: str): + """Generate the index, compute its hash, and push the files to the mirror""" + index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE) + with open(index_json_path, "w", encoding="utf-8") as f: + db._write_to_file(f) + + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_class.push_local_file_as_blob( + index_json_path, cache_prefix, "index", BuildcacheComponent.INDEX, compression="none" + ) + cache_class.maybe_push_layout_json(cache_prefix) def _read_specs_and_push_index( @@ -685,10 +705,8 @@ def _read_specs_and_push_index( cache_prefix: str, db: BuildCacheDatabase, temp_dir: str, - concurrency: int, ): - """Read all the specs listed in the provided list, using thread given thread parallelism, - generate the index, and push it to the mirror. + """Read listed specs, generate the index, and push it to the mirror. Args: file_list: List of urls or file paths pointing at spec files to read @@ -697,60 +715,21 @@ def _read_specs_and_push_index( cache_prefix: prefix of the build cache on s3 where index should be pushed. db: A spack database used for adding specs and then writing the index. temp_dir: Location to write index.json and hash for pushing - concurrency: Number of parallel processes to use when fetching """ for file in file_list: - contents = read_method(file) - # Need full spec.json name or this gets confused with index.json. - if file.endswith(".json.sig"): - specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents) - fetched_spec = spack.spec.Spec.from_dict(specfile_json) - elif file.endswith(".json"): - fetched_spec = spack.spec.Spec.from_json(contents) - else: - continue - + fetched_spec = spack.spec.Spec.from_dict(read_method(file)) db.add(fetched_spec) db.mark(fetched_spec, "in_buildcache", True) - # Now generate the index, compute its hash, and push the two files to - # the mirror. - index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE) - with open(index_json_path, "w", encoding="utf-8") as f: - db._write_to_file(f) - - # Read the index back in and compute its hash - with open(index_json_path, encoding="utf-8") as f: - index_string = f.read() - index_hash = compute_hash(index_string) - - # Write the hash out to a local file - index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE) - with open(index_hash_path, "w", encoding="utf-8") as f: - f.write(index_hash) - - # Push the index itself - web_util.push_to_url( - index_json_path, - url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE), - keep_original=False, - extra_args={"ContentType": "application/json", "CacheControl": "no-cache"}, - ) - - # Push the hash - web_util.push_to_url( - index_hash_path, - url_util.join(cache_prefix, INDEX_HASH_FILE), - keep_original=False, - extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"}, - ) + _push_index(db, temp_dir, cache_prefix) -def _specs_from_cache_aws_cli(cache_prefix): +def _specs_from_cache_aws_cli(url: str, tmpspecsdir: str): """Use aws cli to sync all the specs into a local temporary directory. Args: - cache_prefix (str): prefix of the build cache on s3 + url: prefix of the build cache on s3 + tmpspecsdir: path to temporary directory to use for writing files Return: List of the local file paths and a function that can read each one from the file system. @@ -759,39 +738,42 @@ def _specs_from_cache_aws_cli(cache_prefix): file_list = None aws = which("aws") - def file_read_method(file_path): - with open(file_path, encoding="utf-8") as fd: - return fd.read() + if not aws: + tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") + return file_list, read_fn + + def file_read_method(manifest_path): + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_class(url, allow_unsigned=True) + cache_entry.read_manifest(manifest_url=f"file://{manifest_path}") + spec_dict = cache_entry.fetch_metadata() + cache_entry.destroy() + return spec_dict - tmpspecsdir = tempfile.mkdtemp() sync_command_args = [ "s3", "sync", "--exclude", "*", "--include", - "*.spec.json.sig", - "--include", - "*.spec.json", - cache_prefix, + "*.spec.manifest.json", + url, tmpspecsdir, ] + tty.debug(f"Using aws s3 sync to download manifests from {url} to {tmpspecsdir}") + try: - tty.debug( - "Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir) - ) aws(*sync_command_args, output=os.devnull, error=os.devnull) - file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"]) + file_list = fsys.find(tmpspecsdir, ["*.spec.manifest.json"]) read_fn = file_read_method except Exception: tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") - shutil.rmtree(tmpspecsdir) return file_list, read_fn -def _specs_from_cache_fallback(url: str): +def _specs_from_cache_fallback(url: str, tmpspecsdir: str): """Use spack.util.web module to get a list of all the specs at the remote url. Args: @@ -804,20 +786,20 @@ def _specs_from_cache_fallback(url: str): read_fn = None file_list = None - def url_read_method(url): - contents = None - try: - _, _, spec_file = web_util.read_from_url(url) - contents = codecs.getreader("utf-8")(spec_file).read() - except (web_util.SpackWebError, OSError) as e: - tty.error(f"Error reading specfile: {url}: {e}") - return contents + def url_read_method(manifest_url): + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_class(url, allow_unsigned=True) + cache_entry.read_manifest(manifest_url) + spec_dict = cache_entry.fetch_metadata() + cache_entry.destroy() + return spec_dict try: + url_to_list = url_util.join(url, buildcache_relative_specs_url()) file_list = [ - url_util.join(url, entry) - for entry in web_util.list_url(url) - if entry.endswith("spec.json") or entry.endswith("spec.json.sig") + url_util.join(url_to_list, entry) + for entry in web_util.list_url(url_to_list, recursive=True) + if entry.endswith("spec.manifest.json") ] read_fn = url_read_method except Exception as err: @@ -828,12 +810,13 @@ def url_read_method(url): return file_list, read_fn -def _spec_files_from_cache(url: str): +def _spec_files_from_cache(url: str, tmpspecsdir: str): """Get a list of all the spec files in the mirror and a function to read them. Args: url: Base url of mirror (location of spec files) + tmpspecsdir: Temporary location for writing files Return: A tuple where the first item is a list of absolute file paths or @@ -848,59 +831,59 @@ def _spec_files_from_cache(url: str): callbacks.append(_specs_from_cache_fallback) for specs_from_cache_fn in callbacks: - file_list, read_fn = specs_from_cache_fn(url) + file_list, read_fn = specs_from_cache_fn(url, tmpspecsdir) if file_list: return file_list, read_fn raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url)) -def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32): +def _url_generate_package_index(url: str, tmpdir: str): """Create or replace the build cache index on the given mirror. The buildcache index contains an entry for each binary package under the cache_prefix. Args: url: Base url of binary mirror. - concurrency: The desired threading concurrency to use when fetching the spec files from - the mirror. Return: None """ - url = url_util.join(url, build_cache_relative_path()) - try: - file_list, read_fn = _spec_files_from_cache(url) - except ListMirrorSpecsError as e: - raise GenerateIndexError(f"Unable to generate package index: {e}") from e + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpspecsdir: + try: + file_list, read_fn = _spec_files_from_cache(url, tmpspecsdir) + except ListMirrorSpecsError as e: + raise GenerateIndexError(f"Unable to generate package index: {e}") from e - tty.debug(f"Retrieving spec descriptor files from {url} to build index") + tty.debug(f"Retrieving spec descriptor files from {url} to build index") - db = BuildCacheDatabase(tmpdir) - db._write() + db = BuildCacheDatabase(tmpdir) + db._write() - try: - _read_specs_and_push_index( - file_list, read_fn, url, db, str(db.database_directory), concurrency - ) - except Exception as e: - raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e + try: + _read_specs_and_push_index(file_list, read_fn, url, db, str(db.database_directory)) + except Exception as e: + raise GenerateIndexError( + f"Encountered problem pushing package index to {url}: {e}" + ) from e -def generate_key_index(key_prefix: str, tmpdir: str) -> None: +def generate_key_index(mirror_url: str, tmpdir: str) -> None: """Create the key index page. - Creates (or replaces) the "index.json" page at the location given in key_prefix. This page - contains an entry for each key (.pub) under key_prefix. + Creates (or replaces) the "index.json" page at the location given in mirror_url. This page + contains an entry for each key under mirror_url. """ - tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index") + tty.debug(f"Retrieving key.pub files from {url_util.format(mirror_url)} to build key index") + + key_prefix = url_util.join(mirror_url, buildcache_relative_keys_url()) try: fingerprints = ( - entry[:-4] + entry[:-18] for entry in web_util.list_url(key_prefix, recursive=False) - if entry.endswith(".pub") + if entry.endswith(".key.manifest.json") ) except Exception as e: raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e @@ -911,13 +894,17 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None: with open(target, "w", encoding="utf-8") as f: sjson.dump(index, f) + cache_class = get_url_buildcache_class() + try: - web_util.push_to_url( - target, - url_util.join(key_prefix, "index.json"), - keep_original=False, - extra_args={"ContentType": "application/json"}, + cache_class.push_local_file_as_blob( + local_file_path=target, + mirror_url=mirror_url, + manifest_name="keys", + component_type=BuildcacheComponent.KEY_INDEX, + compression="none", ) + cache_class.maybe_push_layout_json(mirror_url) except Exception as e: raise GenerateIndexError( f"Encountered problem pushing key index to {key_prefix}: {e}" @@ -1069,51 +1056,13 @@ def _do_create_tarball( return tar_gz_checksum.hexdigest(), tar_checksum.hexdigest() -class ExistsInBuildcache(NamedTuple): - signed: bool - unsigned: bool - tarball: bool - - -class BuildcacheFiles: - def __init__(self, spec: spack.spec.Spec, local: str, remote: str): - """ - Args: - spec: The spec whose tarball and specfile are being managed. - local: The local path to the buildcache. - remote: The remote URL to the buildcache. - """ - self.local = local - self.remote = remote - self.spec = spec - - def remote_specfile(self, signed: bool) -> str: - return url_util.join( - self.remote, - build_cache_relative_path(), - tarball_name(self.spec, ".spec.json.sig" if signed else ".spec.json"), - ) - - def remote_tarball(self) -> str: - return url_util.join( - self.remote, build_cache_relative_path(), tarball_path_name(self.spec, ".spack") - ) - - def local_specfile(self) -> str: - return os.path.join(self.local, f"{self.spec.dag_hash()}.spec.json") - - def local_tarball(self) -> str: - return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz") - - -def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache: - """returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs - exist in the buildcache""" - files = BuildcacheFiles(spec, tmpdir, out_url) - signed = web_util.url_exists(files.remote_specfile(signed=True)) - unsigned = web_util.url_exists(files.remote_specfile(signed=False)) - tarball = web_util.url_exists(files.remote_tarball()) - return ExistsInBuildcache(signed, unsigned, tarball) +def _exists_in_buildcache( + spec: spack.spec.Spec, out_url: str, allow_unsigned: bool = False +) -> URLBuildcacheEntry: + """creates and returns (after checking existence) a URLBuildcacheEntry""" + cache_type = get_url_buildcache_class(CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_type(out_url, spec, allow_unsigned=allow_unsigned) + return cache_entry def prefixes_to_relocate(spec): @@ -1124,42 +1073,12 @@ def prefixes_to_relocate(spec): def _url_upload_tarball_and_specfile( - spec: spack.spec.Spec, - tmpdir: str, - out_url: str, - exists: ExistsInBuildcache, - signing_key: Optional[str], + spec: spack.spec.Spec, tmpdir: str, cache_entry: URLBuildcacheEntry, signing_key: Optional[str] ): - files = BuildcacheFiles(spec, tmpdir, out_url) - tarball = files.local_tarball() + tarball = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz") checksum, _ = create_tarball(spec, tarball) - spec_dict = spec.to_dict(hash=ht.dag_hash) - spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION - spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum} - if exists.tarball: - web_util.remove_url(files.remote_tarball()) - if exists.signed: - web_util.remove_url(files.remote_specfile(signed=True)) - if exists.unsigned: - web_util.remove_url(files.remote_specfile(signed=False)) - web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False) - - specfile = files.local_specfile() - with open(specfile, "w", encoding="utf-8") as f: - # Note: when using gpg clear sign, we need to avoid long lines (19995 chars). - # If lines are longer, they are truncated without error. Thanks GPG! - # So, here we still add newlines, but no indent, so save on file size and - # line length. - json.dump(spec_dict, f, indent=0, separators=(",", ":")) - - # sign the tarball and spec file with gpg - if signing_key: - specfile = sign_specfile(signing_key, specfile) - - web_util.push_to_url( - specfile, files.remote_specfile(signed=bool(signing_key)), keep_original=False - ) + cache_entry.push_binary_package(spec, tarball, "sha256", checksum, tmpdir, signing_key) class Uploader: @@ -1357,10 +1276,13 @@ def _url_push( errors: List[Tuple[spack.spec.Spec, BaseException]] = [] exists_futures = [ - executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs + executor.submit( + _exists_in_buildcache, spec, out_url, allow_unsigned=False if signing_key else True + ) + for spec in specs ] - exists = { + cache_entries = { spec.dag_hash(): exists_future.result() for spec, exists_future in zip(specs, exists_futures) } @@ -1369,8 +1291,9 @@ def _url_push( specs_to_upload = [] for spec in specs: - signed, unsigned, tarball = exists[spec.dag_hash()] - if (signed or unsigned) and tarball: + if cache_entries[spec.dag_hash()].exists( + [BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL] + ): skipped.append(spec) else: specs_to_upload.append(spec) @@ -1390,8 +1313,7 @@ def _url_push( _url_upload_tarball_and_specfile, spec, tmpdir, - out_url, - exists[spec.dag_hash()], + cache_entries[spec.dag_hash()], signing_key, ) for spec in specs_to_upload @@ -1414,6 +1336,10 @@ def _url_push( if not uploaded_any: return skipped, errors + # If the layout.json doesn't yet exist on this mirror, push it + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_class.maybe_push_layout_json(out_url) + if signing_key: keys_tmpdir = os.path.join(tmpdir, "keys") os.mkdir(keys_tmpdir) @@ -1730,6 +1656,9 @@ def extra_config(spec: spack.spec.Spec): "hash_algorithm": "sha256", "hash": checksums[spec.dag_hash()].compressed_digest.digest, } + spec_dict["archive_size"] = checksums[spec.dag_hash()].size + spec_dict["archive_timestamp"] = datetime.datetime.now().astimezone().isoformat() + spec_dict["archive_compression"] = "gzip" return spec_dict # Upload manifests @@ -1842,26 +1771,6 @@ def _oci_update_index( upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest) -def try_verify(specfile_path): - """Utility function to attempt to verify a local file. Assumes the - file is a clearsigned signature file. - - Args: - specfile_path (str): Path to file to be verified. - - Returns: - ``True`` if the signature could be verified, ``False`` otherwise. - """ - suppress = config.get("config:suppress_gpg_warnings", False) - - try: - spack.util.gpg.verify(specfile_path, suppress_warnings=suppress) - except Exception: - return False - - return True - - def try_fetch(url_to_fetch): """Utility function to try and fetch a file from a url, stage it locally, and return the path to the staged file. @@ -1884,55 +1793,13 @@ def try_fetch(url_to_fetch): return stage -def _delete_staged_downloads(download_result): - """Clean up stages used to download tarball and specfile""" - download_result["tarball_stage"].destroy() - download_result["specfile_stage"].destroy() - - -def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]: - """Read and validate a spec file, returning the spec dict with its layout version, or raising - InvalidMetadataFile if invalid.""" - try: - with open(path, "rb") as f: - binary_content = f.read() - except OSError: - raise InvalidMetadataFile(f"No such file: {path}") - - # In the future we may support transparently decompressing compressed spec files. - if binary_content[:2] == b"\x1f\x8b": - raise InvalidMetadataFile("Compressed spec files are not supported") - - try: - as_string = binary_content.decode("utf-8") - if path.endswith(".json.sig"): - spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string) - else: - spec_dict = json.loads(as_string) - except Exception as e: - raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e - - # Ensure this version is not too new. - try: - layout_version = int(spec_dict.get("buildcache_layout_version", 0)) - except ValueError as e: - raise InvalidMetadataFile("Could not parse layout version") from e - - if layout_version > max_supported_layout: - raise InvalidMetadataFile( - f"Layout version {layout_version} is too new for this version of Spack" - ) - - return spec_dict, layout_version - - -def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None): - """ - Download binary tarball for given package into stage area, returning - path to downloaded tarball if successful, None otherwise. +def download_tarball( + spec: spack.spec.Spec, unsigned: Optional[bool] = False, mirrors_for_spec=None +) -> Optional[spack.stage.Stage]: + """Download binary tarball for given package Args: - spec (spack.spec.Spec): Concrete spec + spec: a concrete spec unsigned: if ``True`` or ``False`` override the mirror signature verification defaults mirrors_for_spec (list): Optional list of concrete specs and mirrors obtained by calling binary_distribution.get_mirrors_for_spec(). @@ -1940,19 +1807,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No configured mirrors. Returns: - ``None`` if the tarball could not be downloaded (maybe also verified, - depending on whether new-style signed binary packages were found). - Otherwise, return an object indicating the path to the downloaded - tarball, the path to the downloaded specfile (in the case of new-style - buildcache), and whether or not the tarball is already verified. - - .. code-block:: JSON - - { - "tarball_path": "path-to-locally-saved-tarfile", - "specfile_path": "none-or-path-to-locally-saved-specfile", - "signature_verified": "true-if-binary-pkg-was-already-verified" - } + ``None`` if the tarball could not be downloaded, the signature verified + (if required), and its checksum validated. Otherwise, return the stage + containing the downloaded tarball. """ configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = ( spack.mirrors.mirror.MirrorCollection(binary=True).values() @@ -1960,9 +1817,6 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No if not configured_mirrors: tty.die("Please add a spack mirror to allow download of pre-compiled packages.") - tarball = tarball_path_name(spec, ".spack") - specfile_prefix = tarball_name(spec, ".spec") - # Note on try_first and try_next: # mirrors_for_spec mostly likely came from spack caching remote # mirror indices locally and adding their specs to a local data @@ -1972,113 +1826,67 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No # look in all configured mirrors if needed, as maybe the spec # we need was in an un-indexed mirror. No need to check any # mirror for the spec twice though. - try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else [] - try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first] - mirror_urls = try_first + try_next + try_first = [i.url_and_version for i in mirrors_for_spec] if mirrors_for_spec else [] + + try_next = [] + for try_layout in SUPPORTED_LAYOUT_VERSIONS: + try_next.extend([MirrorURLAndVersion(i.fetch_url, try_layout) for i in configured_mirrors]) + urls_and_versions = try_first + [uv for uv in try_next if uv not in try_first] # TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here. - def fetch_url_to_mirror(url): + def fetch_url_to_mirror(url_and_version): + url = url_and_version.url + layout_version = url_and_version.version for mirror in configured_mirrors: if mirror.fetch_url == url: - return mirror - return spack.mirrors.mirror.Mirror(url) + return mirror, layout_version + return spack.mirrors.mirror.Mirror(url), layout_version - mirrors = [fetch_url_to_mirror(url) for url in mirror_urls] + mirrors = [fetch_url_to_mirror(url_and_version) for url_and_version in urls_and_versions] - tried_to_verify_sigs = [] + for mirror, layout_version in mirrors: + # Override mirror's default if + currently_unsigned = unsigned if unsigned is not None else not mirror.signed - # Assumes we care more about finding a spec file by preferred ext - # than by mirrory priority. This can be made less complicated as - # we remove support for deprecated spec formats and buildcache layouts. - for try_signed in (True, False): - for mirror in mirrors: - # Override mirror's default if - currently_unsigned = unsigned if unsigned is not None else not mirror.signed + # If it's an OCI index, do things differently, since we cannot compose URLs. + fetch_url = mirror.fetch_url - # If it's an OCI index, do things differently, since we cannot compose URLs. - fetch_url = mirror.fetch_url + # TODO: refactor this to some "nice" place. + if fetch_url.startswith("oci://"): + ref = spack.oci.image.ImageReference.from_string(fetch_url[len("oci://") :]).with_tag( + _oci_default_tag(spec) + ) - # TODO: refactor this to some "nice" place. - if fetch_url.startswith("oci://"): - ref = spack.oci.image.ImageReference.from_string( - fetch_url[len("oci://") :] - ).with_tag(_oci_default_tag(spec)) - - # Fetch the manifest - try: - response = spack.oci.opener.urlopen( - urllib.request.Request( - url=ref.manifest_url(), - headers={"Accept": ", ".join(spack.oci.oci.manifest_content_type)}, - ) + # Fetch the manifest + try: + response = spack.oci.opener.urlopen( + urllib.request.Request( + url=ref.manifest_url(), + headers={"Accept": ", ".join(spack.oci.oci.manifest_content_type)}, ) - except Exception: - continue - - # Download the config = spec.json and the relevant tarball - try: - manifest = json.load(response) - spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) - tarball_digest = spack.oci.image.Digest.from_string( - manifest["layers"][-1]["digest"] - ) - except Exception: - continue - - with spack.oci.oci.make_stage( - ref.blob_url(spec_digest), spec_digest, keep=True - ) as local_specfile_stage: - try: - local_specfile_stage.fetch() - local_specfile_stage.check() - try: - _get_valid_spec_file( - local_specfile_stage.save_filename, - CURRENT_BUILD_CACHE_LAYOUT_VERSION, - ) - except InvalidMetadataFile as e: - tty.warn( - f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} " - f"from {fetch_url} due to invalid metadata file: {e}" - ) - local_specfile_stage.destroy() - continue - except Exception: - continue - local_specfile_stage.cache_local() - - with spack.oci.oci.make_stage( - ref.blob_url(tarball_digest), tarball_digest, keep=True - ) as tarball_stage: - try: - tarball_stage.fetch() - tarball_stage.check() - except Exception: - continue - tarball_stage.cache_local() - - return { - "tarball_stage": tarball_stage, - "specfile_stage": local_specfile_stage, - "signature_verified": False, - "signature_required": not currently_unsigned, - } - - else: - ext = "json.sig" if try_signed else "json" - specfile_path = url_util.join( - fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix ) - specfile_url = f"{specfile_path}.{ext}" - spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball) - local_specfile_stage = try_fetch(specfile_url) - if local_specfile_stage: - local_specfile_path = local_specfile_stage.save_filename - signature_verified = False + except Exception: + continue + # Download the config = spec.json and the relevant tarball + try: + manifest = json.load(response) + spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) + tarball_digest = spack.oci.image.Digest.from_string( + manifest["layers"][-1]["digest"] + ) + except Exception: + continue + + with spack.oci.oci.make_stage( + ref.blob_url(spec_digest), spec_digest, keep=True + ) as local_specfile_stage: + try: + local_specfile_stage.fetch() + local_specfile_stage.check() try: - _get_valid_spec_file( - local_specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION + get_valid_spec_file( + local_specfile_stage.save_filename, CURRENT_BUILD_CACHE_LAYOUT_VERSION ) except InvalidMetadataFile as e: tty.warn( @@ -2087,59 +1895,48 @@ def fetch_url_to_mirror(url): ) local_specfile_stage.destroy() continue + except Exception: + continue + local_specfile_stage.cache_local() - if try_signed and not currently_unsigned: - # If we found a signed specfile at the root, try to verify - # the signature immediately. We will not download the - # tarball if we could not verify the signature. - tried_to_verify_sigs.append(specfile_url) - signature_verified = try_verify(local_specfile_path) - if not signature_verified: - tty.warn(f"Failed to verify: {specfile_url}") + local_specfile_stage.destroy() - if currently_unsigned or signature_verified or not try_signed: - # We will download the tarball in one of three cases: - # 1. user asked for --no-check-signature - # 2. user didn't ask for --no-check-signature, but we - # found a spec.json.sig and verified the signature already - # 3. neither of the first two cases are true, but this file - # is *not* a signed json (not a spec.json.sig file). That - # means we already looked at all the mirrors and either didn't - # find any .sig files or couldn't verify any of them. But it - # is still possible to find an old style binary package where - # the signature is a detached .asc file in the outer archive - # of the tarball, and in that case, the only way to know is to - # download the tarball. This is a deprecated use case, so if - # something goes wrong during the extraction process (can't - # verify signature, checksum doesn't match) we will fail at - # that point instead of trying to download more tarballs from - # the remaining mirrors, looking for one we can use. - tarball_stage = try_fetch(spackfile_url) - if tarball_stage: - return { - "tarball_stage": tarball_stage, - "specfile_stage": local_specfile_stage, - "signature_verified": signature_verified, - "signature_required": not currently_unsigned, - } + with spack.oci.oci.make_stage( + ref.blob_url(tarball_digest), tarball_digest, keep=True + ) as tarball_stage: + try: + tarball_stage.fetch() + tarball_stage.check() + except Exception: + continue + tarball_stage.cache_local() - local_specfile_stage.destroy() + return tarball_stage + else: + cache_type = get_url_buildcache_class(layout_version=layout_version) + cache_entry = cache_type(fetch_url, spec, allow_unsigned=currently_unsigned) + + try: + cache_entry.fetch_archive() + except Exception as e: + tty.debug( + f"Encountered error attempting to fetch archive for " + f"{spec.name}/{spec.dag_hash()[:7]} from {fetch_url} " + f"(v{layout_version}) due to {e}" + ) + cache_entry.destroy() + continue + + if layout_version == 2: + warn_v2_layout(fetch_url, "Installing a spec") + + return cache_entry.get_archive_stage() # Falling through the nested loops meeans we exhaustively searched # for all known kinds of spec files on all mirrors and did not find - # an acceptable one for which we could download a tarball. - - if tried_to_verify_sigs: - raise NoVerifyException( - ( - "Spack found new style signed binary packages, " - "but was unable to verify any of them. Please " - "obtain and trust the correct public key. If " - "these are public spack binaries, please see the " - "spack docs for locations where keys can be found." - ) - ) - + # an acceptable one for which we could download a tarball and (if + # needed) verify a signature. So at this point, we will proceed to + # install from source. return None @@ -2297,54 +2094,6 @@ def relocate_package(spec: spack.spec.Spec) -> None: os.unlink(install_manifest) -def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum): - stagepath = os.path.dirname(filename) - spackfile_name = tarball_name(spec, ".spack") - spackfile_path = os.path.join(stagepath, spackfile_name) - tarfile_name = tarball_name(spec, ".tar.gz") - tarfile_path = os.path.join(extract_to, tarfile_name) - json_name = tarball_name(spec, ".spec.json") - json_path = os.path.join(extract_to, json_name) - with closing(tarfile.open(spackfile_path, "r")) as tar: - tar.extractall(extract_to) - # some buildcache tarfiles use bzip2 compression - if not os.path.exists(tarfile_path): - tarfile_name = tarball_name(spec, ".tar.bz2") - tarfile_path = os.path.join(extract_to, tarfile_name) - - if os.path.exists(json_path): - specfile_path = json_path - else: - raise ValueError("Cannot find spec file for {0}.".format(extract_to)) - - if signature_required: - if os.path.exists("%s.asc" % specfile_path): - suppress = config.get("config:suppress_gpg_warnings", False) - try: - spack.util.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress) - except Exception: - raise NoVerifyException( - "Spack was unable to verify package " - "signature, please obtain and trust the " - "correct public key." - ) - else: - raise UnsignedPackageException( - "To install unsigned packages, use the --no-check-signature option." - ) - - # compute the sha256 checksum of the tarball - local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) - expected = remote_checksum["hash"] - - # if the checksums don't match don't install - if local_checksum != expected: - size, contents = fsys.filesummary(tarfile_path) - raise NoChecksumException(tarfile_path, size, contents, "sha256", expected, local_checksum) - - return tarfile_path - - def _tar_strip_component(tar: tarfile.TarFile, prefix: str): """Yield all members of tarfile that start with given prefix, and strip that prefix (including symlinks)""" @@ -2377,11 +2126,12 @@ def extract_buildcache_tarball(tarfile_path: str, destination: str) -> None: ) -def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER): +def extract_tarball(spec, tarball_stage: spack.stage.Stage, force=False, timer=timer.NULL_TIMER): """ extract binary tarball for given package into install area """ timer.start("extract") + if os.path.exists(spec.prefix): if force: shutil.rmtree(spec.prefix) @@ -2396,78 +2146,26 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER): default_perms="parents", ) - specfile_path = download_result["specfile_stage"].save_filename - spec_dict, layout_version = _get_valid_spec_file( - specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION - ) - bchecksum = spec_dict["binary_cache_checksum"] + tarfile_path = tarball_stage.save_filename - filename = download_result["tarball_stage"].save_filename - signature_verified: bool = download_result["signature_verified"] - signature_required: bool = download_result["signature_required"] - tmpdir = None - - if layout_version == 0: - # Handle the older buildcache layout where the .spack file - # contains a spec json, maybe an .asc file (signature), - # and another tarball containing the actual install tree. - tmpdir = tempfile.mkdtemp() - try: - tarfile_path = _extract_inner_tarball( - spec, filename, tmpdir, signature_required, bchecksum - ) - except Exception as e: - _delete_staged_downloads(download_result) - shutil.rmtree(tmpdir) - raise e - elif 1 <= layout_version <= 2: - # Newer buildcache layout: the .spack file contains just - # in the install tree, the signature, if it exists, is - # wrapped around the spec.json at the root. If sig verify - # was required, it was already done before downloading - # the tarball. - tarfile_path = filename - - if signature_required and not signature_verified: - raise UnsignedPackageException( - "To install unsigned packages, use the --no-check-signature option, " - "or configure the mirror with signed: false." - ) - - # compute the sha256 checksum of the tarball - local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) - expected = bchecksum["hash"] - - # if the checksums don't match don't install - if local_checksum != expected: - size, contents = fsys.filesummary(tarfile_path) - _delete_staged_downloads(download_result) - raise NoChecksumException( - tarfile_path, size, contents, "sha256", expected, local_checksum - ) try: extract_buildcache_tarball(tarfile_path, destination=spec.prefix) except Exception: shutil.rmtree(spec.prefix, ignore_errors=True) - _delete_staged_downloads(download_result) + tarball_stage.destroy() raise - os.remove(tarfile_path) - os.remove(specfile_path) timer.stop("extract") - timer.start("relocate") + try: relocate_package(spec) except Exception as e: shutil.rmtree(spec.prefix, ignore_errors=True) raise e finally: - if tmpdir: - shutil.rmtree(tmpdir, ignore_errors=True) - if os.path.exists(filename): - os.remove(filename) - _delete_staged_downloads(download_result) + tarball_stage.destroy() + timer.stop("relocate") @@ -2543,28 +2241,15 @@ def install_root_node( warnings.warn("Package for spec {0} already installed.".format(spec.format())) return - download_result = download_tarball(spec.build_spec, unsigned) - if not download_result: + tarball_stage = download_tarball(spec.build_spec, unsigned) + if not tarball_stage: msg = 'download of binary cache file for spec "{0}" failed' raise RuntimeError(msg.format(spec.build_spec.format())) - if sha256: - checker = spack.util.crypto.Checker(sha256) - msg = 'cannot verify checksum for "{0}" [expected={1}]' - tarball_path = download_result["tarball_stage"].save_filename - msg = msg.format(tarball_path, sha256) - if not checker.check(tarball_path): - size, contents = fsys.filesummary(tarball_path) - _delete_staged_downloads(download_result) - raise NoChecksumException( - tarball_path, size, contents, checker.hash_name, sha256, checker.sum - ) - tty.debug("Verified SHA256 checksum of the build cache") - # don't print long padded paths while extracting/relocating binaries with spack.util.path.filter_padding(): tty.msg('Installing "{0}" from a buildcache'.format(spec.format())) - extract_tarball(spec, download_result, force) + extract_tarball(spec, tarball_stage, force) spec.package.windows_establish_runtime_linkage() spack.hooks.post_install(spec, False) spack.store.STORE.db.add(spec, allow_missing=allow_missing) @@ -2587,51 +2272,30 @@ def try_direct_fetch(spec, mirrors=None): """ Try to find the spec directly on the configured mirrors """ - specfile_name = tarball_name(spec, ".spec.json") - signed_specfile_name = tarball_name(spec, ".spec.json.sig") - specfile_is_signed = False - found_specs = [] - + found_specs: List[MirrorForSpec] = [] binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values() - for mirror in binary_mirrors: - buildcache_fetch_url_json = url_util.join( - mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_name - ) - buildcache_fetch_url_signed_json = url_util.join( - mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name - ) - try: - _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) - specfile_contents = codecs.getreader("utf-8")(fs).read() - specfile_is_signed = True - except (web_util.SpackWebError, OSError) as e1: + for layout_version in SUPPORTED_LAYOUT_VERSIONS: + for mirror in binary_mirrors: + # layout_version could eventually come from the mirror config + cache_class = get_url_buildcache_class(layout_version=layout_version) + cache_entry = cache_class(mirror.fetch_url, spec) + try: - _, _, fs = web_util.read_from_url(buildcache_fetch_url_json) - specfile_contents = codecs.getreader("utf-8")(fs).read() - specfile_is_signed = False - except (web_util.SpackWebError, OSError) as e2: - tty.debug( - f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}", - e1, - level=2, - ) - tty.debug( - f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2 - ) + spec_dict = cache_entry.fetch_metadata() + except BuildcacheEntryError: continue + finally: + cache_entry.destroy() - # read the spec from the build cache file. All specs in build caches - # are concrete (as they are built) so we need to mark this spec - # concrete on read-in. - if specfile_is_signed: - specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents) - fetched_spec = spack.spec.Spec.from_dict(specfile_json) - else: - fetched_spec = spack.spec.Spec.from_json(specfile_contents) - fetched_spec._mark_concrete() + # All specs in build caches are concrete (as they are built) so we need + # to mark this spec concrete on read-in. + fetched_spec = spack.spec.Spec.from_dict(spec_dict) + fetched_spec._mark_concrete() - found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec}) + found_specs.append( + MirrorForSpec(MirrorURLAndVersion(mirror.fetch_url, layout_version), fetched_spec) + ) return found_specs @@ -2692,7 +2356,12 @@ def clear_spec_cache(): BINARY_INDEX.clear() -def get_keys(install=False, trust=False, force=False, mirrors=None): +def get_keys( + install: bool = False, + trust: bool = False, + force: bool = False, + mirrors: Optional[Dict[Any, spack.mirrors.mirror.Mirror]] = None, +): """Get pgp public keys available on mirror with suffix .pub""" mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True) @@ -2700,53 +2369,117 @@ def get_keys(install=False, trust=False, force=False, mirrors=None): tty.die("Please add a spack mirror to allow " + "download of build caches.") for mirror in mirror_collection.values(): - fetch_url = mirror.fetch_url - # TODO: oci:// does not support signing. - if fetch_url.startswith("oci://"): - continue - keys_url = url_util.join( - fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH - ) - keys_index = url_util.join(keys_url, "index.json") + for layout_version in SUPPORTED_LAYOUT_VERSIONS: + fetch_url = mirror.fetch_url + # TODO: oci:// does not support signing. + if fetch_url.startswith("oci://"): + continue - tty.debug("Finding public keys in {0}".format(url_util.format(fetch_url))) + if layout_version == 2: + _get_keys_v2(fetch_url, install, trust, force) + else: + _get_keys(fetch_url, layout_version, install, trust, force) + +def _get_keys( + mirror_url: str, + layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION, + install: bool = False, + trust: bool = False, + force: bool = False, +) -> None: + cache_class = get_url_buildcache_class(layout_version=layout_version) + + tty.debug("Finding public keys in {0}".format(url_util.format(mirror_url))) + + keys_prefix = url_util.join( + mirror_url, *cache_class.get_relative_path_components(BuildcacheComponent.KEY) + ) + key_index_manifest_url = url_util.join(keys_prefix, "keys.manifest.json") + index_entry = cache_class(mirror_url, allow_unsigned=True) + + try: + index_manifest = index_entry.read_manifest(manifest_url=key_index_manifest_url) + index_blob_path = index_entry.fetch_blob(index_manifest.data[0]) + except BuildcacheEntryError as e: + tty.debug(f"Failed to fetch key index due to: {e}") + index_entry.destroy() + return + + with open(index_blob_path, encoding="utf-8") as fd: + json_index = json.load(fd) + index_entry.destroy() + + for fingerprint, _ in json_index["keys"].items(): + key_manifest_url = url_util.join(keys_prefix, f"{fingerprint}.key.manifest.json") + key_entry = cache_class(mirror_url, allow_unsigned=True) try: - _, _, json_file = web_util.read_from_url(keys_index) - json_index = sjson.load(json_file) - except (web_util.SpackWebError, OSError, ValueError) as url_err: - # TODO: avoid repeated request - if web_util.url_exists(keys_index): - tty.error( - f"Unable to find public keys in {url_util.format(fetch_url)}," - f" caught exception attempting to read from {url_util.format(keys_index)}." - ) - tty.debug(url_err) - + key_manifest = key_entry.read_manifest(manifest_url=key_manifest_url) + key_blob_path = key_entry.fetch_blob(key_manifest.data[0]) + except BuildcacheEntryError as e: + tty.debug(f"Failed to fetch key {fingerprint} due to: {e}") + key_entry.destroy() continue - for fingerprint, key_attributes in json_index["keys"].items(): - link = os.path.join(keys_url, fingerprint + ".pub") + tty.debug("Found key {0}".format(fingerprint)) + if install: + if trust: + spack.util.gpg.trust(key_blob_path) + tty.debug(f"Added {fingerprint} to trusted keys.") + else: + tty.debug( + "Will not add this key to trusted keys." + "Use -t to install all downloaded keys" + ) - with Stage(link, name="build_cache", keep=True) as stage: - if os.path.exists(stage.save_filename) and force: - os.remove(stage.save_filename) - if not os.path.exists(stage.save_filename): - try: - stage.fetch() - except spack.error.FetchError: - continue + key_entry.destroy() - tty.debug("Found key {0}".format(fingerprint)) - if install: - if trust: - spack.util.gpg.trust(stage.save_filename) - tty.debug("Added this key to trusted keys.") - else: - tty.debug( - "Will not add this key to trusted keys." - "Use -t to install all downloaded keys" - ) + +def _get_keys_v2(mirror_url, install=False, trust=False, force=False): + cache_class = get_url_buildcache_class(layout_version=2) + + keys_url = url_util.join( + mirror_url, *cache_class.get_relative_path_components(BuildcacheComponent.KEY) + ) + keys_index = url_util.join(keys_url, "index.json") + + tty.debug("Finding public keys in {0}".format(url_util.format(mirror_url))) + + try: + _, _, json_file = web_util.read_from_url(keys_index) + json_index = sjson.load(json_file) + except (web_util.SpackWebError, OSError, ValueError) as url_err: + # TODO: avoid repeated request + if web_util.url_exists(keys_index): + tty.error( + f"Unable to find public keys in {url_util.format(mirror_url)}," + f" caught exception attempting to read from {url_util.format(keys_index)}." + ) + tty.error(url_err) + return + + for fingerprint, key_attributes in json_index["keys"].items(): + link = os.path.join(keys_url, fingerprint + ".pub") + + with Stage(link, name="build_cache", keep=True) as stage: + if os.path.exists(stage.save_filename) and force: + os.remove(stage.save_filename) + if not os.path.exists(stage.save_filename): + try: + stage.fetch() + except spack.error.FetchError: + continue + + tty.debug("Found key {0}".format(fingerprint)) + if install: + if trust: + spack.util.gpg.trust(stage.save_filename) + tty.debug("Added this key to trusted keys.") + else: + tty.debug( + "Will not add this key to trusted keys." + "Use -t to install all downloaded keys" + ) def _url_push_keys( @@ -2762,19 +2495,29 @@ def _url_push_keys( for key, file in zip(keys, files): spack.util.gpg.export_keys(file, [key]) + cache_class = get_url_buildcache_class() + for mirror in mirrors: push_url = mirror if isinstance(mirror, str) else mirror.push_url - keys_url = url_util.join( - push_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH - ) tty.debug(f"Pushing public keys to {url_util.format(push_url)}") + pushed_a_key = False for key, file in zip(keys, files): - web_util.push_to_url(file, url_util.join(keys_url, os.path.basename(file))) + cache_class.push_local_file_as_blob( + local_file_path=file, + mirror_url=push_url, + manifest_name=f"{key}.key", + component_type=BuildcacheComponent.KEY, + compression="none", + ) + pushed_a_key = True if update_index: - generate_key_index(keys_url, tmpdir=tmpdir) + generate_key_index(push_url, tmpdir=tmpdir) + + if pushed_a_key or update_index: + cache_class.maybe_push_layout_json(push_url) def needs_rebuild(spec, mirror_url): @@ -2791,14 +2534,10 @@ def needs_rebuild(spec, mirror_url): # Try to retrieve the specfile directly, based on the known # format of the name, in order to determine if the package # needs to be rebuilt. - cache_prefix = build_cache_prefix(mirror_url) - specfile_name = tarball_name(spec, ".spec.json") - specfile_path = os.path.join(cache_prefix, specfile_name) - - # Only check for the presence of the json version of the spec. If the - # mirror only has the json version, or doesn't have the spec at all, we - # need to rebuild. - return not web_util.url_exists(specfile_path) + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_class(mirror_url, spec, allow_unsigned=True) + exists = cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + return not exists def check_specs_against_mirrors(mirrors, specs, output_file=None): @@ -2840,48 +2579,12 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None): return 1 if rebuilds else 0 -def _download_buildcache_entry(mirror_root, descriptions): - for description in descriptions: - path = description["path"] - mkdirp(path) - fail_if_missing = description["required"] - for url in description["url"]: - description_url = os.path.join(mirror_root, url) - stage = Stage(description_url, name="build_cache", path=path, keep=True) - try: - stage.fetch() - break - except spack.error.FetchError as e: - tty.debug(e) - else: - if fail_if_missing: - tty.error("Failed to download required url {0}".format(description_url)) - return False - return True - - -def download_buildcache_entry(file_descriptions, mirror_url=None): - if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True): - tty.die( - "Please provide or add a spack mirror to allow " + "download of buildcache entries." - ) - - if mirror_url: - mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH) - return _download_buildcache_entry(mirror_root, file_descriptions) - - for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values(): - mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH) - - if _download_buildcache_entry(mirror_root, file_descriptions): - return True - else: - continue - - return False - - -def download_single_spec(concrete_spec, destination, mirror_url=None): +def download_single_spec( + concrete_spec, + destination, + mirror_url=None, + layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION, +): """Download the buildcache files for a single concrete spec. Args: @@ -2889,24 +2592,39 @@ def download_single_spec(concrete_spec, destination, mirror_url=None): destination (str): path where to put the downloaded buildcache mirror_url (str): url of the mirror from which to download """ - tarfile_name = tarball_name(concrete_spec, ".spack") - tarball_dir_name = tarball_directory_name(concrete_spec) - tarball_path_name = os.path.join(tarball_dir_name, tarfile_name) - local_tarball_path = os.path.join(destination, tarball_dir_name) + if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True): + tty.die( + "Please provide or add a spack mirror to allow " + "download of buildcache entries." + ) - files_to_fetch = [ - {"url": [tarball_path_name], "path": local_tarball_path, "required": True}, - { - "url": [ - tarball_name(concrete_spec, ".spec.json.sig"), - tarball_name(concrete_spec, ".spec.json"), - ], - "path": destination, - "required": True, - }, - ] + urls = ( + [mirror_url] + if mirror_url + else [ + mirror.fetch_url + for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values() + ] + ) - return download_buildcache_entry(files_to_fetch, mirror_url) + mkdirp(destination) + + for url in urls: + cache_class = get_url_buildcache_class(layout_version=layout_version) + cache_entry = cache_class(url, concrete_spec, allow_unsigned=True) + + try: + cache_entry.fetch_metadata() + cache_entry.fetch_archive() + except BuildcacheEntryError as e: + tty.warn(f"Error downloading {concrete_spec.name}/{concrete_spec.dag_hash()[:7]}: {e}") + cache_entry.destroy() + continue + + shutil.move(cache_entry.get_local_spec_path(), destination) + shutil.move(cache_entry.get_local_archive_path(), destination) + return True + + return False class BinaryCacheQuery: @@ -2951,7 +2669,53 @@ class BuildcacheIndexError(spack.error.SpackError): FetchIndexResult = collections.namedtuple("FetchIndexResult", "etag hash data fresh") -class DefaultIndexFetcher: +class IndexFetcher: + def conditional_fetch(self) -> FetchIndexResult: + raise NotImplementedError(f"{self.__class__.__name__} is abstract") + + def get_index_manifest(self, manifest_response) -> BlobRecord: + """Read the response of the manifest request and return a BlobRecord""" + cache_class = get_url_buildcache_class(CURRENT_BUILD_CACHE_LAYOUT_VERSION) + try: + result = codecs.getreader("utf-8")(manifest_response).read() + except (ValueError, OSError) as e: + raise FetchIndexError(f"Remote index {manifest_response.url} is invalid", e) from e + + manifest = BuildcacheManifest.from_dict( + # Currently we do not sign buildcache index, but we could + cache_class.verify_and_extract_manifest(result, verify=False) + ) + blob_record = manifest.get_blob_records( + cache_class.component_to_media_type(BuildcacheComponent.INDEX) + )[0] + return blob_record + + def fetch_index_blob( + self, cache_entry: URLBuildcacheEntry, blob_record: BlobRecord + ) -> Tuple[str, str]: + """Fetch the index blob indicated by the BlobRecord, and return the + (checksum, contents) of the blob""" + try: + staged_blob_path = cache_entry.fetch_blob(blob_record) + except BuildcacheEntryError as e: + cache_entry.destroy() + raise FetchIndexError( + f"Could not fetch index blob from {cache_entry.mirror_url}" + ) from e + + with open(staged_blob_path, encoding="utf-8") as fd: + blob_result = fd.read() + + computed_hash = compute_hash(blob_result) + + if computed_hash != blob_record.checksum: + cache_entry.destroy() + raise FetchIndexError(f"Remote index at {cache_entry.mirror_url} is invalid") + + return (computed_hash, blob_result) + + +class DefaultIndexFetcherV2(IndexFetcher): """Fetcher for index.json, using separate index.json.hash as cache invalidation strategy""" def __init__(self, url, local_hash, urlopen=web_util.urlopen): @@ -2962,7 +2726,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen): def get_remote_hash(self): # Failure to fetch index.json.hash is not fatal - url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE) + url_index_hash = url_util.join(self.url, "build_cache", "index.json.hash") try: response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) remote_hash = response.read(64) @@ -2983,7 +2747,7 @@ def conditional_fetch(self) -> FetchIndexResult: return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) # Otherwise, download index.json - url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) + url_index = url_util.join(self.url, "build_cache", spack_db.INDEX_JSON_FILE) try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) @@ -3014,10 +2778,12 @@ def conditional_fetch(self) -> FetchIndexResult: response.headers.get("Etag", None) or response.headers.get("etag", None) ) + warn_v2_layout(self.url, "Fetching an index") + return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False) -class EtagIndexFetcher: +class EtagIndexFetcherV2(IndexFetcher): """Fetcher for index.json, using ETags headers as cache invalidation strategy""" def __init__(self, url, etag, urlopen=web_util.urlopen): @@ -3027,7 +2793,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately - url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) + url = url_util.join(self.url, "build_cache", spack_db.INDEX_JSON_FILE) headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} try: @@ -3045,6 +2811,8 @@ def conditional_fetch(self) -> FetchIndexResult: except (ValueError, OSError) as e: raise FetchIndexError(f"Remote index {url} is invalid", e) from e + warn_v2_layout(self.url, "Fetching an index") + headers = response.headers etag_header_value = headers.get("Etag", None) or headers.get("etag", None) return FetchIndexResult( @@ -3055,10 +2823,12 @@ def conditional_fetch(self) -> FetchIndexResult: ) -class OCIIndexFetcher: - def __init__(self, url: str, local_hash, urlopen=None) -> None: +class OCIIndexFetcher(IndexFetcher): + def __init__(self, url_and_version: MirrorURLAndVersion, local_hash, urlopen=None) -> None: self.local_hash = local_hash + url = url_and_version.url + # Remove oci:// prefix assert url.startswith("oci://") self.ref = spack.oci.image.ImageReference.from_string(url[6:]) @@ -3111,6 +2881,130 @@ def conditional_fetch(self) -> FetchIndexResult: return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False) +class DefaultIndexFetcher(IndexFetcher): + """Fetcher for buildcache index, cache invalidation via manifest contents""" + + def __init__(self, url_and_version: MirrorURLAndVersion, local_hash, urlopen=web_util.urlopen): + self.url = url_and_version.url + self.layout_version = url_and_version.version + self.local_hash = local_hash + self.urlopen = urlopen + self.headers = {"User-Agent": web_util.SPACK_USER_AGENT} + + def conditional_fetch(self) -> FetchIndexResult: + cache_class = get_url_buildcache_class(layout_version=self.layout_version) + url_index_manifest = cache_class.get_index_url(self.url) + + try: + response = self.urlopen( + urllib.request.Request(url_index_manifest, headers=self.headers) + ) + except OSError as e: + raise FetchIndexError( + f"Could not read index manifest from {url_index_manifest}" + ) from e + + index_blob_record = self.get_index_manifest(response) + + # Early exit if our cache is up to date. + if self.local_hash and self.local_hash == index_blob_record.checksum: + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + + # Otherwise, download the index blob + cache_entry = cache_class(self.url, allow_unsigned=True) + computed_hash, result = self.fetch_index_blob(cache_entry, index_blob_record) + cache_entry.destroy() + + # For now we only handle etags on http(s), since 304 error handling + # in s3:// is not there yet. + if urllib.parse.urlparse(self.url).scheme not in ("http", "https"): + etag = None + else: + etag = web_util.parse_etag( + response.headers.get("Etag", None) or response.headers.get("etag", None) + ) + + return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False) + + +class EtagIndexFetcher(IndexFetcher): + """Fetcher for buildcache index, cache invalidation via ETags headers + + This class differs from the DefaultIndexFetcher in the following ways: 1) It + is provided with an etag value on creation, rather than an index checksum + value. Note that since we never start out with an etag, the default fetcher + must have been used initially and determined that the etag approach is valid. + 2) It provides this etag value in the 'If-None-Match' request header for the + index manifest. 3) It checks for special exception type and response code + indicating the index manifest is not modified, exiting early and returning + 'Fresh', if encountered. 4) If it needs to actually read the manfiest, it + does not need to do any checks of the url scheme to determine whether an + etag should be included in the return value.""" + + def __init__(self, url_and_version: MirrorURLAndVersion, etag, urlopen=web_util.urlopen): + self.url = url_and_version.url + self.layout_version = url_and_version.version + self.etag = etag + self.urlopen = urlopen + + def conditional_fetch(self) -> FetchIndexResult: + # Do a conditional fetch of the index manifest (i.e. using If-None-Match header) + cache_class = get_url_buildcache_class(layout_version=self.layout_version) + manifest_url = cache_class.get_index_url(self.url) + headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} + + try: + response = self.urlopen(urllib.request.Request(manifest_url, headers=headers)) + except urllib.error.HTTPError as e: + if e.getcode() == 304: + # The remote manifest has not been modified, i.e. the index we + # already have is the freshest there is. + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + raise FetchIndexError(f"Could not fetch index manifest {manifest_url}", e) from e + except OSError as e: # URLError, socket.timeout, etc. + raise FetchIndexError(f"Could not fetch index manifest {manifest_url}", e) from e + + # We need to read the index manifest and fetch the associated blob + cache_entry = cache_class(self.url, allow_unsigned=True) + computed_hash, result = self.fetch_index_blob( + cache_entry, self.get_index_manifest(response) + ) + cache_entry.destroy() + + headers = response.headers + etag_header_value = headers.get("Etag", None) or headers.get("etag", None) + + return FetchIndexResult( + etag=web_util.parse_etag(etag_header_value), + hash=computed_hash, + data=result, + fresh=False, + ) + + +def get_index_fetcher( + scheme: str, url_and_version: MirrorURLAndVersion, cache_entry: Dict[str, str] +) -> IndexFetcher: + if scheme == "oci": + # TODO: Actually etag and OCI are not mutually exclusive... + return OCIIndexFetcher(url_and_version, cache_entry.get("index_hash", None)) + elif cache_entry.get("etag"): + if url_and_version.version < 3: + return EtagIndexFetcherV2(url_and_version.url, cache_entry["etag"]) + else: + return EtagIndexFetcher(url_and_version, cache_entry["etag"]) + + else: + if url_and_version.version < 3: + return DefaultIndexFetcherV2( + url_and_version.url, local_hash=cache_entry.get("index_hash", None) + ) + else: + return DefaultIndexFetcher( + url_and_version, local_hash=cache_entry.get("index_hash", None) + ) + + class NoOverwriteException(spack.error.SpackError): """Raised when a file would be overwritten""" @@ -3147,27 +3041,6 @@ def __init__(self, keys): super().__init__(err_msg) -class NoVerifyException(spack.error.SpackError): - """ - Raised if file fails signature verification. - """ - - pass - - -class NoChecksumException(spack.error.SpackError): - """ - Raised if file fails checksum verification. - """ - - def __init__(self, path, size, contents, algorithm, expected, computed): - super().__init__( - f"{algorithm} checksum failed for {path}", - f"Expected {expected} but got {computed}. " - f"File size = {size} bytes. Contents = {contents!r}", - ) - - class NewLayoutException(spack.error.SpackError): """ Raised if directory layout is different from buildcache. @@ -3177,10 +3050,6 @@ def __init__(self, msg): super().__init__(msg) -class InvalidMetadataFile(spack.error.SpackError): - pass - - class UnsignedPackageException(spack.error.SpackError): """ Raised if installation of unsigned package is attempted without diff --git a/lib/spack/spack/buildcache_migrate.py b/lib/spack/spack/buildcache_migrate.py new file mode 100644 index 00000000000..f5c6b0cf577 --- /dev/null +++ b/lib/spack/spack/buildcache_migrate.py @@ -0,0 +1,351 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import codecs +import json +import os +import pathlib +import tempfile +from typing import NamedTuple + +import llnl.util.tty as tty + +import spack.binary_distribution as bindist +import spack.database as spack_db +import spack.error +import spack.mirrors.mirror +import spack.spec +import spack.stage +import spack.util.crypto +import spack.util.parallel +import spack.util.url as url_util +import spack.util.web as web_util + +from .enums import InstallRecordStatus +from .url_buildcache import ( + BlobRecord, + BuildcacheComponent, + compressed_json_from_dict, + get_url_buildcache_class, + sign_file, + try_verify, +) + + +def v2_tarball_directory_name(spec): + """ + Return name of the tarball directory according to the convention + -//-/ + """ + return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}") + + +def v2_tarball_name(spec, ext): + """ + Return the name of the tarfile according to the convention + --- + """ + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" + ) + return f"{spec_formatted}{ext}" + + +def v2_tarball_path_name(spec, ext): + """ + Return the full path+name for a given spec according to the convention + / + """ + return os.path.join(v2_tarball_directory_name(spec), v2_tarball_name(spec, ext)) + + +class MigrateSpecResult(NamedTuple): + success: bool + message: str + + +class MigrationException(spack.error.SpackError): + """ + Raised when migration fails irrevocably + """ + + def __init__(self, msg): + super().__init__(msg) + + +def _migrate_spec( + s: spack.spec.Spec, mirror_url: str, tmpdir: str, unsigned: bool = False, signing_key: str = "" +) -> MigrateSpecResult: + """Parallelizable function to migrate a single spec""" + print_spec = f"{s.name}/{s.dag_hash()[:7]}" + + # Check if the spec file exists in the new location and exit early if so + + v3_cache_class = get_url_buildcache_class(layout_version=3) + v3_cache_entry = v3_cache_class(mirror_url, s, allow_unsigned=unsigned) + exists = v3_cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + v3_cache_entry.destroy() + + if exists: + msg = f"No need to migrate {print_spec}" + return MigrateSpecResult(True, msg) + + # Try to fetch the spec metadata + v2_metadata_urls = [ + url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json.sig")) + ] + + if unsigned: + v2_metadata_urls.append( + url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json")) + ) + + spec_contents = None + + for meta_url in v2_metadata_urls: + try: + _, _, meta_file = web_util.read_from_url(meta_url) + spec_contents = codecs.getreader("utf-8")(meta_file).read() + v2_spec_url = meta_url + break + except (web_util.SpackWebError, OSError): + pass + else: + msg = f"Unable to read metadata for {print_spec}" + return MigrateSpecResult(False, msg) + + spec_dict = {} + + if unsigned: + # User asked for unsigned, if we found a signed specfile, just ignore + # the signature + if v2_spec_url.endswith(".sig"): + spec_dict = spack.spec.Spec.extract_json_from_clearsig(spec_contents) + else: + spec_dict = json.loads(spec_contents) + else: + # User asked for signed, we must successfully verify the signature + local_signed_pre_verify = os.path.join( + tmpdir, f"{s.name}_{s.dag_hash()}_verify.spec.json.sig" + ) + with open(local_signed_pre_verify, "w", encoding="utf-8") as fd: + fd.write(spec_contents) + if not try_verify(local_signed_pre_verify): + return MigrateSpecResult(False, f"Failed to verify signature of {print_spec}") + with open(local_signed_pre_verify, encoding="utf-8") as fd: + spec_dict = spack.spec.Spec.extract_json_from_clearsig(fd.read()) + + # Read out and remove the bits needed to rename and position the archive + bcc = spec_dict.pop("binary_cache_checksum", None) + if not bcc: + msg = "Cannot migrate a spec that does not have 'binary_cache_checksum'" + return MigrateSpecResult(False, msg) + + algorithm = bcc["hash_algorithm"] + checksum = bcc["hash"] + + # TODO: Remove this key once oci buildcache no longer uses it + spec_dict["buildcache_layout_version"] = 2 + + v2_archive_url = url_util.join(mirror_url, "build_cache", v2_tarball_path_name(s, ".spack")) + + # spacks web utilities do not include direct copying of s3 objects, so we + # need to download the archive locally, and then push it back to the target + # location + archive_stage_path = os.path.join(tmpdir, f"archive_stage_{s.name}_{s.dag_hash()}") + archive_stage = spack.stage.Stage(v2_archive_url, path=archive_stage_path) + + try: + archive_stage.create() + archive_stage.fetch() + except spack.error.FetchError: + return MigrateSpecResult(False, f"Unable to fetch archive for {print_spec}") + + local_tarfile_path = archive_stage.save_filename + + # As long as we have to download the tarball anyway, we might as well compute the + # checksum locally and check it against the expected value + local_checksum = spack.util.crypto.checksum( + spack.util.crypto.hash_fun_for_algo(algorithm), local_tarfile_path + ) + + if local_checksum != checksum: + return MigrateSpecResult( + False, f"Checksum mismatch for {print_spec}: expected {checksum}, got {local_checksum}" + ) + + spec_dict["archive_size"] = os.stat(local_tarfile_path).st_size + + # Compress the spec dict and compute its checksum + metadata_checksum_algo = "sha256" + spec_json_path = os.path.join(tmpdir, f"{s.name}_{s.dag_hash()}.spec.json") + metadata_checksum, metadata_size = compressed_json_from_dict( + spec_json_path, spec_dict, metadata_checksum_algo + ) + + tarball_blob_record = BlobRecord( + spec_dict["archive_size"], v3_cache_class.TARBALL_MEDIATYPE, "gzip", algorithm, checksum + ) + + metadata_blob_record = BlobRecord( + metadata_size, + v3_cache_class.SPEC_MEDIATYPE, + "gzip", + metadata_checksum_algo, + metadata_checksum, + ) + + # Compute the urls to the new blobs + v3_archive_url = v3_cache_class.get_blob_url(mirror_url, tarball_blob_record) + v3_spec_url = v3_cache_class.get_blob_url(mirror_url, metadata_blob_record) + + # First push the tarball + tty.debug(f"Pushing {local_tarfile_path} to {v3_archive_url}") + + try: + web_util.push_to_url(local_tarfile_path, v3_archive_url, keep_original=True) + except Exception: + return MigrateSpecResult(False, f"Failed to push archive for {print_spec}") + + # Then push the spec file + tty.debug(f"Pushing {spec_json_path} to {v3_spec_url}") + + try: + web_util.push_to_url(spec_json_path, v3_spec_url, keep_original=True) + except Exception: + return MigrateSpecResult(False, f"Failed to push spec metadata for {print_spec}") + + # Generate the manifest and write it to a temporary location + manifest = { + "version": v3_cache_class.get_layout_version(), + "data": [tarball_blob_record.to_dict(), metadata_blob_record.to_dict()], + } + + manifest_path = os.path.join(tmpdir, f"{s.dag_hash()}.manifest.json") + with open(manifest_path, "w", encoding="utf-8") as f: + json.dump(manifest, f, indent=0, separators=(",", ":")) + # Note: when using gpg clear sign, we need to avoid long lines (19995 + # chars). If lines are longer, they are truncated without error. So, + # here we still add newlines, but no indent, so save on file size and + # line length. + + # Possibly sign the manifest + if not unsigned: + manifest_path = sign_file(signing_key, manifest_path) + + v3_manifest_url = v3_cache_class.get_manifest_url(s, mirror_url) + + # Push the manifest + try: + web_util.push_to_url(manifest_path, v3_manifest_url, keep_original=True) + except Exception: + return MigrateSpecResult(False, f"Failed to push manifest for {print_spec}") + + return MigrateSpecResult(True, f"Successfully migrated {print_spec}") + + +def migrate( + mirror: spack.mirrors.mirror.Mirror, unsigned: bool = False, delete_existing: bool = False +) -> None: + """Perform migration of the given mirror + + If unsigned is True, signatures on signed specs will be ignored, and specs + will not be re-signed before pushing to the new location. Otherwise, spack + will attempt to verify signatures and re-sign specs, and will fail if not + able to do so. If delete_existing is True, spack will delete the original + contents of the mirror once the migration is complete.""" + signing_key = "" + if not unsigned: + try: + signing_key = bindist.select_signing_key() + except (bindist.NoKeyException, bindist.PickKeyException): + raise MigrationException( + "Signed migration requires exactly one secret key in keychain" + ) + + delete_action = "deleting" if delete_existing else "keeping" + sign_action = "an unsigned" if unsigned else "a signed" + mirror_url = mirror.fetch_url + + tty.msg( + f"Performing {sign_action} migration of {mirror.push_url} " + f"and {delete_action} existing contents" + ) + + index_url = url_util.join(mirror_url, "build_cache", spack_db.INDEX_JSON_FILE) + contents = None + + try: + _, _, index_file = web_util.read_from_url(index_url) + contents = codecs.getreader("utf-8")(index_file).read() + except (web_util.SpackWebError, OSError): + raise MigrationException("Buildcache migration requires a buildcache index") + + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + index_path = os.path.join(tmpdir, "_tmp_index.json") + with open(index_path, "w", encoding="utf-8") as fd: + fd.write(contents) + + db = bindist.BuildCacheDatabase(tmpdir) + db._read_from_file(pathlib.Path(index_path)) + + specs_to_migrate = [ + s + for s in db.query_local(installed=InstallRecordStatus.ANY) + if not s.external and db.query_local_by_spec_hash(s.dag_hash()).in_buildcache + ] + + # Run the tasks in parallel if possible + executor = spack.util.parallel.make_concurrent_executor() + migrate_futures = [ + executor.submit(_migrate_spec, spec, mirror_url, tmpdir, unsigned, signing_key) + for spec in specs_to_migrate + ] + + success_count = 0 + + tty.msg("Migration summary:") + for spec, migrate_future in zip(specs_to_migrate, migrate_futures): + result = migrate_future.result() + msg = f" {spec.name}/{spec.dag_hash()[:7]}: {result.message}" + if result.success: + success_count += 1 + tty.msg(msg) + else: + tty.error(msg) + # The migrated index should have the same specs as the original index, + # modulo any specs that we failed to migrate for whatever reason. So + # to avoid having to re-fetch all the spec files now, just mark them + # appropriately in the existing database and push that. + db.mark(spec, "in_buildcache", result.success) + + if success_count > 0: + tty.msg("Updating index and pushing keys") + + # If the layout.json doesn't yet exist on this mirror, push it + v3_cache_class = get_url_buildcache_class(layout_version=3) + v3_cache_class.maybe_push_layout_json(mirror_url) + + # Push the migrated mirror index + index_tmpdir = os.path.join(tmpdir, "rebuild_index") + os.mkdir(index_tmpdir) + bindist._push_index(db, index_tmpdir, mirror_url) + + # Push the public part of the signing key + if not unsigned: + keys_tmpdir = os.path.join(tmpdir, "keys") + os.mkdir(keys_tmpdir) + bindist._url_push_keys( + mirror_url, keys=[signing_key], update_index=True, tmpdir=keys_tmpdir + ) + else: + tty.warn("No specs migrated, did you mean to perform an unsigned migration instead?") + + # Delete the old layout if the user requested it + if delete_existing: + delete_prefix = url_util.join(mirror_url, "build_cache") + tty.msg(f"Recursively deleting {delete_prefix}") + web_util.remove_url(delete_prefix, recursive=True) + + tty.msg("Migration complete") diff --git a/lib/spack/spack/ci/__init__.py b/lib/spack/spack/ci/__init__.py index 9f3a1ca0d9c..7f786094b4a 100644 --- a/lib/spack/spack/ci/__init__.py +++ b/lib/spack/spack/ci/__init__.py @@ -33,6 +33,7 @@ import spack.paths import spack.repo import spack.spec +import spack.stage import spack.store import spack.util.git import spack.util.gpg as gpg_util @@ -245,7 +246,9 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision: if not spec_locations: return RebuildDecision(True, "not found anywhere") - urls = ",".join([loc["mirror_url"] for loc in spec_locations]) + urls = ",".join( + [f"{loc.url_and_version.url}@v{loc.url_and_version.version}" for loc in spec_locations] + ) message = f"up-to-date [{urls}]" return RebuildDecision(False, message) @@ -1242,33 +1245,31 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic """Given a url to write to and the details of the failed job, write an entry in the broken specs list. """ - tmpdir = tempfile.mkdtemp() - file_path = os.path.join(tmpdir, "broken.txt") + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + file_path = os.path.join(tmpdir, "broken.txt") - broken_spec_details = { - "broken-spec": { - "job-name": pkg_name, - "job-stack": stack_name, - "job-url": job_url, - "pipeline-url": pipeline_url, - "concrete-spec-dict": spec_dict, + broken_spec_details = { + "broken-spec": { + "job-name": pkg_name, + "job-stack": stack_name, + "job-url": job_url, + "pipeline-url": pipeline_url, + "concrete-spec-dict": spec_dict, + } } - } - try: - with open(file_path, "w", encoding="utf-8") as fd: - syaml.dump(broken_spec_details, fd) - web_util.push_to_url( - file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"} - ) - except Exception as err: - # If there is an S3 error (e.g., access denied or connection - # error), the first non boto-specific class in the exception - # hierarchy is Exception. Just print a warning and return - msg = f"Error writing to broken specs list {url}: {err}" - tty.warn(msg) - finally: - shutil.rmtree(tmpdir) + try: + with open(file_path, "w", encoding="utf-8") as fd: + syaml.dump(broken_spec_details, fd) + web_util.push_to_url( + file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"} + ) + except Exception as err: + # If there is an S3 error (e.g., access denied or connection + # error), the first non boto-specific class in the exception + # hierarchy is Exception. Just print a warning and return + msg = f"Error writing to broken specs list {url}: {err}" + tty.warn(msg) def read_broken_spec(broken_spec_url): diff --git a/lib/spack/spack/ci/common.py b/lib/spack/spack/ci/common.py index 78caafcb2ab..ecef706ad19 100644 --- a/lib/spack/spack/ci/common.py +++ b/lib/spack/spack/ci/common.py @@ -31,12 +31,12 @@ import spack.spec import spack.util.compression as compression import spack.util.spack_yaml as syaml -import spack.util.url as url_util import spack.util.web as web_util from spack import traverse from spack.reporters import CDash, CDashConfiguration from spack.reporters.cdash import SPACK_CDASH_TIMEOUT from spack.reporters.cdash import build_stamp as cdash_build_stamp +from spack.url_buildcache import get_url_buildcache_class IS_WINDOWS = sys.platform == "win32" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] @@ -179,33 +179,13 @@ def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file): for release_spec in specs: release_spec_dag_hash = release_spec.dag_hash() - # TODO: This assumes signed version of the spec - buildcache_copies[release_spec_dag_hash] = [ - { - "src": url_util.join( - src_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_name(release_spec, ".spec.json.sig"), - ), - "dest": url_util.join( - dest_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_name(release_spec, ".spec.json.sig"), - ), - }, - { - "src": url_util.join( - src_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_path_name(release_spec, ".spack"), - ), - "dest": url_util.join( - dest_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_path_name(release_spec, ".spack"), - ), - }, - ] + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + buildcache_copies[release_spec_dag_hash] = { + "src": cache_class.get_manifest_url(release_spec, src_prefix), + "dest": cache_class.get_manifest_url(release_spec, dest_prefix), + } target_dir = os.path.dirname(output_file) diff --git a/lib/spack/spack/ci/gitlab.py b/lib/spack/spack/ci/gitlab.py index a69149dee8a..15d46ada01a 100644 --- a/lib/spack/spack/ci/gitlab.py +++ b/lib/spack/spack/ci/gitlab.py @@ -292,6 +292,9 @@ def main_script_replacements(cmd): ) maybe_generate_manifest(pipeline, options, manifest_path) + relative_specs_url = bindist.buildcache_relative_specs_url() + relative_keys_url = bindist.buildcache_relative_keys_url() + if options.pipeline_type == PipelineType.COPY_ONLY: stage_names.append("copy") sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"]) @@ -301,9 +304,12 @@ def main_script_replacements(cmd): if "variables" not in sync_job: sync_job["variables"] = {} - sync_job["variables"][ - "SPACK_COPY_ONLY_DESTINATION" - ] = options.buildcache_destination.fetch_url + sync_job["variables"].update( + { + "SPACK_COPY_ONLY_DESTINATION": options.buildcache_destination.fetch_url, + "SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url, + } + ) pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True) if "buildcache-source" not in pipeline_mirrors: @@ -333,9 +339,13 @@ def main_script_replacements(cmd): signing_job["interruptible"] = True if "variables" not in signing_job: signing_job["variables"] = {} - signing_job["variables"][ - "SPACK_BUILDCACHE_DESTINATION" - ] = options.buildcache_destination.push_url + signing_job["variables"].update( + { + "SPACK_BUILDCACHE_DESTINATION": options.buildcache_destination.push_url, + "SPACK_BUILDCACHE_RELATIVE_SPECS_URL": relative_specs_url, + "SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url, + } + ) signing_job["dependencies"] = [] output_object["sign-pkgs"] = signing_job diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py index 84aec74328f..d20750fa58e 100644 --- a/lib/spack/spack/cmd/bootstrap.py +++ b/lib/spack/spack/cmd/bootstrap.py @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import pathlib import shutil import sys import tempfile @@ -28,7 +29,7 @@ # Tarball to be downloaded if binary packages are requested in a local mirror -BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz" +BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache-v3.tar.gz" #: Subdirectory where to create the mirror LOCAL_MIRROR_DIR = "bootstrap_cache" @@ -410,8 +411,9 @@ def _mirror(args): stage.create() stage.fetch() stage.expand_archive() - build_cache_dir = os.path.join(stage.source_path, "build_cache") - shutil.move(build_cache_dir, mirror_dir) + stage_dir = pathlib.Path(stage.source_path) + for entry in stage_dir.iterdir(): + shutil.move(str(entry), mirror_dir) llnl.util.tty.set_msg_enabled(True) def write_metadata(subdir, metadata): @@ -436,7 +438,6 @@ def write_metadata(subdir, metadata): shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory) shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory) instructions += cmd.format("local-binaries", rel_directory) - instructions += " % spack buildcache update-index /bootstrap_cache\n" print(instructions) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 80a579a3b71..7d1433f33c0 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -4,11 +4,9 @@ import argparse import glob import json -import os -import shutil import sys import tempfile -from typing import List, Tuple +from typing import List, Optional, Tuple import llnl.util.tty as tty from llnl.string import plural @@ -27,14 +25,21 @@ import spack.stage import spack.store import spack.util.parallel -import spack.util.url as url_util import spack.util.web as web_util from spack import traverse from spack.cmd import display_specs from spack.cmd.common import arguments from spack.spec import Spec, save_dependency_specfiles +from ..buildcache_migrate import migrate from ..enums import InstallRecordStatus +from ..url_buildcache import ( + BuildcacheComponent, + BuildcacheEntryError, + URLBuildcacheEntry, + check_mirror_for_layout, + get_url_buildcache_class, +) description = "create, download and install binary packages" section = "packaging" @@ -272,6 +277,27 @@ def setup_parser(subparser: argparse.ArgumentParser): ) update_index.set_defaults(func=update_index_fn) + # Migrate a buildcache from layout_version 2 to version 3 + migrate = subparsers.add_parser("migrate", help=migrate_fn.__doc__) + migrate.add_argument("mirror", type=arguments.mirror_name, help="name of a configured mirror") + migrate.add_argument( + "-u", + "--unsigned", + default=False, + action="store_true", + help="Ignore signatures and do not resign, default is False", + ) + migrate.add_argument( + "-d", + "--delete-existing", + default=False, + action="store_true", + help="Delete the previous layout, the default is to keep it.", + ) + arguments.add_common_arguments(migrate, ["yes_to_all"]) + # TODO: add -y argument to prompt if user really means to delete existing + migrate.set_defaults(func=migrate_fn) + def _matching_specs(specs: List[Spec]) -> List[Spec]: """Disambiguate specs and return a list of matching specs""" @@ -397,6 +423,10 @@ def push_fn(args): (s, PackageNotInstalledError("package not installed")) for s in not_installed ) + # Warn about possible old binary mirror layout + if not mirror.push_url.startswith("oci://"): + check_mirror_for_layout(mirror) + with bindist.make_uploader( mirror=mirror, force=args.force, @@ -527,8 +557,7 @@ def download_fn(args): if len(specs) != 1: tty.die("a single spec argument is required to download from a buildcache") - if not bindist.download_single_spec(specs[0], args.path): - sys.exit(1) + bindist.download_single_spec(specs[0], args.path) def save_specfile_fn(args): @@ -553,29 +582,78 @@ def save_specfile_fn(args): ) -def copy_buildcache_file(src_url, dest_url, local_path=None): - """Copy from source url to destination url""" - tmpdir = None +def copy_buildcache_entry(cache_entry: URLBuildcacheEntry, destination_url: str): + """Download buildcache entry and copy it to the destination_url""" + try: + spec_dict = cache_entry.fetch_metadata() + cache_entry.fetch_archive() + except bindist.BuildcacheEntryError as e: + tty.warn(f"Failed to retrieve buildcache for copying due to {e}") + cache_entry.destroy() + return - if not local_path: - tmpdir = tempfile.mkdtemp() - local_path = os.path.join(tmpdir, os.path.basename(src_url)) + spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC) + local_spec_path = cache_entry.get_local_spec_path() + tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL) + local_tarball_path = cache_entry.get_local_archive_path() + + target_spec = spack.spec.Spec.from_dict(spec_dict) + spec_label = f"{target_spec.name}/{target_spec.dag_hash()[:7]}" + + if not tarball_blob_record: + cache_entry.destroy() + raise BuildcacheEntryError(f"No source tarball blob record, failed to sync {spec_label}") + + # Try to push the tarball + tarball_dest_url = cache_entry.get_blob_url(destination_url, tarball_blob_record) try: - temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path)) - try: - temp_stage.create() - temp_stage.fetch() - web_util.push_to_url(local_path, dest_url, keep_original=True) - except spack.error.FetchError as e: - # Expected, since we have to try all the possible extensions - tty.debug("no such file: {0}".format(src_url)) - tty.debug(e) - finally: - temp_stage.destroy() - finally: - if tmpdir and os.path.exists(tmpdir): - shutil.rmtree(tmpdir) + web_util.push_to_url(local_tarball_path, tarball_dest_url, keep_original=True) + except Exception as e: + tty.warn(f"Failed to push {local_tarball_path} to {tarball_dest_url} due to {e}") + cache_entry.destroy() + return + + if not spec_blob_record: + cache_entry.destroy() + raise BuildcacheEntryError(f"No source spec blob record, failed to sync {spec_label}") + + # Try to push the spec file + spec_dest_url = cache_entry.get_blob_url(destination_url, spec_blob_record) + + try: + web_util.push_to_url(local_spec_path, spec_dest_url, keep_original=True) + except Exception as e: + tty.warn(f"Failed to push {local_spec_path} to {spec_dest_url} due to {e}") + cache_entry.destroy() + return + + # Stage the manifest locally, since if it's signed, we don't want to try to + # to reproduce that here. Instead just push the locally staged manifest to + # the expected path at the destination url. + manifest_src_url = cache_entry.remote_manifest_url + manifest_dest_url = cache_entry.get_manifest_url(target_spec, destination_url) + + manifest_stage = spack.stage.Stage(manifest_src_url) + + try: + manifest_stage.create() + manifest_stage.fetch() + except Exception as e: + tty.warn(f"Failed to fetch manifest from {manifest_src_url} due to {e}") + manifest_stage.destroy() + cache_entry.destroy() + return + + local_manifest_path = manifest_stage.save_filename + + try: + web_util.push_to_url(local_manifest_path, manifest_dest_url, keep_original=True) + except Exception as e: + tty.warn(f"Failed to push manifest to {manifest_dest_url} due to {e}") + + manifest_stage.destroy() + cache_entry.destroy() def sync_fn(args): @@ -615,37 +693,21 @@ def sync_fn(args): ) ) - build_cache_dir = bindist.build_cache_relative_path() - buildcache_rel_paths = [] - tty.debug("Syncing the following specs:") - for s in env.all_specs(): + specs_to_sync = [s for s in env.all_specs() if not s.external] + for s in specs_to_sync: tty.debug(" {0}{1}: {2}".format("* " if s in env.roots() else " ", s.name, s.dag_hash())) - - buildcache_rel_paths.extend( - [ - os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")), - ] + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION ) - - tmpdir = tempfile.mkdtemp() - - try: - for rel_path in buildcache_rel_paths: - src_url = url_util.join(src_mirror_url, rel_path) - local_path = os.path.join(tmpdir, rel_path) - dest_url = url_util.join(dest_mirror_url, rel_path) - - tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path)) - copy_buildcache_file(src_url, dest_url, local_path=local_path) - finally: - shutil.rmtree(tmpdir) + src_cache_entry = cache_class(src_mirror_url, s, allow_unsigned=True) + src_cache_entry.read_manifest() + copy_buildcache_entry(src_cache_entry, dest_mirror_url) -def manifest_copy(manifest_file_list, dest_mirror=None): +def manifest_copy( + manifest_file_list: List[str], dest_mirror: Optional[spack.mirrors.mirror.Mirror] = None +): """Read manifest files containing information about specific specs to copy from source to destination, remove duplicates since any binary packge for a given hash should be the same as any other, and copy all files specified @@ -655,21 +717,24 @@ def manifest_copy(manifest_file_list, dest_mirror=None): for manifest_path in manifest_file_list: with open(manifest_path, encoding="utf-8") as fd: manifest = json.loads(fd.read()) - for spec_hash, copy_list in manifest.items(): + for spec_hash, copy_obj in manifest.items(): # Last duplicate hash wins - deduped_manifest[spec_hash] = copy_list + deduped_manifest[spec_hash] = copy_obj - build_cache_dir = bindist.build_cache_relative_path() - for spec_hash, copy_list in deduped_manifest.items(): - for copy_file in copy_list: - dest = copy_file["dest"] - if dest_mirror: - src_relative_path = os.path.join( - build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/") - ) - dest = url_util.join(dest_mirror.push_url, src_relative_path) - tty.debug("copying {0} to {1}".format(copy_file["src"], dest)) - copy_buildcache_file(copy_file["src"], dest) + for spec_hash, copy_obj in deduped_manifest.items(): + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + src_cache_entry = cache_class( + cache_class.get_base_url(copy_obj["src"]), allow_unsigned=True + ) + src_cache_entry.read_manifest(manifest_url=copy_obj["src"]) + if dest_mirror: + destination_url = dest_mirror.push_url + else: + destination_url = cache_class.get_base_url(copy_obj["dest"]) + tty.debug("copying {0} to {1}".format(copy_obj["src"], destination_url)) + copy_buildcache_entry(src_cache_entry, destination_url) def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False): @@ -693,13 +758,9 @@ def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False): bindist._url_generate_package_index(url, tmpdir) if update_keys: - keys_url = url_util.join( - url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path() - ) - try: with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: - bindist.generate_key_index(keys_url, tmpdir) + bindist.generate_key_index(url, tmpdir) except bindist.CannotListKeys as e: # Do not error out if listing keys went wrong. This usually means that the _gpg path # does not exist. TODO: distinguish between this and other errors. @@ -711,5 +772,53 @@ def update_index_fn(args): return update_index(args.mirror, update_keys=args.keys) +def migrate_fn(args): + """perform in-place binary mirror migration (2 to 3) + + A mirror can contain both layout version 2 and version 3 simultaneously without + interference. This command performs in-place migration of a binary mirror laid + out according to version 2, to a binary mirror laid out according to layout + version 3. Only indexed specs will be migrated, so consider updating the mirror + index before running this command. Re-run the command to migrate any missing + items. + + The default mode of operation is to perform a signed migration, that is, spack + will attempt to verify the signatures on specs, and then re-sign them before + migration, using whatever keys are already installed in your key ring. You can + migrate a mirror of unsigned binaries (or convert a mirror of signed binaries + to unsigned) by providing the --unsigned argument. + + By default spack will leave the original mirror contents (in the old layout) in + place after migration. You can have spack remove the old contents by providing + the --delete-existing argument. Because migrating a mostly-already-migrated + mirror should be fast, consider a workflow where you perform a default migration, + (i.e. preserve the existing layout rather than deleting it) then evaluate the + state of the migrated mirror by attempting to install from it, and finally + running the migration again with --delete-existing.""" + target_mirror = args.mirror + unsigned = args.unsigned + assert isinstance(target_mirror, spack.mirrors.mirror.Mirror) + delete_existing = args.delete_existing + + proceed = True + if delete_existing and not args.yes_to_all: + msg = ( + "Using --delete-existing will delete the entire contents \n" + " of the old layout within the mirror. Because migrating a mirror \n" + " that has already been migrated should be fast, consider a workflow \n" + " where you perform a default migration (i.e. preserve the existing \n" + " layout rather than deleting it), then evaluate the state of the \n" + " migrated mirror by attempting to install from it, and finally, \n" + " run the migration again with --delete-existing." + ) + tty.warn(msg) + proceed = tty.get_yes_or_no("Do you want to proceed?", default=False) + + if not proceed: + tty.die("Migration aborted.") + + migrate(target_mirror, unsigned=unsigned, delete_existing=delete_existing) + + def buildcache(parser, args): return args.func(args) diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index 0a3b68af674..31bdfb04485 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -423,7 +423,7 @@ def ci_rebuild(args): # jobs in subsequent stages. tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name)) for match in matches: - tty.msg(" {0}".format(match["mirror_url"])) + tty.msg(" {0}".format(match.url_and_version.url)) # Now we are done and successful return 0 diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index 92eb5f951ab..b04af6315b3 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -202,3 +202,16 @@ class MirrorError(SpackError): def __init__(self, msg, long_msg=None): super().__init__(msg, long_msg) + + +class NoChecksumException(SpackError): + """ + Raised if file fails checksum verification. + """ + + def __init__(self, path, size, contents, algorithm, expected, computed): + super().__init__( + f"{algorithm} checksum failed for {path}", + f"Expected {expected} but got {computed}. " + f"File size = {size} bytes. Contents = {contents!r}", + ) diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index c51ea278a05..5abd9ac5c56 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -65,6 +65,7 @@ import spack.util.executable import spack.util.path import spack.util.timer as timer +from spack.url_buildcache import BuildcacheEntryError from spack.util.environment import EnvironmentModifications, dump_environment from spack.util.executable import which @@ -449,17 +450,17 @@ def _process_binary_cache_tarball( else ``False`` """ with timer.measure("fetch"): - download_result = binary_distribution.download_tarball( + tarball_stage = binary_distribution.download_tarball( pkg.spec.build_spec, unsigned, mirrors_for_spec ) - if download_result is None: + if tarball_stage is None: return False tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache") with timer.measure("install"), spack.util.path.filter_padding(): - binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer) + binary_distribution.extract_tarball(pkg.spec, tarball_stage, force=False, timer=timer) if pkg.spec.spliced: # overwrite old metadata with new spack.store.STORE.layout.write_spec( @@ -2177,7 +2178,7 @@ def install(self) -> None: ) raise - except binary_distribution.NoChecksumException as exc: + except BuildcacheEntryError as exc: if task.cache_only: raise diff --git a/lib/spack/spack/schema/buildcache_spec.py b/lib/spack/spack/schema/buildcache_spec.py index 6f560b4a5df..81cde38901a 100644 --- a/lib/spack/spack/schema/buildcache_spec.py +++ b/lib/spack/spack/schema/buildcache_spec.py @@ -19,10 +19,6 @@ "additionalProperties": True, "items": spack.schema.spec.properties, }, - "binary_cache_checksum": { - "type": "object", - "properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}}, - }, "buildcache_layout_version": {"type": "number"}, } @@ -30,6 +26,6 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Spack buildcache specfile schema", "type": "object", - "additionalProperties": False, + "additionalProperties": True, "properties": properties, } diff --git a/lib/spack/spack/schema/url_buildcache_manifest.py b/lib/spack/spack/schema/url_buildcache_manifest.py new file mode 100644 index 00000000000..e3dc4340fcb --- /dev/null +++ b/lib/spack/spack/schema/url_buildcache_manifest.py @@ -0,0 +1,45 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Schema for buildcache entry manifest file + +.. literalinclude:: _spack_root/lib/spack/spack/schema/url_buildcache_manifest.py + :lines: 11- +""" +from typing import Any, Dict + +properties: Dict[str, Any] = { + "version": {"type": "integer"}, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "contentLength", + "mediaType", + "compression", + "checksumAlgorithm", + "checksum", + ], + "properties": { + "contentLength": {"type": "integer"}, + "mediaType": {"type": "string"}, + "compression": {"type": "string"}, + "checksumAlgorithm": {"type": "string"}, + "checksum": {"type": "string"}, + }, + "additionalProperties": True, + }, + }, +} + +#: Full schema with metadata +schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Buildcache manifest schema", + "type": "object", + "required": ["version", "data"], + "additionalProperties": True, + "properties": properties, +} diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 9278d9bf1d8..0a27c18781d 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -17,11 +17,10 @@ import urllib.request import urllib.response from pathlib import Path, PurePath +from typing import Any, Callable, Dict, NamedTuple, Optional import pytest -import archspec.cpu - from llnl.util.filesystem import copy_tree, join_path from llnl.util.symlink import readlink @@ -38,16 +37,27 @@ import spack.paths import spack.repo import spack.spec +import spack.stage import spack.store import spack.util.gpg import spack.util.spack_yaml as syaml import spack.util.url as url_util import spack.util.web as web_util -from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError +from spack.binary_distribution import CannotListKeys, GenerateIndexError from spack.database import INDEX_JSON_FILE from spack.installer import PackageInstaller from spack.paths import test_path from spack.spec import Spec +from spack.url_buildcache import ( + INDEX_MANIFEST_FILE, + BuildcacheComponent, + BuildcacheEntryError, + URLBuildcacheEntry, + URLBuildcacheEntryV2, + compression_writer, + get_url_buildcache_class, + get_valid_spec_file, +) pytestmark = pytest.mark.not_on_windows("does not run on windows") @@ -372,7 +382,7 @@ def test_built_spec_cache(temporary_mirror_dir): for s in [gspec, cspec]: results = bindist.get_mirrors_for_spec(s) - assert any([r["spec"] == s for r in results]) + assert any([r.spec == s for r in results]) def fake_dag_hash(spec, length=None): @@ -435,7 +445,11 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config): assert "libelf" in cache_list # Remove dependency from cache - libelf_files = glob.glob(os.path.join(mirror_dir.join("build_cache").strpath, "*libelf*")) + libelf_files = glob.glob( + os.path.join( + mirror_dir.join(bindist.buildcache_relative_specs_path()).strpath, "libelf", "*libelf*" + ) + ) os.remove(*libelf_files) # Update index @@ -480,8 +494,7 @@ def mock_list_url(url, recursive=False): assert ( "Warning: Encountered problem listing packages at " - f"{test_url}/{bindist.BUILD_CACHE_RELATIVE_PATH}: Some HTTP error" - in capfd.readouterr().err + f"{test_url}: Some HTTP error" in capfd.readouterr().err ) @@ -538,29 +551,6 @@ def test_update_sbang(tmp_path, temporary_mirror, mock_fetch, install_mockery): assert f.read() == new_contents -@pytest.mark.skipif( - str(archspec.cpu.host().family) != "x86_64", - reason="test data uses gcc 4.5.0 which does not support aarch64", -) -def test_install_legacy_buildcache_layout(mutable_config, compiler_factory, install_mockery): - """Legacy buildcache layout involved a nested archive structure - where the .spack file contained a repeated spec.json and another - compressed archive file containing the install tree. This test - makes sure we can still read that layout.""" - legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout") - mirror_url = f"file://{legacy_layout_dir}" - filename = ( - "test-debian6-core2-gcc-4.5.0-archive-files-2.0-" - "l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json" - ) - spec_json_path = os.path.join(legacy_layout_dir, "build_cache", filename) - mirror_cmd("add", "--scope", "site", "test-legacy-layout", mirror_url) - output = install_cmd("--no-check-signature", "--cache-only", "-f", spec_json_path, output=str) - mirror_cmd("rm", "--scope=site", "test-legacy-layout") - expect_line = "Extracting archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache" - assert expect_line in output - - def test_FetchCacheError_only_accepts_lists_of_errors(): with pytest.raises(TypeError, match="list"): bindist.FetchCacheError("error") @@ -600,7 +590,60 @@ def test_text_relocate_if_needed(install_mockery, temporary_store, mock_fetch, t assert join_path("bin", "secretexe") not in manifest["relocate_textfiles"] -def test_etag_fetching_304(): +def test_compression_writer(tmp_path): + text = "This is some text. We might or might not like to compress it as we write." + checksum_algo = "sha256" + + # Write the data using gzip compression + compressed_output_path = str(tmp_path / "compressed_text") + with compression_writer(compressed_output_path, "gzip", checksum_algo) as ( + compressor, + checker, + ): + compressor.write(text.encode("utf-8")) + + compressed_size = checker.length + compressed_checksum = checker.hexdigest() + + with open(compressed_output_path, "rb") as f: + binary_content = f.read() + + assert bindist.compute_hash(binary_content) == compressed_checksum + assert os.stat(compressed_output_path).st_size == compressed_size + assert binary_content[:2] == b"\x1f\x8b" + decompressed_content = gzip.decompress(binary_content).decode("utf-8") + + assert decompressed_content == text + + # Write the data without compression + uncompressed_output_path = str(tmp_path / "uncompressed_text") + with compression_writer(uncompressed_output_path, "none", checksum_algo) as ( + compressor, + checker, + ): + compressor.write(text.encode("utf-8")) + + uncompressed_size = checker.length + uncompressed_checksum = checker.hexdigest() + + with open(uncompressed_output_path, "r", encoding="utf-8") as f: + content = f.read() + + assert bindist.compute_hash(content) == uncompressed_checksum + assert os.stat(uncompressed_output_path).st_size == uncompressed_size + assert content == text + + # Make sure we raise if requesting unknown compression type + nocare_output_path = str(tmp_path / "wontwrite") + with pytest.raises(BuildcacheEntryError, match="Unknown compression type"): + with compression_writer(nocare_output_path, "gsip", checksum_algo) as ( + compressor, + checker, + ): + compressor.write(text) + + +def test_v2_etag_fetching_304(): # Test conditional fetch with etags. If the remote hasn't modified the file # it returns 304, which is an HTTPError in urllib-land. That should be # handled as success, since it means the local cache is up-to-date. @@ -613,7 +656,7 @@ def response_304(request: urllib.request.Request): ) assert False, "Should not fetch {}".format(url) - fetcher = bindist.EtagIndexFetcher( + fetcher = bindist.EtagIndexFetcherV2( url="https://www.example.com", etag="112a8bbc1b3f7f185621c1ee335f0502", urlopen=response_304, @@ -624,7 +667,7 @@ def response_304(request: urllib.request.Request): assert result.fresh -def test_etag_fetching_200(): +def test_v2_etag_fetching_200(): # Test conditional fetch with etags. The remote has modified the file. def response_200(request: urllib.request.Request): url = request.get_full_url() @@ -638,7 +681,7 @@ def response_200(request: urllib.request.Request): ) assert False, "Should not fetch {}".format(url) - fetcher = bindist.EtagIndexFetcher( + fetcher = bindist.EtagIndexFetcherV2( url="https://www.example.com", etag="112a8bbc1b3f7f185621c1ee335f0502", urlopen=response_200, @@ -652,7 +695,7 @@ def response_200(request: urllib.request.Request): assert result.hash == bindist.compute_hash("Result") -def test_etag_fetching_404(): +def test_v2_etag_fetching_404(): # Test conditional fetch with etags. The remote has modified the file. def response_404(request: urllib.request.Request): raise urllib.error.HTTPError( @@ -663,7 +706,7 @@ def response_404(request: urllib.request.Request): fp=None, ) - fetcher = bindist.EtagIndexFetcher( + fetcher = bindist.EtagIndexFetcherV2( url="https://www.example.com", etag="112a8bbc1b3f7f185621c1ee335f0502", urlopen=response_404, @@ -673,13 +716,13 @@ def response_404(request: urllib.request.Request): fetcher.conditional_fetch() -def test_default_index_fetch_200(): +def test_v2_default_index_fetch_200(): index_json = '{"Hello": "World"}' index_json_hash = bindist.compute_hash(index_json) def urlopen(request: urllib.request.Request): url = request.get_full_url() - if url.endswith(INDEX_HASH_FILE): + if url.endswith("index.json.hash"): return urllib.response.addinfourl( # type: ignore[arg-type] io.BytesIO(index_json_hash.encode()), headers={}, # type: ignore[arg-type] @@ -697,7 +740,7 @@ def urlopen(request: urllib.request.Request): assert False, "Unexpected request {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash="outdated", urlopen=urlopen ) @@ -710,7 +753,7 @@ def urlopen(request: urllib.request.Request): assert result.hash == index_json_hash -def test_default_index_dont_fetch_index_json_hash_if_no_local_hash(): +def test_v2_default_index_dont_fetch_index_json_hash_if_no_local_hash(): # When we don't have local hash, we should not be fetching the # remote index.json.hash file, but only index.json. index_json = '{"Hello": "World"}' @@ -728,7 +771,7 @@ def urlopen(request: urllib.request.Request): assert False, "Unexpected request {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash=None, urlopen=urlopen ) @@ -741,13 +784,13 @@ def urlopen(request: urllib.request.Request): assert not result.fresh -def test_default_index_not_modified(): +def test_v2_default_index_not_modified(): index_json = '{"Hello": "World"}' index_json_hash = bindist.compute_hash(index_json) def urlopen(request: urllib.request.Request): url = request.get_full_url() - if url.endswith(INDEX_HASH_FILE): + if url.endswith("index.json.hash"): return urllib.response.addinfourl( io.BytesIO(index_json_hash.encode()), headers={}, # type: ignore[arg-type] @@ -758,7 +801,7 @@ def urlopen(request: urllib.request.Request): # No request to index.json should be made. assert False, "Unexpected request {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen ) @@ -766,7 +809,7 @@ def urlopen(request: urllib.request.Request): @pytest.mark.parametrize("index_json", [b"\xa9", b"!#%^"]) -def test_default_index_invalid_hash_file(index_json): +def test_v2_default_index_invalid_hash_file(index_json): # Test invalid unicode / invalid hash type index_json_hash = bindist.compute_hash(index_json) @@ -778,21 +821,21 @@ def urlopen(request: urllib.request.Request): code=200, ) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen ) assert fetcher.get_remote_hash() is None -def test_default_index_json_404(): +def test_v2_default_index_json_404(): # Test invalid unicode / invalid hash type index_json = '{"Hello": "World"}' index_json_hash = bindist.compute_hash(index_json) def urlopen(request: urllib.request.Request): url = request.get_full_url() - if url.endswith(INDEX_HASH_FILE): + if url.endswith("index.json.hash"): return urllib.response.addinfourl( io.BytesIO(index_json_hash.encode()), headers={}, # type: ignore[arg-type] @@ -811,7 +854,7 @@ def urlopen(request: urllib.request.Request): assert False, "Unexpected fetch {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash="invalid", urlopen=urlopen ) @@ -1097,9 +1140,7 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success): json.dump(spec_dict, f) try: - spec_dict_disk, layout_disk = bindist._get_valid_spec_file( - str(path), max_supported_layout=1 - ) + spec_dict_disk, layout_disk = get_valid_spec_file(str(path), max_supported_layout=1) assert expect_success assert spec_dict_disk == spec_dict assert layout_disk == effective_layout @@ -1109,51 +1150,66 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success): def test_get_valid_spec_file_doesnt_exist(tmp_path): with pytest.raises(bindist.InvalidMetadataFile, match="No such file"): - bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1) - - -def test_get_valid_spec_file_gzipped(tmp_path): - # Create a gzipped file, contents don't matter - path = tmp_path / "spec.json.gz" - with gzip.open(path, "wb") as f: - f.write(b"hello") - with pytest.raises( - bindist.InvalidMetadataFile, match="Compressed spec files are not supported" - ): - bindist._get_valid_spec_file(str(path), max_supported_layout=1) + get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1) @pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"]) def test_get_valid_spec_file_no_json(tmp_path, filename): tmp_path.joinpath(filename).write_text("not json") with pytest.raises(bindist.InvalidMetadataFile): - bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1) + get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1) -def test_download_tarball_with_unsupported_layout_fails( - tmp_path, mock_packages, mutable_config, capsys -): - layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1 - spec = spack.concretize.concretize_one("pkg-c") - spec_dict = spec.to_dict() - spec_dict["buildcache_layout_version"] = layout_version +@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "temporary_mirror") +def test_url_buildcache_entry_v3(monkeypatch, tmpdir): + """Make sure URLBuildcacheEntry behaves as expected""" - # Setup a basic local build cache structure - path = ( - tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json") - ) - path.parent.mkdir(parents=True) - with open(path, "w", encoding="utf-8") as f: - json.dump(spec_dict, f) + # Create a temp mirror directory for buildcache usage + mirror_dir = tmpdir.join("mirror_dir") + mirror_url = url_util.path_to_file_url(mirror_dir.strpath) - # Configure as a mirror. - mirror_cmd("add", "test-mirror", str(tmp_path)) + s = Spec("libdwarf").concretized() - # Shouldn't be able "download" this. - assert bindist.download_tarball(spec, unsigned=True) is None + # Install libdwarf + install_cmd("--fake", s.name) - # And there should be a warning about an unsupported layout version. - assert f"Layout version {layout_version} is too new" in capsys.readouterr().err + # Push libdwarf to buildcache + buildcache_cmd("push", "-u", mirror_dir.strpath, s.name) + + cache_class = get_url_buildcache_class(bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION) + build_cache = cache_class(mirror_url, s, allow_unsigned=True) + + manifest = build_cache.read_manifest() + spec_dict = build_cache.fetch_metadata() + local_tarball_path = build_cache.fetch_archive() + + assert "spec" in spec_dict + + for blob_record in manifest.data: + blob_path = build_cache.get_staged_blob_path(blob_record) + assert os.path.exists(blob_path) + actual_blob_size = os.stat(blob_path).st_size + assert blob_record.content_length == actual_blob_size + + build_cache.destroy() + + assert not os.path.exists(local_tarball_path) + + +def test_relative_path_components(): + blobs_v3 = URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.BLOB) + assert len(blobs_v3) == 1 + assert "blobs" in blobs_v3 + + blobs_v2 = URLBuildcacheEntryV2.get_relative_path_components(BuildcacheComponent.BLOB) + assert len(blobs_v2) == 1 + assert "build_cache" in blobs_v2 + + v2_spec_url = "file:///home/me/mymirror/build_cache/linux-ubuntu22.04-sapphirerapids-gcc-12.3.0-gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.json.sig" + assert URLBuildcacheEntryV2.get_base_url(v2_spec_url) == "file:///home/me/mymirror" + + v3_manifest_url = "file:///home/me/mymirror/v3/manifests/gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.manifest.json" + assert URLBuildcacheEntry.get_base_url(v3_manifest_url) == "file:///home/me/mymirror" @pytest.mark.parametrize( @@ -1170,3 +1226,244 @@ def test_download_tarball_with_unsupported_layout_fails( def test_default_tag(spec: str): """Make sure that computed image tags are valid.""" assert re.fullmatch(spack.oci.image.tag, bindist._oci_default_tag(spack.spec.Spec(spec))) + + +class IndexInformation(NamedTuple): + manifest_contents: Dict[str, Any] + index_contents: str + index_hash: str + manifest_path: str + index_path: str + manifest_etag: str + fetched_blob: Callable[[], bool] + + +@pytest.fixture +def mock_index(tmp_path, monkeypatch) -> IndexInformation: + mirror_root = tmp_path / "mymirror" + index_json = '{"Hello": "World"}' + index_json_hash = bindist.compute_hash(index_json) + fetched = False + + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + + index_blob_path = os.path.join( + str(mirror_root), + *cache_class.get_relative_path_components(BuildcacheComponent.BLOB), + "sha256", + index_json_hash[:2], + index_json_hash, + ) + + os.makedirs(os.path.dirname(index_blob_path)) + with open(index_blob_path, "w", encoding="utf-8") as fd: + fd.write(index_json) + + index_blob_record = bindist.BlobRecord( + os.stat(index_blob_path).st_size, + cache_class.BUILDCACHE_INDEX_MEDIATYPE, + "none", + "sha256", + index_json_hash, + ) + + index_manifest = { + "version": cache_class.get_layout_version(), + "data": [index_blob_record.to_dict()], + } + + manifest_json_path = cache_class.get_index_url(str(mirror_root)) + + os.makedirs(os.path.dirname(manifest_json_path)) + + with open(manifest_json_path, "w", encoding="utf-8") as f: + json.dump(index_manifest, f) + + def fetch_patch(stage, mirror_only: bool = False, err_msg: Optional[str] = None): + nonlocal fetched + fetched = True + + @property # type: ignore + def save_filename_patch(stage): + return str(index_blob_path) + + monkeypatch.setattr(spack.stage.Stage, "fetch", fetch_patch) + monkeypatch.setattr(spack.stage.Stage, "save_filename", save_filename_patch) + + def get_did_fetch(): + # nonlocal fetched + return fetched + + return IndexInformation( + index_manifest, + index_json, + index_json_hash, + manifest_json_path, + index_blob_path, + "59bcc3ad6775562f845953cf01624225", + get_did_fetch, + ) + + +def test_etag_fetching_304(): + # Test conditional fetch with etags. If the remote hasn't modified the file + # it returns 304, which is an HTTPError in urllib-land. That should be + # handled as success, since it means the local cache is up-to-date. + def response_304(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"' + raise urllib.error.HTTPError( + url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type] + ) + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.EtagIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_304, + ) + + result = fetcher.conditional_fetch() + assert isinstance(result, bindist.FetchIndexResult) + assert result.fresh + + +def test_etag_fetching_200(mock_index): + # Test conditional fetch with etags. The remote has modified the file. + def response_200(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"' + return urllib.response.addinfourl( + io.BytesIO(json.dumps(mock_index.manifest_contents).encode()), + headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.EtagIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_200, + ) + + result = fetcher.conditional_fetch() + assert isinstance(result, bindist.FetchIndexResult) + assert not result.fresh + assert mock_index.fetched_blob() + assert result.etag == mock_index.manifest_etag + assert result.data == mock_index.index_contents + assert result.hash == mock_index.index_hash + + +def test_etag_fetching_404(): + # Test conditional fetch with etags. The remote has modified the file. + def response_404(request: urllib.request.Request): + raise urllib.error.HTTPError( + request.get_full_url(), + 404, + "Not found", + hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + fp=None, + ) + + fetcher = bindist.EtagIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_404, + ) + + with pytest.raises(bindist.FetchIndexError): + fetcher.conditional_fetch() + + +def test_default_index_fetch_200(mock_index): + # We fetch the manifest and then the index blob if the hash is outdated + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + return urllib.response.addinfourl( # type: ignore[arg-type] + io.BytesIO(json.dumps(mock_index.manifest_contents).encode()), + headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + local_hash="outdated", + urlopen=urlopen, + ) + + result = fetcher.conditional_fetch() + + assert isinstance(result, bindist.FetchIndexResult) + assert not result.fresh + assert mock_index.fetched_blob() + assert result.etag == mock_index.manifest_etag + assert result.data == mock_index.index_contents + assert result.hash == mock_index.index_hash + + +def test_default_index_404(): + # We get a fetch error if the index can't be fetched + def urlopen(request: urllib.request.Request): + raise urllib.error.HTTPError( + request.get_full_url(), + 404, + "Not found", + hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + fp=None, + ) + + fetcher = bindist.DefaultIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + local_hash=None, + urlopen=urlopen, + ) + + with pytest.raises(bindist.FetchIndexError): + fetcher.conditional_fetch() + + +def test_default_index_not_modified(mock_index): + # We don't fetch the index blob if hash didn't change + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + return urllib.response.addinfourl( + io.BytesIO(json.dumps(mock_index.manifest_contents).encode()), + headers={}, # type: ignore[arg-type] + url=url, + code=200, + ) + + # No other request should be made. + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + local_hash=mock_index.index_hash, + urlopen=urlopen, + ) + + assert fetcher.conditional_fetch().fresh + assert not mock_index.fetched_blob() diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py index 647c794c645..8db7058a1d2 100644 --- a/lib/spack/spack/test/build_distribution.py +++ b/lib/spack/spack/test/build_distribution.py @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import os +import shutil import pytest @@ -37,12 +37,7 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p assert not skipped # Remove the tarball, which should cause push to push. - os.remove( - tmp_path - / bd.BUILD_CACHE_RELATIVE_PATH - / bd.tarball_directory_name(spec) - / bd.tarball_name(spec, ".spack") - ) + shutil.rmtree(tmp_path / bd.buildcache_relative_blobs_path()) with bd.make_uploader(mirror) as uploader: skipped = uploader.push_or_raise(specs) diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index f350874485d..daeca3a4011 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -5,12 +5,16 @@ import errno import json import os +import pathlib import shutil from typing import List import pytest +from llnl.util.filesystem import copy_tree, find + import spack.binary_distribution +import spack.buildcache_migrate as migrate import spack.cmd.buildcache import spack.concretize import spack.environment as ev @@ -18,8 +22,16 @@ import spack.main import spack.mirrors.mirror import spack.spec -import spack.util.url +import spack.util.url as url_util from spack.installer import PackageInstaller +from spack.paths import test_path +from spack.url_buildcache import ( + BuildcacheComponent, + URLBuildcacheEntry, + URLBuildcacheEntryV2, + check_mirror_for_layout, + get_url_buildcache_class, +) buildcache = spack.main.SpackCommand("buildcache") install = spack.main.SpackCommand("install") @@ -74,20 +86,6 @@ def test_buildcache_list_allarch(database, mock_get_specs_multiarch, capsys): assert output.count("mpileaks") == 2 -def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir): - """ "Ensure that buildcache create creates output files""" - pkg = "trivial-install-test-package" - install(pkg) - - buildcache("push", "--unsigned", str(tmpdir), pkg) - - spec = spack.concretize.concretize_one(pkg) - tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack") - tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) - - def tests_buildcache_create_env( install_mockery, mock_fetch, monkeypatch, tmpdir, mutable_mock_env_path ): @@ -102,10 +100,15 @@ def tests_buildcache_create_env( buildcache("push", "--unsigned", str(tmpdir)) spec = spack.concretize.concretize_one(pkg) - tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack") - tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) + + mirror_url = f"file://{tmpdir.strpath}" + + cache_class = get_url_buildcache_class( + layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + cache_entry = cache_class(mirror_url, spec, allow_unsigned=True) + assert cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + cache_entry.destroy() def test_buildcache_create_fails_on_noargs(tmpdir): @@ -159,12 +162,14 @@ def test_update_key_index( # it causes the index to get update. buildcache("update-index", "--keys", mirror_dir.strpath) - key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp")) + key_dir_list = os.listdir( + os.path.join(mirror_dir.strpath, spack.binary_distribution.buildcache_relative_keys_path()) + ) uninstall("-y", s.name) mirror("rm", "test-mirror") - assert "index.json" in key_dir_list + assert "keys.manifest.json" in key_dir_list def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch): @@ -180,10 +185,14 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch): # Install and generate build cache index PackageInstaller([s.package], fake=True, explicit=True).install() - metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json") + assert s.name is not None + manifest_file = URLBuildcacheEntry.get_manifest_filename(s) + specs_dirs = os.path.join( + *URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.SPEC), s.name + ) - assert not (mirror_dir / "build_cache" / metadata_file).exists() - assert (mirror_autopush_dir / "build_cache" / metadata_file).exists() + assert not (mirror_dir / specs_dirs / manifest_file).exists() + assert (mirror_autopush_dir / specs_dirs / manifest_file).exists() def test_buildcache_sync( @@ -205,7 +214,11 @@ def test_buildcache_sync( out_env_pkg = "libdwarf" def verify_mirror_contents(): - dest_list = os.listdir(os.path.join(dest_mirror_dir, "build_cache")) + dest_list = os.listdir( + os.path.join( + dest_mirror_dir, spack.binary_distribution.buildcache_relative_specs_path() + ) + ) found_pkg = False @@ -252,33 +265,15 @@ def verify_mirror_contents(): verify_mirror_contents() shutil.rmtree(dest_mirror_dir) + cache_class = get_url_buildcache_class( + layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + def manifest_insert(manifest, spec, dest_url): - manifest[spec.dag_hash()] = [ - { - "src": spack.util.url.join( - src_mirror_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_name(spec, ".spec.json"), - ), - "dest": spack.util.url.join( - dest_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_name(spec, ".spec.json"), - ), - }, - { - "src": spack.util.url.join( - src_mirror_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_path_name(spec, ".spack"), - ), - "dest": spack.util.url.join( - dest_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_path_name(spec, ".spack"), - ), - }, - ] + manifest[spec.dag_hash()] = { + "src": cache_class.get_manifest_url(spec, src_mirror_url), + "dest": cache_class.get_manifest_url(spec, dest_url), + } manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json") with open(manifest_file, "w", encoding="utf-8") as fd: @@ -298,9 +293,7 @@ def manifest_insert(manifest, spec, dest_url): with open(manifest_file, "w", encoding="utf-8") as fd: manifest = {} for spec in test_env.specs_by_hash.values(): - manifest_insert( - manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path") - ) + manifest_insert(manifest, spec, url_util.join(dest_mirror_url, "invalid_path")) json.dump(manifest, fd) # Trigger the warning @@ -327,11 +320,37 @@ def test_buildcache_create_install( buildcache("push", "--unsigned", str(tmpdir), pkg) + mirror_url = f"file://{tmpdir.strpath}" + spec = spack.concretize.concretize_one(pkg) - tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack") - tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) + cache_class = get_url_buildcache_class( + layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + cache_entry = cache_class(mirror_url, spec, allow_unsigned=True) + assert spec.name is not None + manifest_path = os.path.join( + str(tmpdir), + *cache_class.get_relative_path_components(BuildcacheComponent.SPEC), + spec.name, + cache_class.get_manifest_filename(spec), + ) + + assert os.path.exists(manifest_path) + cache_entry.read_manifest() + spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC) + tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL) + + spec_blob_path = os.path.join( + tmpdir.strpath, *cache_class.get_blob_path_components(spec_blob_record) + ) + assert os.path.exists(spec_blob_path) + + tarball_blob_path = os.path.join( + tmpdir.strpath, *cache_class.get_blob_path_components(tarball_blob_record) + ) + assert os.path.exists(tarball_blob_path) + + cache_entry.destroy() @pytest.mark.parametrize( @@ -503,3 +522,230 @@ def test_push_without_build_deps(tmp_path, temporary_store, mock_packages, mutab "push", "--update-index", "--without-build-dependencies", "my-mirror", f"/{s.dag_hash()}" ) assert spack.binary_distribution.update_cache_and_get_specs() == [s] + + +@pytest.fixture(scope="function") +def v2_buildcache_layout(tmp_path): + def _layout(signedness: str = "signed"): + source_path = str(pathlib.Path(test_path) / "data" / "mirrors" / "v2_layout" / signedness) + test_mirror_path = tmp_path / "mirror" + copy_tree(source_path, test_mirror_path) + return test_mirror_path + + return _layout + + +def test_check_mirror_for_layout(v2_buildcache_layout, mutable_config, capsys): + """Check printed warning in the presence of v2 layout binary mirrors""" + test_mirror_path = v2_buildcache_layout("unsigned") + + check_mirror_for_layout(spack.mirrors.mirror.Mirror.from_local_path(str(test_mirror_path))) + err = str(capsys.readouterr()[1]) + assert all([word in err for word in ["Warning", "missing", "layout"]]) + + +def test_url_buildcache_entry_v2_exists( + capsys, v2_buildcache_layout, mock_packages, mutable_config +): + """Test existence check for v2 buildcache entries""" + test_mirror_path = v2_buildcache_layout("unsigned") + mirror_url = f"file://{test_mirror_path}" + mirror("add", "v2mirror", mirror_url) + + with capsys.disabled(): + output = buildcache("list", "-a", "-l") + + assert "Fetching an index from a v2 binary mirror layout" in output + assert "is deprecated" in output + + v2_cache_class = URLBuildcacheEntryV2 + + # If you don't give it a spec, it returns False + build_cache = v2_cache_class(mirror_url) + assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + spec = spack.concretize.concretize_one("libdwarf") + + # In v2 we have to ask for both, because we need to have the spec to have the tarball + build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True) + assert not build_cache.exists([BuildcacheComponent.TARBALL]) + assert not build_cache.exists([BuildcacheComponent.SPEC]) + # But if we do ask for both, they should be there in this case + assert build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + spec_path = build_cache._get_spec_url(spec, mirror_url, ext=".spec.json")[7:] + tarball_path = build_cache._get_tarball_url(spec, mirror_url)[7:] + + os.remove(tarball_path) + build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True) + assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + os.remove(spec_path) + build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True) + assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + +@pytest.mark.parametrize("signing", ["unsigned", "signed"]) +def test_install_v2_layout( + signing, + capsys, + v2_buildcache_layout, + mock_packages, + mutable_config, + mutable_mock_env_path, + install_mockery, + mock_gnupghome, + monkeypatch, +): + """Ensure we can still install from signed and unsigned v2 buildcache""" + test_mirror_path = v2_buildcache_layout(signing) + mirror("add", "my-mirror", str(test_mirror_path)) + + # Trust original signing key (no-op if this is the unsigned pass) + buildcache("keys", "--install", "--trust") + + with capsys.disabled(): + output = install("--fake", "--no-check-signature", "libdwarf") + + assert "Extracting libelf" in output + assert "libelf: Successfully installed" in output + assert "Extracting libdwarf" in output + assert "libdwarf: Successfully installed" in output + assert "Installing a spec from a v2 binary mirror layout" in output + assert "is deprecated" in output + + +def test_basic_migrate_unsigned(capsys, v2_buildcache_layout, mutable_config): + """Make sure first unsigned migration results in usable buildcache, + leaving the previous layout in place. Also test that a subsequent one + doesn't need to migrate anything, and that using --delete-existing + removes the previous layout""" + + test_mirror_path = v2_buildcache_layout("unsigned") + mirror("add", "my-mirror", str(test_mirror_path)) + + with capsys.disabled(): + output = buildcache("migrate", "--unsigned", "my-mirror") + + # The output indicates both specs were migrated + assert output.count("Successfully migrated") == 6 + + build_cache_path = str(test_mirror_path / "build_cache") + + # Without "--delete-existing" and "--yes-to-all", migration leaves the + # previous layout in place + assert os.path.exists(build_cache_path) + assert os.path.isdir(build_cache_path) + + # Now list the specs available under the new layout + with capsys.disabled(): + output = buildcache("list", "--allarch") + + assert "libdwarf" in output and "libelf" in output + + with capsys.disabled(): + output = buildcache( + "migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror" + ) + + # A second migration of the same mirror indicates neither spec + # needs to be migrated + assert output.count("No need to migrate") == 6 + + # When we provide "--delete-existing" and "--yes-to-all", migration + # removes the old layout + assert not os.path.exists(build_cache_path) + + +def test_basic_migrate_signed( + capsys, v2_buildcache_layout, monkeypatch, mock_gnupghome, mutable_config +): + """Test a signed migration requires a signing key, requires the public + key originally used to sign the pkgs, fails and prints reasonable messages + if those requirements are unmet, and eventually succeeds when they are met.""" + test_mirror_path = v2_buildcache_layout("signed") + mirror("add", "my-mirror", str(test_mirror_path)) + + with pytest.raises(migrate.MigrationException) as error: + buildcache("migrate", "my-mirror") + + # Without a signing key spack fails and explains why + assert error.value.message == "Signed migration requires exactly one secret key in keychain" + + # Create a signing key and trust the key used to sign the pkgs originally + gpg("create", "New Test Signing Key", "noone@nowhere.org") + + with capsys.disabled(): + output = buildcache("migrate", "my-mirror") + + # Without trusting the original signing key, spack fails with an explanation + assert "Failed to verify signature of libelf" in output + assert "Failed to verify signature of libdwarf" in output + assert "did you mean to perform an unsigned migration" in output + + # Trust original signing key (since it's in the original layout location, + # this is where the monkeypatched attribute is used) + with capsys.disabled(): + output = buildcache("keys", "--install", "--trust") + + with capsys.disabled(): + output = buildcache("migrate", "my-mirror") + + # Once we have the proper keys, migration should succeed + assert "Successfully migrated libelf" in output + assert "Successfully migrated libelf" in output + + # Now list the specs available under the new layout + with capsys.disabled(): + output = buildcache("list", "--allarch") + + assert "libdwarf" in output and "libelf" in output + + +def test_unsigned_migrate_of_signed_mirror(capsys, v2_buildcache_layout, mutable_config): + """Test spack can do an unsigned migration of a signed buildcache by + ignoring signatures and skipping re-signing.""" + + test_mirror_path = v2_buildcache_layout("signed") + mirror("add", "my-mirror", str(test_mirror_path)) + + with capsys.disabled(): + output = buildcache( + "migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror" + ) + + # Now list the specs available under the new layout + with capsys.disabled(): + output = buildcache("list", "--allarch") + + assert "libdwarf" in output and "libelf" in output + + # We should find two spec manifest files, one for each spec + file_list = find(test_mirror_path, "*.spec.manifest.json") + assert len(file_list) == 6 + assert any(["libdwarf" in file for file in file_list]) + assert any(["libelf" in file for file in file_list]) + + # The two spec manifest files should be unsigned + for file_path in file_list: + with open(file_path, "r", encoding="utf-8") as fd: + assert json.load(fd) + + +def test_migrate_requires_index(capsys, v2_buildcache_layout, mutable_config): + """Test spack fails with a reasonable error message when mirror does + not have an index""" + + test_mirror_path = v2_buildcache_layout("unsigned") + v2_index_path = test_mirror_path / "build_cache" / "index.json" + v2_index_hash_path = test_mirror_path / "build_cache" / "index.json.hash" + os.remove(str(v2_index_path)) + os.remove(str(v2_index_hash_path)) + + mirror("add", "my-mirror", str(test_mirror_path)) + + with pytest.raises(migrate.MigrationException) as error: + buildcache("migrate", "--unsigned", "my-mirror") + + # If the buildcache has no index, spack fails and explains why + assert error.value.message == "Buildcache migration requires a buildcache index" diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 9f4cf680aca..07c3d7bdf04 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -31,11 +31,8 @@ from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig from spack.ci.generator_registry import generator from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE -from spack.database import INDEX_JSON_FILE from spack.error import SpackError -from spack.schema.buildcache_spec import schema as specfile_schema from spack.schema.database_index import schema as db_idx_schema -from spack.spec import Spec from spack.test.conftest import MockHTTPResponse config_cmd = spack.main.SpackCommand("config") @@ -718,7 +715,7 @@ def test_ci_nothing_to_rebuild( ) install_cmd("archive-files") - buildcache_cmd("push", "-f", "-u", mirror_url, "archive-files") + buildcache_cmd("push", "-f", "-u", "--update-index", mirror_url, "archive-files") with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") @@ -855,18 +852,18 @@ def test_push_to_build_cache( # Test generating buildcache index while we have bin mirror buildcache_cmd("update-index", mirror_url) - with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as idx_fd: - index_object = json.load(idx_fd) - jsonschema.validate(index_object, db_idx_schema) + + # Validate resulting buildcache (database) index + layout_version = spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + url_and_version = spack.binary_distribution.MirrorURLAndVersion( + mirror_url, layout_version + ) + index_fetcher = spack.binary_distribution.DefaultIndexFetcher(url_and_version, None) + result = index_fetcher.conditional_fetch() + jsonschema.validate(json.loads(result.data), db_idx_schema) # Now that index is regenerated, validate "buildcache list" output assert "patchelf" in buildcache_cmd("list", output=str) - # Also test buildcache_spec schema - for file_name in os.listdir(mirror_dir / "build_cache"): - if file_name.endswith(".spec.json.sig"): - with open(mirror_dir / "build_cache" / file_name, encoding="utf-8") as f: - spec_dict = Spec.extract_json_from_clearsig(f.read()) - jsonschema.validate(spec_dict, specfile_schema) logs_dir = scratch / "logs_dir" logs_dir.mkdir() @@ -1032,7 +1029,7 @@ def test_ci_generate_override_runner_attrs( def test_ci_rebuild_index( - tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch, capsys ): scratch = tmp_path / "working_dir" mirror_dir = scratch / "mirror" @@ -1069,8 +1066,9 @@ def test_ci_rebuild_index( buildcache_cmd("push", "-u", "-f", mirror_url, "callpath") ci_cmd("rebuild-index") - with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as f: - jsonschema.validate(json.load(f), db_idx_schema) + with capsys.disabled(): + output = buildcache_cmd("list", "--allarch") + assert "callpath" in output def test_ci_get_stack_changed(mock_git_repo, monkeypatch): diff --git a/lib/spack/spack/test/cmd/gpg.py b/lib/spack/spack/test/cmd/gpg.py index e83602d2753..dd86a56e8b6 100644 --- a/lib/spack/spack/test/cmd/gpg.py +++ b/lib/spack/spack/test/cmd/gpg.py @@ -8,6 +8,7 @@ import llnl.util.filesystem as fs +import spack.binary_distribution as bindist import spack.util.executable import spack.util.gpg from spack.main import SpackCommand @@ -172,23 +173,25 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome): # Verification should now succeed again. gpg("verify", str(test_path)) + relative_keys_path = bindist.buildcache_relative_keys_path() + # Publish the keys using a directory path test_path = tmpdir.join("dir_cache") - os.makedirs("%s" % test_path) + os.makedirs(f"{test_path}") gpg("publish", "--rebuild-index", "-d", str(test_path)) - assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path) + assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json") # Publish the keys using a mirror url test_path = tmpdir.join("url_cache") - os.makedirs("%s" % test_path) - test_url = "file://%s" % test_path + os.makedirs(f"{test_path}") + test_url = f"file://{test_path}" gpg("publish", "--rebuild-index", "--mirror-url", test_url) - assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path) + assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json") # Publish the keys using a mirror name test_path = tmpdir.join("named_cache") - os.makedirs("%s" % test_path) - mirror_url = "file://%s" % test_path + os.makedirs(f"{test_path}") + mirror_url = f"file://{test_path}" mirror("add", "gpg", mirror_url) gpg("publish", "--rebuild-index", "-m", "gpg") - assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path) + assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json") diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py index a9ede4df73d..cd1fe699986 100644 --- a/lib/spack/spack/test/cmd/mirror.py +++ b/lib/spack/spack/test/cmd/mirror.py @@ -6,6 +6,7 @@ import pytest +import spack.binary_distribution as bindist import spack.cmd.mirror import spack.concretize import spack.config @@ -365,8 +366,10 @@ def test_mirror_destroy( install("--fake", "--no-cache", spec_name) buildcache("push", "-u", "-f", mirror_dir.strpath, spec_name) + blobs_path = bindist.buildcache_relative_blobs_path() + contents = os.listdir(mirror_dir.strpath) - assert "build_cache" in contents + assert blobs_path in contents # Destroy mirror by name mirror("destroy", "-m", "atest") @@ -376,7 +379,7 @@ def test_mirror_destroy( buildcache("push", "-u", "-f", mirror_dir.strpath, spec_name) contents = os.listdir(mirror_dir.strpath) - assert "build_cache" in contents + assert blobs_path in contents # Destroy mirror by url mirror("destroy", "--mirror-url", mirror_url) diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index cb7d3f082b1..b5b8e4d9825 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -1068,9 +1068,7 @@ def install_mockery(temporary_store: spack.store.Store, mutable_config, mock_pac @pytest.fixture(scope="module") def temporary_mirror_dir(tmpdir_factory): dir = tmpdir_factory.mktemp("mirror") - dir.ensure("build_cache", dir=True) yield str(dir) - dir.join("build_cache").remove() @pytest.fixture(scope="function") @@ -1084,9 +1082,7 @@ def temporary_mirror(temporary_mirror_dir): @pytest.fixture(scope="function") def mutable_temporary_mirror_dir(tmpdir_factory): dir = tmpdir_factory.mktemp("mirror") - dir.ensure("build_cache", dir=True) yield str(dir) - dir.join("build_cache").remove() @pytest.fixture(scope="function") diff --git a/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json b/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json deleted file mode 100644 index 8aae45be93f..00000000000 --- a/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "spec": { - "_meta": { - "version": 3 - }, - "nodes": [ - { - "name": "archive-files", - "version": "2.0", - "arch": { - "platform": "test", - "platform_os": "debian6", - "target": { - "name": "core2", - "vendor": "GenuineIntel", - "features": [ - "mmx", - "sse", - "sse2", - "ssse3" - ], - "generation": 0, - "parents": [ - "nocona" - ] - } - }, - "compiler": { - "name": "gcc", - "version": "4.5.0" - }, - "namespace": "builtin.mock", - "parameters": { - "cflags": [], - "cppflags": [], - "cxxflags": [], - "fflags": [], - "ldflags": [], - "ldlibs": [] - }, - "package_hash": "ncv2pr4o2yemepsa4h7u4p4dsgieul5fxvh6s5am5fsb65ebugaa====", - "hash": "l3vdiqvbobmspwyb4q2b62fz6nitd4hk" - } - ] - }, - "binary_cache_checksum": { - "hash_algorithm": "sha256", - "hash": "c226b51d88876746efd6f9737cc6dfdd349870b6c0b9c045d9bad0f2764a40b9" - }, - "buildinfo": { - "relative_prefix": "test-debian6-core2/gcc-4.5.0/archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk", - "relative_rpaths": false - } -} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2/gcc-4.5.0/archive-files-2.0/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack b/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2/gcc-4.5.0/archive-files-2.0/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack deleted file mode 100644 index 047d9b2cb7fcdfc01451c8dd63213b9246c3e4c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10240 zcmeHM2Ut_tw#LH9*auJu#n2U{r4T?+RFEb}6A?r(sGl)6!o@~;P^8r z5`l_CIyfq@i%G%$%(o$MN&|&La%vo=qRg)E-@iF56&0BBxAl);?+EPr|8Ljiw{HCL z)93o9V98YKAB!we{~4bX3444F-qv?_e6gsXwW3vn7tf0_v^LjDIXcvJ zbf}SN723}qy^xxwVIvM)8s7AXS5VVwp;AdB|G5k`bc^=Ps_Aj`CUoyJzw61gfk=&8 z1+U(|txxQa3eg@7wpSYs-rHkrWva2j)z!DjFSUa(yF;yk+AXCPp+CrA>Mc%rJ8Vq4 zF+J_FgYjyEG>H+0a~RcowivXK7AL-_FfsI|7Bgg!5{2@)Epr`(3E^r0=n!N$O z1O=>y!FA`s^WgDu&2qKLj6_?0P&&|_!{=P$BCA=FttjGEcW7ItmR8FZP(oM~)WDaq zG&ATE;Nb~ywuP8%Rn_wVKav$#Z9Z){G&lu0h|S%XR24qtAgMjnySm_A-};kaA6DZaJ#NIDn}X7LQr6RH9OLJ~%^TN)7IlJ-e3Lcf?Q`@A-c@uOI}Ej>HYK!e z<22|6z_Y@vEqBV2Yo)M}ks-9dqTrCkZ7toC&fe|}yMdYQec;jY*f0oW9Lk^-fO(4` zju}=^yr5VBZXc)Jhxjp71qnAC9gD-0!%IWs#Py407-jK$vRJXVrPEmg;xPQv*&cO)%k-ht#3mICt)D|4vlomD-u)w5T8@(<{ zyqw}#8g{<-De+x|k9tWV@4_NYv38fDSJGR^G=Jq&@=2Wna1}UqtCv>yyxdPjJ?z6vDMRW)TbnyWY@LhoTCSVn9#d<5pmC@j@(fj;6~W@(l50=;Ur+#Liz-*ZA@s zc=|XVi5ijB7fbUoo__YEMy6c)=NRE9hOl5+iEeSTUWmpyNSkD9A)QC+dR#NIx<-;p zH-TPFx~8iqbY0L29}Y_zPY*ce%cmpfy;e{*=>ATEt)Tbhy2Xo5s|LE>c%$pgCf!vL zZIJquw+|ZH11gT8O4jn1MY%8_lIBlSwdqz)>FPv87X*{HlRsY3)zej&^h)ZNpVBBZ=ya=0n9<%R znW1;7RDgTbk8DtXs_RNJvh_rEtKu{~51+NG!{%dJALFHr$K;KXwY*a5Ep}4jo|U{C zoGWxf`&wQI9JF3MGsA4p$BNe@^c{cV-{q=^x+4|Waq3WN^6uDIE*SMz2fzB5U+H>Y zS?zf{lu?`RBk4he(Ee)}NT@o`K+sdBEueqp(88e?c$7f|B1~?|1BkOUP-%92(8)KS z+~St0SntT(wYQjJ35hE?)HAu2pAQ#+u{qLzV4+4^LCQyGSpVHclT$eExL8Mx~0r*2~h-UM*EPpE+=}lJ5h6NE?4?rPmZj#g*B2<^P?kag7PAp-Cx>0$xFZA8f}nl zLlp2k$V$JJ+F4O;HjrfKc^I9wE9~5GXWwF;Q=(b$>9;j?<}>dSkA&Gh ze?I%Fr=~;mQHpzyK~i1tQ0Amjp8TuE1Bpk6{SPF!biqqryT!Jej$G1e$jOXt%xlkn z;5KQ$(TY(?kLn!Vm8-?KuuDuvW%d5q9oH=-K%L3ggPxRQA7@4DuBbESxBkRZM`$R!_j`*S0l2 zy**tbZ~K6>c?w8f))-*{Wi&1Uo&X3M;O{?Sgt6VS(Thhv>3adJ`<8|J0IybAI%g6N_`xg(%r zIXntnIk$;(bs2r2b*H}d@5-;L6nUq)d|mP>1WO)Xg1)3pqSvol7G#T#k89@aB%WsU zNkd-Kyj14sE-QyY)+X*ytTz>$9kYsA+ggy1V02EhVnaMV-N4fxV1C&6x)9}5-Q$sM z+5BaXy@G~=bV5QNq2NcfLtGSF%t?x=Czq0?5^SL#%A4o z6Qeiak@ikfSVi2~gvhPwqC$Q9`SEvc>I?=i9R?hyZR0&W>+fE>VS=qo)G4<>w#VJ{ z76;%?nZ_&i#(*`0EN8g^aU{ZVY(~PAeR3%AzcZ%LxR_DbVt};#B(l*Ou zRbH^c6bhRS3>$eJCE@QJYwoMwnEBf5!6Uo;H~ss?f!c>^Mjl>>A|1j~vxYj8umbhmMej~$k-aEtTjp=B- zK(X8rrSr_oB~a0Xo2&E#PfDJ~3kM^LJ@;NIIeWk}iYlcty=!`LqQdiTS?tZw5c$wB z59PqrsKDKwE7guXx|~Z1cYNZ93p4CE6*vM(w9| zQGI5UIpTSAMkBt6`PgQs*K@fdW&6SQGGKpfXzKc-cb&*du{5iv^z)99V^UPcKwVuT ze+<|)4`nEggSIn^<@Iq-rWsK27og%%t*wmwbF1F~FB5Mayp>>A3vm+}t;6oS5WD+4 z`}asb24rqPob2{tf`LZ4F=S90MFIhKI<+5C)dUr{)LuaB1kb;PWPyAYiwdjNS69B^ zKFu|(*+hE`Zf3uco^X$Eq=RoN78HFiOwl@SFzr6d|H|fZl3a^xF`uZ|#P+40<$r*| zfQH{LVC6#=$EM|*yuXNmA3c$|%`{mSgN{%FLHA)=&C_=)IKFL~fv#)*0+o^^(J+%?aYWKtenDG=^NFj25Y!moGi)P2%YrDDDR46HLiDy`DuR!<4w z?zx3vD(+8Fz}EieY^Ell-)bT)^I@-SBRqTaJ(uIx;i;8^4_`me(W14J5-#?YY( zfnb^Xvr)JY)HREZh!f&aGkIS=#H%-Qy8d&g^rNkX<{x$3^RT`%AJVclfR-a0h_lZ# zYFZPt86!O~$gffi8cib0YHiFK1Ddra#pP37g?o8CHD8^zk?FY28&c7(=>eadh|+< z?u_tY$Ayzy4~Zi6689&%c*YF(Z`Y4hihbF9>R4RmeCZu4x;@laUz-LH_%ar$8Y-lBt1);irj12+^O7l)MZ@B8buKKYe^Fr**kcc6|5j;GUtydE9%}{ze-VTX zb6yLe<^_$)vjJea9!1vA61+;u|rGVbU4(OS6^E@g>(q=#`c zdtO2fM1Q?;dNM;30qrS_*4R% z(3p0^4S3%UXz%{HJ{@&WL&HfALcTK;fBoki)W-qGG5IDls*)zX-DFIya;xF!FGhhq zJBp$;_~g*thDc;x$wVtlZ(L* zmP}3d{+TyNn~dnAvG_GXpT#w-PHT|-B7gX>7Pf3k^9kxAxMs!5)jXTU_MPWiQFQ1k zdylGb3+5XCx6Tz5ew_dQIRE`|{`+s$&i{u=|I?>_6lVadaEd}EeFYBZT=XBD|H6?- z_~&EO{s$^|2}i=$Q*AZK6Nv`m0@r4_5k%ez3jQ49bIaDd^2S!a1xH}%F(F{ zklY9)K%YbfoW7UB18Az7>o-VnoaPJ(BGKbJaW;`c0sfE(|05~|KuCPTcUk9&-%(sL zVGoc1S2T5bft0>uI-^|y67}mvT2{h|oQx%t(BJ9)%F?+n_n+2#$r_eSbY^#hIG`-2 z%d);NII^2Z-$e0M@<=Q-m3}w)wOY-uN?xX)-WFXjohe@m!KU%0{d$KEm>abLG z6bCe18TH8piO=S-a5xI148y6as;Zz=kSG9;L*dm_5Gq(K3Wvwx5J)vu6(tM`tAtU* zDj}6|Y8W(52@h96A<;-SjpN=YhuCY5Baraq-<@y;*j3MNKiWCF0(gQ42fF|Kwo?4k ztyKJGulX-^D?f+x#geNtn(Dw=N<7+$0&tvLd+smWdVeVXfxr(0ejxAzfgcF`Z3z4a D{oFJ8 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub new file mode 100644 index 00000000000..fc85a4b3113 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGf23+EBEAC6UqaiE43cF9jFuVjA8xJ5j31BMhufpnk0cwoE5Iks/GgR/Hki +LMYbzy36V7TZGObel+5DtFKipX+WCwWj2XsjbeqHeuCkxZhzHFwfi1UJl9FO2T28 +iNn6OsBiGeU6ULNmehSia2hx0uhj1re/FUwJExOAvuYv8nc7M+nozqi7Pp/WjP8v +UTiqP2onzZJbidlSBvmZ2nheWk7G78e617gcV/ye+UyXZvciiF2UQBg9YV6D8JuD +YhBbNAVOzJOiyOdTBmZmOkmYsGx58sEbFVqGeOMB0xoxZrqKjMm9NhvjqjJF/sWs +hN/PD5ylW1UR05/fGxlG2GLKKfBInbdqnC101OFWXP5HenYHmKaBJoCKCAUfsoJ0 +r/t/GVh3z3w/99p0TRDONnTecKm5S9z3/5QjjE5RsWcd4ll7mRikUiVpe1WhKRwT +4T76pQLq3XwNJqiOmuMQuSHoBE9OMufvRFiTYC0QHyLoCV2H5PCWtS2xSsIDN4PB +0RNd0hnHKanVV7d2TkIrGOagoAo0wXqyW/Op6KUG1NdaFYYziDFEHeZxfGoPKytO +iS5PEwZG2FqambAZhJU5OXwzgnCRIoE5DCZad4YS6U5YD/2zg+RrQ/5GUxl5Cc+W +Zwesn9FV5jywx/oFePYbTSNQVPQ6jbUDvhmHvZ8c/OfGOVXQr0VpvfIwdwARAQAB +tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv +ZHlAbm93aGVyZS5jb20+iQJRBBMBCAA7FiEEqYoEuILhnYX9Nu4GlWXYCwVckv8F +Amf23+ECGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQlWXYCwVckv9i +pg//eGjBR9ph9hUYRsekzKWM1xB5zFOFfNoqlpCut/W7LAfy0XXkFy/y6EvPdcgn +lLWRWPsOFfsKGwZd7LgSovhEMQ2MRsAUUB/KNZx7s6vO/P773PmJspF3odQ/lcrM +1fum2lShChWqimdBdNLrXxG+8duO9uWaMBIp28diBCyB25M/MqpHtKYu00FB/QJ6 +ZwQH4OsgXVQHRjyrtIGx/2FQoWt0ah3eJMJCEw46GgkgiojtoTfXQQc4fIJP324b +O1sxz5lx3xVBG/EZYzyV3xnSoG9aZNJ1cJq8EKO7ZoNKc/8jwkVu5gewGaXYI0LK +/WkOeiXcSHPMSdu7TpnitvLYFCjc9YAEKQnjooXdt7+BElwC3+5hZJNXEnoGPMzn +3UL60sQE/ViCsGcW+l9rtzXPNTmLMjEg4rGRqOhX+UmwyhvGD2QYbZtXlayu5xn+ +5m/PfmdqgL1xsdvNsLo/BOo+6kizMdBk48Xfp0YM8AC4BzUEENypGzC4T0WYF0k1 +Jfc6/eSwiytIcIkJ42GlaVfEFE8UxfYc1/2zqTBN9EdzWJqy0Bh+mVOgOaeb0Dzi +xWpUpChi1fBB3PXWJ5iAS/w0HSVn4G5/JAIEFAs7r6ju2YtKBfuk+u/K5Q28mo7W +6LrZQywN44nBMTvSQUhhXpSNYG+juyotXJUJ3F2u9Cf/jVU= +=TkbL +-----END PGP PUBLIC KEY BLOCK----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub new file mode 100644 index 00000000000..46726ccbc8f --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGfHlp4BEAC5wkZSHqF9z6GcymuHpk1m9aNXCJdt4ZWvE8ck8GcuVu1nbzlZ +h959jqtwk7nFMki5YaNMz6jcQf0eeS75viL4CoPAqFiVyhyCCh5am75h9F7vTBq6 +190017lhu9IgkAkiklnjfDbyXH+BwqJ78nXp6e6R4ShFMHNGGvYLem1wmPKzqPlZ +zN0yjc0+d5pw4hu+IEFrM63yqGp2BVX1X132IKUEcROCQt1QOma5oORhYEtSCieX +PuhuHJOA7q6nJuFccPCs5OcDS4IbQgGAbWL4L1+LAGVLVGpK4IVtqEZ831Srclh8 +0ruyFFeV/hqOONThwwile0Jwh5Jz/2sYxT5c+nlumXWK+CXTm4OCfGt1UuGy6c6u +Rz84PHfanbKnATp6RUjz4DMREkmA6qBnUFqGLLGaBKBsm42b7kbo7m5aeItuOwLE +U7AcnBEqqHLfI7O1zrHKjQCxhEWP/iok0kgEdiJ4tlPhfDjQRG6thlmZnVdt/08V ++bvVkbYZyWPzjbG3QHyFew1+uzPHb2UopgpByVKYEWhCgNfcFtE56lEI9c40Ba5o +LaZl0VlgfSLP4c+LoFB6gZp1gcVQuPo1JKd1v5WP60f1iHhazL5LEeMYcW6kvujK +58Q683gSH5DsVAnxaj1uU4nvtKDh8IF1CNKKXk8RVsltdpv9bGhV8b4qVQARAQAB +tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv +ZHlAbm93aGVyZS5jb20+iQJOBBMBCgA4FiEE6J1JcfAJex56PrVzcbSEgC54180F +AmfHlp4CGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQcbSEgC54180aDg// +f7GqIW5LzYqIqkey+IjdkSSfeD47tlWc2ukKYStHu0gTlHhrUp4rHNJ/s8XQ1o6o +jwzWfNMYh68wt9sjuM2BEkkh3RUFEjVqqW+k562gS5ibfKTDtJb2Yj0n/CQKWvoi +vUUzO88xW0AnZFieP+vD5iI5Zw4H2dY8cH4X1XlWAJufFdH4WBaZjujNwNOcCsnd +w2nE050wKTR2wroWq0HKn1Ni3QNtKWPpLoHGAlhW6ACLa+EFqxHU6D3KhW6IV4Jc +sdt36nHNiRiy6nT99asqtN6Z0Yw+EnQSuIDosIbmSgZoieINh0gU6AKwgydxLUxL +Cu1w2fZHGuFR/ym0c/tTpM893DxHMc/EZ/SpU8fXkC9lYnQO3or/Y0mLHd0kSEv7 +XoonvcOu1tOQzmvrvUQUtTn4+6OKpGViyZG5C8Lbk8/yKWFv5b+Gpss/EiGTHSsk +bPTHf5jMsWElv0GgFq2TpybtIcY52yJoZ1fBMEA9Nk76Y/MNFlN0d7HyS6tWGr6E +8FWJB7RYG5XHMEDIKSheq+Q5cORwz92JPFI+sovZukp+20G7f7/gwos441KamJPc +y1+M4uO21aKX2fA07bcgFtm25gNLoHyvjQLcmyDis6xogvciCV3iQ/mtunewgYp/ +lUX1dv0R5o8TteaAIkbJicbdLtur/iuAWN404E/QShc= +=8P00 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json new file mode 100644 index 00000000000..4e0cf4995e7 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json @@ -0,0 +1 @@ +{"keys":{"A98A04B882E19D85FD36EE069565D80B055C92FF":{},"E89D4971F0097B1E7A3EB57371B484802E78D7CD":{}}} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json new file mode 100644 index 00000000000..3f64de63c54 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json @@ -0,0 +1 @@ +{"database":{"version":"8","installs":{"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez":{"spec":{"name":"libelf","version":"0.8.13","arch":{"platform":"test","platform_os":"debian6","target":{"name":"core2","vendor":"GenuineIntel","features":["mmx","sse","sse2","ssse3"],"generation":0,"parents":["nocona"],"cpupart":""}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====","annotations":{"original_specfile_version":4,"compiler":"gcc@=10.2.1"},"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez"},"ref_count":1,"in_buildcache":true},"sk2gqqz4n5njmvktycnd25wq25jxiqkr":{"spec":{"name":"libdwarf","version":"20130729","arch":{"platform":"test","platform_os":"debian6","target":{"name":"core2","vendor":"GenuineIntel","features":["mmx","sse","sse2","ssse3"],"generation":0,"parents":["nocona"],"cpupart":""}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====","dependencies":[{"name":"libelf","hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez","parameters":{"deptypes":["build","link"],"virtuals":[]}}],"annotations":{"original_specfile_version":4,"compiler":"gcc@=10.2.1"},"hash":"sk2gqqz4n5njmvktycnd25wq25jxiqkr"},"ref_count":0,"in_buildcache":true},"qeehcxyvluwnihsc2qxstmpomtxo3lrc":{"spec":{"name":"compiler-wrapper","version":"1.0","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====","annotations":{"original_specfile_version":5},"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc"},"ref_count":2,"in_buildcache":true},"vd7v4ssgnoqdplgxyig3orum67n4vmhq":{"spec":{"name":"gcc","version":"10.2.1","arch":{"platform":"test","platform_os":"debian6","target":"aarch64"},"namespace":"builtin.mock","parameters":{"build_system":"generic","languages":["c","c++","fortran"],"cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"external":{"path":"/path","module":null,"extra_attributes":{"compilers":{"c":"/path/bin/gcc-10","cxx":"/path/bin/g++-10","fortran":"/path/bin/gfortran-10"}}},"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====","annotations":{"original_specfile_version":5},"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq"},"ref_count":3,"in_buildcache":false},"izgzpzeljwairalfjm3k6fntbb64nt6n":{"spec":{"name":"gcc-runtime","version":"10.2.1","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====","dependencies":[{"name":"gcc","hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq","parameters":{"deptypes":["build"],"virtuals":[]}}],"annotations":{"original_specfile_version":5},"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n"},"ref_count":2,"in_buildcache":true},"jr3yipyxyjulcdvckwwwjrrumis7glpa":{"spec":{"name":"libelf","version":"0.8.13","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====","dependencies":[{"name":"compiler-wrapper","hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc","parameters":{"deptypes":["build"],"virtuals":[]}},{"name":"gcc","hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq","parameters":{"deptypes":["build"],"virtuals":["c"]}},{"name":"gcc-runtime","hash":"izgzpzeljwairalfjm3k6fntbb64nt6n","parameters":{"deptypes":["link"],"virtuals":[]}}],"annotations":{"original_specfile_version":5},"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa"},"ref_count":1,"in_buildcache":true},"u5uz3dcch5if4eve4sef67o2rf2lbfgh":{"spec":{"name":"libdwarf","version":"20130729","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====","dependencies":[{"name":"compiler-wrapper","hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc","parameters":{"deptypes":["build"],"virtuals":[]}},{"name":"gcc","hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq","parameters":{"deptypes":["build"],"virtuals":["c","cxx"]}},{"name":"gcc-runtime","hash":"izgzpzeljwairalfjm3k6fntbb64nt6n","parameters":{"deptypes":["link"],"virtuals":[]}},{"name":"libelf","hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa","parameters":{"deptypes":["build","link"],"virtuals":[]}}],"annotations":{"original_specfile_version":5},"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh"},"ref_count":0,"in_buildcache":true}}}} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash new file mode 100644 index 00000000000..7738b6bddf1 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash @@ -0,0 +1 @@ +7f94d6038bb4e5e7fff817151da5b22d7dd6d1e2d9ad51bd55504676786c17bd \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig new file mode 100644 index 00000000000..8a63bf498a6 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig @@ -0,0 +1,124 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ +"spec":{ +"_meta":{ +"version":4 +}, +"nodes":[ +{ +"name":"libdwarf", +"version":"20130729", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"core2", +"vendor":"GenuineIntel", +"features":[ +"mmx", +"sse", +"sse2", +"ssse3" +], +"generation":0, +"parents":[ +"nocona" +], +"cpupart":"" +} +}, +"compiler":{ +"name":"gcc", +"version":"10.2.1" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====", +"dependencies":[ +{ +"name":"libelf", +"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez", +"parameters":{ +"deptypes":[ +"build", +"link" +], +"virtuals":[] +} +} +], +"hash":"sk2gqqz4n5njmvktycnd25wq25jxiqkr" +}, +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"core2", +"vendor":"GenuineIntel", +"features":[ +"mmx", +"sse", +"sse2", +"ssse3" +], +"generation":0, +"parents":[ +"nocona" +], +"cpupart":"" +} +}, +"compiler":{ +"name":"gcc", +"version":"10.2.1" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"811f500a89ae7d2f61e2c0ef6f56e352dfbac245ae88275809088a1481489d5b" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54 +180hlxAAisLofFhr/PQvLcQ79T3t3V0tqGgz9x6QnPKfbPCgvb66tTNlny+ML0fY +y1H9xXQO53QOxfN9cdXcf2EVbRQ2eT6ltmwekI3ZZuCaTguflNu/i11UV6UnDy3x +dXOYQhky5QjtPbhJ0NxG5XDKoRFoUPR/rgXsiNG5O0sk3M5H9ldpsj8af5W/6LCL +gCTNM8fF0TVbd4MF9TiIECFBng2CrxhHwpl2gPHHxab1zxLRCF6t1lZvL6To0hmC +e/Tqre+42PhRSCtXuwhK22r0rvreVUaiglYn8udjOJHwNVKdzLnTZ1OBAFeIq00U +9uuroyaF841pq9+8PitwUORurv0lsnHUbfbi/+ou0HzMiaXzz+MPdOXt8nUuyScs +oKOi8ExvpWJ7vn6klkvQtMK/Gakzd4YOxO/nk9K8BJgVN3qrODwHYSORk8RrdITS +tkjiEJiIoklddiwCf3NUzlxiIYWbiqKqNbY+Pxh4B+OpVDnvRmpkJHgoSuVoCS8b +coaOTIgqDpnIClHIj7ogxO+ureRjIIkGNNh6wVhlHDlgm1GzxNUOklMrzDkYMD01 +eTYxrbicw7ZVwqhFtR8olODKT9QAqXUJOkGHS9IA6FJctatkUkIOG1DSI52AZV1r +PYzgdKtTxS60EkN8Igl6VMTkaC05anLygCTyOvGaV7sqVKmzHY8= +=8OR5 +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig new file mode 100644 index 00000000000..81fd33bf7fd --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig @@ -0,0 +1,72 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ +"spec":{ +"_meta":{ +"version":4 +}, +"nodes":[ +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"core2", +"vendor":"GenuineIntel", +"features":[ +"mmx", +"sse", +"sse2", +"ssse3" +], +"generation":0, +"parents":[ +"nocona" +], +"cpupart":"" +} +}, +"compiler":{ +"name":"gcc", +"version":"10.2.1" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"48c8aa769a62535f9d9f613722e3d3f5a48b91fde3c99a644b22f277a4502d75" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54 +182ezg/7Bkil1mY6d4recJMkFhpBzzDs8aMD+WQOBPoy/bWHIGsPb1DyOOW7lTLa +QC9jh9Rq02oMeX0LWvNg7k6iMTayWcrPzJwk1rgh3pg/ySgCTZ576/aP/UOZwA8h +HT/3RzsDFlq7Wkh4yYaDgSEDVc5PgUevb1p2f126Z9HMFjG8siEWmuZQOcy4I9JG +osQFtwWTLmx96sBMzweZTu2i3iGTPNz4Ae1hu+v5clmSFg43eW7EWChEVoob+3hb +hLRxajZEPsIho4yR5yynoxduXeXrLLP7GH6XGnYt7Z2GJR0UamIrPfxYuWBK76V1 +03Ie2rRXwOKfsjDWw9Z8ziTVu25G0aZ274DX6eQyaWKfvzz69cBXO0fgw1lU8B9S +K0j9k/xtnDCrIkPSh4QGQpFRlbzxkj20E+EnwgDCGIlK1rBzo2V5na4YNj+SbC91 +0BmWrj6dRkQZUMJHeb95kBMfFpKG5B6u7HQxZtIwHFAfF0nypbiB7xmdy/gAmUao +ej3Cu34DvWtLVeSh7lRimeEc44WyBDk2YSPqYleAwYMZBn4WSozUS/KVLU2T/AhZ +VlLaEBaFrVngmsw5PCdck0XRSNSAN9HUgPItpOzYig20NeT1/69wIlUZVNpLEYGT +yvZsmqHFnkunAs6av3XmGl0i8rSA6DujunpNXML6hUciFEK5wg4= +=Aq8h +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack new file mode 100644 index 0000000000000000000000000000000000000000..847e37f2fe1655c524608255f12a7caef85cf17d GIT binary patch literal 4099 zcmV+e5d7~SiwFP!00000|Lr~NZsJI~t^KviORUA{sJmytxO|1IkcJRQNVqkSPST^6 zW$ZFIF}7oy+kw!ZI&kLN#I9p{5m`iuk3GJAqz9`uB)%~mQR^@Wp zA3E7LKNuJUk{|?E1dG=d_oBcHZz$nMHw?qb^A#6W8An|S1RssRbK^tL&)9m#Q*Ga; zuMFRZR&V6?*TWj$J^p}aSeA@`9B|M0hf%_l7~%WJUl0IK#y<{t==kG{JaRO}hw?eR zkqt|O*YeO-&t=6`nQ!vPcaJ~IGl0l{EU>Qew;lhhJn)F|7m57G1M3|BAgKm>hGK~s z)pj8}W{K|}e~tkFknxWL=q#Q6wWVC|Lq&t`kE!D;<31g8Ao2_LpJ9RchPwVS&6$T+ zKK=lRl%x2A!VgvieO1i>hGuDy-Fogm6PQ6P!~XsHUt)> zo};MeWE!x3NUHuP{(n4>1pUu3%laQb|0DYU#{2zN7{=8d=**ZKuMaOQlnesrb6>4iOR$D#8S* z5T*LiN627sL#(9?Ax@_lI+9I<3}jWlgsx}U7K#BhgZ?yl$x!d|5-;VYoGR&p2$a0S zi%{ehkn4$(&M-anCy3}Oy+`GoL)U_4q@bS!?mt>j$oW#d0rHBtD$nk79kJ1OHk0;W} zr^D0YX{U4|eAqcXK%SIuU>6NYJOCWWOMvDCj?1G`J>wQdFo~yBsQ+E)U`O>O@--Q!wuN8GQiNu9$z}D@9~vpqyA|% z1#=M!ieOXd_qAoIr%`afQg4(Fs+HZHPu%gxdaESV_L}8VS16q}>czvxY8jRg1^EdH zSuVFJGz?UG7W1HQSTwHlMeF(H?q)Gw>BE<8-&XV+qv#uDXg*C`4(mdd|8y)Ki(Iq9 zH@L&S_HjepITZM6qgdsR?^oH}#@M@WaL)>-@^!FLlmO6t)!h$DonUC^=4pbpdsO^% zTG>0@X|ol+czj$eA@hB}wTd4b-4Egiaiw61fXjp2M6x7Fcp;=koV7N>7KJ8XY&So$ zy=WYjXd`g>7Z3w>147JHt^zvutI)goXiIh*0fc{q#eaq2T!ByW2VFl zlubNcAE)4!o`57{{eSWOe}-kq`+u;G}U zmy~ZKnL-ASW_U*8B_3V!IRT*ibjm>n70*b2`;rPD^CN2YA>JW?bVelTLDjZ2FDM2e zuY?yxKA-2<9C8GaT~_UpsraF7g783{Irj&*@3A)5#QyRn!4O>5A#WsiSePkxu-MgJjr$ux$S& zV*ld+kmokX{;yc)Cl0{VAOZGYTJ`)Nxdg=i#{lr!v0a}kl?vH`J%r;qc=mArLw1=L zT>A{FzL$*%BNpcH4hbY4Zv6lEt!c5v(tcUqt#*n#wQ>RceQRWS5aUbJ+e)pt*O}$0 z7Z1yF3onP9$gN^`zYy;%9GZHDp>s6gGVaBIy&UPHesOg^G@RSM!CswRa{@cOkZvS+ zJ8N^ZQK|09oocsS$ZGI1YmH2E+CSWhS)g{*k*keT?P#|w&&RYPw+l2mIa%N6{PN4R zo|BVkttTg8tzmd&Gj~sGyK<#itL+p^hjOjD(=N78ms3<5opQSy##hUoxybokVIM1! zw3a^Vk~AxGJxr~uljc`Sb0n&iP*-N5`M79QZFeqii3U?N;`>^&xEsN=j~erdR%MJ< zbySq+SPmni;d-k&6FwX`If;ZSn0&KPo{5I0*_t~Ci|i4Sd@Ql+;nnSRl%t{v44wi& z%QJlA60W1r`^yOZm7Z<;p6@D7*7K3BL8Vxu>x>F5-}a=;bxxE;KB|9$rO&Yj0us$; zY|*o?y<2{8r44Rxdn5i#5O1$k)w~$E%8(VZJ;QorhaQttHBNo5G5Y}W;mJyG?N*3KkW82;m|SgzdFeBG(ZJ~^R;n;k>`dD^cKR?;4C7z z83(7hKAOmR(R?Aov(TuFGfeMrYn+keK;Kk)9>z@=zIEE&Z#JT0)RxPQkJWawF+=P( z>*c8+bXr(JVzY|9>#T3j*~Lk$+{R_f#r9qyb8MzFE>4_YOx8rgS1Y&WK#9nQ<#Ma= z_U*S5wacwqu~aU+%^XA5wcX4=GRKyk3GNG2h>YOAT3n}9E)`JIe=Ur8bTJ(^P17D? z+p-w_pf2u~caHYtT5}J%3hl?~kFSM!G z>!gSkv0uqpFE)yM<=rs$;m>@qUzrJ(znE<5=YOpCVNvSm#b%Bvw7LtO{U&m@0!e*4 zVf9}plk5M$!aJ5-DwsLT-i_zOVWAH#=o%`8Dn+yZWxqI-m8HtM&35^o!5F9c2GPd} zB7I8qqc)snJMk?P`9&h1cDm*Il(AEYM}r?^*TTyrDDu;r0A*OJInrR{HbA3F!pkH$ z&KTSTC}d{C%Op6dExe7Q$C;;rX;h^4NN^e*wj#WZqGtupwP>u^<^uCk(Kb#I%Sj@| z&rl1=7TP|CTu2)2#{{vQWUM zqjvDVnLhYu+xhSPI-U9B&%@()|JqMCes8z_=g(j0>Ysmn_`kor`Cnsn_5OL=O<+Bc z1pCj5E6#r;_CFQ?xs7`M8@?S!O#Zqc0rsEeIG))5XM!&&EL3cF@gSG_oK6KRT#6qwk-?k7Txj`0F3VPJOTlSWXN2(h zOqrbPN-^k=7s2p=6?6wc2^s_*kRwxZ}9 z*T3vr#<{3oOZwSJyBetC;PU25b6x(TfB>~igASrSv{Ykh@bSAf)BSJvJtAMC1S;6S zamGUzPz9wh4eJ~a#iil;BgGu|25-W9@Fu(Tasy}MHGY~;I5R<<3C~=PH6{Pt5k~{f zHAmLPC9i1bqw9;S8%32adl%~2Wna_=5<4_aUUiHKXFini?mjQS>p)-`lHvcaSpOsY zKVt!qV>ZYCAHPFJoc}cfIsf-{%YPE|KiU8Pe6Ugb|5v<2OpL+Pfaw3%E&qwo|9~au zzdjSN%m%&x{jJl3h#`0sBw+oYVR&{~|C94yV*xBb+glVhT`}CI-bMxNEs?7i4aEMx zZu#GI`#(KHis-3F0h#~3Zuw7y{%2Qw|AQm?KMp{K_GRe*>A9E05PT01{r|e) zi)yhbycyD?Y@G_koa#YU8F`TM2avMSk4xDW^r)T-m2-M4NF!^8>rn;jx5%btDiz#l zqoKp5K5uPpsixw2R4q*Kt~GAVd%Pw}QTSX02e++2r3Sw5Y-h7qS68&-!b=4S03wYA z{cMnv9{58u733PZ=C?B}X+RqBApYb^kli$}s0C$A>fZ_?!kwP)BHOL0p(YekBH!a# zG0(z0FKGhEF%XC>LlcN(Kp;lVNQJcLhEcw2j#LWI|i>u@wN`1VL0yc?tG$vC`! zy_O;o{vW%%|A(yq#se&qe=+_a{)WKomE^|(ng73T`A>xYUwr=$>wkgh|2TkUH*Wv` zL%$(IOu+*n0rUTr`XBIQ{vQVKyj9|y4DW%>VP_4oTA0s0>RATIm=hUVWYr2^0i8)b_7)4iDF`vsR>f+*Okei+4hAhx?ti)+~UC>31=M*8IlX9BM zO1utLX=U!|!fRgZ@fs(pYz`G5ihf`*OLR-giJ#+#UFi?|R(@ zynxsT)fZSC1A5!kZ=WORU=SB@KD_qv%qYO$5xYcy009C72oOjr{2Qy^RB8ao001)W BL-YUu literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack new file mode 100644 index 0000000000000000000000000000000000000000..5a56736f2745f61d994d2b00be32de712b5f92be GIT binary patch literal 3633 zcmV-14$ko(iwFP!00000|LtAdZrjKi4tiDiB}!;fB-ynn4v&%-0wPPYEnBBZa%^S0 zASiNVF{VhGq#COL z(HFYsL31#1PF6EDK70HD&#){R|0M8<@eiYf2Tg>}AAdmrJQ@EaaM$t2U-HD(6c5U$ z@JiNA4PMG)OFfkpM`b=bKR$c>S)Ktz{u9Bf#^18NTj#(%#$P1zpA1$x{(e#o4s^v7 zv#RAlHkcBhJ^ma603hR^1n3-{d(~Dh_n@Ld=W)uoW!$4N2O>Xb{}~pDCsm?6E z^6>{iq-@0-6(3mbp+{RP4ncxPA4EyKne~BgbKM|xs|Fb-|sQ-DQ|C0a^ zxuo^Kp%0)j%rbPI23*!T8?on;p*TFdI34Tu`xBkLcz4MO?D$N&lHhw{E*=#`|9{=` zp9=jifJOb!5&fS8fLKVm{tuE9EATi-tNsVlGb$08{L1A&9r~Xo{{IsJNKDH0KkeFz zdP;5s9tUaF|HS`K25Hd$9J56KGerL<0Z`!AL;ve08Ysh%p?R)5mN|x|N569|lh}m^ zAu0VY^8D@cFNh2JpJR#tp9DT+Miv@b%gt!p9aqv>P11~rGk`KrJ*1KrJ(R46nO>Y z2ck4&m;w3|MD#7aM0`%6V?raw=o^9iE6dUhPlN2?U}Aa`s@rH&?M}7YX!qnwzgcS3 z%G43msC~txfIxx57RYb$B6Zj+Q*7q?se3zpNQ3=nC27I_vt<1*5r6_%FZ(}MOnr#> z6XU-Uq`>}50w*lkf1V-sKM8!u(@ow!xE|=ntkT1m5cy^ELip}QEYqrsSb1WUdJ?NME z-SV;Udb_`m{3lPxjv0`6063190L@AaS70UNq3Q2Y43p|k3e~;?ZS0$VX;UC%Z;v`EI|x`=--+E3)mkyx85TciUWh>11GeuE2rmWDq2N zJvt11W(O19(6Z1xr@c$>VO=LlJFSB{zu!Kn937P^y}j3Lx4eDWm$vt7mG;r=<#L3& zjuod}#}xorx4yF`9_~B( zhyL^Rcn(iKPd8~?-WRLX7n_>HbmatoxrS^>n=y&LQpU!^MC70$)cChY;*rR;cK9ZD zu-iFmirWVQUu%|X+|hCkBOncvj}-J;n6{5hn+1>Y%bz`SO`nEo`|4rSsU)=cQm6HX zt=8VMo$5|)ud!2ZcKYS=v9#YV_m)p5Y1Uw^kLUdHPBko5F=$)y)X~Fw^?r9FGrgMX zKE0pht;2G2I@W&|-~VFC^WQ`O^7-{z|HXR5C5ykSLJHP@nPuz093bnzN#H}qvyfaN z`%5$Ewh}30hT~X{6M1Z!ZDil^UCR$2GT~!h)LSPIKOsgsBNlX_YMGkrvjHLpBuoo|Wfxe^QD^w( zB~cO?KF><|Jd+n=Tz}l0at+;X6sM>oL>MZD3nQoJlTHJ%{{cR2|Aj^S4~YFw0wACE z_djl5`6Uj(!;u2}FCneIX#dIkABg~7+Lq%{<#I7Mvc@o26389wz0NJp1;=^^RnN`E z1QH8#c!h)#4>$h*ej{RAD(_Y0O08Sku2+lT`;Cd|LQF45mpk>+ZZ`@wN(WWBjTdH) z<#wsJcf+dH+^Ox#-CD0&%xUmEXHEHEmeGrk+?5ej)l^CUS*qlwY zEMvT=!;(BR-}9pjZS?;AV8Xu>#P=7fYMhN6Wy}h>fo|TjLHEfyG?Y`A$Gz(3#&-40 z%Lz0PXy2UT)cP<Hm1C_^4Bh(BpPx_J!yVRZ$)!rAm_-h}GB9A) zgcn)OQ%>C5Qg?fuS{tR;boZkf&@_=(r4>`F5tmGEWJ&Hc^gDMGf~mDZ^P{^s`|Gvl z0h+8+7E5Ap$=E10OS{!d7<>1pJoxS+>#r~|xy-jet@iOy=G*yd20gUA3EjOGacqu^&)_)mB;uh9_S&^*&CV@Zw{g1)L z|9<}Em+r5>)%X8d`?+KPW3NGHfBXI5=*6#lo6TQ3?SK9L9bNnVx7Yvq!?S|5?U!}}=e2I5;xiI)-{Vx}CnIAVZ{_>mR`4wdFhA;=3o}bIKRLfLw z8r7a4IzG=NbA63BfQ@b4z}vc`+Mww>)6zhLWv+uAVpuWZR5Nci`DLL<4`srf;){Zq zW3ViU_mm9{>X`_}ACCP=9N`9PUkn?;MUbB=6J~CnL=< zCg#~WuV|-}%d?9sMU~D6XX?B26HyyU?AS1P)z;672$|2=-S*>JY;Y&|DAM8oEw2BN z^PiIekP`p>mxsL0M4Z1BgPi~Qb<2M$^gr4E{bX31{ohMoXC=nq;UN0|>z4mC=zoq8 z7yW;7{(m9>g*AEq_hToF5JPZpq+tD*VR&{?|C9CKM1W;zYlEVq^~^2md0fbz6WRKr zLG1t6E&uCm|D)rKh>p59$o=2fE&r*||6p;LdLJ7NYtA4LCu-SVFb z{m(6V{!gC&Bmx$!)${+m9%o8S!5xu`^}l7m|ICy1za)?f{m(A`{uloJZ=(MnA1wHC z{QsLX!ioC7D^j5U0RW5p|5<_P|0M7YRkrLaM?V>PR2K^n)ea6Kp(^kk8kP+mm!`T9 zQsq{=->L2H^{An4Kx$|?lmiys zJS%ZpVJHkmjpr1hkeBkB%1ZnYs?yTj%rtjF8}J$@s%#!HkP3{JA1abI1VbLzNGqV+ zesE&Hm4UZYs`|tm&@j=z|G!)8+oVQ{ zqZzt+>dG3_CN|Vi0nP;C2zMlVR#fbVPw`su_^h&-*M2vmo=4^R3R{Qn7WKoAF|ywW zb2JZLdibWor(YAbPC!6FKtMo1KtMo1KtMo1KtMo1KtMo1KtMo1a1Z<+?<07%0LTCU DqZ2%| literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig new file mode 100644 index 00000000000..d50cf662f95 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig @@ -0,0 +1,429 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libdwarf", +"version":"20130729", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c", +"cxx" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +}, +{ +"name":"libelf", +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa", +"parameters":{ +"deptypes":[ +"build", +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"0898457b4cc4b18d71059ea254667fb6690f5933c82e1627f9fed3606488dbca" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv9Xlg//d7uWhVbHjujSXRpoN3hzH5sUvvTSZ9xzvXGAXCoAu2oEGg4hxZPIFQJ3 +pZzKysZMfeFg+UKwDzex5TlKZ3JtKgCTKYl64zZfUl2EQgo/d/Fjz5mSFHW/6sa1 +1uTe3+sVt+HlijN72t2412Qbp+/uGvU+KBvXPA7kgkp88Kd/PL9xe3jlT9ytH5Nw +3LIghe++JiepjFAKXTfIA04EjLb8c50AAxsK5Xx37HOOVHHQ8L9anFnOVYM+DxAz +gn4dBYUQ9Uu5k5uEu5CwtxsED2/Yar7YWIepEnyp6z4zQVbwjO4/w0vZ3wSJ9c4P +UhZs8V2akuqIWyzlQuBOjywnEQc/nw9v0py+Dr/Qr3U4XWh/LARWABMxa4IqXMOK +aVmd6weVjV4U929gaOT/FCtZPfaFNRbk97YP8yAxuLhSdiGS0Mp16Ygz21fVWB7C +UjkGGsKK1cdiJQ0m1CffmydU/nbDjSuw4WZIoIgDzvN7SFm7YBtE+xY+RUPsHU22 +QMAXojF5abwn48HJeP47MYdfR7+nUJq6XJiJ7/80a7Ciy8SAVxinQWqvigf/hmTf +kAiQaqOVSlRBJ2yry5fYBKHSIRvghCqS4t4es8o13R7n2wz68VqKu0JkNlT3Ijjc +QjJYtI+844PCDNetPVV8iNWF6upnTJnPHcFmKAEO1663hOc3Dh8= +=3fA5 +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig new file mode 100644 index 00000000000..745b3b61492 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig @@ -0,0 +1,317 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"c068bcd1a27a3081c07ba775d83e90228e340bb6a7f0d55deb18a462760c4bcf" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv/zSg/+NrS4JjT9TFSFR/q2vaN9aL7fSTunxp+M8eAzTmg0sgHc/D6ov2PMpUF7 +1E2mnZ2gL5a5dHtsSCf30ILFzQoD+m+I9yOwcJopcbEjr8pcnXBFe6TT8lkxlXtI +EHNsYGMUHFbFvc+hFdWatQJicdDaIbdyEMGAC7Kobs/4KpdBF5VWV+sIrzD5+XzO +ACiKRjBmcaJpa950nuEaFzBITgq1aDtZ0EEZdXYvjRnzj9Bm6gbqmWzlllW1wf4r +5hSMTpAsRED4TxL433nuf0nKIvTD5Mywzs88kiLCtEABfDy1qccyBAnjyNypFF6B +fPqSDnr33s+JQ35t7RcHKfrgowk69UablE25YOUrQP6LtH4QzLBLj4/Z0zuz33hO +v+YYe51DgixsMQ2WCKWEO6sNcrcrLBJMFVwUP2FyTTdW3jCYRlFiTYLSfoDhTRJ/ +4o7f2eEp3sVoOe12jKI6dw/P+c70dl8K4+1ICcnZkwsb0pd0vt2z4J2kPs2+1/0g +vpywJO1HL5Zy7/ZRlmeeSMHYEDX2eKhm7QRFbxw1IEbg3stQCA7a425JWztyJ05K +sfhFQgPt7F/xanJVFYk/hdza+3+5pFr1K/ARcLFBdLBKGxAXTMMR+NkMp3J5NiOo +SMZJ3jG6xA2ntvSkyx/GFawD0FpnlgEByU3E+R/WiQA4VojLpvo= +=kfWI +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig new file mode 100644 index 00000000000..5e84d71f883 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig @@ -0,0 +1,99 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"2c1c5576e30b7063aa02a22111eb24b3f2a93c35ac0f64b4e491c7078706c0ea" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv/T8BAAhK/v7CP6lMIKILj35nEi+Gftjs7B7f6qvb4QNtqcGHum6z9t3JxkOOrd ++q+Wd329kLYAFs/y9eaGe5X7wY1U7/f863i3XrxHbtmrnMci61D8qMjA1xnBGC+5 +yd746aVeV/VRbJxTeB9kGcKPMcIQYcearlDMgj5fKfpCKM8a+VyJfw7qHNUyrTnu +d6LSGsEey6tGkJecgnJZTNSwryO3BZbg/4EviivMXm38AKGZrSib06qjkoHrPRvB +8ftGSGlK4YmFs5/YjKFL7QzuNJeqPNJt4mD64tsk21urOfbQJe5AmdMLPGY0PbW/ +w++06c8lsd/6FmzUwlnTBUa39lKJjhkhoK7KFGVqZROcXZfhwAyqPZt7ReA5FDMV +l5X7sytjQuSFaQPGi5g1xXQGEI394T2I55p5T5/RuQ2PXcFxxSOmIcEcD8o6Z7+x +XWLq44KUWQyQP/StjaVhIz9YPogeBBJllA9hN+GzVrr2i+Esu1QO5uDgVuJP7pTA +9wwCLV/t0hf2TZcpU2fwEu+DMniaHm6haVwqiu6QGkbkMBx49zkV9b5i9L441GoC +Q86R2Gs9O0+QzHuN6egbQ0xKm/lfU8dmJSzV0snXawAeQ/vgCpdinx40EMc7Nz03 +rgZ3j88c/ADvCb1DVKmu1Phf6U7WqG6/AvB9tYl4Zl30VX7ETaw= +=ifvQ +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig new file mode 100644 index 00000000000..7ca58d17725 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig @@ -0,0 +1,151 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"f33e7a6798a5fb2db6e538d3a530cc79b298e36d56a1df385d93889a9ba431d0" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv+MsRAAsaQjZbB9iW/Lq9b87H/E5Zmv6RrClvpjSnwvhLR4nhPL3p0G70k6tI/b +NEdXctDyvBOJOEoLaEBrCODl/3GjV8B9Gj7OhT/BIKQjlOfJqVdwIrnHgav5ri+Q +UUXLtejhJiUNoxeILI/xZx2CoKT9q/3EpQ5ysqdybJmYJCf/hv+lXEhnwUIv8vV/ +xdRYY//rfeMowCNIZtFPjSejMywXJfFKjl7h5dN5kwM63D6z/sh4zW7tqHq4kk+A +2m0WcorVg93wAm+YoJaQJVx8bYeMGfV/TjmY/cSouCt8PM4Vi93vwieZCkzEpXbM +BkVN4X3PTMZSOf0WTkEbnQD5v090/DoQPZyBrcDoJ/HmWDiz5Is2wUI0mLVkbg2L ++rKNC3ZajJhsWElMGNNtZRLmGeTIe8hT+LNAejo221vrOJbnUmpIjKxVjStDbXmW +nulgyEPSTfsJaXgbXmeJ8LOk0tWpBAGC16VzgXrPxoGD2XKxoiPCGLNrF/l1wyl+ +n+nw3TchNFrofpPrqJzT/vS71B6KDb0PVSTQZfM9+FahrQ+YbsIkzDAuxVZb5t3q +HUME95RgoIBbccUGxAPwkaNme2OLaLzsJZ/Xhl5I8T1fraLYapsKNjQ5+CSKO8+t +MlJYgSHuazWSetRbZ2H7g7QJWqeHUAWi9i1szpNDYxTFSs8wgDY= +=edPy +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack new file mode 100644 index 0000000000000000000000000000000000000000..dbed0b978af0378673c607d208d504c2d4566fd3 GIT binary patch literal 9063 zcmV-tBbeMDiwFP!00000|LuKgciT3z@blTff>-%axshmb5heLv-&@DooYjq!CTW_( zLy@3Fn<5!5T9T9Zx4$z0NpX|6q_Sdh+L$7i0WcU027>|U`PNhm$=-kdMd$$Zx?TDN zC_XEG_hg~>AFlgvRw{ug2sIyyTt`_O2wvR(Q_cU^c;MRoFLcCC-T!NWZ2z0CZ4f{B z>3neg{udOvTlfDupsN3a;m>7(>-WDXc7?kC*8wZ~KTys@^Q`ZPi-FLwC*!U(?G03? z+a1ZFJh&!%T)+Q&9Z9VFe;shM{(pNd4!A-8i&8EB*8(@~e{T_zAZ)v4JJ8)Q4d;i8g;qH0b+(r;HW- zp7y!eQA+DSLHZ9j|97gBhb`}aL6SL73&#ilrXawFYFk5Nrun97IY4sTRwu50u4=yC z+1t5oDr_&TquZ}9|9d@2s^$MWU?b!|D0?OO->K#QIzW*6w=e%)FRbT)+ksmCZ}<4$ z2>CBo$bX@h|LXujRyITa)1(ujo(I+owfx`i@xKZ3U+Q$r^1rtKtObNYZ?oiolB_Lu z1gX;#2kodEeU%JdA9pQNCQ~8@#8i5&=$oQFG)Lol%J>1O<^Oh%|BaCUf?Ui0TLQ7O z0rvkNl>dC-Y5I9R4cr83`M=%ce zf$P-s!UeD{`L8U;cxn90Mf-0o|JO1mw|xA6Ym8m!B{yj7Hw-egG)HN)8lJ6%rt422 z5IYehW^Z7Up=`=Z*Cd+03wX|n?+`nK=%MD%EQfne!@0j$I48=y#CP_& zx8sO=ra2rZa$;v+l=lUNJ2`y|FJkw>d|zCw(wD9YWS=_(uHkCFJ*Q|Y+-;N9c54-b#z_a7d= zd;Ql(>E(kH>G{*APaeH~UPWT1(~~-)oRC=T$#;Xobkyz*Bg+mghezyNJcP@{*u!vb zM7CM5RZsYEd9i{_fR)&0O`KK4p;&A3RFY?;>uo=B zn6#-WbOj-l#PHtY$0h;suh++aAHI2g^62>K>Bo1+$0yJJe0V6IK7H`+>FLSer{zhY z&v#{|*B>a!Bp{%}%H8h*Cg6S=;UD~U{Nm-a*C!uep8WOZ<7?^J^N)Z2-F^1_@rR>V z&z?PhUAfZebOwDXp}Qh(57li~*KFR?LVf&WWS5R*@u_sGyn6NcPx0mHUmtoO{(k=P z@Z;&xhwh&bK0aGPb|qcjb+&@$6zfmpb?LqQ=b`lB)!R2mFMA()FO;KqPo$Fv?;aig z-F+%5l}o8=Qn*u0)*<;S-`(LrY* z^zI5pA5>RyHGK?0re+5uRedhi4DRA2e*tX7 z{4ZAA|In@H|8)SY;_u4*pZd$x)Be@r_UC^=fL<=o|4KdouLHg{aM<8_Se=lNT58f0 zSGX7Bz$!U6|7RxH<UJbSIXMd37(cFt6_Gk_b;j_gsnsf6%9e zbtcm=2z|_I>JtqgwlYlM#Le$7E-XSf4MZb~JOWaiEJ|=0P~|+(|fhCFrvf zO`wQy;V~Yh^g$|5EkBGv$)pX+Xt*O?UluhrdQ&+FMvnW%@a)liVT~l$k0wgbk*AaK zmunZ*pj|PaH;QGZZU<@E*4NQj?<=v-W}4+|ws|&@&K1)MheJhnLdChTY}<0qFKT7% zB$F0)=1U99Tg(?{k*yn3{d_i?o%zsGE9i}EPrE`xTPq5M0L^h=xq^I8^SZt@vK-A; zQHYt)$!bn`U0LUEUBoQ8qpY=rt0mlzNw^^BErz4%7gIDf(Vm_ID-vyL+vePxhhuAE zi-tQ4&y6q2%;|kS*T04&k#INS{!4{iNn~XhzQc>W@IyS`&@iteH-1Xb*qSqnnEPxj z0q@^Owsif_*Br_bKeg}39IE7K)GakOQS8xY3-=Eq7`FQkusBWVYpNE8zBP*zhd zXN_l)BY}QzMDjVAe$gb+U5JZ7`w`Z`%+{~e9bVra?tzY_QbJviQ@S0DsVrb&|Ip}Rq1ETA=lyiydc#> zt{!qfHslEC`l8(n&pjm?8@?SmU#7BVoJaF7vxTPjro%7#*>t2BFB@or8M*-|q2$x99)# z8iHDKt$=#{x837^BjkUt;`DwaMF@1N!g$vJeMqC{H z!V1EjBn^5n({tat7wxdrw(ZzF;z=^#nUORD6%~AXGJ}WGea*K>kWY|!x(>B~FtoYw z1oiJbJ3G3q1p!czVSLZzOY?*!+(p|e$2A%aM%<)eE|x=Fp!=2=@;fwHf&`OIG{*?I zk;{c+-;G9Ncv?X@zvaZ;M*U8qD-h4yF{wxya2g_UfFKKa*8r_!KnUSr2sno38{CpE z6wh(U5l0)^B;W$z24AzebF$z(i|7#Knw%LqI{KDswiPb+StA4<^MQbOXF*)I(B*=e z<}J}fwIK`&<=iMlM6C84(E`g}u);-#$1s+K!#shGtcWjMHj%R}2ikyPTHJpW0YD=E zIbCyrZCP_IWZb3uF$bAw@f`Z&(n|S}Ltpk2Jndr)-vgF!Tg=NE$Ty1c^ASNj5@Li4 zF1byRGb|s*k&Dd5r-)u~MOcjpc(WSGd%M;4aqQd_i9~7Z`R>#*kOJI<=wr>Xf(g>r za;N~HUf^8J{6r5DPG;Rab|=Kcp>V)8#$o6M`+IvMD;!5d99?_Kl??cuDw;m>ZAfZ1 zPIc@=T9P^oWSpPW-`jv`?#HwcVt%Ta;*ngOxR zU9SDVT-xjKT;jb4A`9gc4t5x+^e`RAz9ie=f@laRTImR<5{rsWI#`3n1ijw_&TyUI zLT*ID3|cnaXWAl>8Bi%LvO{iaky)C~Ct>6}xwZ~chuuFNv?t&pf8bVQyhDPz*UqDlarQ?hkM=@VAXKCZ*P2l)$g0-vvFz7DJ> zcG56iSL3ceD0B~!gkCJVRsl{cBH3Ny4@jsY(?uc9#O0K^cK8GY7sti}a6HPz%y5DE z9=c*FCh4YcnjQNNLr|wEh$7qIZ9JG?3LUvs!Nm|WbV@-*A=EGj-EvS-0yfNEuNb6z zUMXlWCyuE}k}m@=-Hw?6xk7 zhAOU1V6MoIXW%$|tSTU%pt6WG$%;aBP>k`SHkkda(3%oVeRNcjPNz{&;gwOowzwo3 z`DaypbsVkqY7*z;s|b*$SkZp-j7C;fJdX0ia@2@*+^8yB5?585BrZ7=UP=J^G_f&> zRfRee>s=U)_{;H*MkxUgh_8sAjy+mEXIgua;Z7pW3!*?=Rz#|Pj8?7ayiD}{`xhrL zFNq#>@(ET8R3;#I<7t)5VMVXH=M^eRDbF>f@jdPlRV7m$5ykTnNR*|50WYz7kejs$ z)$vMFu9WZMHZ)W8drXp{h`hM&m02&(ma3tkE<~Q1tN=rm9SzQW?=V(SA*97ElcBzP z13k5`vcA@5OpQfm3a^>T=srGYDOEmigD0xdYVJhrH&e3+#O=1Qw`F&C{=*;8~A4_4k* zs8TCPK)IPN0O%t9b1TbxIOLOSV_F>E(4wLUX(dY&S|=7lmvrens%|SPoU~c>&PPyU zzOz3irF~A!g}J;jDS#ueF3l{eO!W#3!3uRk%ZbQNWmTXWu=KqUWq#4`OnK;|X1>j> z6yNSnvDQhmX!U_M*lNz?N;`pis@j!rPQp-a9ljc@*I#4WI z^lmks%y@8oD`NPrzBxI3`&2!A^Q6(r*OhXos`;ZrWg;|Nm-vRqP*`^Bcy$KPZraFY zi>)uJ6HSIV-c&%5HGuO;0;~imJ+M|SuikSNq&eeUyy1o+%qV~zFON`xE|xR8V67hd zi*namfrbr+n$bq`XtJGHU+E15Dzzx)*ppA{Pf6L_Z50|>=!>~dU>V!|OS2>L$oE|! zLXH6MoFMYhGEa<`mhQV*idKxMWn~kqW*TEAk>y-Pj(7__|L86D|Ec;d9c_F>_W^dl z15?DP3S4lHJ2VU?b7BK@W|pSb4sK?_pQ0@VZ7+U+L$H7@q+&W8^W1xawzO(X45k3# zH@1KUV^ejIn8gYR(*m2i4y=9uev5+GrKc(x&$cT-(Fmb(plIQ}G0e~xZwlb=0y@nnT&k2MroG>h?7!n~LayBbVKxx9 z$3+GEY)p!@n~~oDCoK{8nkeBhS3&|lpT3a$$aPGZkU}&wb48_*V><>wMcXwEzcTjD$5Z;8+53*G$D7%H^`-RFK=W9SmucqtKB(mu?iw2T9 z2yn|VvqKoTEjmtd>koN$L9VI^_yu(B@ynAJ$4AF+nc{GYiuZnl zKXWaogfBzq0=7r54j;UDq`rFeUVZrX@Cjm}$}-!qRUI-@4cxWbU4`J`@o6;%1E#A2 zikvc?jLekV_e-tze5%>d=u-3Fzd1ZPfhRT|Cr3IuSFT@U6*^wEvP%~)ShykzmFcu8 z!=|?~Hh*uWTI@b5esoI;oD)4H8lX7|lqlyuSFvhaGI7-F78b8bq(N+0#=(-87{pL9p-PR5p5_Mxf2OB;p6L1;CN;_jdg1^| zh$lx!OS4=oXSj5cdGVPEUFSzm#Jm28Et0F9=~o>*sLdE0U#>f};f+-HMk%mkR!W1?Af>kqIt8)HoaT9D)j zM0!Z#$;peh#CO`R-$u*7&%Hl>`xJ@SW;?r3ABY!{d@qh4yg7Vx{OA;`ml_ZnTA*d! ziQ^ANWJ5;iU7ZU^JzX+k&|lg2@vMm03MbiDyAb^)(u@iH7SJ**2y!@s(8xO(H^Ksv zXt)eWyA(JlsWEr>;Fwt;;=?+roviJL*ddv;@rjIO%1qoJQiJWM95nveS1wE`FH`m| zOxRr}?C&>H8$Md}0u@h*Kw`Ag;a3U{*e42-3>mU47v zSH>>Y1dY-0h+ADc#iwE&(omI(%VrjL_V5mZuUP@%VwpxgD4LCo(*&1(2_|E~jSoLi z@}lJ|Z`?&^ptJ`0!oqODgDMA|dE`d6L6b5A8mBo6Jk5mC31w$lWL|tM1Z6JsV@Mpe zR#YKj6qeWxck@M`%gj;q+B1CDQ!^t({R%H-%26X{RnJk=>k=oP z8pY;mK`}W_X2Xc>X5yUKevMo5>12~2(DfXnB3LA}pG&BmXW{~> zO_mI~mkHn%J6?>n>DZv@(4{b*jc_yAG4QnJZy`5i2U(b(VcVu@TNNmg5IC0y)EmVF zTI0SHI}LX3xHIaQI3sL&=U}p9hlr!HDP;RAwhL}e!k%9xFRg0|hH zb^C|^)M*d+&-Zu#32^IPZ#LR<>ZRJIe;B{FS+i-#y?giINpKh*;BI88h;j%}nN;s5N>uyf>0gGRxHI{Lu+xDrj@09a z>cfw~shsU4?l3n3NX6Eql`ozFN zz?4)zQ72> zoymRmLZ17!Y}fZ2GhDW$D>GiDbWA;|vnX2`;%6t4%!W`2QDHJjrE_5xxLA!L^OSlh z4sudE^+jaUSA43B`LD0oY_0U^%#@M17^e!(D(_}n^IMyjM;-lR%PcUMPATT zxj>xg9xV?^CfW0q%Cqw-<>;7o5mS!LhEBhr#V!v(ttDONA%(xibf7zh-!n=};xVAF z+IGdSsug=0*5Z+m-f%WOj2cjVh^^(AmgSsd>zH$5+it~fQML_h#wkf>KfYLt7gTRp z;PzfI)+$EA48%(uY^%0fnPyl}m z=+XPob?x}%0N1QoAn!qrua<3Vd%@V9sqn_@BkTVx{SV_nO%7C+(T+60EK~1bgG>8Z@WVt|1@)2W!&`q!t8fG zaDPu65OBI6G$F&ai<3pCNN@T|I~sZ^lloKVGvM}z_tAAU;LQCDKb!QBbZoCooD%psKrGm=+I*#l6v=3%orrz2oaJ;BgxEKCe4(CGmppX2## zKMb%HFymlmpaYPhMfo9D5sJA|r72XQ+IEADrAtA?lQ8G%^;u?JwzTIY%0uOPgtqn3 zi^nB3yC`{*B6vOa96*Nf$%xFEG8xA=eVBrW5cf%Ie{HLu@Bag&`G^0qh$E*;iTrh877+TznF3spcahHmiRt5!41>Hg!5o*X$2T|12 zsh*C(c!7&Hv`Vt)ifSY<#3fTYwQzB0+y`DNX+r359YnfLf}qIQmuj5lN&g-&;0)=7 z9;)y!oVm%RGg-V9io!{N%1U2 z08=wy=$A%5LgmDLk9+IWn=jbXN2{)C0)ZQI*yS<=M>|W7qdn0g8&PPs`;AIX;89}W zTd^oKHOr2W;;h0AT89Q!8Mu>??7uT~sz8!(Hd_h;4p=|pNeZ^C1RWF`O%=FOW)s$U zOdA^&^j`=4w7-}Qjl-7fiGJAdWj_Wi#)LRTn% z|4r`H{$J~W+w%Xq@J?m42-u7GQUsm&=vahSiI848Ob0To?sYA>-jE&Qg>`A-kLKjZ z;l$2fb$daEl8nRKKKRks#L5iAhT%xcXKUK@_SGaCG-qOT4;qR*ltpDAl7ZYaxzo6m$>lM|_JyH`2!$&kow1yT9O+vJv`Ux8nQn_4hy40;04P-~V9JqF()% zpkDuP_xRrk`QH=E_TOGD|JMOxx3fj^A9urS+>O%5^;AG%1J?hQ@?WgK|Fjm^4EbMw z|Mj+DOTPaa%m3Q?TZ9de|AJ85|402_uI2waU?c6nOQS40pl?b*lTD)cDs!iX*MTvV4N1|(J|O4~ zI!52rdWI>OGS=x6Gsbu&L+XziNzt#omb;q;8=?OR75ZPj|GyRh zE$f%p|Av-RTXt^hFK71w`R) z$p2LL*Xo(zI?o&8Sw3U0A>lv_o~?1+)Ssl*%ZZmDp!9&GI$d zJex@8is^*Ip&~n>;%rd!GT&)$3msnMh4y8&Z*gb7w6MIzd~p`px-r$yXS3Ov??)3W z=#6Yo+o1O4Zq8_5R`Kt2M9_~DsmV9h_PI}=Gr^R>3t=Idhj{%64gX$G?={p=Lk%_5 ZP(uwh)KEhWHPmn=`2Q0`zyAPe0RYM{9RvUX literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack new file mode 100644 index 0000000000000000000000000000000000000000..a31d664cb9214dfcbfc938663cd6603234537a21 GIT binary patch literal 8625 zcma)>Wmwcv*Y2qy1*Ab*Qd&xoE)j=LDM3QI5s(@hh7^#HPHB*ChLY~?7?6}0q#5S_ z`aJJ-&iQ)why87>dwse0Uf1ttjKM|w?*UE%?7UaSl20F&VIhV7wyiAF@Pcun9J@g; zQVw-^Q})DyxFk8=yVw^=+XZamt*}RZbk<*L8%aZT<45sq80P4l*R3(m#B|Mc8KE@a;`I>YZd`f~ygPW+03n}OA>aPjgv3}6FvCL_BNxmDkN${j z<)1(_7{Du1b{@6!FUA0@Lz%TdH{={(;*Lj<%ASb`^$Aj)shjwhAkMw+)stTha=WR; zvGgYBsMTja>xsZ^hM+p77nLS?d13-ZS0!i;@e79v- zSyHDR5mFz%W|&FZIk){?MuKha?iD3+)l1AI`ROtPy=`0VMNE*adbhyO2f%0xrg;kJ zZ3AZXq8s5g1i}DV=Je;J=U65{ z|Ec>ARu6=OZS`(96Mq*!noZOIZ*E~p(s$htz|G1~u)aHT#Y>F?Q>Rg)@y&SST(@h- zODqc(d0aJ6(z7Hqj6r#Fs|w4I$$Xp)Ay)ymldmIK#4>PW(jFb~lL4HJ) z)2So5JHKw`j-tAwQ_Mxv)ONhaRN{ijrY-w&oJ`OGuC4kS=il^j|ax;~3( zeY2~ltUvKO&%cqedHs1saU(TQ?B#|$)7=5M-jatu2%{48R)vFfL~hLzogql7*Huj> z)WC0hq#0g7jnn#c4zX!x(_^Qom*-E}Fm(-5S`iz2GUA+mF9YoMx^<_yh#a!Er&#r> zt^C)%y$J)^Rf3r9k^0BMcU#X^8brAJkEAQ@6)qz6tz};#IV2l=mAx{RO|pEYhuR+a zrc1NiN@1*cC)^8+dzz$P*5i+?s@$-q8;;49vupJYY4I-?r-jHrzc#>b8pzwD-y74BRP80o*4Y}T z-hT8}qpx$%bm?$?l(8tfm2MIrWG`+jLOKzUjNRY{))T$b6{;l4;c-CLa zo~Q8Zky2*ob~FsNo7-1tz`sEU@%fCL9FBZ76O+b>LM;U%E)E|$7x_PQ`0I#%YcQ?~ z)QGm;AYqK?=T+9>-O$5yy7YLSMr4g>^<4S#cz?JFub72wcyB`d?>^r29d0~aZ)!+) zynlY7&Y$XY95^Zgg27bBiOo)78xX|q9r$iN@aD1IrUoUREOC}|dcxRsLKn5}b!ep- z=D({rUc6zN^MpvXU0@5G#ObM@*WqrK^co&8wPE}C(l?HAJ%Xn_#FO6&Z`e}&=AzzK@^upJRVO!iv_ZqSScPM|8ds8Q9myBfY7Z~G1>&bX0`M@60J_xW6KKm z@)7O*?829u`hKYW5pku}{pY6! zxB}lFxza)4F%e*3D(lH)1A=$x$Zd{(o7$FS?f*t2!>ir_Gd1Vhlsl9vb?0Hp^=S7^{Jc-fk^W= zq;NL;*E=H$t+G~VPSJ6}iEZ%7#~^c-89KGfu!zrt9kxWy_jaOSDVnmg)SNG9pn^?^ zrr}m~Btf>zTZ9T24c*<=y;n$1M%A@`5KgXWc_Upr271!2+e+~+(#hQ2wJ;rM! zWS1=ZLD|erpFTA#)6#j_$AGyM^& zE6w>EdI(}(N^g*V@z<-=E~b***ZcVcYRaqYs9Gdzd^~@*n|k4J<-$tx-uI-^-iSBX zxGphw*FKOo&;v$tJgO@qH80o|Tl_cvBTlHCa#Lu0z{l!TyIiVc9Q1OhK7aI-gsvT* zy{QI0upIeI*_n6PQq_*$ulVe1#|(S%KXIksiKinTPyGe5{6!QMD5{2Arw>U#TNndvs=Y(=5w{Gz{;t`q5b@NwfOqn@2kSA4~|d` z^wi0<>kR6Vn8R+ot(IllVxcssR*vxdC7sWmybtR&>IQUWY42TNi_!OZp?lh^M}zwk zb*62U#}N79Ggw2#6-@8zl2QFbE@WV`+6dV$s&@mE`h$W|-sYMCO2{SDFn##~AATgj_d3*s5Q}Q+*&m?U=tP|akeEjST)2M2 z*Vuz>F)^!ajEBybIQ?bSt>bD~U3ynVe#-Oz-6$UEfn<*+3VT!8dF0uD7Z^YtfwHN+ z06ySB-5}B8oUtRBZC1lMtL|@+GVS5{Z*JazYwme?RXmLpaI_m(8)VKdc z{B%tyvM@;ES9}k>-vm_wXifV2)6rlQ{O{Te=CzH?9b%5i$(&rb1Ij^~_`yR~O|{Ye zR}4;)aP@?n%~uJkaZA#63`hM$s(q{*q6V9v5dzLSyT)soWT*4n%X<;xUiAEVLzwua zGWUf70#&t4l4?L_|k%p<}B)=(iN9~a`z%=c^a*kDHHtRR8!XCUxIr< zxnkIVzqXh7vLov8EH(#%kH`i@$r$QU9+s90^}3rlF9hSys0Zc}413i~jDNQnRSksE z+Kef^@4w5iX+wv}CI3sP-^+3!B@Cep6_Phr zSX7x8480Sr2ZG-5t$a+<#$Z|AYOf$ zyT=^i5i+zkVqxf8Z#nf+M=>o=z&erFA@9T9{6ERsgqa7~A7msl$?`XdK?PZ_cBF{q&ssBRk8PRGWeVQioB z06-TR8BMGUq$lv_R6D+;vTSO^64oJ8zdF{J%j~k7Ru+E+myU)}XIHGMueoJ){ z!ec1wD#(B8EjAK1Z^AKEqh^eLQt@Cu>*PKT{7%x3oCIwuqZvkNEG_4k%+%A-J+QSt z+mFOFpML8rro@Av*-(G~_hpX$U-9)?0Jnu1PutPZJGcBVwNRd>y)9_1pLLcsTRCK^ zUdX!Q0i9dq%(q{_CNp!5QkfVw5vAXEmgvl(7dK{fuN``wj!o6sUTXV+*6UpZIv%O5 zmEy_Er(C?sJnxLY>iWBDI@5%fN+5PT{H0#I{`A-jxAHjT)|q?~MJlUcm6F88?!;}6 zHT9CsR9`VS&+*dD@sxgxKNiowDQ{WuXX?`1USTz8hA+pWBqyow1MZnbehCSvu*~uK zT$G`rJ9m^DOEJ4GX7ME#a2j3^vyy!flQdvB)64U&cZ@bj+53YA7j`*DMygF+>(*Ja ztXwexCLJ{9h0xVGE*jO~J*KTB+@id_+lN@KHb|xIFta1xDU#V%XE$$X=F8}e$Isw= zg%TbyMhh%2W=>*xk-|+r?B66mKY6y_F^zvFkb4YdE;V@4Tj)U0F?!v7UqGV8ndu)U z^lr;!4x6)|`sYZvVNDrPjzchVTCmDVlrK8sIiH$2&lg4wDYx5No>0e-8J;9ou;6sJ zz(A(2`K1u1hrGbm#s`(xac|^;)82eJLMGYH$2FN~<%j;LH2ToI{Y!dcrX6=SMLg>lxcuklk^!%sS=y}PPlW`5FAJYmQk-XC5Z(5N-Df#Q!VTlN)(Ual_X!q=n+N5j8+vuFjqF*iK2(=UVn zCJ(W;$N52@6gN{K`_lZ#k9qV@(Pqy1)SD~EY+5|nc$|OPj{P(|)Ny8+mAm{Mj~>2W zgG#kg@~v9i&y{F`Frf1k|$g86AJgW6tAK_eB0G{?ccSH9gJ z^0}jFu6B1WbhH*;(YC(ZTe0{-l0L8iXjOfs^U1-KAHv zueIc(Cj@;7AA%cyi4zM=l@aT^meK3t=!_#ilYj2wn;4lhOkDKvq z49(0q=YXY#IPm8rm_M$u0HV$KE`}`$7LI-NTt0 z>pk7USX9gVJOJv7fHYs+?JluaVUTVVj|4fCs28xEuWyekH?|}e$hF`;UmV=~m6xvc z&DKqDRmgQn)GZlwgnThlq4l9-viMQx8S(Q>9789{;Hj|kX%jx-OG^yJDbjoQ&5N?z^}_2Jdh5s*zx>WHeRM@ja-#|B zm%jV<=b-=&({EX|nc%K$?zN9iSPeJj8Im6WoN(F;)QplBI)^Z*)ag+L!;VZEwSYR|#J;IvNNrf~Jd3fhAw_JRpF zVy#6vF}$zn;fB4L27{5xQw@;uF(-UUU2|C-7DDa-GsO^PK|di9X)3a z@y3e>oA$}C1R|-+5mgq+Wd%Fc^Lv8W50NFm={%{c-FXBDJlBNCoHqI9R<25|6$%OF zA>MU<{!en>f1#tZJ2QC=iBmnWT^Kn?gEJI&9y(-Kmp$>yraq8NkVNbeAkr`aFbo0?q6Zo?*3=`@5?jPLyn09Bp z8rD~@?q<-UlJYtHgPum(Kx(HLTL+q}E4QYg-ac_MuoTHO?d5JgKY52Nn0sa9GNf_y zXbY!w@aEMvH-W#e9&_9F{<>Xbbo*YPyzR)XxQ=Pktd+8^NO3?fJo)E(2dQyep1WGt zH-y=eF_dbIjFTJf(1KPnR6L5tQ-Z8`r%!kIl`4EO+2O{^a~|`_Fai6_$V#t1R3T%W z^O)Ugy~uDel|oSFLuGX=&%P?jD^CBk_l&D-frkFOmRDF^vlxXoOB|e^t;mj8^RNZ@ zSa|>5!!6S)cHZ(mOoOj{e8(Et86GTi{JUa#3`Y{XVYFSv4%@)G9R(6r@c_|RC|GMd zLv0>z8g|lV|9gqEfiM4{CGR=R_~HSw{vW1k@gTPi`FzQt+zde}e3TpFU#z zgI=2>4Y?~#FKi7CauXjYS7}BCIdqC@>wN`56J%K@ zeCxfWf&Ytt*RyKZbnjy1gLU)GzmK)ra{;3O;%i?>m?&diz z&8;tQGc5hmZqGT?JfHO=)TU4U+h@zfz{u(xwrANZDYf`)J--f02YDy6FLZuBQ7M(* zuFKiWmhK4ME&HL-f;(G^qtuz|p0Sm*iF#YL^(K%yA6Ub3e`&*wM1_Xgw86eVC*Dg=V+^d2DGAzBZ6p;~wv$AKk|uw?xI>z1@VEM*a+@18ct19#16# z*KS-bdrKY>BYg1<9vXYXjoy2i(Pt5B!h3C;iW*i z#tE}%nx%efKzyU+1XQhpq*cj~a*T$9mFDr*|3!T1y1?Mc^nJ-BxVeI*VKj})qL(s? zN^(ixsr6T=6uRR;c+9v(5yoFDnIgVSt|w~>bE*Pp!1>|yL$_Py!7iEs|92*r9@3xSU>5{rh1B zDIJRx0^S&FUf0|u13tP>AA@VSH zJ`p>2B~G*Uo{^a{rVD6_#~qSKgx|PwzkfjC&_8V9YU{wRXKaE+BMbBoU}Kg(WuQr- z5n?aAZNpJGM}te3jk~D*{<{N_69z7TqXd9{j*<@ttm^SBw`mmoNk;7c_5N@$@-0Qd zH;+BSC#Er(Gi5YVoTr*mc17=}bf|1cWf#0`vD-Qi#?PT7(id{Cj># z+kerryvTmr$2a)&&k2wC_4QJ8Wq9!PpZRyZ!J$6EF2hArnrhsN(0YtTTN?ZKR#WQi zb%6SCS(r-YvLhF+q&X^zgSqh3m*f|b%|_@r$CK#e<+s^THnK>j!wm_LK9QA!1E zLZIg`_eb7Bk#30G0j|=6S@k)D!qXT9GicQTqP*6!_MJ8&z5E;q1cDj;oaDAX*Zr1w zgC?NR6eZQ53h7S>sZHq$LBg8_&qLD$5PdF*H_q;NA~BUBEwvjPWy2wJ)9sjs4#REJ zhlH;?)!SFK3HAP5`bp`zma?>gvr*^GJ zZ=N1w8m7I@Si;0!4+Njl*c-gUQWzjTeDsk@H|o4H8o~O|;{1jVY$50D@8QIb zj9>9HkhA=8G+^-=<;~W>aLZ4Qfc*<~x!yYBc|QXRX=5wIy9w<~5wJSIK*1pxu)sqZ zJV3aaYv*B9x04@sxEJFcX4>EoYoI1#0RyPG0*njWyHLc#+^YbG?f0uV9RRP1^zpZI&{RhyaHo)C@+mCDM|3bm)`v zv*tjN*t7tXo1c|>JKF=9)|Yy`Y|&Mq@0$gptZ3C_P;* WC5r$5YdWA9AV%9lYN6Spqx}zPR9c4s literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack new file mode 100644 index 0000000000000000000000000000000000000000..c3afd0b19e32e1a822b38cb937af0a5cf12b8ea5 GIT binary patch literal 5589 zcmaKwXEfXav&Kb@5`+*$FF};7C{YqiBzlQ%^)5=H1VOO6AbM}B6A@(fDB0C}w`%m> zdn|VMe{|-Ky$x05?SiYmGhC>jFr!P-dqzD0rRL@3bvcQO;T~;N zG*4JWPDFh_hC3 zI{9lsHr#;NohQBwW&1#KE#{Ja^;QQ}tho9D43>y<0ut&ryPPsuusdw5k7L{f*~{)n+4ds&BVQ&A3e zTM%y~SiGmOIzXndcfpCU5R3iXh$t3&vjn8S0vBMDzZuxy{Z}MyY6T1B=RUYA1AP7- z8~=^?J@9eBXzU1*trlxwh2tGSB%Y4mcYsN>V&6bGVDyxK10Pebys58*xtCm*I!Wbw zEEq&q1(rMgUYecra}jb87Al%Mjt34K1pm1qt|Y{T)*2!K;$FZmf$tM58({Wy!v?ud z-3tsK0Rq8*4k<2%8Az8eoG`wJaZrimWFtvaV+NkL0M+=oLF;QAxEILN(kX%)qpMpK zMr+Cu0h`V+5<~5GE`Y|a9GjM+x96r&s9ZXZm1K47W8~^=>U)drq8^2)b-?}#Kp_F| zalqC6pOtpG7E1I}LRhKAnbpct*D1927EoP5=yXA&ssX`pZip!tMhl|rV0&F9TCBr!GQbX3)FA zvtOt?bb?E7#KMpT8H7^v32Olxp_><`>ivF&()bEBrX*|mKnIkVMO+!BH*cnL+(%YY z!=VzJCfx5=HZwl#JyZO$ThCY{*~#x7+Eiz`;4ekdb{5o5YD1iymu=0BlRrdI%)7sv zowHgjk~&H`)=XuN&Sc{Pga4s;3rf4})wu|#eIHZEEfF5a#|Q8p=di$-kMh{Q-#WM& zNBfQAuUGte;TF z(_C}zeC5Bhm%6w=FeZ+?f8T@!^4;){K;(@ZulNF4Q~I!X+r-skUno1XOS4WH*U}X9 zGOIVEu^{yrl0AQrR?s>(AI8FDp#zWlqJ18Z!L&y&-t+closL zEfy5sxwjAl((?wr(bWE82Elf?_ zv{kt;6EeFaVWbj#N!8`alv_W(S6I>sE!${RI&(jtyM6d@^olAaKF_=rdbL`M*_tcI z-0bXZuC~mSs2Viq8kA)|fj9{`7uimK{^XZv7n#Ooc_(`@HRfiv-K|pVIrF;egHZX7 z*N^6bDka?Y?t$I;di&h1gTtt7ptkvMX&)gLWs9X&S;=dR6M!lHp(H^Eaq*g@EU%k} z43}f+7(Rx!R+LUKv}a>vOj2cAWpFjnzeg7*=pD@Y?fUwpAyjtRa3mhq29^e6J!sK)AdRbE*j=q#H*#mEoTa5o&4n_L?u#%-KWFEl>Ir7Yz@(NnmK&C6}<=r&1S z7F6~Y^IfmeL+K#8-02bGw{h|>TaHGM58-A9RY{BD0MQn@I^V?R+4Cr~!obw3);l*S z);EpA1|I`FZ-2rAI~z1-LJVvG!;NFWhdZfwi|Gj9lq2y!zoNWA-y%RceVkz6QrXjt z)NJS3O5NRizbgB59;W+r$4%@Ye;x;Gg{f&n%?y*0&GRc|yJ@Y17)2ZJpv-!MQp_^A z>*t%y3&o6el0Q60>W?kwAw{EL@AwXZ|EqNZQU}L zX?0r3VCv z2deFu_5zsP_|M3O1Q2iE?j=OH>~hPv$L{yUISmKMtbPqp?023bgr?P&_cKaUj59URb#O*P8L+7m5OTxCm(CZKw*h zj>~ImF4KznxEqj-m-q0Hq5kxv9o$iQWg*@|~m7TR8Z40dr-gwW`p;{NRE z*f6j6Xs$u(Z_g@{X+CN7Z-?6Y@pS4WZd(W7EruT7v09g=^r`(u`xvor5->RKLihd; zVTJy-%k9YTm6=Pkd1F#Q%ZVO~czgw<$tUquCjt+TCp@-~kGk@Jr2`y-23?)f3TUmO z5fn+qURgkT&cteHd*%NwV5siaC?HFtawdHUEP1Igopg7F2&fS*Y zBYKXxtQFzwfCd0V>7M0qm~{U_VJUn%4vj|4VWEe;zybd~N^`reZD{r)V2BSabYFo_ zufW2Q#iex|XbT_Jc&fVycg*qy4ng+MP5_X43_J}lp6*!#F8B}cv68Yvu~hc?HKs^t zI2eFG@OwpR>bhovl~*2tD<$YIV-+tGUX*b;wTx-w2j}RjQZD5N%E|=#ddi=0vxu1G zF^EkG%1}4JTwZc)myLqo=NU3NCJJXD1VqX~`xuDLtlCkvtVF(@I*Q6}a0tA?7oc!g4d+_D(?a{X(1&d!ATm zr4&|>pp%rNJ47yMB4D`j9Vn0eWzy^4y6kx| zcZ5_JsqbY<<|mN;aYjyyu!qs?;KvnQ6KC#)pGr}Zdey-+2xx-D(ODY8yJ0@`D2_hy zC`Ku1Si_WyK+Zu=DmBp%3Er_eQIt!8T8O}Kb`1%fgMy6b!voGDBfnSm$QoYvRh^+; zP-%FlWU!JHVYF>?y)wLdc%5 z0Wz{tWVZYle*7XOYRGf{`l#>OTLN3<%it}ec4{wYDGIzMfJx1+*T=B(0jNqR zM%U;uer5v9pBEsyX|7?0e7Bu`lcd{^5)5+8Vkyld??_%9o;H8uDqdg3amlSvFy&HT zZ?1bRZp_mC?zwTB`HYKLRvT=p$gJCCKvBub%l3=nSEdnB)ybi>qdtd;L^*$WyEBtk6mqFg;FK+1gJnyqmo_c)&%?9jkhOA% zgsJuxR41Y}&gf`fzqHgI^^;T(B+mX(OPtJshqRNUn7Mw*9l$j5Gq&Y6RYFCCAZ&tD zhMOzK+-}#ON%6NRuZj3wn{uMjvQqIXR3BxzoFI#CTL^-7VdQ&OQ@x;W?q!Cw%9*A> zR%1jPjk8<`p)?B!N|7?Kp{(Cem>D(7+GW(mzN^`BLQ~5-N_t?1zF#RcwZ_7O(?EIAGJj0^e0P%_9EE+;7Q(<0gC5PGq$|gF;P9EoE!d`;ABVRJER+_ zhnzT5chbe(xu-AI%-5CJl;KguDoeD&=?=wWEJR29SoZ4%A6Z(amHiP_Zy;GI&X$)D0 z)?4G(X}0nl(vm&sVu`PT2Jd3|N@XSaRLW}ksC}$bsx^Mo#{RA4`~4!0w9Tmd{M3%D zPeU#4C*2p z5VI{EAyAIUK!HLtj$}46@lrkso9Y*1ys4l%(LKjyNl*Brh|N}P zclQNbt_ulso&_me70>N?Hj_1(^k-`HXv-;0hx;*~q$#7q=@#P4Wo9j&s*>ZOw0*PalM_^Ji`;7RF41JSij3QplEQX$?#laj(92GC_&>L#XmnUQo%5p^v+BN|v=**fJ| zpMlQLp0lo@(-%iQ34Jon{i#wVD_O{8xZ~7}oJQ^rN^c+sEDald)>+U z_c@)>kZ=&WBmM*Y9M+5qdWYkW_%%5y!tHLI)QS&Q;E9E-L zLT!H};S0}ERzhb*ub-G`=ZI=?q~?{^7*}jR$#jl^Yg#dxVDsnhzq|-|m;x#by3(Zf zZF;5^`#kNarItx(S6kO=Qh!*mN1ZF?*BQP09U-Vs@FBFy+@Ve5%dD*2A@NsEtJK;M zUfX4m^^gjEzJubwt4Y2}5S?tH)?WV*j1QG$Vjkf*Z zOlnudprZRBZY%Fr$z|8Vf7|NfLAPfeWqC4n(}WMP!A=0dC)%w&mNB1u+)L}$$UyOY z-$k?WekobmQ1vVG1M_=Gi|Ld!HP0c(d3apP|4RDQ@!`@sJT}uBEHAdl&+sCRl1YAgZB+hX|pqkWEkjdBziWf+GQytD{TAMAzH{Y z+H?_VKbAxk%9N;7@oUy&h9j4!_47wRe|(M4n5~Nq^a`5On|WmXbgA<0vgsiIN%r{1 zxP{dPtOIZ4-a=>m-gyi*y)Rs9@HC=wZ%hD%G^?2BBp!_)9h71KnQj^lNliMA4*dvd zeCs^w0J|3>tOu;m;dc2=;b)SPHA}zTa9K@b{XgiK#AA0uTSyAor@A~sv@AZa|MQ7ba&pAGMif8vZ8Kf?<9o=J7MSa5bRhn037226 zYX{dtd)A^ft)MVP(viUlar#UZIbVAGvW5dQ1*B2RC)sIFKLa7vlsn(8fCq3RIv?oK(}v@fj3)9nHs|sscN{2 zX%tNJ2XYa>Ke8pB;w1^hE=S&oOaFK?^ofp`bVuO3>sDGMw3k(*dBB40kwo;L}FL1s_1v$UJ zYVc3HjRQR1@tovU17N1g{u^*0IgATS@b3R;uvh;y*EpFqFh-yWU}Aglp-jIUdIUU; zG#~)fegm^x|M`)Qq}LH5?K6Wf8}16u$P z%71uGjb|T>xn)!1D2LPI!2e3qUbDMtt^MaQ`sXGO@D(#oJ_rBn$1knu8a>_09%w$} z!6M84M>QV6#z%bnVtpUr`HickXr}bbnjF31e$_5K=j#GO_WEX6UZPXgD>w|!LXhZ^ zBleNoAkw#%j8HkA3@q`hT=dlR$?z+uEUd*H^IQtCWy_~yI_rkx49M|#K1D(TanD+h zDn#%8Dc$Wc5{TfRK^5RXd4wm(bWqbMXWy=_{R`y(>*en*SordZRR}GwW{#x$T36Kn SpCFj9U^a^GUOa|7c>e`JZ&Ats literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack new file mode 100644 index 0000000000000000000000000000000000000000..71132aebce6d20007a4e164e84d07dd65621aadf GIT binary patch literal 5077 zcmV;`6DsT}eo>r~e+RIUU+Md?M zA}~p)1Spr7)aCoPzk3f5Nr?4wT1=DKU6n8lo0)z2?LET)LFf$3n6Cf!6G;MU={nm2 zO>bxXZfSDsH=_T{M&*=5u^GZ8tGL!2QjqC?V1_@<2JWqYSyCjf|4OK+|Bmn4G>m?@ z9o%34a#Pc}{;Qx_`j6T_)&%aae?`${uKz0N2K`6PFG}ajhNp}+Wy$ULbal|$G(27J zYO%I?PxH9H{#%l&aQ#<757YnYy)57X`d3tr|CP`~^&gC48pRF2(}*lTj>QWnj;Ys9 z!fx3#?yY}C(`D}emC#Sne|D%jHiY}@zo|&v|0^MJU0nb3tvNiPCg}d>T;m44k9Dp{ z&C>W!R(~VIpVOR4T5|o%szw4c?rr}}Ai#~`I&J&H3_FJ7fyiCg==;{WVTP8pzVg^o zSYE0^?f1w3R!dbm{#QYBf&Y-~mGEEU_+JId>c->azaPXr20RXN{9kVWp9}m~X5hcf z@xKa^wdQ=_Kdai8c^s&gIQ}oU|IY*dtFz{RG>-pOki0o3`G1c8UCU~Oi5EM4+K?qt z5#@&St@|zbM%^zLrW2ZO=SyEbZ+5)6-EL}L-1KygIHB+LsTYf1BH!b2;by7| z|C^e2qyKBu^S>GXSK^aP?*ErU&j&5}k+3D$wC$K)Q>Y7pYsMWv>_Z_c5lUDskW`n( zP}rBVBG2qo+)qNVoVJfOnJ*dxFeJzK;Fv5*@Xw|LiPSn;Yg4f{RE<*ARurjeDXO*6 z-mp5Y%?@pA9j&Q%sA;VMpK}^|)GaXDHNy+XBQIGw`8zEqiYBU}v_(#P33*|9geXLj zwiInkZj!err?8{w|0SXC$6G@$X~?o7>*9tiYX7IMAMYLQJw4eoo*utyYQ||3j8C|F zY>iY)^AZ1N-2dU@ze-3^=Is8@4~YML({nn&jtBqS>G9*gtV*(6j{h=`|5ebXu>LE1 zKDfHDbt&|H8`2QDxuGg`!N2fm2z!d&(jd9iTXmru`bhvq5NfndWn)9Ws{h*kP7!Y@ zyWVz|Q{|3lxe4}p3HuJLm$KggZ}?wc%Fbt5T}T5Qoi48cqJUZ~mAw{%s>=Eug#hDp zGwQ+c`FQW2SFiRD{(617yZh{L|JgtK>FYNyPLGaW>^(nyb8rAdZfqT#+tM^eRWx}+ zY|2et-GnPe&Nn=wtLlOX^`^PPe<$PD2kN2z?A`Oz-KRU+`w!3Gz4`m2`s&$R_2t3A z{@$CHGi9u9Y-&<7m$A~)7DU9%ao5|4vE1y^r3SYdV!*~!)tc|0@9ZAFYwjs6eWrOT za2+kTP7EBsy6|BzH`@SJRgs(b6sdZsDF^CF^Y!cJe<`m{{{GPV@XyPSPd}dQe9-@T z_VLARk5~29*x{vLU7pz>v3Rc7dkm?zHd@-kNEKM)|DsUs(ZravI>By8QfrX~Yd(%p zTgsvc^dc%4ja`|EjIh{a=MY|6K_win?(3fAQRi-~7E3J^uY)T>qt&@Bixj z{%;j@Da1ZF4WvDyq^TRNrreTcS&^EOuGfVCvMQXP`gAE|E4@HkU5YEj05b}^5w(2J zhMJqAt~OhmDmOK$spzzU*{bD(Lqoc8RUh+#!FJrp9QK;sROy&2G){prF(hk>sw>JS zBrQsT?O8o%TO3P51=H$5X~%RUI?fPp&7YPk6`G6qugrM@cyqKeWQ$r z{X5ef#D9qPdMWy;RvqSmxho*30GHXri6Hjf}XfJ%ecV7 z7<7utz=O6CjiQ*M64@0U?1Y7V$7``G1jW7Ku&~!D{C4e$9Y6*4`-;f|Z>ShqdX@xA zU`H2v+!^#ey?fDZtA5~WP1jVDAq0x^P)+Q9Xxg@}MXj%XOX->0K&UYqfXz_Pa!MvE zjMKk?-5O|7)b;$YcHnl0Bd4qSVbX85JZ;eLeJzOrxW=O(H9IB}6zsqW;{?)5)(6Z# zP0xco8uh_W@8WUc1=Tx*l)rlYKZ-R;EHZ|dgScPkP-6Xx8{YhYE z^5hA+gdfJC=`m~kp>{)u&?L`fcZsv%!#dm6ar!d>$k_70y;#N2G|V^-oputZx7#hh zAAnZUE?N1h4;$T*YHLkA8W^K0Y=EcBO`aR9J)g+C{{#$PUp9}nNmf!!W@bAA=LW;IH?|;7QBb+=ITocFt z<@W!%!2i~a=YM$qUkNGt^5OsWBm6uPJpTNjtd;UVz<-U;|5ZY(WXBIiq0{Zf1MHerOyy z7WE?9IJ7DFTF0T`7O6cA@Hw1@B1+`VF?n@(y0)_7^aDSPN!Rt;`ENhUe}!~qr4#yn z(uWg$upp8hcm?}!>a5uCvpUT8#muMkihmT3C9&g*)a~R4ZR*mP8Xl!KwGFIjz|lJ3 zU>H&Sxw5iixn>lReakvdL8JCIy}%kymJ%WeLRP>8vA?rJ@+E~NgBb$t4$TnPq6)$a zJFDmSX@KW!6QLK!LA14wz$A9PMD)Y1kXAAy!}4vc^c){tN>7c=ZUiNtgnaGwSz5kI z3dlyXm4Fd3Lkiu5PK4AZ_Q*5)js=)IV&_EXPLMAeG|a?yV#YaNF1qbHUNRKwY+i1K#yO~(Uc@PKPg=$R(Sf(mQtF*h}eny~-&sG*8d!w(z421ND#@bmzM z*Pug-2Dro#j?4LZM~BajpB^9XonZ4=2@|~;nb4HNK3mgWECG=i(L_PMWV2WX`YP`C zE~Xhfalg3hP96T5n0B8nIK3{5q6wZ+Y_nCc*Cp{3lcarJFjgtP5+Q3bdHU>-l?I_d zaBK?F%~zU+F)E}`7vwfsvvBNB6)Io!sJAWrxz+q`P3^j7>$?^8>xvs&xzf@_*pcCT ztTO0@;PUBh*zwNCCrsJejC0v~=j_yU+4Z;*IC7do)FS4$xDXb+H>4eBnE59XT0wB$ z1>?bLpBTrp@lmJ4&J**N08j9NvFUe0O52ewoL%!vO*!*v;M%1zf`u_ItK|@|on5+q zU4I+fAdW41ZOoOrrIu_1JY*hs^VJzY1KThzH5gphio7E_$sshIh>{G`_QKE)Yr;D( zN&*B%fQs=V@=VxB{d3z1YpHlexLDy3G~M&#>vm0EERIJaWPG5^9Y1kxRuw`91{c#C zk*LSe39+*bnQN2Hrg0K_*VjeN#jLf??pvYiZfHVoC~0Ls67h%l#do|yhf*$Ull>|g zN3CoeYm*l!nlO~yJPQ_r;xJ6l9w+5Yo&dLJ6SB^wJZZ#JqVA;C(0Wq|mW3b+FCYL_=qn6nkt^B>L2l&%~@&RBkkMu*gTW`ZhX2 znxu+_w#?QQWX>oy;&(da&yYP(lEpF*Zb%>t_rNI#NX>VXm@@s8?G=fsB=_K4-fc6Y zSX9HViD9auw(xlv<*Dhhl*jiJM_ z^bigvP#Yh4GIHpy2`T8{Q%xAc@|1BZWQU`&a0=Ieh9cM6IKdfTW72}se4aebAG%_x zMwxQ}gRGg#%P;vIO-&SHpxYZ#D~ZjvOKbV*lZ~1*=bMtF!&F6Sa?}wl_V^Wyc|DtD zE#qMxO87F9gNWI}RUv7~^ln*c@BV}Y8b;FbXsmL4kZoGiFcwcje2&u{A&f;VA4p$cf_Gr?a)2nv|j4bgMEZ=>+jL zj}>OPvojv_1jre;2s$Zg{7)fLL3*;AP!jvl+;-AM%Y+2*cj#pj`?2r45twW8d~;maT#tJGg#mk_)ph>Yv9~u`@`kcj&xV`rZxHhPl)VRtpu0lk$0Wjp&e(}( zn_f)PCm6~2nb8pdaB<)zl3g}U011Pb=TsBfzw5U}{8O9ADzmOn4$S@#BmWN}16d3g zlvPZ5tzoc`rsd6cnW8~Uh1#E_GvFPzp9pK%qs~^ro;%R2@f=32Q_lzfMg%E0WLMj# zVN&qZ+|?kZ!#Rg^I%LWtuG=z}>^3A@!?;!(G7lJ{29bpw=MyTR^az;`MVwlTBkTo4 z96TB10Ai>!dzfel<2*B^nQFr9xf|>(8ww(y+&FI@pB2Vs*Yi1f@-TBe!uq;*^t_a2 zPa{u$3DL6Fa+@(i+NHxZnaqyYLI}ZaIQGeGd}|nIPd$v$927gv61dXI z9T?-$^O{u7g3~J-Y`nx^f9%d8W&}93&=Dc6uxF*v8PZSkSR?=0Ch|RpZDZ9;@`7Mc zlKqq|yJ-rJ-kRYElj0=6U7Oq867sY;wd@d?3*rgM00Wf zU!L*(N51~65>h1P*T4UdFW}%#{UGA&zn0to=K}wg^7>y{=lEX*De?k5|1tFy5grQe zPVwZF+?zQ5 zFSq~C1^)Bz|2-Nl()a(^v-u;#cB1sHBaZL!fN%>v{`+4gS(D1|e^qrp|5F8BMs3sU zj(w87bBC-yHW=UoJ*;7M!ruKjAI++#?~_em&IQeR7qJGESf($~g(D1=*xxz{scXj0 zfExMf{{rypq-TaUzE2`D@U!s&wSixlP&^RzjMyJvjPEmG2Qu$jmBXchJcc_*s`^yB&Y2x zBm{~KO0+3bCMj>(o(s%b=BrV6=mH9u|GB^%V>*JQBx-SbQ__?jFOe4G#s1=L@e~ZX z!%_O(4+aK-BnaLog2m4j&!WH!? zU+S(4&EDAVuZJ}rz5XoE06^A13b<$egDmk`jPUsN7X+5beVOKX1`3!^kE`L0F{Q)OQ3|apuU|s8PjodqV;PLCv3lh=)k-$3F-z%!Zo-UhW zO0jGRe3p3h`g6zwl*sx=0d$&9|JszV_Mogn`^WTg$GT6~oE7;6`_BMYd`DgVnBmOB z8()8x6{(Tz4st(O5%d)$#WFOY+4R;+&zZmsB3bsw>wk$M_CFGcgZ>A+w50z5L-cZNRJDZsEu(m^)moPqxN`3-Lh=IQX_%)M4j6$qE-Ksde9&(CaPGJY5k z{r|S*KNk95SfT$}qW_}+RuVQq|4*tAJFsquss0xw;T;u;OuljXkB9yjiU0pXz)JiE z=zrQ7$;uho2CN@qs{e`q9}&bs|08q19RKHu{*MCKY-V%xf6z%6nisllnL}v0^r>T+ z#4bD;qSF5&&)+Toi~b+t3YH=Me-!X7F|g3eT25j+5iDIInHU+et6BCCA@QOjj8_U# zst;X+^cFY3n(`3haGc(-S3)VS4DFAfiv{BWmXi+9otY4gDl={|Nxi@HCq`?2S!#OtouGs@W=)YRyi! zcv8>TDh28Ys?@%0QmjC+*=;tn&5P7wr$B+k?MqWTeh?4)&n?@3690tEG@*@BAcOn` zmXi>wZ(HNh1jPs}cbkm)+$L3Kv*S3xiVvF|a=v6|fV?8E!h=1oE!OMhPoUQRd@LP* zIXKCmvE41@vsdH4#?}&TVl{~>^5Kjmb zreFcjZw}Sa-;~#R3Yr(Rd*wcleqWAuKjrt@o#S0D|G7{q9+!&6<~~ zQ&0o~mz|JMWH*I?cg3ZiN5Z{wtzOu#l#4rGxTDXtWLxw3UaQf+bGm7am;3N_8@Lto#whw)9vaURmxG#6;lCV-Mw=o9qJKVFvt9%=56ea*P-t_dnRwo#Gbp1TRDjw#) zoRoJ@c3Pm!=Z}u^1!TYXxn}-zz4J-@B(4^$Y)G&qNq8lcMU1sJy5@xjpKmojgHq)S zXqC#9y;`|YZz0e9SlVwEIxFeQA{U95CWIBT8{=d?fUt(8$Ob)^kGXNPH zAR>#CMb;ItY%-x(W79=;=r%cRCt#deOEW~cEryX0FYsA`5k%w%a6G@VHRM{jsih_t zuygH7B9BHWX0yBqGFe6pHTyL=X?D7mLb;gh6Wf|>IB@D`+-sxbf8Zb<_Mct0{}QqP zQ2?9eHpl+2i1QN%;CT=O`!B8h{$C{aKMH_XBg=NFLLrwPSVQRV!P5tOpVG^`U|Xk9 zah-HX7_l%1Pe>r~a^wGh-+{3x|f5V(1Lbaw+?K04~OwsGVP)4fWAYUk8_`SDXNb=hC$VZ)Rg| z)XSCKZoASc<b?0-Mp~nT993QW5b$(S%3GyqY_FU$CtzeF2Nm|Ps zHA$MaxgMt0l*#bRg*g(H3ur2{)O=nzt6H4PTcW`fjrhLW$QMJH)?t0V(5i~zt`76v zIhMnSaJts4%!Cgoj*mmBGA7?Fm1V-IX|=}A{vvw>B=;qDJ-oWTj&>BZfx&wKXga#9 zU%+)VdVd>%ztXcT*Kuumly+RCYfvlZ_$sA9)3qEab(ImjA|KX2Ug*=;K&(WAlqEXW zm2<-nF4e)!O>fMf3gXSBq8R4`TOI---P6q{PUtB$HA6mw8BA5LHrA)-Zd#yOf%eoW zp3O8z8KT1e-xNo4EX}0+=?BxE795Ur{9hTQd74EH1^U`AFm~)!Y2V6RsB^922dIUt#)$M>86aM$k3no`Z1{hHsv9_8RrD7`3{k`sYfkQJ*398nx0? z5ZXT^f;; zMeZFHYx#P9w^R&rAO4*W{st@Mg}<0|;^%*?_sgQh&x^zKDYSYD?Y#zawmeCFKVkJ> zC!6d4z{2axF6H&wb*>#34s(5ILR(iT*;df*f7ve%WM!$dPNP-2XEFRT-yynRAT*~~ zf7OPoY{!0tBELxFlXj<6n=*C+@u>F)*|p$fk`(#rgNM>h#TctF^yr~cC&9-g=~wg~ zJQOms!N(*y=`DB+qy5U$#560^dnh^04n`3?hS9SI=SJi!wz! zv4yeEAs32<=P^Mn7g_8ynAxI0b&%pKb+GzFYn>W;GVSpdl^VKZ6Z#2MYjOV{kNEG3 z`(FWx|3(39HuC#l*L_f#gn*v`F^K=-%R!d!{~+<-NZ@b(^;iGl|Nr^>@9ls6QQiO8 zNbdi$HTv(p8lC#%&x4~6|JqB|e{VJa=g(j0%AbFH`oF)t`(J&0`SE4PO<+9`2m24i z756_9`yUCgnT`7XH+VaanEZ7?4D3JPIG))57lLmIEL8X~ad!w^89z)fT*0ryn3g(! z04#JSYIZZrM3NB_Z#{7<%U8%uFpHPXOm3B#b}_kK<^r0kWh2BcG{?FLE2azaK`srk z-q`krl^71M@X;;@`hS7<;Xp3&HJR`tT(av8k-?|JY-qY(Ez?pgQ^sYK(HPTv zTYLy?IMNMhhem@~cy6w~7E9dvr^K*g!lmY3T=V)unJ%h?TMEbBaEpHA4WBC047ra- zB;^S9qrU&5e`-*&t5~yMKdgL#i zM-^1U(9JWv6c@Vfj%CB24L*hU;8S+Vw##iT;*RmpA^v;#ji@vB1Brr4#UK!~V&U`52-F{tu*MYz?#KZqz5&t9SKO+G) z!)%WK@4rJvoc}cfx&QZV%YPj7KRN&Zad z{g06V0IyB|&#vbsmf(JfLHv*9ndSQ*$@$-C;3ukJjjnCIKX9ow79FbTzw1vG;RQ6T z5wsneYC}jB8qJedWp}SbX}SR^&9W&Ky0UIK^wv*XKT+*w@iY5Ji zZR$gQgjps9XohOEs9L4-acfJnts&(}H5~{nr4PLqT7TX`jWtZ;+J9Z0QlO1`F)4f) z(xEJk^2D6#K}8-rka7o*vd|x!vP|euJ;ax1^p;nqtLnBx<*466hnA^?_n?l4BaryI zwY8-fvg1(IpumTwKbVg=CQ4EGUW5@ITaHQ$Tz9mcPG4SL(jyyQ$fk-(V?sCW)uadR z&`5Z-#c7|=t$K3HYWXVc^Sb;&kv#F*3?iHatV>|@j%Q1nB^r^ zU^5J4MF4qG5i?m;>uHIPE9wJAn$%}l!1018WwLTlQK7b&UQwTAk(55YKEBqI;T}=n zYrsF2>~wJ(!Aaj~jz$H#=!1jsNeUlpPg02Rntm0mCI;6UD}xU{nx3q~$G2-KV&VUR zPdCi`mqKl~d4Z ^24w&Lw&gz-`hW5LKdk=+qW_}+05`fn{kGrX5Rjk#>P;X!Z@=8GJV`yIgDTCjh2l+4=D0ck#HoZ$?*t~KrY+kI`Vd5zXT)%z}Jih2T%_+77Cj~5X8piVrC uqeEw#`t557?JeR0&V^Sl-WhrLTVj_85FkK+009DVg?|H>j+nOq$N&I2m@gIp literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack new file mode 100644 index 0000000000000000000000000000000000000000..101ecf4f351db27027bfb5ba96177020119ae4ee GIT binary patch literal 3622 zcmV+>4%zV^iwFP!00000|LtAra^lDxpUS6{Um_}|QZus)Lbt$MR1M~^8FLx14YQe| zgwz7f2nkEV=Q)i}dZ z^3@ZAf)IJ${f1clUU4n(9RG^opSocPW80A|SY;S>MGy|0|Ivj5Y$su=30pQDhdk39 z2N=Dv)n5;5+ZfUax- z$pKx>fWxQj)UEt+@A+p~QKazvhkb50A~g8io$T6jgAKTUJy@xtey zrUhaoIfKj-D}uf(CuoXfNIJRo+oA&;lE_ z{@1l0(A5M*rbwDiSSJJKbgT;M$=R2oHoEL<%-QF2mS=`1;)MtK}7YxAo8z>P-OCj%YQWVKZE`MX96u!5!e5uJ(A=v z_%`5a5K;Y){r`{<1^v%bEA&5w^?w+k)7)n0f4_n*FithgG={)%$j`QEV7u@rgr)xl zj=Nd@d0|QaBl*Yv|I_0tHZbALns#hE=1*NL9vkVBqng$b649hWikk|KR3A8y=uWOr zHKZXx=_E~soB)Zgymn84WoxDZV`!3se+oFIh_`8x6VqZ!7FAxLrL@EeK;R@g)e}UO zqI&R;7vQ(}4wmx;SO(Ao1-<6kzcLI-aU`8M?2Qd)Of)NXqTVPJD)m-3->GIRr5te# z6yk$q5HwHF>1{f-%?ZR|D@QP~>*pTr=phRBpAp3+`_JI@zfeG@>CLkLL&?xoSUxuX zYe59;zsR%Pvi-;Ff1%(imW;UM$F{FxLsNl11}&smCLS}-3}C?!lY+hi&5DrJx6JWq zLZWz@y^cq?UtaQAzgU)`g}e0&x?GYyfW8o0;+Q?QDO9V)1E$hEYK!fU`<-m3nQQY0 zJDm^Ee{wYBn9(9f(=01eJjqijhUZeyL(?u{3>E1?^3}ctM#wk))>y#rojn4wYsiAk zK=XI(;glJU_ydt`bv`yaT(eW(J!;fG3QYYYCp3%YW}U6CVg`z&ItHbLVkGcfjv-8bwwtwnhFZ%Qj1`Qm5Ih^TmVe-ch4cSSd#|2jxhZ3>}-M z=}j9ubL^n8Bm0++8@@Wfcd)<9G%8$Yw^V6=XdLBoJDvS{t&wjW@=SL1@Ug++}nFxr9Sv}?bwPwBNBNb4fV%K$bN+{aUYL`V}Y#|xhlKA+c>TY zJNrCWs%A^<@m=)uMp(96*k;LL{PwwLq3e?{Il6cpJ7xJAmu=LJm_q3z(%YtIe=+#^Zz!Nssm)sdMT*46i@)na1lE5kp5>R;e<=~K|Av99m}9~jga(+TI9^DJ zf(Wg$K#3d!joJtrcy!zH<5kRm%nL`W56}~0s51gV8_1@i*lsZl)aNNqM7wy|v;Z&T zG1(j&4m6I}@o7Om=gf+l5rT!Gvw+gDL{_93noltlyRtOs>o|#}A{VH0<%%@VG9n`| zDKW*-sX(cp;*(N+zZ=FOo{w#-l5T^k*Yj1cVG|sHXxM*#+5Xem{)YiN74`Q&ZeIDt z4#49P0sAlVl!%}IJ`=$C$g~_Hm&+sv<`8&m0?GZogXA(VSmtLSJ9aW4kVu&QD=3s` zy3zj+Thn5*xxGR+UutG|%7qO5!`9fa0ix%om&I~+w>b?}vipT@9j(l?yY*~qZ&p;T zS}g5$o26DElT^TI(irRdG@F-n5-1-wyQONbe3&nE=VR)DOTXet`R(@lhT!|}r}ebk z!CKqx`R=Z4X1-I-cZ=C_c_*9O@0LqDjclW{oT5~178)%-zEo(=Mb78)`&bmkwe(RH z#aWr_;T%<-G`~2{5yc$r$}BV=7mOl2j&=&x zQRvsp@EwDmX*#xJNu#9gK$`~pHHw2I4fR;=@n|tXgnGLN0)jwqD>gS-f8XCszEVobPLs^QlnO#5%y}8!c-8N zb)+DX?LsbW(lO_Z&m^|nKxKBbjonP*SdS+xlsLYatOkXzTxfJ%CDPq56zZ8bZ*EW2 zDAdc@Tp{x&aSSZWv=Tohjtw*6US|lO7@*x)T(e%tWnk0aXFMKVOovR#HEJoj{ zi}}LN;cmBF+lBVH@i6^yV~M4F#&_#|e||A#3k_JS*3IVgnFMs;rJfEF1~^M7j?}kr zO5JQVN_CiC(d=6@KsM>FODjfJBd%a>Xi4rg^~Jjh!Nl4?{%9_9f4NlMhs-)DVnyr? z87tXpcDIoCWAFZ$2R*b(xGPLlGWOk1>wP{H`);wB-UzMkLUXSMT`gBm-*}Av-n)PO zGZI@*Y6-W-u6LmIA?srW1TI}Nti=DC1ig&zg-e2l z)&H6VC$;&PL9&;58kj}}Y7Ydb(S9rZ%OH7H;9QG5Wi}U>j|#SNN?1-3D1Js-NVd@S zIpRXnU_T~=D)xZDtm;b(b-d1C*3sJED zjIiSTS8V@70WEIK^I!i}DQxoBi3r$#niBE(AJ2uW7!oYB!MQsGj)X3z2X3JECJa-V z-vBn<6E!=R!;@r4Lifk0gyrKyCX~g4MkXhUOpBkKEOH7A#k3${7Z_vB042i#Xb+bP z*=}sPO^OYN=V(`#4ZKgr{jvcY`xKA4%Wsn7HjqLa!Yp7oZZ5-=O+!Lyua6b@%pn{k>`6v&AyH=zJLotMlaw#x z^OLg+Nfu9gC-UdhzMu?5W~l3&Jkm}w5V3pgZu_(uJKQmDLp1!q<@FzY{&P5>#n6BM zS^jpA8$c|9i#ztk@Vl9$5dsZ26CZ{%0v++5gAq|AzuP zy&>=a-gd+YHUtkw1lE5kier}bKVJV01xSXrw+Ldop1DoD2@2U8EL-0+u>F78^1sRU ze|nw~)=>`zzW;mK@*fHPPcN_kVf`Nt7>eI~{qLV=ht0tKf%X5(mj6iTe|E+5fBgI> z6fpEgJ^#P!d8XJD+!2vj|6BF@&kSDw3j>kR|IG65f6*e=|6zckza9U7c1So@|93?M z^gm70%lrQ+7VG~o@S4b(qYF#x4;-S21c<162aynYa0+yD1T32*ng9^FTD{XK?e4V* zRnq~Xniin|N78hg+AcmN{o$&C z>6DmKWJcsvAd4$=QQz}3ue4&18!;y#+=>3p5F|vTFo%?fu;Fx1rq(>&bQG18g?#3Ys{vY1| z`Fz-z{hvL}kgSWYqS=n6^~UJUF05`Ik0|(m472?Euh{>^dLO|11m zE_CRVTS?qIAZ`6s#63sS^F?6M4>@WtPFO&f&{2KeNjdA(VsT<1S&FV1U+k^|l<^2C zumWogP=r0`I_9+6tFM){=J{D?H?Q4cgd-0taTT@(>}}%5PXV$!1`9L?oIB{I!lhqh swT^*-fq{X6fq{X6fq{X6fq{X6fq{X6fq{X6f$;$R9|R}?&;ZB)0OHj)r~m)} literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json new file mode 100644 index 00000000000..3b9fa2ec229 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json @@ -0,0 +1,410 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libdwarf", +"version":"20130729", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c", +"cxx" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +}, +{ +"name":"libelf", +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa", +"parameters":{ +"deptypes":[ +"build", +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"2a6f045996e4998ec37679e1aa8a245795fbbffaf9844692ba2de6eeffcbc722" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json new file mode 100644 index 00000000000..7584a164998 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json @@ -0,0 +1,298 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"59141e49bd05abe40639360cd9422020513781270a3461083fee0eba2af62ca0" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json new file mode 100644 index 00000000000..36535520cef --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json @@ -0,0 +1,80 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"f27ddff0ef4268acbe816c51f6f1fc907dc1010d31f2d6556b699c80f026c47d" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json new file mode 100644 index 00000000000..fb3a00f77c3 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json @@ -0,0 +1,132 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"348f23717c5641fa6bbb90862e62bf632367511c53e9c6450584f0d000841320" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack new file mode 100644 index 0000000000000000000000000000000000000000..eb99e6b274838d045a6e1561139a55d3a21d879a GIT binary patch literal 9058 zcmYkCWl)_x7p`&F;>C-*7Fyi3xVuAfcc(ZMcXyZK&W09>yL*A+4jX6h=kz=8oSBm! z`I9x1m1I`t&UJ?}4h81_HsB)g%zvS?<@(AUKr6g9TcC5@_g{DSo6b-#QY}(63$^mS z{nVb<>FKbq$bNkdy|{|E6im1PBjrZ^Zc$7<=1!5}DV_Uj`5G#`Ow)W<7>p|blT$LQ z60&DA?5#xD3b>4e)~fOMIOpNNx)C_mQ$LNpCEour*$*Y10nOqH-oG^>+}=6+Cdh|f zSKbXy9hsZay0`2l02(5GL4b+E=ylD=Vu3iRMLN}&CqH%J>ZNjzW?~UzXMc1O0!r#o+WsW~jd)gRPK(EN5&>#flh2+_5U&N_rYS35z$9~Kmq0#W#-u;r zk-03}1%PlOx(VJnnf~DL^oE`?`RvEL3GfrZ27$?XoRz@?r)%su0% z-S*55>~-er&JDoF`gTAE-o6+ZTxB@{lN>aQa5Q%;lV3-R%4F4wTdJ}MC%fhDhbxbrf;9~UUVa><0GTsyP-`p^p6kah(!28_P!Z7Y1 zHlAP57eQHayJPsrcW!&}`$iZQ8rlwl&odw|=$xROfHj@qhwN-z<91b#Ho5W?w8GP@ zubX&pQuP?@jZjjx*>N9S*3*u-dO8fecX>Pm6^+Ge>jUY25EIk@`}4P-DcB|3tFOz% zzVy1(Ve9Eb+P`u;X+um(O2$kLnDuM9P z4?3gCT3UUSyhevKZ(Q#{e)QxliQB@yuTOtegMi(!o|WLhG-DBgbYmg^2k`NI9P_)l ze0Ehmd3jd5TSyUS`g(qlgJAN-ZFJx_>CP|lE- zudsJ|{lSNGub_j<8Elj2sJ$Hy6S9JUfFKtyb-L&fBHAXw70<_vQKqvC;_cB6@^`Ha zp_i|6+c`umqX@b&7E42VGGwuB^eS||NRe|`=9#v+IT94ih8+XR?Q(Jm`-p>rl9 z3><|Z5y;amxRIaIL~+I>;O)-&;5m+g63aw57`o?qpj=9$n3I-0Skw90(pyN<+s7m^ zem8N>NFZa_Wn9|4hZg z`=syfC=|TAw80<6(T2SqD@Nh}5o7+`>T{#DR z8s67QEEM&7kO%^FU$3Dj6QJ$!(^rP5#KfIy*^&y)6Q7}6TOX~{OXyzN)rM090lj=l zQG-}0qrU7`AO8!D>kk{P=UpRJ&C9RrfyIoP*%}%kJiFMQ+rF}j&jz3H_2{IBUnS-? z9LFzBTvqjmg5?Qjfcev>My(LW*he-fI)RQZa+(AqTS(IIm+{|K^;Sh|+#qVp zuU~8|7z#QcIC+OS7C7HOZ!_ljy_?jFp`VrFy(7U^f1yer@xmE4wVj7OaM%nro6LQm zyRM7%EA%XoE6|2Be+_;NeJ{DD>(+<&&27Kpy$5r}f=tsC>p7ME22|kbG?;GsS*inm zt7%$xKEkd`4b(X9RMwr@ezAHvY*qRt%(D6SG`y|#Ys<;GkQau#gW(tY)H@g%yQuYn zn|phV*0wS=xmhEkc9zk{)&`HA}3T7^J`mTrg)S)xNC&pkU6j`&7g`H!3Roqz(Jj=J9G z$gArQ9B()?{2q0+3p0 zTOulvxuP!$&{>tyk^EE=O9n1SaM7=nJW*ehwabs%t12UuO9qL!DMTzuPLc?@Q>9pM zApY5H9#fhO7P4fRRx@dPS#m8WL^B^-#ag29#?lA+`7NHq;z7qL$5TUkRRD^;QCAh4 zQFah>+>%|JcJ237qClQJ#^jF_UE`+Q$BG44qcx?9H;Z?rabmGkYDKvfAEMTYRPdr+ z2qO1E_5+X*arbg@9jn4A>4sn4zT!2~4TN2kc%R|g`;WiQMKu1FpBrB&d)am@ap+v`_6PSz72}Yip1DFbZlK6bB7h!Z zw8T+=9`J+s@5}X(jH)Q$`JcWiOaG^FK!771RqW&88)y$pp^w-Q@&755Mvw7_V947{ zdI;lI?$gx-1&cZdg5saceGm+OTODxr)R+#VFuF}RV%2aceA&^90zvZacD{182)$ZV z_qZxD2KWu2SC21+!rzLP4Hb*F0mt^Y#t7&N^ERLg0&MB1#(Q#$07Y!lS|S9b|7Jyo zaCn!!ya5SfntU3-45d{28q46;DkTbJk7>f(9};hmTMV9 z-aAL{0lF~;FJi}XE6%X&?N`1xpSW}{vXxz26@A!$dYquD=1)s5iBK!Waj4SIVL00| zdlPI_$)2Qw5@?{k{e%mc6M>mK{wh1jwxBs|Jt~-NpBqz@LNDD0=bo|FxlWBNw4pmMIc9~(yaYKzs!@!fvykPF)DbRJ za1GVS^!%0R!|r6RLrbq%s9iXn)U)K5VUwSB=#glB32lZOLYvCFEo57S`A|Yf{EGhc z$!CGtZaZB{Ufsd7j&k2*K651C;GPM~f@i&~>s*s}xu$&ge<+($vFybiO8zC=h~P5K zDVU^`hDkaNo}M)_HQ+NOCGn;RniQv77mB*+Y}F7ypQrR^DtO?kxPqDJa3r+C{7X9Y z^b*aP9FDLXuUJSon(YPS_LtiIhNJ|q0c8Rg)>)gz_o{Urc6{0~SuU+T=+AdYJf?x~ z(Oip0f@IW=j+x9b?-8fzCRO4&4=nknB%P%K@FM~nxZ*#EZf>lRtj=*>mU>*2C0Y(V zYR=ud{t;8F4IPJ%y$n#aEEN3??}oOM+ed*Zcj;noO>C=vTJ_%OBmy>haPqyslX8Vi#qxK9p zF9^>tx`i3>N%*gul``+!A=&riW-kqEj3=GH`B=;hsz#r{5{Qo`VvR8u3g$HX?Cddv zPS&Vt^WgRqiFd}IR2LzU#mp}aOb^l;^k30W0*8;i7aJVx$lG>y+62hUcZgmsb=vXA z&SUL#!dRtGav=?6B#5n+r|{laqQxo`V{Yvy{BX7h3G@AqHe-^)=Ol7Nfe9bXX!ub_ z+b$kLUUR5j>l_;(AVa>h>(3*vFq(v?4ESD2nts^{7S1V-E@qp^`1N0YR{t)sOGUH) zL}~{1AI&a!H5kH_OOu=!3M`pX&xg&Qw_w=&&6jO?0QYVVT+p02NHu5YOBEi(Io|6Iof9uA*#G-=&JOmjUkEv=QN>+QT| zh#Mj1(@iOQU4fzKS}^I5rN3>uys2y%(V!G7H=WfN0i8vZB724Y#;U-QnxzrTQlM*eT&6L9y6CJ?{8S%p>7O7sNMskVNU}}pBHjf}EQc`HA z@q8up@Uhh{nrRr7f$C+LYaxs(u#$j0m3e9f3M>)TKHCq_JE>~1P1T+% zQusZ0yO9&H?@zz$p&_BIR^Dm3`mY6zX5^JSk4 z%*8bI^KB3I?bk{s_xT+?r$d>oNL*o9VaJMIp20btrtWvDnZ!I%^E2OCKhrX_yKeLx z5p~^o`0qh8rATjYh^O>}lTFirTf{j2U$5r66sGfI|840AdG#81NyoPk!IBC~Ynl>2 z5rpsD{G5Me%_&&x*2C9*=-6~>yq}44NM6V*##E(^w;#uAbaW;rRXK^wL7A5@r4&8y z^Za+r%!?!x;&O>3{qG6A?Bw! zMwZ$0HDMU2t){>S&nRPKnShhzy1GE(xeF(r`Vs1@?605RuBlhJ-#U#Jvd4RxpId3! z+3p%uX2}$KLPgsTm*+=M)6S(>r^p^wolZ%z4t=B&v)+%yt%?du~ zvYbsH7`852&dh!fSoPgdOY{?$7@zWh7vn!EH1OS)Oje;JUD&%uWfRt|?bBOJhRg z1`amD*NQ$?uqe6Ha2fpYt&+3O!k9w%z5(v9$L-vOZyhuLp56s~`f-8fn`;Z-#EW^j zy6Y7KepxU(^k4>fAk7rCHmS1R^n*8j7G-_UU}JmX^Hk7H83v9+`|Nxl`)9@Vd8&tV zjqwL#;Z|>UafOTGF0k0wYGnuOgxb8S_CtzoJd%XE9b_MLUJ$}4?ityCFcdx%rZjhL zF=I4y5@~!J;A?-*yd3(=h$2Mw6ApH}%j~?=rXY*Mk>>gqzdotT*G8k!I>Vwo`#@hh z!3Uiu7)*Es)IGAL@`+R&!EEe=GQHD8WcL{$&!S)W90M=Vp`hW6%-^)DGM5?FpGhT! z%UTpym6H%D^jJ{DQNND&jZW+t)6sNrNQ=C}sR{09-C%_ojJ;^U>Ygcp8SEv+Mm7ve^G7SFx?22#ZEtaLWZyp6@n2Q5? zG#n~J=yM5*u3@c^!1;b8nFHR5jmAs!@(P5z&F{LpoC-K%op5QhgnW2^|FUSKPr?sRe`m(K-1M9< z_KHBjyLT0yxv$%7sC2b>gjL|{Dd!N)uT505>Xk;J(d$B;hUQxw}L1I!1_~O@owMBpBdo|w%v)ju8@o4rNPqF((T&P$G#C~?1B%3cC0P% z8wHB=!gwn6?U~d*0Rh_I2g46)Ld*xVl<9c(ANuC2AR+pEygj^ckIq=iD!X{diE%XP z)8(%#L*dF^_24Vw328T(AJ*frdh$aVN-+Dj*&1i~#5KwC<3fR!O$UX+oZ}7p7OgI5P zYvg$$wBEils&l_cG30+MK0&&rQ~kG>%Zc3Ge0fg^`5J92J%h@{hwIg=UT4ppkmVd`cT|hxGz2z2Du-mM3c;HQs@r|;R>k1LRUA_UU!o&-9 z7dBJF@no0Dy(t#~2^S}o$EHD~8FRMFVIxue{fYaIzn{Z8uf9!xy0-&=g*f~0RG^6M zPs0#fQJHR~L@11X3)iA&5u$vfm}MqyK|w()UH`&E7@r>${t|MtVI$YeC%6^?73)u0 z7W^CpKKwIvGe_dZ;ZDU1R-K8$W8jD#t;FE$_M@hb^B5~13^)kJW{@88;vEM((AG@M zx5(zaVW$>;y1?2HXUB>E{$3SAdpU#G%!D{C?>|W{Bw$Su{kK{9kii}qS6QyX!Hvek zm^Giwl!=j?%_+83+Ibyo+;H5Fm-EOHY$N4TBE@Qm0K!D<)h`+SBe(``k zfv4UmOYQh}b&?=$=duy{U%11;5%WUE-w{c?rxW8s_dd(3qq;k2C?`?Yr7rzhW4!1c z;#PAL^Ub4?kg}8I-Zzo^0xPn7!E{#zVR0Le53Umil}~=FQLJsUOIAFIg2BGX)Uy{! zE-}ibxcE~t>|etswh>bA{?KPqGWyQBw;yi~n-ZqBpgXXvST}u7-y{eL3GP25=PNBg*@~FKW~}t@M}hPDRG+( zQo+uIyxbMb8>2>oz~@@)gl^-bPdCeqSSr|Y!>(LsBj>z$D?<WLMu|TnD!<49jzc@4o?`MWL7NMY5wa@76!bWKT zDjTEM`w{{i5n5syO{}n@__Zen2SKIvj^HPi{0Da6dG|q-nqrG|O2%KIRC5E2z5mEZ z%BSRe{RcMS9saLCq3^68)s$hGTN+^Jj9~|B;BlK_ddy|G`|uG(|G{u)vsw6H_d>hVN&kEJ+QifZLp;~l-J%vorJEg9E7{vuM&s@~hx;!<_K_1E9*)0j7ZdboJt?eM zWZ&K-QUr0BshL@kvc1^r;4*W$EOXSbmj=U@r_FKKZqqlJEpE;fu?$!&1l%3VsDEdN zwy<_*N!wAB9aStYGFm*CrD}clwN1^OT$K30?jd`pm|}ZBtjyMY$Gv}?rG(3qP!}n_ zQ#4FL5u&ZwJ|&43tavP@f(AN@&)_v5(Dgcxb=G2;<~<+Yhbi49^CS6K_JFf7!NoGs zH!#=qXQ+Ka;b#dM@7ZuG<GZHFZ9qTf0juHbx^V)P-T*}C+T0_-P3(|LdZZ<7W z>0!QwsyNmlp6%Sc2tjSxb<@XYiTJHScg2tX??XZ+5Q`B>8XOH-1*H8J3O!29GTh2V zJw@OzivS!+S;^`3iJKN_4tWNV_=+V3erR9PY^4W22RlOv5aRv`A-NQF0-QEk9fk)CcPpbPt4Fwb1FtQbQPsoN zAJY?UoSU-Au7Kc-jm#;?=jt2PF3MsHVF6!t?v}V52FEb|lu=mA=n#z2G?G;LAf6Dq zPHf*o@WYDN)!nb)5MJOK$ifCr_z$U(6~DOW6n0Foc!_qt_z|r|yoD2_r0@`8Vw4Ok z9WTcl<3Co#+8(IRs&lfX;TA`cVd6+D=C7Y*#bdk7y?V(%8ZN{mM0T3XS5mNJ<-ZOn z(kY_AH~d=Vlp`i2c~yvO={**OLCT}*zKq${AkhN8*AC)1~yKpLj&JV22zBEw0LOFhrCQ~Y&yfF0hOI|bsMpiCgA zK5cCvJ^=eF+bciVo^JYWuc#VUr^aEBBGHgn(26CiZL{>1uI7Qq^>FTk*h<*&H$5}$ z?Ae$KlcA>X)WttBI~>;*S@*(*i=*Q1UK_BDmQj}X-c0I!pe^c!|D0ASz18qL0NNzYHuY!$X&1Wm;?s-O19kRj&3X-tMz%V}rNXqi+|?wFAr zE-uc$(Bt)P3oPI0#Zq6Te2rTE8#OmNCVKjif<{LkrSkd**+hebCmQota{J6f>)L|O zf*ogGa`idCW*0}?X@i`X)s$wqQxr;#oaK^gPfx{C$Ggl)^T4Lk@_WHB#vYDoJ1#71!7M!j%PNBBC>vvvz2d@-q;1tk7w?3*90>;R=umD;7V zfhkj?dyuXuU{CksUxe!Ti%G-(_UoJx)C*PFHrKp@b%H&s0qkFcKmIR(bp%+tp{#{f z>lA$vtsh?*Bwva2Tmsr4p!d4y5R+Hn3ghw{Kv}cc4zQUS-3L1YjY!K9K-|IWjsygn z_>Y@!VnYhopniStHW8M!fRzWp&Qus2d;xT^bYepb5g{stzYH5;SN?r6`0orGA)?+$ zkAncu{MpmY#?S6kVEvz3*2RXH1OfJtx0R)kRkf=KXjHI7AIc_~9R&2>T5EiuI{zj| zkWza&xDR#%3j1@cmU%9c`u-98O+WEYwBaxJV$+yf5ggO zq@U^DS@_Qa`N>rV`XY4mFp}LLM80}7I}JZQA2lJcQ#*U-jo%ufI(vC49)h#vX92WE zRT-rBuQlf~H9a!FCi*J-*gSlaiPm-Q`-k(9=O-U*7S?>vi4Rvhv%b(DiQ^=mtCD`| z@0Zy(@NtuQtw{*3d?#+Ayr{TS-98fdlBZaKVJsIn%KLL9aTa`ae`_p>`EgP8|L<19 P0PI5)AR6W?EX@A_w@yge literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack new file mode 100644 index 0000000000000000000000000000000000000000..37023e4cf3cdc9ce0739adf10e474f74e3afb248 GIT binary patch literal 8627 zcma)>RX~+dx3%f+?rxColtvJc?r!Oj#!X6>bZ)x41qqSv4v_}w*pi#r@9{h5|IhVV z7xQk8F|XDfYdy5_XmJ01fQyi4lZ~#{hX)532KY7_dt+!QOEk<|)Pa32GqZzh?!xu? z(bGlD)6-=&E2T|yyZ^Ny)n&BP!8q2yhbIhRd0aU(_KM}?bv&$ z{ie7L+pus3zq5M_6uov3?_)cM$Ll@maKndAfZtK}8Fhkm{$;^$XuV>td9<`a@Wi!7 zqf;VXKHw(z)bsWJ>JwA58(=p#{R_A?rtpZkCoQJ{vgAZuLGC19>0Em3CFVAH4`%Cm znP%wzM%yiS`98;c8B{$3+Q%YoAgX0`;@zi<#u(~v|3vWmf4IMVDpLm&V*i3Y95feg zE(()Ms5-$|Re;@!{z}n+z?};o75*cqUB_oX1o+QY9YVY_Lfg7;P!-%ZPS=)ab*EP) zD4jdsL`ADjI^Lh{gBlGsfXU*F0TA!K>f?4b1VqlH^13k!TE#r$+wrX$0irhn0|a0g zc}kO=puy0-z2E))J9QQA60RdOpiy2FXzaSvXl~u zCrYwMr%Xv0oD@8l9~RH<6s^lzq>_2|Hh;ixM<(LExanegP+rMamaQCw_9mCJZ3j^2sp9!ggd?qV8aeokM`tmFkN2@+$wlR${b~$Vf`>EtEpW ztw%9ajgt?iw)yw5acLjmS0Rde7K9}Wjq-=-NIhDlx`AFu(=|E`C;-TdffNk*{Is>! zRVqc>9cXhFWOGho8HT26L12RY@S9#*GIp~N%h#@R^&?h@$X_}%`<%ZOYgd>u#(o_W zSm!jx^{6Wm&W@Aw7ihK5O*Po#w8k0m!`I;Jxl`&5S80gHDU|Mv#$Tb*_w+mLE0a&7 zAt)Q<`~$6G7zFaa|Fb^63mH9nb~o=6C6T%O1b$Zu>JRaK@qCz`hnQrmMw84eL=|$y ze?=o9ZS?7B`JH9e;uiAL#m?&Ha?*RAVrMq?;m-S{==tDcak00+47&MT>AwAPBO7>k zpb`jufeNJfm-|0kE1$5(`VSWdKa{HlosQ78oX>grAH2NnaW%rDBx}GPmq%{Fh_%F% zSsc07wBC5$ci8mn8FdF!$pk!TeZIGTDDUF@9DL{XaQ!pBAnm8u5n%wleR|81rZ~9n zw;DVvM`v-5&1_5Tw)BJ0to;0nxFH_?z9D7>Q+9Df=j)i2Syko?vlM6SZtwbRaqi=6 zUPQ9QsC&{qZXr0>Kgh%5cJHc}8hQXt>zvh0k}lNEAqxHYs?OfNhICuiwY}#+1o|c z6q1~lg44eF(8bS|6CU23Rf6IhHDCeTK>*}1x&W88=c)%J1HHBmo053_EFSE4**PSD z_A)8>x^uX7)=R2c*q^?AakteUbbkugF$#F_@w?ioZ{fM`*1HZ5*749PgP9fL;BZw6 z9;kg~V^f>knyF}E-^4sj%%a<0eVqSmhi>!xk16|A3m1I=gR)Jg$$@Dh7reo65R2M{ z{sEiMr99A=(+dpb+t9$(e*rXyp;u3^%JLMpy0KT>p#ca2ZNp+`~JJEWfABd5Ea9*F^pBORPuEae^3w6+zQUM>k!) z9QBlblDq?Kg1+Iuh3c6r9S*0{iulzyIEz9Ncec+c7Gl8#1{$wGMmVtr)6cxH%08mI zCXkeI7{ua4ReK#Us2Tltdq1G~Qk!Cb7-cG(L^z41{C(kJ%IFSP#(4f>iE~zqhg6W$ zK$w|~%S=BUp)?*_k}f4>BuJmM`*hv5A0V7d$}&GVR*CH7cnxoayt1Yujm-nKPZ}9t z)uK426y)_8%}lyFHMiaE>26eG2&U*h(k6L~`fO}y&yOB4H)^0BQnwpjk3Blh+UFQ} zi#t7j`v!Hp#l_;m=kYJ|94lFhB%+4XxlIi1eoT%i;)}on(mj0(ZfuAT@)H$S(~?$^ zvlZg|0(#Gw#wgu;YTIK%R@)NDXqq&Sb2bJCI-kE`t-wW$_5LWXJrlioNs)JzS`&mPlSy$5g;GEWSRMhNW*w`!ymw&cE$@oT|>*Gv~Y|*`2ZxS2>K(E;Rg17MA#q zGo8;px?6hoqxCI|7akV%L+$SR&yy(%Rqg~U)!9_m{rOdAhvhlS;ol1(FMtiH`$clE`GjvaAq*5PH# zRDq`Jmu_ZD*56tqg{GExZ@hU4k$TrHMWG+#Dp-D{3Fw^QyIODjS<3su=g8d;@n4K6nw2)_swQ$e}^_lv@*3?2cWM0<~r_9HjYb zaDu`oR}pH75H+DJ`FV=rE5{+j{@eGEePyHew!-3kO&3qeuV(Ub3=Gp17G}ybQYA!DfyqW*bSI3^K~zvPptEaZhKpiQ{QqMB z_zHUl(%%O-vopm#3<6grjAM6iJ>csHV4%uHy@^MQZJ?cK(7dI6#PeitkK{qQgYp&L zGuOe@lQ8URN$O-vOA01)4g-Nkf!hSvL}StqQo#QhTd;2fTr03`HF~YfEx#9@&3$cL zH18S+VD$iG3;^bh3BVm}R$JvbJ6<|4zrqKKJokSSC6m=K4wy1J14Kuc(N)7 zU2Q~XYUGEu;bQGYW;)@6;+-%3{mDAt{H4B}=6g8Z@d=rx2x_3!K7dDlM-!n%WbFGS zh`I89o7#JKsx_aU6qVLkwkxwt7OWR|ulZ0Sl=-WJI!{@89P_v(zwhmCxF&T(z}V($ za6A$eDb{`eEhd^x9;T)t@U5exV#63Ol!$!P56dQ3nQK?Pq29~1bl!4Mo&S$D*B-)V z=l2wg(s2ToY?nGQr2%X+ZYrpoAk~jb;`Hs#{I2z?XhuQgzhcy>ap~}t^n|EL{1Fb= zYvL%8csSjmHjaHirVr(yRFPDkZDHyN+;FbfLU|I50yC{KII<1nIjNbgldE`RT)M`StPQ*|pX%0W}*X*&p{FA%Y|;WFG! zrW#3xL=vv!PUtSt`F@gT{mvgI&%}KlQ#{<$QHnNAB@sZ_th4_+E+JyaptT;@ zC((?R4kyC$q~cv?nb6Wsg#ov8l@*@>K9(&Rugg)s%=TX+?)WyshSh87Y-8gXRq7bp z>&z&Y8Q*UpyQtiU{caj19F=MFk>();#`bp{KiIv7UQV??kMge=L%D`n&(G&{PK6}Ek-N+$$F-d z_JrIb?DYBCy&lpyZZ|*cDYabNcbEb^=mPF=G`&Uw^<-5Tj>f6uPx+v?a>Na5n&Txw$G4VN<2>Vik!&EFM74A>s9n@cn)U`qxHg;S$*alF(;B40HSjUiy zmlrXh2~ADAiB-jfVz;hxqMQ^OVEp>cg?u+D>+j>5pd1eHOPpxcy;z=Zd%`rC+Jp>gUuBv`$l#)KNWYXWCUP~!L zznf0mnHp*Y-ffTu^HH!Kk= z$Pjx^I>7{>=;WUMogg+3k$b{3Cv+ zruz7jl7jyhvO*3e*)?7s>Nl;t50s6OUgZzm4e|nZ{XbH$=>P7u81w}~ovX<){>U9v zk7M!bu3ODbxibu-a74Cs1@N~SxLa0|TnK$Cf!1KR42&-varFf2OKOnswPv3NcVl#I zuvMu~q$w3+rc9mgwQpBhs$8XiPow3Cu91C;l@j?YrGSdw_*2GASPQ zU9DA9dvA^(cp{oEE+czv>iaCOL@p~mjrkYfOjFgM&!dm3#Lj64%T!H0L|hhghAUaU zrkci{Io!Q`_?uxoN$xp&Pcy23JE!gmhsxXb91XZ*rdMyy+Mu<=G!sl?Ds#LdhRC+B z)#z^sF=Ij9lAo0pU1h`4#A~RHU*gjxoKbep5%bfVJNVaJSSRCn3xQEmmo|YN6J<{d52%p5&%I5xah^U117H z<94yzeXJpe(1qG-Psl;jPW1)FRk-r*0^9Say1avVydH~bXfN`m4g8Y8LdMV*?F(HY zkRjXq$jKGv-d#STicN9z%z@S(Vz0*GTAmuhMAWryUCUH6xT7>ALp0t%%Le1Z-Xjc^ z?S$<;Y!NIg1^7#Uvrr7L=KNh`@4Gp0M&1gBU>$FIg8tW0=8fjNM4BdplQ?S)DQMY? zZu<0{e(~3+j_gyUisosMpajEn-&HuC7lQv8Qff5k&5*y{Nnc)uqhfL{sj$)Vdz&ey zmKS|3Mvdab2hHYJ&7~_R_kG50+$I=eI)lC5T)6V@BrD#jSkfYaQbDteotZa}I;Ltr z+DT;wxD72ywX<=QB_EP@jE(P9u`1xMDUFK4A=;l(xZ;V)D#(RDQR_ZK_DN80+#g%l z!!myb^rBwRp7VEqmn@|=Y0WR$l2lcjAE{`0ucoQaU+8KOT8uBZJ1{qxC$}bN@tr+% z8}nQc>`|?i$#wA>YYhpqU_A1(_19%_^6%^?3h9y)F~mGbbdL?8}G=ovq~g z-g_p_f~7*wE#R$Knrb6~LZ`SP&bVY>jG5)5ouJ3gODW5g?fctuBtVPI+OtT2anCvo z#yT<#Y_k2)u}tg0Q1OrEKx`=JuFf3r%W9-q{;Cv*y2~G9N}G2SVNIj8H#{CZJ3zlN zs7_LKQz_c0{|O>(a6juW8RJT0+is2`hgqZK{cUbe6Ae$c)1T8aQ$=ibBL9=~4_==i zj$Z!PPlqtiWsmTM$VNSt772z>hD@V1W+X<&_6$*b&OpHL7zGpp0O-pKiFR-2|u2}3=Bv3UjxwU_gYER$s%SXi? z-QAp;no4ew=DWDKf57!iahq~Klil9^IO1u)nBDzDgLNOLtK$Rz>7}Y&bIMDxpR6J&ZBmqr6e^Yif~DUL8FnSo!|0(+dOnK34eZ z2c`pVS9+0w3Zs2GPf?-4o1P&wclREO+LK!X^OGodgFVv9zWG3T&u-cw!cK)A%4URp zf>CQb4UGOoEYR}cXi3SOUNUC}asX_iJq4MWzmX$9{o*NOZ2>vM`EPjwFu0}q;*lw5 zgsyV5u_HDmJ5{ui!&U%Sp&I6tT9jkn)9LCcZKmaXFJZHg!E(c`i&Vn;9jot7ihQw{=MtHj{2TK4j`L5H=hY+ z?GV_}{|o)YSHRtWJ~d-EQKNvnc`(8z3`#4@T>y;S$lSfks1=)t18Mmky$tI-55gK2?I`2D`+gc_E0RrZJS`1ry< zs7U=4m4SsPPki!8f5QqzLoxQ&FE4|QxJM|s3vA7U?d4XX#e|Rs8QZjjj}A|YsCYD< zH@$%rbewg19&R6PQRho5@3ybcaHpg0MIH41 zj9?v=7)5<%HT>=aUtElAyiynmVWkYk_?WMikG+Vo{^4u)DNkVgN`(v5&V^;}jmA@(y!9EA|)?okiXKl8C;v#3_ zEB-8ldybx7nbvn6<*g)D$S%mVX(1(4-rP<;Z#d1WM|($>f8;NsaKnr=i*eJcb>gA z)11D=yQ7p$XI3T^t2IvI?wtwPtJbP3Bq2)Mw-k<9b7M)0y%6bQ2IB~$Bbv>!tCkd! zV_V^K1M&0^bxS0+_jDe%X$H2qK5$S%X;Ds6V5qM{`F(93%67B!yK90chXg zCB_C{ur`F%H)n1buIbXS|^L z!1VDIa?4}$;df53(%Q+uPqRBq&CO@H+==xDd#0mmH4PTB>XFnp8m?h(Unn@lr@zw9 zE2i$~m^@CgDz|WcADg10HK(OqL(fi~aG22E)VCBXT@zgu{MLW$HmD?-8Rij!Q|Ey? zs}R0qDiPNiKKT@?Wz0{P&)>~u-0D2m^Bd~?Gmf(ZwH=H)_y=ck zm~ya-2}%boQ%bz8h`QDi=ofVZ(NM*fG(zy#uZh7NHn1~HVH3^}ae)9P?vd)S*0G-K zfRC3<&lO8cY~J^lU-j1g-(`}v>2l+tcE3XD#}Dp;i~pW}9}^X~R3A!UY%N}-jj_%` zEn5Dnar0qt+eU1zV33x)lo>)cN9x+McjZWILh)xWHF?WJSSo?yLr~20VRpu0zSTvx zxAlhGu$xu~w)Y77J7t+Z!#MemB2ykgZP1gO!HZL}i@5T*oF?QwG29M46IY0(CS0E9 zrL_QU^?>AheAQ$H3gQEv5Fyrp3_{?ow2zsDwAg?UC(9V+^@zx1M|+~>L1FFn(3$)h zz-803P55LTe2?Y@pvc3rEw8ZV5>ZgDL#g#c;YGc$rqOLGr_S}z8*&Q#A-`$NSl}YU zG+k=)@7#M3X#hcQeLK^;m!We_X%FB%6`n^Iogn>wds2jAC7b(`3MGXxd)kxEk-+sm zYV!Oi!JVo41nIon6rb;3<^@g0hb*wsddo}wJ;W|QS}yG7BAVb-OJ=7^YLB}`Gz($p zO|xIX>zty0F}Ja5DMCo3p}S!(oW1`E=M2w?(}(2+$JbUReSje+W{@iw#h}}`X=!(& ziw9+z_~!pPGbd|$)tg|y*h^aOtQ+*$W@}HH$~qptYBLM3iw4FTPbW|F3|@?;_lNTY zx7~MkJV7D-bh#iXC~*|Pq43U%K&}u=oc(o3+)7Iove|n3_H!ME=LGo;%9n=Wg2ifb zDYi@P7$-Z!Io&xfUHsxms!w=|Y6ZVe$P!4q91&|Hr3?faW$C%e7fvupqcLBK)R@)K zs2d_wR^QbHL@w>>*(K1z5B4-RH!B~mG5^LXD zO?Mp%Wh^YSTvTn6ZC+Rk041wArUE37yw4{j2e4b$z%a1%O4>CMa`iQ{UHt5@%K)8N@U>|hxWTMC! z3WOJ-**#m-{A>3yf%O}F(*aU^e}^Br&PV$?+aU6W(LS5W?qIjzciE%y$_vK`wAEV< zB@Z?i^oGCeb$VS#Wbs0Nx;H<#1ww`5UD~>u=QrKP!pq2d!|MEOf9gIVmN)tYQBN-^ z*tKr2-ug>P0v*}k)fIl6$4iZ`3HDT2k9}R)Jb!rl!hLU>cHBGBDKWm0H}Y1PNs;t< zpr~`5@2O%%;Qf#EW`z;^7Ml0V(o!B)?JP9?vPSK65GHOpKAbp!NT1Ebr(>C&0ZWYN`+x87fzz-4|7-jto#DE>9k zBz42{rAM_|btFAaq12Aid*87g;Z?cTkumV(i(_G~p3=?jbGdjz0k8IjJo(KYn(O+RT+JcwBr+n5 zL85rC?!z*F|Lh8h`1di)i%C@=q6V|~n}Iqv)%Kor$PAJWHd|zrEydm@mEeoA6l_^n XM_uUuUKT^j0o*Ga2*N~Mte{dlozygbdkxBeg&0T0J7CWn8zF5ZZ&hf%-Ik7#tveG z4yKn8N47PdPg*X3Ewvlb?0^#c0U!}&h6nU5ivH{wK8D}p6JCuPf};|zSdJvW9>W`N z0lgnR;81{W2yjR$X)(tKET)UT4u$^u8yG(TszZR7=Uo8m7}!n~i>ZDOM9QDJM-eU0 zhaTK|r4xAHdg>7m21@&^yUue&L}D%#;v&1{;|BBB35Yml5A^Q?@<`w_5jNl(7M{BS zNRtpP`S-%P&-^!HsPO{?5`f%Qz#I>lmBx<3?%oLViB7Zrgb(I)@G2dxaQpNA)c@TM z1HaT7{Q|U9J=gmZ+?R%^{13VVjHSi&__|LfRapqJ z7D@}Lj;=a8e~9>!sd)1$qQXNa1jR@R-W`*KRuRv)=vyPaP9Bg7B1DA3+CUG8rGko( z5yM(cm#_VeTv8ij_1luIfTwt<*o#4rPH|ntce;+0LTmxTH`ovA=jC0(p4a12z2v|a z_xWPK$m(@_6YECLw{1AJ7NV2q7EtxWaPHQ*<7%p0!4o-xF_{dyE}27s94C;eZsV%d z13^XASufvOvOxtDcubJh!{a#t`_niFn`R)wOT?`6+)XI+ElouDc-_jr9y+H7I!-^f zJo)W%@2NXvSvHTwwJW)qc#F-_7e^5z=QPc&Rn^|7buZft+?1l#rX&d|fGFp%8DL1{wOw-}22 z?8yuVuX%>7&1FE%zy!J@V<-ysjL3-SdL1w*((~5$WakHFd~s^zcwneiQ1Fce76vV}zONee7Uq4i^Xc15 z{%5-fA})?KbO&9)O)bu87aN7iv)7gHK&v8xRj?XxlTAo zY3TrB`Or;O{$We01y9LzJg$~S2mSQQ_-!3|6YbiVUL2rzeQkur06rvTAXgdewv|0> zp@+Y=_e!F&Hf9_CTm>r;-aS{3`r&88HvOSCZ&R?&dvW1>EQq-9Rp3XZKa`h~<%=hF z*9<{SCd!`q>|!W=JK{0WciAqS11cC=U84UJl!4eFKZlB0NTeys5h8oHmnqBQs7f4U zYm4X|W?LfKlEo|xc52%SNrRvEU$T&6`s(Dq8GxR#5N6FvM=Dz1?@}GB%bGe_s$^_}rKyQw-qHxX(t*8W+BE4rIMfo`2&1nMD?;T<|m!pgvg z&-+w7FM@VJ`!)a4OH%wl2@VZ(m__7q;MPRW^?sJGdRjBJWuG!?kgZx3XF_%E) z{lBDw=rlY9Vl)+Dl5NQfSWgIGLyGYf#$2O`c`Pi-@6fpkl&7JEF=n=R<_uPhDB$D{ zT}lH<>=M^Fk?nhYa_a_Mv#1Pw;Fa4^+~c#!5%&sh=&Cr$9iRnN{Hobkw}_694825iVi_TvYD9nB>+WjbZd4d<8| zV`&5+0tXIFe$zfW!|2*x64SQ#a;Q(>9g@rPoTMdGY^Z}s34PRsAHjStjxn3p$NYV% zeqT@UUQO$?el%WyHl2R}=RLlwf9JyXopR?QT6cB0#f^0@`6e_GW#cv)rn8opYA%== zFV3duNh7t1i$CJguYP~gFj$_nwWgOX#c6%nqoAa(9Kz!6;CxgetaRpnb!?Whe@gHX+ z%G6Xc@hy|3`4pwS)w${0=0j^;HAwg;3Bjh+2GX4f4 zE^0|{Xhk8E%I=j)5Sc}N5pL2fYMeDBmFXf+V#Iuhssr^aO%?{dP6Z9^=(C93qJ4vu zFTWxZoejyOyzvqtJUvd9Pu3S6e8=rSYr12>FQmXk7a?t1bp73Bzy%$)pNM3XLk(&I z!;C1Ui^3KhgVZM!aAzHd}0d~<}X zbKd(?MSR3tAEe~#)otS%%I<%_+1>1}x@jpgx19yzKWp$o zj>J)W);;F;^@u`)7HtmYNzHLJ{sa0!zRGspBAewpqB(QwN_*Mi3%JhOuGhztcLYiA zuOdFOLq*s@p@u2)Bl9{emmyCc9{!|gU~xlCuo_0<;FxnuJ-)pBaON!2FSm0rf5X>L zL8F)W@k5*&^kX@>69e%#PXtTic6`4sKL)5o)v58uJqYucMf6jbk079 zZ(OO?X^Qniv<8=yj>WGRBV{a-%4^C`|5`u=PGe6S-lRm@FFKa5B3KW<_qC}(a@5}z z^gOnZdn-QWo`9?97D2s~gPECDL50iNG5Cbzho zw;AzGarH%|oei#|WTq#7uakh*eZI9r2^d0R`R;JVIJ~{fkiCJ_Po>`Lw(R7-8EWaL zITI!`-pmTy$Rb?mo$NMe1XDEdv@$#=D9UMG79nP&t#|ZS9N_A|)K^C$OA9?7{XX8v zE*ykW*BG(UrccwD=nr|R)Ll^YoC@m(Rkb8xvB@lUPk|L zc!qmQDiS-KDkk(5XNZM!CUOduutz)aJt~TEhk}U^Pa=-&Y$V{*R4~(k`H)l*b9JCf zw;pa#4B>fecm1$P-%JIU2$N*3ptaN(KID)I>*#UgMs8?}WO zE?iz&H43gGjC|;r!=c1j^{u!n@J;d@RbTjpf3@?ns`7xw(#PIuMQm%TrB#OG%2&RXly@=R+DJF9OFP_B z{O)_9M4G~_wI=q~VeatF=&G?7^DCQC$B^U<<^D&fW*gtt-zN!V3uwa6If7lLY-Y5f zg^vo!^TgieJILFlg_si22*>z?at+OK16&CkM;?*=*6DC$pW)-xliut_$2NxdXR?Yo zm+c=KZw10~O}H5|a=}!puq@$w9tY-rR6%HQ75!+<(X;J`eo@j=M`crOg2nfqsN|Y& z^0wrseTMynJ<>o;f4$e1kGydUW3pZhmacS!C{)vqrN{aCpkK8K0}Xyzw>%=R;0(!&yHufKv+X$X zx#2~cVl@*zr{6;>gYHjN4weqa138;5vLil-_q7%Z-R|mmLWTLd7D=kr9#4wH2OVQQ zp8pxqQyQA*iSk^!UhMnT0P(_h&gEsMTa=d(--Ss^J+R8$)9y9Sg0NjB;ByD@e*w4n zoj>v2u%o`sB@%OF?tL$YONMJmJ@#U>@$wO_#e__zE%X@yFPYa+IYxFecK^aNs~Xf- zveE>puz#;EZKjKx=@xaNx|a=$uN9ieE>9XkJ-6_bAx{@;8dxZljIZsP`IyB)7)q>{ zW!h^cANLNNUA`r{Owwr)Z+2EuS-)82$YJl0{0safwk;w${8Qii;FTmhCBaGlx~1j09z_|> z5!-@o*p7*RmUt{9%NF4rCBDJ!@rFR?wLE5#v8a^FT9xq12qWFnscgq?)w=p=Rry=# z!eALfe0lgEY3{{391t(w&)%so%rHLkcROV`*a!79_sb;C*iv6Q?PaplcWq%fIH>;0?{(Dgo6RT}Uib2~A^rR0>f?`<}fXr;T_ zC!H;}oYUFi0uv=8?N9xT^FV2S-ag%Lek8eX2#|lj{@hgK*c@0OdizF^qeO)o;gS|Q z8Clg};^BAp5Kl|et&fV{s08fdFcR5i%hF*d^XPHV-O2Ax7X@|qFDMbdNB51@XqQFy z=)zKyZ}s?&)3@wA))MJ{D`+${YXq|Ep$iyGf9x@(95O~IWoL$*Tdzv4+wIl6GqEQr zQh1MgIYvpI<oV2-?DwoX${Kk#&SZ<8VCVsmzJXXNLI!@2^vaHGqd zzB{nRYF)l~8?Uu^p4iARv1$JASuM{bMkSN`W~5SDjhD{nL?g}OpHSrIgP;3K+O>() zXZe;j=4V^oT35~m_bRh7{Zoa#Jq>Ss*!Jp|x7X_~-=7#+|DqWSQ5mPbNj8DHVGVuQ z(u%FOLCafjtv_%XUkL~`yJ`zR;XLQ=Dm(z`>C)~A_9|Q5_j^M?ROBW~_LMb*`v}uJ z$au$4FA6%NL#Khd!)1}aegT~i?8b9pqUBs(YUj?>)(UTXO9_fK*o2FNaN~n!eDvaM z<%0FiHjV=V9U6ZwAM2ZW)+{L?#?a zckWdS9_5n83HD?0DoqO}OK5#Vyze*4*I;0wSXHPKz6&;Z7^ zN#GD}jbV3H;aSnCjsaEx8>(6Y?CCi`Tr6t=3>o$Lmh=$+A50H`{fj}vL6|5w|E0}# z_?Pjfsw>nT@N3fL2Yld?Q|Z4y5_00o)9~orhq`u?@qhKI>l;+~R89>*;UA1Wwf;A5 z9-g5o8VdLf+)xA9p}&jHe*ykonvRp;KBH~ugUIAahCn6XG4?K z`JMf15Suivo>(e%MMNW7%_${BQL)=XXA}X5_PVBoa%AlhT3Vinu+l3KNez7PDtuyX zJu!cH3=JH>DjflXX@Bk+}eo>r~e+RIUU+Md?M zA}}OW0+dTk>hk^D-@ONjB*c0-ZA_EdU6n8lo0)z2?LET)VdVDBgs%Vg6G;MU={nm2 zO>U?BZfSDsH=_T{MirFCi5bBqXK}4Lq%hZi-;92m4cuG*vZP2{|1+Vo{++q+8@@8!lqIj%)zyA$)9`h@ zqb1trJfg(oJMQ~Ng`gjNkaX0 z8g;6sac})Anl5wyp9%d0{pW}Dks;h)|4l{Gxc+BB;<~v0=Ua1dKuys7&$-48dLQXr zk(!n9pRE2y20y1cx0NW&*%l#xnV|@w7&A# zQdnM^h1&0r|E(6!|7SyUf&Y-~Rq$Wp_&*Dh)s4r;{~%0w40s&k_`lr#KNt9~Ou>Jd z;srkZwhrG^vuKwq8=2Y5}}0E0!ejf z0);&}FY?VE#r-q_%V`H#llh|22SaiLACAeQ1pjQ>mq^XgTAPZsp=y+>wxUQ)OHr+j z_J-xOHXYj59IdH4)U?)s&pC~J>XjJnn$d;plb5WV{GF8(MH5v~+9Idjl)Nx~LKLD% zTZ*G9*gtb)H+;=j`3@qZR{DXjm> zo)4}rY+VYyz=kwLZf>YbT?j6G8o{2Tw=_sD^;TWzL_r!t5ri6TQ`y*%uj;>czca*J z%C5KF!s{Bz#IOTm$LJDRu|C_N2kjxfHcn14F%Z7?l(N5tLlOX^`^PPe<$PD2kN2z?A`Oz-KRU+`w!3Gz4`m2`s&$R_2t3A z{@$CHQ)RrVsk%IpvC`5OM8wQ-*W0PJ_Pl@R zi+_DM-q+vn9KU;Z{OtWd+Ww23m&%Uv&#RX^M;~82Jvo}@i+W4aWvN;CqSjnoDHq09 zX3GunRuL72VmSPF=6U#X_x0|JX9q8}_q(kR@AqH*eX2FzKi}CseAnDlTKaVJ)FiDi z&ZfGt`K!ww#&Xl`PgNDUc~6O|hnjMro-|*-e*Ty8>g4Yatq=da{P^_a$<7D;uV)`$ zOqFO;Z(Zj%(yuPhe2iE=SL{B9R5x4AjfIgau*Uyoq1wZ-A?XYSyD2S>xlR$R`8Yyt zEsRhf7Q>h9t|zC0=HmXZ+N#|DRrvGYGa*G$7w-Nqo*VO+2LWcxwi=Ov%t(#D8VV^Pf!~|7SspJO}Up`q4Mac-X%) z%|ZMJG_S^gjj#Wj30(^4H8^Qt^k~9nUq&mg^(zRgfju7o#{+C>nK$!QC<2dWQ;yX& ztuS@*Tr09v1-7;o28;#%U|Sd4O#4ZkM0l!W^-SD&W~Wzd@kA$Nf7q3r z$o26o*Nd#sf*)UEtB(c6KH!v@Z92H=rCFNO#y?$C&NfQ6QL~LMv;DY@txjXhOaw@G zI@G6;nK0Q)$S|UQl8Gkj^*2P##9GivID#wor2&kog{!M9Q9`^#L66;>WnAE30y;%y z;6dAnhjBttiR_9lcEZBG+ zIQ^clcP=_@H3&Ve>6vOefIx8`sj1zIOxxDAxb-z?DP40L2sK7Suo>!GZpCDUQTErj zTYW8#JAUxh4!zD`=yuc~N_)+gul0M~uN5%>*JK!GX2(Q=g6+Fel0sI=`he-D>HCmJ zqdwS4LF9H^-}DUhF30t#G3Hfw&P3}Io%o^tCYX3`QxkrORd}Y~NweG8p9E$mPoAJl z1W^*1KC{LjYBzKMP4Z24mpC6itg~$$=RZ?`j4dDBi&czF!%ULMZKp|gyWI+UA!sG* zl2x2Kx*sJ=0&CRxfhSM014Wm{4(5ee z-&;%01Hp8fgZY0+(wkNM=lB0-Lg*k{D};>Z|F+2QWe@rtj+vpmMf$QRaVJ_h;_-jE z{eQvZ|L74O4qelUkN=k2|K|e#o7MM!D*XE|Ga*G=ocBN9^$|`U3$BUd|8o2PT;P9e z%JV-w|DOpd`tsrb^&|W|5pDQaX zmS@H>*|)6Y3^ZzQvkR=@WGNwnAmjy15c@kjq*zi&a+o2|?#PUAEvg`_u(P^BkA`^8 zHW9i>62@EW2uxzfPsJeW2w5dFHmtzLO3(4RrR>zi?8H#=NhsD{pJnB%q=alFUkMl! zGosK<=tM+qVh??@=URZdLv~Jl?uNypLBmXKH({KM<)Yi3>!$;uPOeLA_Ui4iBG|Ws zi@0b<&vbn-1|PWAgsy3VEU2)S9rH4ys0sUTj~c2dHG-%CY(P}+4^IzJcnvzRXoyQ3 z;kaC!cXasd`04TC-U&93l`zqpu?bBn?XxxAI}!30lc&!PS!oyreb=TS-D0I_ zl%PUNbwO_9HA~0-RH5=km-^enpIgoE*3_oI_-~F(o;s?L|=#)r5C`oQ4RD02QM} zI3@)ZWBypFa6Jlo- zGS|kNO_Mb8udj=ki&<-(-M2#3-Oz->P_oKFEaDIGOW^vY4rN@{#`{$KM%0X~N`yF|qN6v|WD`YzhXJHD)M< zd6p~$#bKDfJxa=%JOOUc#$;VcdEAI6MD1kN(0Vff9zTw>O5XS+Gi~M@tR8XWB5;8@ zKx2qy`oAZnjrQQ5V_8fZ9;;iQ!V(36u^gfviBkt;O)en%1<2#v4=w;80xV(Soj|Z- zsX@}s*yA56R@eftTU`)DM8%lmV-mGW6=s;i=eXh3Sld2}pQOg7c=lxNGXx(Heobf$ z5X6KA`;*ywBkMLRSzTR)Ek!rnj+1Dua*1jQNI7d72<wBZ^mG}+A7(n3DfJ*j&Zna?0y7GC0=0Kn49&$ zu*RyRUE@I`KUgP59c(i((a_l?#V(r^i9a>uGcm6fR~ro-Eb|eqo{dhBC8=VmEz@-c znKO!w1dc=gjMxJuc`O6rMg+2OADn`K)B-O}DAP~XUXhqe3J)&i-8N&2MK$c27-cGI z3!ev)B&>;$BSNZKTdNuxQahsN`Ph+aM7ddQtK=xKAk8D~Vb(NeedD;E;lA-*o4dU3 z*VTg!u6(6C8{SlO6wMH`l-d}}dP(@@K#~Qn>5L|V!3dovqQLjl7&;h958+@8wegWB zqk!(3kbw?9)r29eNEs(Wb~LOCr*sWyC~~ch5}eUB#w{qz=gHIJp(~bZR5^z*$eOCW z;*#Id)L0<~y1fCl(!^|gv{sxx-l%bNz9~66%v6*mM~+~z$FE?_>)R}A84dGL!k4)m zM9dbhN=ZwucgxCp_a`LKFp^G&BbDQWY?GRXv3MHcbDZ7~VJv3(P&Ss!vMm1`dq*x@ z&_%D&`C+-e@c-G(v#hkk|coUw$lz;CM1BrLod@LNP@tN!OW91B(*66K`j{Co^y}i+rH^(pP-4!A~CJ`od#!h|P^b?Xj!AM5W zj1B>S%L6Zw?6PSBNEpN-ry9%ty`U}PpW0YfnRk7BVD^U?2Y(1T$YQvltYXG%4TFU= zDQ~vR6b)J`)&3-%0q>~&L|DTfIa?)rcA#0KIgFZ9F9!Zb3@JBcSKFsiTJqDv)gYzA zIfraIWWpn^+cJ{uHY8hvq*fa+4;Y{Zk%gTU6Dpwe7?}@5oLWm_>;*&|JQ?HwVyH8F z7;6aQJT;}6Xu|Zl8|*9_3L>AvIBy=GmBwY)^EpNGFm*h_`nq@Yypm>5B2RG%(X!SG zn=vBVp@S@$%#YV12*GVQ_Q`B~YZzxwK7(jJi=XW$pWDTl`{vY~9WUqQQ${}+eS>pW z3wxEDLS7T`#Y`}?kP9|6OJnAf2r=~%3swu%!VD2wNbsRspM7@?^K>kRa16PLTfv?? zrjcL}w}f;y!$ojzA8e_T2@z;YoLUxzKr&(2w`QExpuY+Rc!BZ)$VJ#qE&}jB_ULU{ zva-lz%H}~`)+RI!+$~j7CMsovnTW&|bP(q_9k_~bxqgS$GcA0eywJ~x&ou;OX$H7{ z#q*J-2JTgIimDFsp^rJesRhMBZeh1$2zq-~9lgECB84<@8XLmYOkl5=;JaZ^=$NjT zqQtWX3#<(SHW{ol6WM=Da+<=VuTH6sb-uP1cQp~Cv4fvQgHOv z97mWG#{urz-0qf;XU(}a8NY8LUiHWOB;Z-2isbvdiT7jQbeSWXi~Ilblx&41G@PG3CuZaIT$NyQ7A}_%69}}Mu;i2H}G_Uxt@bzCa zp}D|+dCL2LIsVUt6lnpz|6Y9dhy(wzxxjyQ%KIPr^B*%IMc!N-{2%-5 z9S;U~p?SrBp8wB=<^uou{QslTLd^f;cO*I7KZxcP|9SpD8=4FJSEt}VfB*YTNRc-d z1OF$#Bh2H$ooHV1pXdLxq4~i7Df9m=j{mbDMOl#ie**v6<2XD9+)5n(m)rm60{{8^ z&!f>o%>VREzW(!pG_Uy2=YM8H^MU`>_y6(lf6s;#skIpR5C3Hz1nx~7|CihU=K}xv z_x~P^7U}zc?AiRGVY_km))6=Gc|f>@9{>HXlB`M9_rIzdpZ}Q!UB+$G?~Htsy>o}G zKQHh-o z>ZEH%Hoi|HHt@6YA+>>Dlu|qpca0<%UFGsNUXL%tW?az kG*On&L4xlyVC7dlNO0oBi4!MIoR*3HAA5!WmjLhp03TRJcK`qY literal 0 HcmV?d00001 diff --git a/lib/spack/spack/url_buildcache.py b/lib/spack/spack/url_buildcache.py new file mode 100644 index 00000000000..29caa91806d --- /dev/null +++ b/lib/spack/spack/url_buildcache.py @@ -0,0 +1,1239 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import codecs +import enum +import gzip +import io +import json +import os +import re +import shutil +from contextlib import closing, contextmanager +from tempfile import TemporaryDirectory +from typing import Any, Dict, List, Optional, Tuple, Type + +import jsonschema + +import llnl.util.filesystem as fsys +import llnl.util.tty as tty + +import spack.config as config +import spack.database +import spack.error +import spack.hash_types as ht +import spack.mirrors.mirror +import spack.spec +import spack.stage +import spack.util.crypto +import spack.util.gpg +import spack.util.url as url_util +import spack.util.web as web_util +from spack.schema.url_buildcache_manifest import schema as buildcache_manifest_schema +from spack.util.archive import ChecksumWriter +from spack.util.crypto import hash_fun_for_algo + +#: The build cache layout version that this version of Spack creates. +#: Version 3: Introduces content-addressable tarballs +CURRENT_BUILD_CACHE_LAYOUT_VERSION = 3 + +#: The layout version spack can current install +SUPPORTED_LAYOUT_VERSIONS = (3, 2) + +#: The name of the default buildcache index manifest file +INDEX_MANIFEST_FILE = "index.manifest.json" + + +class BuildcacheComponent(enum.Enum): + """Enumeration of the kinds of things that live in a URL buildcache + + These enums serve two purposes: They allow different buildcache layout + versions to specify different relative location of these entities, and + they're used to map buildcache objects to their respective media types. + """ + + # metadata file for a binary package + SPEC = enum.auto() + # things that live in the blobs directory + BLOB = enum.auto() + # binary mirror index + INDEX = enum.auto() + # public key used for verifying signed binary packages + KEY = enum.auto() + # index of all public keys found in the mirror + KEY_INDEX = enum.auto() + # compressed archive of spec installation directory + TARBALL = enum.auto() + # binary mirror descriptor file + LAYOUT_JSON = enum.auto() + + +class BlobRecord: + """Class to describe a single data element (blob) from a manifest""" + + def __init__( + self, + content_length: int, + media_type: str, + compression_alg: str, + checksum_alg: str, + checksum: str, + ) -> None: + self.content_length = content_length + self.media_type = media_type + self.compression_alg = compression_alg + self.checksum_alg = checksum_alg + self.checksum = checksum + + @classmethod + def from_dict(cls, record_dict): + return BlobRecord( + record_dict["contentLength"], + record_dict["mediaType"], + record_dict["compression"], + record_dict["checksumAlgorithm"], + record_dict["checksum"], + ) + + def to_dict(self): + return { + "contentLength": self.content_length, + "mediaType": self.media_type, + "compression": self.compression_alg, + "checksumAlgorithm": self.checksum_alg, + "checksum": self.checksum, + } + + +class BuildcacheManifest: + """A class to represent a buildcache manifest, which consists of a version + number and an array of data blobs, each of which is represented by a + BlobRecord.""" + + def __init__(self, layout_version: int, data: Optional[List[BlobRecord]] = None): + self.version: int = layout_version + if data: + self.data: List[BlobRecord] = [ + BlobRecord( + rec.content_length, + rec.media_type, + rec.compression_alg, + rec.checksum_alg, + rec.checksum, + ) + for rec in data + ] + else: + self.data = [] + + def to_dict(self): + return {"version": self.version, "data": [rec.to_dict() for rec in self.data]} + + @classmethod + def from_dict(cls, manifest_json: Dict[str, Any]) -> "BuildcacheManifest": + jsonschema.validate(manifest_json, buildcache_manifest_schema) + return BuildcacheManifest( + layout_version=manifest_json["version"], + data=[BlobRecord.from_dict(blob_json) for blob_json in manifest_json["data"]], + ) + + def get_blob_records(self, media_type: str) -> List[BlobRecord]: + """Return any blob records from the manifest matching the given media type""" + matches: List[BlobRecord] = [] + + for record in self.data: + if record.media_type == media_type: + matches.append(record) + + if matches: + return matches + + raise NoSuchBlobException(f"Manifest has no blobs of type {media_type}") + + +class URLBuildcacheEntry: + """A class for managing URL-style buildcache entries + + This class manages access to a versioned buildcache entry by providing + a means to download both the metadata (spec file) and compressed archive. + It also provides methods for accessing the paths/urls associcated with + buildcache entries. + + Starting with buildcache layout version 3, it is not possible to know + the full path to a compressed archive without either building it locally, + or else fetching and reading the metadata first. This class provides api + for fetching the metadata, as well as fetching the archive, and it enforces + the need to fetch the metadata first. + + To help with downloading, this class manages two spack.spec.Stage objects + internally, which must be destroyed when finished. Specifically, if you + call either of the following methods on an instance, you must eventually also + call destroy(): + + fetch_metadata() + fetch_archive() + + This class also provides generic manifest and blob management api, and it + can be used to fetch and push other kinds of buildcache entries aside from + just binary packages. It can be used to work with public keys, buildcache + indices, and any other type of data represented as a manifest which refers + to blobs of data. + + """ + + SPEC_URL_REGEX = re.compile(r"(.+)/v([\d]+)/manifests/.+") + LAYOUT_VERSION = 3 + BUILDCACHE_INDEX_MEDIATYPE = f"application/vnd.spack.db.v{spack.database._DB_VERSION}+json" + SPEC_MEDIATYPE = f"application/vnd.spack.spec.v{spack.spec.SPECFILE_FORMAT_VERSION}+json" + TARBALL_MEDIATYPE = "application/vnd.spack.install.v2.tar+gzip" + PUBLIC_KEY_MEDIATYPE = "application/pgp-keys" + PUBLIC_KEY_INDEX_MEDIATYPE = "application/vnd.spack.keyindex.v1+json" + BUILDCACHE_INDEX_FILE = "index.manifest.json" + COMPONENT_PATHS = { + BuildcacheComponent.BLOB: ["blobs"], + BuildcacheComponent.INDEX: [f"v{LAYOUT_VERSION}", "manifests", "index"], + BuildcacheComponent.KEY: [f"v{LAYOUT_VERSION}", "manifests", "key"], + BuildcacheComponent.SPEC: [f"v{LAYOUT_VERSION}", "manifests", "spec"], + BuildcacheComponent.KEY_INDEX: [f"v{LAYOUT_VERSION}", "manifests", "key"], + BuildcacheComponent.TARBALL: ["blobs"], + BuildcacheComponent.LAYOUT_JSON: [f"v{LAYOUT_VERSION}", "layout.json"], + } + + def __init__( + self, mirror_url: str, spec: Optional[spack.spec.Spec] = None, allow_unsigned: bool = False + ): + """Lazily initialize the object""" + self.mirror_url: str = mirror_url + self.spec: Optional[spack.spec.Spec] = spec + self.allow_unsigned: bool = allow_unsigned + self.manifest: Optional[BuildcacheManifest] = None + self.remote_manifest_url: str = "" + self.stages: Dict[BlobRecord, spack.stage.Stage] = {} + + @classmethod + def get_layout_version(cls) -> int: + """Returns the layout version of this class""" + return cls.LAYOUT_VERSION + + @classmethod + def check_layout_json_exists(cls, mirror_url: str) -> bool: + """Return True if layout.json exists in the expected location, False otherwise""" + layout_json_url = url_util.join( + mirror_url, *cls.get_relative_path_components(BuildcacheComponent.LAYOUT_JSON) + ) + return web_util.url_exists(layout_json_url) + + @classmethod + def maybe_push_layout_json(cls, mirror_url: str) -> None: + """This function does nothing if layout.json already exists, otherwise it + pushes layout.json to the expected location in the mirror""" + if cls.check_layout_json_exists(mirror_url): + return + + layout_contents = {"signing": "gpg"} + + with TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + local_layout_path = os.path.join(tmpdir, "layout.json") + with open(local_layout_path, "w", encoding="utf-8") as fd: + json.dump(layout_contents, fd) + remote_layout_url = url_util.join( + mirror_url, *cls.get_relative_path_components(BuildcacheComponent.LAYOUT_JSON) + ) + web_util.push_to_url(local_layout_path, remote_layout_url, keep_original=False) + + @classmethod + def get_base_url(cls, manifest_url: str) -> str: + """Given any manifest url (i.e. one containing 'v3/manifests/') return the + base part of the url""" + rematch = cls.SPEC_URL_REGEX.match(manifest_url) + if not rematch: + raise BuildcacheEntryError(f"Unable to parse spec url: {manifest_url}") + return rematch.group(1) + + @classmethod + def get_index_url(cls, mirror_url: str): + return url_util.join( + mirror_url, + *cls.get_relative_path_components(BuildcacheComponent.INDEX), + cls.BUILDCACHE_INDEX_FILE, + ) + + @classmethod + def get_relative_path_components(cls, component: BuildcacheComponent) -> List[str]: + """Given any type of buildcache component, return its relative location within + a mirror as a list path elements""" + return cls.COMPONENT_PATHS[component] + + @classmethod + def get_manifest_filename(cls, spec: spack.spec.Spec) -> str: + """Given a concrete spec, compute and return the name (i.e. basename) of + the manifest file representing it""" + spec_formatted = spec.format_path("{name}-{version}-{hash}") + return f"{spec_formatted}.spec.manifest.json" + + @classmethod + def get_manifest_url(cls, spec: spack.spec.Spec, mirror_url: str) -> str: + """Given a concrete spec and a base url, return the full url where the + spec manifest should be found""" + path_components = cls.get_relative_path_components(BuildcacheComponent.SPEC) + return url_util.join( + mirror_url, *path_components, spec.name, cls.get_manifest_filename(spec) + ) + + @classmethod + def component_to_media_type(cls, component: BuildcacheComponent) -> str: + """Mapping from buildcache component to media type""" + if component == BuildcacheComponent.SPEC: + return cls.SPEC_MEDIATYPE + elif component == BuildcacheComponent.TARBALL: + return cls.TARBALL_MEDIATYPE + elif component == BuildcacheComponent.INDEX: + return cls.BUILDCACHE_INDEX_MEDIATYPE + elif component == BuildcacheComponent.KEY: + return cls.PUBLIC_KEY_MEDIATYPE + elif component == BuildcacheComponent.KEY_INDEX: + return cls.PUBLIC_KEY_INDEX_MEDIATYPE + + raise BuildcacheEntryError(f"Not a blob component: {component}") + + def get_local_spec_path(self) -> str: + """Convenience method to return the local path of a fetched spec file""" + return self.get_staged_blob_path(self.get_blob_record(BuildcacheComponent.SPEC)) + + def get_local_archive_path(self) -> str: + """Convenience method to return the local path of a fetched tarball""" + return self.get_staged_blob_path(self.get_blob_record(BuildcacheComponent.TARBALL)) + + def get_blob_record(self, blob_type: BuildcacheComponent) -> BlobRecord: + """Return the first blob record of the given type. Assumes the manifest has + already been fetched.""" + if not self.manifest: + raise BuildcacheEntryError("Read manifest before accessing blob records") + + records = self.manifest.get_blob_records(self.component_to_media_type(blob_type)) + + if len(records) == 0: + raise BuildcacheEntryError(f"Manifest has no blob record of type {blob_type}") + + return records[0] + + def check_blob_exists(self, record: BlobRecord) -> bool: + """Return True if the blob given by record exists on the mirror, False otherwise""" + blob_url = self.get_blob_url(self.mirror_url, record) + return web_util.url_exists(blob_url) + + @classmethod + def get_blob_path_components(cls, record: BlobRecord) -> List[str]: + """Given a BlobRecord, return the relative path of the blob within a mirror + as a list of path components""" + return [ + *cls.get_relative_path_components(BuildcacheComponent.BLOB), + record.checksum_alg, + record.checksum[:2], + record.checksum, + ] + + @classmethod + def get_blob_url(cls, mirror_url: str, record: BlobRecord) -> str: + """Return the full url of the blob given by record""" + return url_util.join(mirror_url, *cls.get_blob_path_components(record)) + + def fetch_blob(self, record: BlobRecord) -> str: + """Given a blob record, find associated blob in the manifest and stage it + + Returns the local path to the staged blob + """ + if record not in self.stages: + blob_url = self.get_blob_url(self.mirror_url, record) + blob_stage = spack.stage.Stage(blob_url) + + # Fetch the blob, or else cleanup and exit early + try: + blob_stage.create() + blob_stage.fetch() + except spack.error.FetchError as e: + self.destroy() + raise BuildcacheEntryError(f"Unable to fetch blob from {blob_url}") from e + + # Raises if checksum does not match expectation + validate_checksum(blob_stage.save_filename, record.checksum_alg, record.checksum) + + self.stages[record] = blob_stage + + return self.get_staged_blob_path(record) + + def get_staged_blob_path(self, record: BlobRecord) -> str: + """Convenience method to return the local path of a staged blob""" + if record not in self.stages: + raise BuildcacheEntryError(f"Blob not staged: {record}") + + return self.stages[record].save_filename + + def exists(self, components: List[BuildcacheComponent]) -> bool: + """Check whether blobs exist for all specified components + + Returns True if there is a blob present in the mirror for every + given component type. + """ + try: + self.read_manifest() + except BuildcacheEntryError: + return False + + if not self.manifest: + return False + + for component in components: + component_blobs = self.manifest.get_blob_records( + self.component_to_media_type(component) + ) + + if len(component_blobs) == 0: + return False + + if not self.check_blob_exists(component_blobs[0]): + return False + + return True + + @classmethod + def verify_and_extract_manifest(cls, manifest_contents: str, verify: bool = False) -> dict: + """Possibly verify clearsig, then extract contents and return as json""" + magic_string = "-----BEGIN PGP SIGNED MESSAGE-----" + if manifest_contents.startswith(magic_string): + if verify: + # Rry to verify and raise if we fail + with TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + manifest_path = os.path.join(tmpdir, "manifest.json.sig") + with open(manifest_path, "w", encoding="utf-8") as fd: + fd.write(manifest_contents) + if not try_verify(manifest_path): + raise NoVerifyException("Signature could not be verified") + + return spack.spec.Spec.extract_json_from_clearsig(manifest_contents) + else: + if verify: + raise NoVerifyException("Required signature was not found on manifest") + return json.loads(manifest_contents) + + def read_manifest(self, manifest_url: Optional[str] = None) -> BuildcacheManifest: + """Read and process the the buildcache entry manifest. + + If no manifest url is provided, build the url from the internal spec and + base push url.""" + + if self.manifest: + if not manifest_url or manifest_url == self.remote_manifest_url: + # We already have a manifest, so now calling this method without a specific + # manifiest url, or with the same one we have internally, then skip reading + # again, and just return the manifest we already read. + return self.manifest + + self.manifest = None + + if not manifest_url: + if not self.spec or not self.mirror_url: + raise BuildcacheEntryError( + "Either manifest url or spec and mirror are required to read manifest" + ) + manifest_url = self.get_manifest_url(self.spec, self.mirror_url) + + self.remote_manifest_url = manifest_url + manifest_contents = "" + + try: + _, _, manifest_file = web_util.read_from_url(manifest_url) + manifest_contents = codecs.getreader("utf-8")(manifest_file).read() + except (web_util.SpackWebError, OSError) as e: + raise BuildcacheEntryError(f"Error reading manifest at {manifest_url}") from e + + if not manifest_contents: + raise BuildcacheEntryError("Unable to read manifest or manifest empty") + + manifest_contents = self.verify_and_extract_manifest( + manifest_contents, verify=not self.allow_unsigned + ) + + self.manifest = BuildcacheManifest.from_dict(manifest_contents) + + if self.manifest.version != 3: + raise BuildcacheEntryError("Layout version mismatch in fetched manifest") + + return self.manifest + + def fetch_metadata(self) -> dict: + """Retrieve metadata for the spec, returns the validated spec dict""" + if not self.manifest: + # Reading the manifest will either successfully compute the remote + # spec url, or else raise an exception + self.read_manifest() + + local_specfile_path = self.fetch_blob(self.get_blob_record(BuildcacheComponent.SPEC)) + + # Check spec file for validity and read it, or else cleanup and exit early + try: + spec_dict, _ = get_valid_spec_file(local_specfile_path, self.get_layout_version()) + except InvalidMetadataFile as e: + self.destroy() + raise BuildcacheEntryError("Buildcache entry does not have valid metadata file") from e + + return spec_dict + + def fetch_archive(self) -> str: + """Retrieve the archive file and return the local archive file path""" + if not self.manifest: + # Raises if problems encountered, including not being able to verify signagure + self.read_manifest() + + return self.fetch_blob(self.get_blob_record(BuildcacheComponent.TARBALL)) + + def get_archive_stage(self) -> Optional[spack.stage.Stage]: + return self.stages[self.get_blob_record(BuildcacheComponent.TARBALL)] + + def remove(self): + """Remove a binary package (spec file and tarball) and the associated + manifest from the mirror.""" + if self.manifest: + try: + web_util.remove_url(self.remote_manifest_url) + except Exception as e: + tty.debug(f"Failed to remove previous manfifest: {e}") + + try: + web_util.remove_url( + self.get_blob_url( + self.mirror_url, self.get_blob_record(BuildcacheComponent.TARBALL) + ) + ) + except Exception as e: + tty.debug(f"Failed to remove previous archive: {e}") + + try: + web_util.remove_url( + self.get_blob_url( + self.mirror_url, self.get_blob_record(BuildcacheComponent.SPEC) + ) + ) + except Exception as e: + tty.debug(f"Failed to remove previous metadata: {e}") + + self.manifest = None + + @classmethod + def push_blob(cls, mirror_url: str, blob_path: str, record: BlobRecord) -> None: + """Push the blob_path file to mirror as a blob represented by the given + record""" + blob_destination_url = cls.get_blob_url(mirror_url, record) + web_util.push_to_url(blob_path, blob_destination_url, keep_original=False) + + @classmethod + def push_manifest( + cls, + mirror_url: str, + manifest_name: str, + manifest: BuildcacheManifest, + tmpdir: str, + component_type: BuildcacheComponent = BuildcacheComponent.SPEC, + signing_key: Optional[str] = None, + ) -> None: + """Given a BuildcacheManifest, push it to the mirror using the given manifest + name. The component_type is used to indicate what type of thing the manifest + represents, so it can be placed in the correct relative path within the mirror. + If a signing_key is provided, it will be used to clearsign the manifest before + pushing it.""" + # write the manifest to a temporary location + manifest_file_name = f"{manifest_name}.manifest.json" + manifest_path = os.path.join(tmpdir, manifest_file_name) + with open(manifest_path, "w", encoding="utf-8") as f: + json.dump(manifest.to_dict(), f, indent=0, separators=(",", ":")) + # Note: when using gpg clear sign, we need to avoid long lines (19995 + # chars). If lines are longer, they are truncated without error. So, + # here we still add newlines, but no indent, so save on file size and + # line length. + + if signing_key: + manifest_path = sign_file(signing_key, manifest_path) + + manifest_destination_url = url_util.join( + mirror_url, *cls.get_relative_path_components(component_type), manifest_file_name + ) + + web_util.push_to_url(manifest_path, manifest_destination_url, keep_original=False) + + @classmethod + def push_local_file_as_blob( + cls, + local_file_path: str, + mirror_url: str, + manifest_name: str, + component_type: BuildcacheComponent, + compression: str = "none", + ) -> None: + """Convenience method to push a local file to a mirror as a blob. Both manifest + and blob are pushed as a component of the given component_type. If compression + is 'gzip' the blob will be compressed before pushing, otherwise it will be pushed + uncompressed.""" + cache_class = get_url_buildcache_class() + checksum_algo = "sha256" + blob_to_push = local_file_path + + with TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + blob_to_push = os.path.join(tmpdir, os.path.basename(local_file_path)) + + with compression_writer(blob_to_push, compression, checksum_algo) as ( + fout, + checker, + ), open(local_file_path, "rb") as fin: + shutil.copyfileobj(fin, fout) + + record = BlobRecord( + checker.length, + cache_class.component_to_media_type(component_type), + compression, + checksum_algo, + checker.hexdigest(), + ) + manifest = BuildcacheManifest( + layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION, data=[record] + ) + cls.push_blob(mirror_url, blob_to_push, record) + cls.push_manifest( + mirror_url, manifest_name, manifest, tmpdir, component_type=component_type + ) + + def push_binary_package( + self, + spec: spack.spec.Spec, + tarball_path: str, + checksum_algorithm: str, + tarball_checksum: str, + tmpdir: str, + signing_key: Optional[str], + ) -> None: + """Convenience method to push tarball, specfile, and manifest to the remote mirror + + Pushing should only be done after checking for the pre-existence of a + buildcache entry for this spec, and represents a force push if one is + found. Thus, any pre-existing files are first removed. + """ + + spec_dict = spec.to_dict(hash=ht.dag_hash) + # TODO: Remove this key once oci buildcache no longer uses it + spec_dict["buildcache_layout_version"] = 2 + tarball_content_length = os.stat(tarball_path).st_size + compression = "gzip" + + # Delete the previously existing version + self.remove() + + if not self.remote_manifest_url: + self.remote_manifest_url = self.get_manifest_url(spec, self.mirror_url) + + # Any previous archive/tarball is gone, compute the path to the new one + remote_archive_url = url_util.join( + self.mirror_url, + *self.get_relative_path_components(BuildcacheComponent.BLOB), + checksum_algorithm, + tarball_checksum[:2], + tarball_checksum, + ) + + # push the archive/tarball blob to the remote + web_util.push_to_url(tarball_path, remote_archive_url, keep_original=False) + + # Clear out the previous data, then add a record for the new blob + blobs: List[BlobRecord] = [] + blobs.append( + BlobRecord( + tarball_content_length, + self.TARBALL_MEDIATYPE, + compression, + checksum_algorithm, + tarball_checksum, + ) + ) + + # compress the spec dict and compute its checksum + specfile = os.path.join(tmpdir, f"{spec.dag_hash()}.spec.json") + metadata_checksum, metadata_size = compressed_json_from_dict( + specfile, spec_dict, checksum_algorithm + ) + + # Any previous metadata blob is gone, compute the path to the new one + remote_spec_url = url_util.join( + self.mirror_url, + *self.get_relative_path_components(BuildcacheComponent.BLOB), + checksum_algorithm, + metadata_checksum[:2], + metadata_checksum, + ) + + # push the metadata/spec blob to the remote + web_util.push_to_url(specfile, remote_spec_url, keep_original=False) + + blobs.append( + BlobRecord( + metadata_size, + self.SPEC_MEDIATYPE, + compression, + checksum_algorithm, + metadata_checksum, + ) + ) + + # generate the manifest + manifest = { + "version": self.get_layout_version(), + "data": [record.to_dict() for record in blobs], + } + + # write the manifest to a temporary location + manifest_path = os.path.join(tmpdir, f"{spec.dag_hash()}.manifest.json") + with open(manifest_path, "w", encoding="utf-8") as f: + json.dump(manifest, f, indent=0, separators=(",", ":")) + # Note: when using gpg clear sign, we need to avoid long lines (19995 + # chars). If lines are longer, they are truncated without error. So, + # here we still add newlines, but no indent, so save on file size and + # line length. + + # possibly sign the manifest + if signing_key: + manifest_path = sign_file(signing_key, manifest_path) + + # Push the manifest file to the remote. The remote manifest url for + # a given concrete spec is fixed, so we don't have to recompute it, + # even if we deleted the pre-existing one. + web_util.push_to_url(manifest_path, self.remote_manifest_url, keep_original=False) + + def destroy(self): + """Destroy any existing stages""" + for blob_stage in self.stages.values(): + blob_stage.destroy() + + self.stages = {} + + +class URLBuildcacheEntryV2(URLBuildcacheEntry): + """This class exists to provide read-only support for reading older buildcache + layouts in a way that is transparent to binary_distribution code responsible for + downloading and extracting binary packages. Since support for layout v2 is + read-only, and since v2 did not have support for manifests and blobs, many class + and instance methods are overridden simply to raise, hopefully making the intended + use and limitations of the class clear to developers.""" + + SPEC_URL_REGEX = re.compile(r"(.+)/build_cache/.+") + LAYOUT_VERSION = 2 + BUILDCACHE_INDEX_FILE = "index.json" + COMPONENT_PATHS = { + BuildcacheComponent.BLOB: ["build_cache"], + BuildcacheComponent.INDEX: ["build_cache"], + BuildcacheComponent.KEY: ["build_cache", "_pgp"], + BuildcacheComponent.SPEC: ["build_cache"], + BuildcacheComponent.KEY_INDEX: ["build_cache", "_pgp"], + BuildcacheComponent.TARBALL: ["build_cache"], + BuildcacheComponent.LAYOUT_JSON: ["build_cache", "layout.json"], + } + + def __init__( + self, + push_url_base: str, + spec: Optional[spack.spec.Spec] = None, + allow_unsigned: bool = False, + ): + """Lazily initialize the object""" + self.mirror_url: str = push_url_base + self.spec: Optional[spack.spec.Spec] = spec + self.allow_unsigned: bool = allow_unsigned + + self.has_metadata: bool = False + self.has_tarball: bool = False + self.has_signed: bool = False + self.has_unsigned: bool = False + self.spec_stage: Optional[spack.stage.Stage] = None + self.local_specfile_path: str = "" + self.archive_stage: Optional[spack.stage.Stage] = None + self.local_archive_path: str = "" + + self.remote_spec_url: str = "" + self.remote_archive_url: str = "" + self.remote_archive_checksum_algorithm: str = "" + self.remote_archive_checksum_hash: str = "" + self.spec_dict: Dict[Any, Any] = {} + + self._checked_signed = False + self._checked_unsigned = False + self._checked_exists = False + + @classmethod + def get_layout_version(cls) -> int: + return cls.LAYOUT_VERSION + + @classmethod + def maybe_push_layout_json(cls, mirror_url: str) -> None: + raise BuildcacheEntryError("spack can no longer write to v2 buildcaches") + + def _get_spec_url( + self, spec: spack.spec.Spec, mirror_url: str, ext: str = ".spec.json.sig" + ) -> str: + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" + ) + path_components = self.get_relative_path_components(BuildcacheComponent.SPEC) + return url_util.join(mirror_url, *path_components, f"{spec_formatted}{ext}") + + def _get_tarball_url(self, spec: spack.spec.Spec, mirror_url: str) -> str: + directory_name = spec.format_path( + "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}" + ) + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" + ) + filename = f"{spec_formatted}.spack" + return url_util.join( + mirror_url, + *self.get_relative_path_components(BuildcacheComponent.BLOB), + directory_name, + filename, + ) + + def _check_metadata_exists(self): + if not self.spec: + return + + if not self._checked_signed: + signed_url = self._get_spec_url(self.spec, self.mirror_url, ext=".spec.json.sig") + if web_util.url_exists(signed_url): + self.remote_spec_url = signed_url + self.has_signed = True + self._checked_signed = True + + if not self.has_signed and not self._checked_unsigned: + unsigned_url = self._get_spec_url(self.spec, self.mirror_url, ext=".spec.json") + if web_util.url_exists(unsigned_url): + self.remote_spec_url = unsigned_url + self.has_unsigned = True + self._checked_unsigned = True + + def exists(self, components: List[BuildcacheComponent]) -> bool: + if not self.spec: + return False + + if ( + len(components) != 2 + or BuildcacheComponent.SPEC not in components + or BuildcacheComponent.TARBALL not in components + ): + return False + + self._check_metadata_exists() + if not self.has_signed and not self.has_unsigned: + return False + + if not web_util.url_exists(self._get_tarball_url(self.spec, self.mirror_url)): + return False + + return True + + def fetch_metadata(self) -> dict: + """Retrieve the v2 specfile for the spec, yields the validated spec+ dict""" + if self.spec_dict: + # Only fetch the metadata once + return self.spec_dict + + self._check_metadata_exists() + + if not self.remote_spec_url: + raise BuildcacheEntryError(f"Mirror {self.mirror_url} does not have metadata for spec") + + if not self.allow_unsigned and self.has_unsigned: + raise BuildcacheEntryError( + f"Mirror {self.mirror_url} does not have signed metadata for spec" + ) + + self.spec_stage = spack.stage.Stage(self.remote_spec_url) + + # Fetch the spec file, or else cleanup and exit early + try: + self.spec_stage.create() + self.spec_stage.fetch() + except spack.error.FetchError as e: + self.destroy() + raise BuildcacheEntryError( + f"Unable to fetch metadata from {self.remote_spec_url}" + ) from e + + self.local_specfile_path = self.spec_stage.save_filename + + if not self.allow_unsigned and not try_verify(self.local_specfile_path): + raise NoVerifyException(f"Signature on {self.remote_spec_url} could not be verified") + + # Check spec file for validity and read it, or else cleanup and exit early + try: + spec_dict, _ = get_valid_spec_file(self.local_specfile_path, self.get_layout_version()) + except InvalidMetadataFile as e: + self.destroy() + raise BuildcacheEntryError("Buildcache entry does not have valid metadata file") from e + + try: + self.spec = spack.spec.Spec.from_dict(spec_dict) + except Exception as err: + raise BuildcacheEntryError("Fetched spec dict does not contain valid spec") from err + + self.spec_dict = spec_dict + + # Retrieve the alg and hash from the spec dict, use them to build the path to + # the tarball. + if "binary_cache_checksum" not in self.spec_dict: + raise BuildcacheEntryError("Provided spec dict must contain 'binary_cache_checksum'") + + bchecksum = self.spec_dict["binary_cache_checksum"] + + if "hash_algorithm" not in bchecksum or "hash" not in bchecksum: + raise BuildcacheEntryError( + "Provided spec dict contains invalid 'binary_cache_checksum'" + ) + + self.remote_archive_checksum_algorithm = bchecksum["hash_algorithm"] + self.remote_archive_checksum_hash = bchecksum["hash"] + self.remote_archive_url = self._get_tarball_url(self.spec, self.mirror_url) + + return self.spec_dict + + def fetch_archive(self) -> str: + self.fetch_metadata() + + # Adding this, we can avoid passing a dictionary of stages around the + # install logic, and in fact completely avoid fetching the metadata in + # the new (v3) approach. + if self.spec_stage: + self.spec_stage.destroy() + self.spec_stage = None + + self.archive_stage = spack.stage.Stage(self.remote_archive_url) + + # Fetch the archive file, or else cleanup and exit early + try: + self.archive_stage.create() + self.archive_stage.fetch() + except spack.error.FetchError as e: + self.destroy() + raise BuildcacheEntryError( + f"Unable to fetch archive from {self.remote_archive_url}" + ) from e + + self.local_archive_path = self.archive_stage.save_filename + + # Raises if checksum does not match expected + validate_checksum( + self.local_archive_path, + self.remote_archive_checksum_algorithm, + self.remote_archive_checksum_hash, + ) + + return self.local_archive_path + + def get_archive_stage(self) -> Optional[spack.stage.Stage]: + return self.archive_stage + + @classmethod + def get_manifest_filename(cls, spec: spack.spec.Spec) -> str: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest file") + + @classmethod + def get_manifest_url(cls, spec: spack.spec.Spec, mirror_url: str) -> str: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest url") + + def read_manifest(self, manifest_url: Optional[str] = None) -> BuildcacheManifest: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest file") + + def remove(self): + raise BuildcacheEntryError("Spack cannot delete v2 buildcache entries") + + def get_blob_record(self, blob_type: BuildcacheComponent) -> BlobRecord: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def check_blob_exists(self, record: BlobRecord) -> bool: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def get_blob_path_components(cls, record: BlobRecord) -> List[str]: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def get_blob_url(cls, mirror_url: str, record: BlobRecord) -> str: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def fetch_blob(self, record: BlobRecord) -> str: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def get_staged_blob_path(self, record: BlobRecord) -> str: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def verify_and_extract_manifest(cls, manifest_contents: str, verify: bool = False) -> dict: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest file") + + @classmethod + def push_blob(cls, mirror_url: str, blob_path: str, record: BlobRecord) -> None: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def push_manifest( + cls, + mirror_url: str, + manifest_name: str, + manifest: BuildcacheManifest, + tmpdir: str, + component_type: BuildcacheComponent = BuildcacheComponent.SPEC, + signing_key: Optional[str] = None, + ) -> None: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def push_local_file_as_blob( + cls, + local_file_path: str, + mirror_url: str, + manifest_name: str, + component_type: BuildcacheComponent, + compression: str = "none", + ) -> None: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def push_binary_package( + self, + spec: spack.spec.Spec, + tarball_path: str, + checksum_algorithm: str, + tarball_checksum: str, + tmpdir: str, + signing_key: Optional[str], + ) -> None: + raise BuildcacheEntryError("Spack can no longer push v2 buildcache entries") + + def destroy(self): + if self.archive_stage: + self.archive_stage.destroy() + self.archive_stage = None + if self.spec_stage: + self.spec_stage.destroy() + self.spec_stage = None + + +def get_url_buildcache_class( + layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION, +) -> Type[URLBuildcacheEntry]: + """Given a layout version, return the class responsible for managing access + to buildcache entries of that version""" + if layout_version == 2: + return URLBuildcacheEntryV2 + elif layout_version == 3: + return URLBuildcacheEntry + else: + raise UnknownBuildcacheLayoutError( + f"Cannot create buildcache class for unknown layout version {layout_version}" + ) + + +def check_mirror_for_layout(mirror: spack.mirrors.mirror.Mirror): + """Check specified mirror, and warn if missing layout.json""" + cache_class = get_url_buildcache_class() + if not cache_class.check_layout_json_exists(mirror.fetch_url): + msg = ( + f"Configured mirror {mirror.name} is missing layout.json and has either \n" + " never been pushed or is of an old layout version. If it's the latter, \n" + " consider running 'spack buildcache migrate' or rebuilding the specs in \n" + " in this mirror." + ) + tty.warn(msg) + + +def validate_checksum(file_path, checksum_algorithm, expected_checksum) -> None: + """Compute the checksum of the given file and raise if invalid""" + local_checksum = spack.util.crypto.checksum(hash_fun_for_algo(checksum_algorithm), file_path) + + if local_checksum != expected_checksum: + size, contents = fsys.filesummary(file_path) + raise spack.error.NoChecksumException( + file_path, size, contents, checksum_algorithm, expected_checksum, local_checksum + ) + + +def _get_compressor(compression: str, writable: io.BufferedIOBase) -> io.BufferedIOBase: + if compression == "gzip": + return gzip.GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=writable) + elif compression == "none": + return writable + else: + raise BuildcacheEntryError(f"Unknown compression type: {compression}") + + +@contextmanager +def compression_writer(output_path: str, compression: str, checksum_algo: str): + """Create and return a writer capable of writing compressed data. Available + options for compression are "gzip" or "none", checksum_algo is used to pick + the checksum algorithm used by the ChecksumWriter. + + Yields a tuple containing: + io.IOBase: writer that can compress (or not) as it writes + ChecksumWriter: provides checksum and length of written data + """ + with open(output_path, "wb") as writer, ChecksumWriter( + fileobj=writer, algorithm=hash_fun_for_algo(checksum_algo) + ) as checksum_writer, closing( + _get_compressor(compression, checksum_writer) + ) as compress_writer: + yield compress_writer, checksum_writer + + +def compressed_json_from_dict( + output_path: str, spec_dict: dict, checksum_algo: str +) -> Tuple[str, int]: + """Compress the spec dict and write it to the given path + + Return the checksum (using the given algorithm) and size on disk of the file + """ + with compression_writer(output_path, "gzip", checksum_algo) as ( + f_bin, + checker, + ), io.TextIOWrapper(f_bin, encoding="utf-8") as f_txt: + json.dump(spec_dict, f_txt, separators=(",", ":")) + + return checker.hexdigest(), checker.length + + +def get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]: + """Read and validate a spec file, returning the spec dict with its layout version, or raising + InvalidMetadataFile if invalid.""" + try: + with open(path, "rb") as f: + binary_content = f.read() + except OSError as e: + raise InvalidMetadataFile(f"No such file: {path}") from e + + # Decompress spec file if necessary + if binary_content[:2] == b"\x1f\x8b": + binary_content = gzip.decompress(binary_content) + + try: + as_string = binary_content.decode("utf-8") + if path.endswith(".json.sig"): + spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string) + else: + spec_dict = json.loads(as_string) + except Exception as e: + raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e + + # Ensure this version is not too new. + try: + layout_version = int(spec_dict.get("buildcache_layout_version", 0)) + except ValueError as e: + raise InvalidMetadataFile("Could not parse layout version") from e + + if layout_version > max_supported_layout: + raise InvalidMetadataFile( + f"Layout version {layout_version} is too new for this version of Spack" + ) + + return spec_dict, layout_version + + +def sign_file(key: str, file_path: str) -> str: + """sign and return the path to the signed file""" + signed_file_path = f"{file_path}.sig" + spack.util.gpg.sign(key, file_path, signed_file_path, clearsign=True) + return signed_file_path + + +def try_verify(specfile_path): + """Utility function to attempt to verify a local file. Assumes the + file is a clearsigned signature file. + + Args: + specfile_path (str): Path to file to be verified. + + Returns: + ``True`` if the signature could be verified, ``False`` otherwise. + """ + suppress = config.get("config:suppress_gpg_warnings", False) + + try: + spack.util.gpg.verify(specfile_path, suppress_warnings=suppress) + except Exception: + return False + + return True + + +class MirrorURLAndVersion: + """Simple class to hold a mirror url and a buildcache layout version + + This class is used by BinaryCacheIndex to produce a key used to keep + track of downloaded/processed buildcache index files from remote mirrors + in some layout version.""" + + url: str + version: int + + def __init__(self, url: str, version: int): + self.url = url + self.version = version + + def __str__(self): + return f"{self.url}__v{self.version}" + + def __eq__(self, other): + if isinstance(other, MirrorURLAndVersion): + return self.url == other.url and self.version == other.version + return False + + def __hash__(self): + return hash((self.url, self.version)) + + @classmethod + def from_string(cls, s: str): + parts = s.split("__v") + return cls(parts[0], int(parts[1])) + + +class MirrorForSpec: + """Simple holder for a mirror (represented by a url and a layout version) and + an associated concrete spec""" + + url_and_version: MirrorURLAndVersion + spec: spack.spec.Spec + + def __init__(self, url_and_version: MirrorURLAndVersion, spec: spack.spec.Spec): + self.url_and_version = url_and_version + self.spec = spec + + +class InvalidMetadataFile(spack.error.SpackError): + """Raised when spack encounters a spec file it cannot understand or process""" + + pass + + +class BuildcacheEntryError(spack.error.SpackError): + """Raised for problems finding or accessing binary cache entry on mirror""" + + pass + + +class NoSuchBlobException(spack.error.SpackError): + """Raised when manifest does have some requested type of requested type""" + + pass + + +class NoVerifyException(BuildcacheEntryError): + """Raised if file fails signature verification""" + + pass + + +class UnknownBuildcacheLayoutError(BuildcacheEntryError): + """Raised when unrecognized buildcache layout version is encountered""" + + pass diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index e3679f87493..6959764bcf5 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -42,13 +42,22 @@ ci: aud: "${OIDC_TOKEN_AUDIENCE}" - signing-job: - image: { "name": "ghcr.io/spack/notary:0.0.1", "entrypoint": [""] } + image: + name: ghcr.io/spack/notary@sha256:d5a183b090602dea5dc89d5023fe777d1e64d9a7ddcb6cc9ec58a79bb410c168 + entrypoint: [""] tags: ["aws"] script: - - - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_BUILDCACHE_DESTINATION}/build_cache /tmp + - - export BUILD_CACHE="${SPACK_BUILDCACHE_DESTINATION}/${SPACK_BUILDCACHE_RELATIVE_SPECS_URL}" + - mkdir -p /tmp/input /tmp/output + - aws s3 sync --exclude "*" --include "*spec.manifest.json" ${BUILD_CACHE} /tmp/input - /sign.sh - - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_BUILDCACHE_DESTINATION}/build_cache - - aws s3 cp /tmp/public_keys ${SPACK_BUILDCACHE_DESTINATION}/build_cache/_pgp --recursive --exclude "*" --include "*.pub" + - aws s3 sync --exclude "*" --include "*spec.manifest.json" /tmp/output ${BUILD_CACHE} + - |+ + for keyfile in $( find /tmp/public_keys -type f ); + do + spack gpg trust $keyfile + done + - spack gpg publish --update-index --mirror-url ${SPACK_BUILDCACHE_DESTINATION} id_tokens: GITLAB_OIDC_TOKEN: aud: "${OIDC_TOKEN_AUDIENCE}" @@ -62,10 +71,14 @@ ci: - export SPACK_COPY_ONLY_SOURCE=${SPACK_BUILDCACHE_SOURCE//SPACK_REPLACE_VERSION/${SPACK_REPLACE_VERSION}} script: - - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR} + # TODO: remove this when we stop getting windows config includes added to the environment + - spack config add 'config:build_stage:["$tempdir/$user/spack-stage", "$user_cache_path/stage"]' + - spack config blame config - echo Copying environment specs from ${SPACK_COPY_ONLY_SOURCE} to ${SPACK_COPY_ONLY_DESTINATION} - spack buildcache sync "${SPACK_COPY_ONLY_SOURCE}" "${SPACK_COPY_ONLY_DESTINATION}" - curl -fLsS https://spack.github.io/keys/spack-public-binary-key.pub -o /tmp/spack-public-binary-key.pub - - aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_COPY_ONLY_DESTINATION}/build_cache/_pgp/spack-public-binary-key.pub" + - spack gpg trust /tmp/spack-public-binary-key.pub + - spack gpg publish --mirror-url "${SPACK_COPY_ONLY_DESTINATION}" - spack buildcache update-index --keys "${SPACK_COPY_ONLY_DESTINATION}" when: "always" retry: diff --git a/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in index 0ad46d5fc90..e224ed0eb6d 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in +++ b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in @@ -9,8 +9,3 @@ mirrors: push: ${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} source: False binary: True - buildcache-shared: - fetch: ${PR_MIRROR_FETCH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} - push: ${PR_MIRROR_PUSH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} - source: False - binary: True diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml index 4bd9a184c18..83632dc95a4 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml @@ -29,8 +29,9 @@ spack: - signing-job: before_script: # Do not distribute Intel & ARM binaries - - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done + - - export SPECS_PATH=${SPACK_BUILDCACHE_RELATIVE_SPECS_PATH} + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep intel-oneapi | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep armpl | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done cdash: build-group: AWS Packages diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml index 4597d187b26..75ef28076c0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml @@ -33,8 +33,9 @@ spack: - signing-job: before_script: # Do not distribute Intel & ARM binaries - - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done + - - export SPECS_PATH=${SPACK_BUILDCACHE_RELATIVE_SPECS_PATH} + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep intel-oneapi | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep armpl | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done cdash: build-group: AWS Packages diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 94d4457d393..ef2328868b2 100644 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -563,7 +563,7 @@ _spack_buildcache() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="push create install list keys check download save-specfile sync update-index rebuild-index" + SPACK_COMPREPLY="push create install list keys check download save-specfile sync update-index rebuild-index migrate" fi } @@ -651,6 +651,15 @@ _spack_buildcache_rebuild_index() { fi } +_spack_buildcache_migrate() { + if $list_options + then + SPACK_COMPREPLY="-h --help -u --unsigned -d --delete-existing -y --yes-to-all" + else + _mirrors + fi +} + _spack_cd() { if $list_options then diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 560f47193f5..a0640f70c4e 100644 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -697,6 +697,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a save-sp complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a sync -d 'sync binaries (and associated metadata) from one mirror to another' complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a update-index -d 'update a buildcache index' complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a rebuild-index -d 'update a buildcache index' +complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a migrate -d 'perform in-place binary mirror migration (2 to 3)' complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -d 'show this help message and exit' @@ -861,6 +862,18 @@ complete -c spack -n '__fish_spack_using_command buildcache rebuild-index' -s h complete -c spack -n '__fish_spack_using_command buildcache rebuild-index' -s k -l keys -f -a keys complete -c spack -n '__fish_spack_using_command buildcache rebuild-index' -s k -l keys -d 'if provided, key index will be updated as well as package index' +# spack buildcache migrate +set -g __fish_spack_optspecs_spack_buildcache_migrate h/help u/unsigned d/delete-existing y/yes-to-all + +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s u -l unsigned -f -a unsigned +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s u -l unsigned -d 'Ignore signatures and do not resign, default is False' +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s d -l delete-existing -f -a delete_existing +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s d -l delete-existing -d 'Delete the previous layout, the default is to keep it.' +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s y -l yes-to-all -f -a yes_to_all +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request' + # spack cd set -g __fish_spack_optspecs_spack_cd h/help m/module-dir r/spack-root i/install-dir p/package-dir P/packages s/stage-dir S/stages c/source-dir b/build-dir e/env= first complete -c spack -n '__fish_spack_using_command_pos_remainder 0 cd' -f -k -a '(__fish_spack_specs)' From a3a49daf8f753462ad29c5bdab47f9193467d30a Mon Sep 17 00:00:00 2001 From: Greg Sjaardema Date: Tue, 6 May 2025 21:47:39 -0600 Subject: [PATCH 04/57] seacas: new version, change in name handling defaults (#50324) --- var/spack/repos/spack_repo/builtin/packages/seacas/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/seacas/package.py b/var/spack/repos/spack_repo/builtin/packages/seacas/package.py index 8cb6287724d..1c5fa0b4450 100644 --- a/var/spack/repos/spack_repo/builtin/packages/seacas/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/seacas/package.py @@ -34,6 +34,9 @@ class Seacas(CMakePackage): # ###################### Versions ########################## version("master", branch="master") + version( + "2025-05-05", sha256="9e9872cee4223482d74918b0f50cc3ec77791f79330915611a0e1d5691c15184" + ) version( "2025-04-29", sha256="2a3a1533a1fbff8e8b78814a3a45f6fadfb3f05b5d9d10a4f0452c7bb4d1aa2f" ) From 931d034da4b7ff1affa296b05ccd42186e9237c9 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Tue, 6 May 2025 23:07:11 -0700 Subject: [PATCH 05/57] test/patch.py: switch tests to use mock_packages (#50337) --- lib/spack/spack/test/patch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py index 4d0d4a15cb4..7afdd4b5f96 100644 --- a/lib/spack/spack/test/patch.py +++ b/lib/spack/spack/test/patch.py @@ -89,7 +89,7 @@ def mock_patch_stage(tmpdir_factory, monkeypatch): (os.path.join(data_path, "foo.patch"), platform_url_sha, None), ], ) -def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config): +def test_url_patch(mock_packages, mock_patch_stage, filename, sha256, archive_sha256, config): # Make a patch object url = url_util.path_to_file_url(filename) s = spack.concretize.concretize_one("patch") @@ -466,7 +466,7 @@ def test_equality(): assert patch1 != "not a patch" -def test_sha256_setter(mock_patch_stage, config): +def test_sha256_setter(mock_packages, mock_patch_stage, config): path = os.path.join(data_path, "foo.patch") s = spack.concretize.concretize_one("patch") patch = spack.patch.FilePatch(s.package, path, level=1, working_dir=".") From c88e7bc49281fb06181bd269453959c3c2450a21 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Tue, 6 May 2025 23:09:14 -0700 Subject: [PATCH 06/57] test/ci.py: remove redundant mock_packages fixture use (#50331) --- lib/spack/spack/test/ci.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py index a5ca0273d5d..f2a922ae97b 100644 --- a/lib/spack/spack/test/ci.py +++ b/lib/spack/spack/test/ci.py @@ -414,7 +414,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo): @pytest.mark.regression("29947") -def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_packages): +def test_affected_specs_on_first_concretization(mutable_mock_env_path): e = ev.create("first_concretization") e.add("mpileaks~shared") e.add("mpileaks+shared") @@ -444,7 +444,7 @@ def _fail(self, args): ci.process_command("help", [], str(repro_dir)) -def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch): +def test_ci_create_buildcache(tmpdir, working_env, config, monkeypatch): """Test that create_buildcache returns a list of objects with the correct keys and types.""" monkeypatch.setattr(ci, "push_to_build_cache", lambda a, b, c: True) @@ -483,7 +483,7 @@ def test_ci_run_standalone_tests_missing_requirements( @pytest.mark.not_on_windows("Reliance on bash script not supported on Windows") def test_ci_run_standalone_tests_not_installed_junit( - tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages + tmp_path, repro_dir, working_env, mock_test_stage, capfd ): log_file = tmp_path / "junit.xml" args = { @@ -501,7 +501,7 @@ def test_ci_run_standalone_tests_not_installed_junit( @pytest.mark.not_on_windows("Reliance on bash script not supported on Windows") def test_ci_run_standalone_tests_not_installed_cdash( - tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages + tmp_path, repro_dir, working_env, mock_test_stage, capfd ): """Test run_standalone_tests with cdash and related options.""" log_file = tmp_path / "junit.xml" @@ -537,7 +537,7 @@ def test_ci_run_standalone_tests_not_installed_cdash( assert "No such file or directory" in err -def test_ci_skipped_report(tmpdir, mock_packages, config): +def test_ci_skipped_report(tmpdir, config): """Test explicit skipping of report as well as CI's 'package' arg.""" pkg = "trivial-smoke-test" spec = spack.concretize.concretize_one(pkg) From 3d8f9a7b228427f766f7c2f2169b4d721833d971 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Wed, 7 May 2025 08:33:05 +0200 Subject: [PATCH 07/57] Make target constraints stronger in public pipelines (#50297) Signed-off-by: Massimiliano Culpo --- .../aws-pcluster-neoverse_v1/spack.yaml | 16 ++++++++-- .../bootstrap-aarch64-darwin/spack.yaml | 1 + .../bootstrap-x86_64-linux-gnu/spack.yaml | 1 + .../stacks/build_systems/spack.yaml | 1 + .../stacks/data-vis-sdk/spack.yaml | 17 +++++++++-- .../stacks/e4s-cray-rhel/spack.yaml | 5 +++- .../stacks/e4s-neoverse-v2/spack.yaml | 4 +++ .../cloud_pipelines/stacks/hep/spack.yaml | 29 ++++++++++++++----- .../stacks/ml-darwin-aarch64-mps/spack.yaml | 5 +++- .../stacks/ml-linux-aarch64-cpu/spack.yaml | 1 + .../stacks/ml-linux-aarch64-cuda/spack.yaml | 5 +++- .../stacks/ml-linux-x86_64-cpu/spack.yaml | 1 + .../stacks/ml-linux-x86_64-cuda/spack.yaml | 5 +++- .../stacks/ml-linux-x86_64-rocm/spack.yaml | 1 + .../stacks/radiuss-aws-aarch64/spack.yaml | 19 ++++++------ .../cloud_pipelines/stacks/radiuss/spack.yaml | 1 + 16 files changed, 87 insertions(+), 25 deletions(-) diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml index 83632dc95a4..5351e6feb7d 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml @@ -57,6 +57,7 @@ spack: require: - gromacs@2024.3 ^armpl-gcc ^openmpi - "%gcc" + - target=neoverse_v1 libfabric: buildable: true externals: @@ -64,33 +65,42 @@ spack: spec: libfabric@1.17.0 require: - fabrics=shm,efa + - target=neoverse_v1 llvm: variants: ~lldb mpas-model: require: - precision=single ^parallelio+pnetcdf - "%gcc" + - target=neoverse_v1 mpich: require: - mpich pmi=pmi2 device=ch4 netmod=ofi +slurm + - target=neoverse_v1 nvhpc: require: - "target=aarch64" openfoam: require: - openfoam ^scotch@6.0.9 + - target=neoverse_v1 openmpi: variants: ~atomics ~cuda ~cxx ~cxx_exceptions ~internal-hwloc ~java +legacylaunchers ~lustre ~memchecker +pmi +romio ~singularity +vt +wrapper-rpath fabrics=ofi schedulers=slurm - require: '@4:' + require: + - '@4:' + - target=neoverse_v1 # Palace does not build correctly with armpl until https://github.com/awslabs/palace/pull/207 is merged into a version. # palace: # require: # - one_of: ["palace cxxflags=\"-include cstdint\" ^fmt@9.1.0"] pmix: - require: "pmix@3:" + require: + - "pmix@3:" + - target=neoverse_v1 quantum-espresso: require: - quantum-espresso@6.6 %gcc ^armpl-gcc + - target=neoverse_v1 slurm: buildable: false externals: @@ -99,8 +109,10 @@ spack: require: - "+pmix" all: + target: ["neoverse_v1"] require: - "%gcc" + - "target=neoverse_v1" providers: blas: [armpl-gcc, openblas] fftw-api: [armpl-gcc, fftw] diff --git a/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-aarch64-darwin/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-aarch64-darwin/spack.yaml index ebf597acb27..01d7c2a8cfa 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-aarch64-darwin/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-aarch64-darwin/spack.yaml @@ -3,6 +3,7 @@ spack: packages: all: + target: ["aarch64"] require: target=aarch64 config: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-x86_64-linux-gnu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-x86_64-linux-gnu/spack.yaml index 67d5b20fa32..aa1b011719a 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-x86_64-linux-gnu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/bootstrap-x86_64-linux-gnu/spack.yaml @@ -3,6 +3,7 @@ spack: packages: all: + target: ["x86_64_v3"] require: target=x86_64_v3 config: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml index 2f5177af30d..8639cd195f3 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/build_systems/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: ["x86_64_v3"] require: - target=x86_64_v3 c: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml index a85e004104e..55c96cabff2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/data-vis-sdk/spack.yaml @@ -2,32 +2,43 @@ spack: view: false packages: all: - require: target=x86_64_v3 + target: ["x86_64_v3"] + require: + - target=x86_64_v3 cmake: variants: ~ownlibs ecp-data-vis-sdk: require: - "+ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf +sensei +sz +unifyfs +veloc +vtkm +zfp" + - target=x86_64_v3 hdf5: require: - "@1.14" + - target=x86_64_v3 mesa: require: - "+glx +osmesa +opengl ~opengles +llvm" + - target=x86_64_v3 libglx: require: "mesa +glx" ospray: require: - "@2.8.0" - "+denoiser +mpi" + - target=x86_64_v3 llvm: - require: ["@14:"] + require: + - "@14:" + - target=x86_64_v3 # Minimize LLVM variants: ~lldb~lld~libomptarget~polly~gold libunwind=none compiler-rt=none libllvm: require: ["llvm"] visit: - require: ["@3.4.1"] + require: + - "@3.4.1" + - target=x86_64_v3 + concretizer: unify: when_possible diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml index a9ff3d5afdd..857886f6b23 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml @@ -44,7 +44,9 @@ spack: - netlib-scalapack ncurses: - require: +termlib ldflags=-Wl,--undefined-version + require: + - +termlib ldflags=-Wl,--undefined-version + - target=x86_64_v3 tbb: require: "intel-tbb" binutils: @@ -78,6 +80,7 @@ spack: petsc: require: - "+batch" + - target=x86_64_v3 trilinos: require: - one_of: [+amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml index a2acd99dbf9..c1aa18a45b2 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-neoverse-v2/spack.yaml @@ -10,12 +10,16 @@ spack: require: - "%gcc" - target=neoverse_v2 + target: ["neoverse_v2"] providers: blas: [openblas] mpi: [mpich] variants: +mpi binutils: variants: +ld +gold +headers +libiberty ~nls + blas: + require: + - openblas hdf5: variants: +fortran +hl +shared libfabric: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/hep/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/hep/spack.yaml index 3027875ed7b..ffd722564cf 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/hep/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/hep/spack.yaml @@ -8,6 +8,7 @@ spack: packages: all: + target: ["x86_64_v3"] require: - "%gcc" - target=x86_64_v3 @@ -17,20 +18,34 @@ spack: tbb: [intel-tbb] variants: +mpi acts: - require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +svg +tgeo cxxstd=20 + require: + - +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +svg +tgeo cxxstd=20 + - target=x86_64_v3 celeritas: - require: +geant4 +hepmc3 +root +shared cxxstd=20 + require: + - +geant4 +hepmc3 +root +shared cxxstd=20 + - target=x86_64_v3 geant4: - require: +opengl +qt +threads +x11 + require: + - +opengl +qt +threads +x11 + - target=x86_64_v3 hip: - require: '@5.7.1 +rocm' + require: + - '@5.7.1 +rocm' + - target=x86_64_v3 rivet: - require: hepmc=3 + require: + - hepmc=3 + - target=x86_64_v3 root: - require: +arrow ~daos +davix +dcache +emacs +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20 + require: + - +arrow ~daos +davix +dcache +emacs +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20 # note: root cxxstd=20 not concretizable within sherpa + - target=x86_64_v3 vecgeom: - require: +gdml +geant4 +root +shared cxxstd=20 + require: + - +gdml +geant4 +root +shared cxxstd=20 + - target=x86_64_v3 # Mark geant4 data as external to prevent wasting bandwidth on GB-scale files geant4-data: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml index c0a63b995e3..253e1a00ba0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-darwin-aarch64-mps/spack.yaml @@ -3,6 +3,7 @@ spack: packages: all: + target: ["aarch64"] require: - target=aarch64 - +mps @@ -11,7 +12,9 @@ spack: mpi: require: mpich openblas: - require: ~fortran + require: + - ~fortran + - target=aarch64 specs: # Horovod diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cpu/spack.yaml index 23ed6aa665e..544e097849e 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cpu/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: ["aarch64"] require: - target=aarch64 - ~cuda diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cuda/spack.yaml index d37f8ec7d27..563ec83dc2c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-aarch64-cuda/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: ["aarch64"] require: - target=aarch64 - ~rocm @@ -9,7 +10,9 @@ spack: - cuda_arch=80 llvm: # https://github.com/spack/spack/issues/27999 - require: ~cuda + require: + - ~cuda + - target=aarch64 mpi: require: openmpi py-torch: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml index 31ca52dd394..10668744aae 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cpu/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: ["x86_64_v3"] require: - target=x86_64_v3 - ~cuda diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml index 7b508debd37..d4248acd34c 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-cuda/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: ["x86_64_v3"] require: - target=x86_64_v3 - ~rocm @@ -9,7 +10,9 @@ spack: - cuda_arch=80 llvm: # https://github.com/spack/spack/issues/27999 - require: ~cuda + require: + - ~cuda + - target=x86_64_v3 mpi: require: openmpi py-torch: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml index a285c131f7c..54a082376ab 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/ml-linux-x86_64-rocm/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: ["x86_64_v3"] require: - target=x86_64_v3 - ~cuda diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml index c5a1259ba62..e2102e4d2c7 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss-aws-aarch64/spack.yaml @@ -2,15 +2,16 @@ spack: view: false packages: all: - providers: - blas: - - openblas - mkl: - - intel-oneapi-mkl - mpi: - - openmpi - - mpich - variants: +mpi + target: ["target=aarch64"] + require: + - "target=aarch64" + - "+mpi" + blas: + require: + - openblas + mpi: + require: + - openmpi definitions: - radiuss: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml index 867c2ae9a62..f707fdc01ef 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/radiuss/spack.yaml @@ -2,6 +2,7 @@ spack: view: false packages: all: + target: [ "x86_64_v3" ] require: - target=x86_64_v3 - '%gcc@7.5.0' From 99d849b2e60a09bf1600f1bf63e289869c42f29e Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Tue, 6 May 2025 23:59:16 -0700 Subject: [PATCH 08/57] test/spec_semantics: add mock_packages to test_intersects_and_satisfies (#50336) --- lib/spack/spack/test/spec_semantics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 28788aadf62..70bbb7fb078 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -1837,7 +1837,7 @@ def test_abstract_contains_semantic(lhs, rhs, expected, mock_packages): (Spec, "mpileaks ^callpath %gcc@5", "mpileaks ^callpath %gcc@5.4", (True, False, True)), ], ) -def test_intersects_and_satisfies(factory, lhs_str, rhs_str, results): +def test_intersects_and_satisfies(mock_packages, factory, lhs_str, rhs_str, results): lhs = factory(lhs_str) rhs = factory(rhs_str) From f151bc65f7756cf605c438b67be44f1d6d281dd0 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Wed, 7 May 2025 00:02:10 -0700 Subject: [PATCH 09/57] test/env: switch to mock packages (#50335) --- lib/spack/spack/test/env.py | 57 ++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py index 64476624d0d..5fd9d4c24f8 100644 --- a/lib/spack/spack/test/env.py +++ b/lib/spack/spack/test/env.py @@ -22,7 +22,10 @@ ) from spack.environment.list import UndefinedReferenceError -pytestmark = pytest.mark.not_on_windows("Envs are not supported on windows") +pytestmark = [ + pytest.mark.not_on_windows("Envs are not supported on windows"), + pytest.mark.usefixtures("mock_packages"), +] class TestDirectoryInitialization: @@ -35,7 +38,7 @@ def test_environment_dir_from_name(self, mutable_mock_env_path): ev.environment_dir_from_name("test", exists_ok=False) -def test_hash_change_no_rehash_concrete(tmp_path, mock_packages, config): +def test_hash_change_no_rehash_concrete(tmp_path, config): # create an environment env_path = tmp_path / "env_dir" env_path.mkdir(exist_ok=False) @@ -65,7 +68,7 @@ def test_hash_change_no_rehash_concrete(tmp_path, mock_packages, config): assert read_in.specs_by_hash[read_in.concretized_order[0]]._hash == new_hash -def test_env_change_spec(tmp_path, mock_packages, config): +def test_env_change_spec(tmp_path, config): env_path = tmp_path / "env_dir" env_path.mkdir(exist_ok=False) env = ev.create_in_dir(env_path) @@ -98,7 +101,7 @@ def test_env_change_spec(tmp_path, mock_packages, config): """ -def test_env_change_spec_in_definition(tmp_path, mock_packages, mutable_mock_env_path): +def test_env_change_spec_in_definition(tmp_path, mutable_mock_env_path): manifest_file = tmp_path / ev.manifest_name manifest_file.write_text(_test_matrix_yaml) e = ev.create("test", manifest_file) @@ -121,7 +124,7 @@ def test_env_change_spec_in_definition(tmp_path, mock_packages, mutable_mock_env assert not any(x.intersects("mpileaks@2.1%gcc") for x in e.user_specs) -def test_env_change_spec_in_matrix_raises_error(tmp_path, mock_packages, mutable_mock_env_path): +def test_env_change_spec_in_matrix_raises_error(tmp_path, mutable_mock_env_path): manifest_file = tmp_path / ev.manifest_name manifest_file.write_text(_test_matrix_yaml) e = ev.create("test", manifest_file) @@ -202,7 +205,7 @@ def test_environment_cant_modify_environments_root(tmpdir): unify: false""" ], ) -def test_roundtrip_spack_yaml_with_comments(original_content, mock_packages, config, tmp_path): +def test_roundtrip_spack_yaml_with_comments(original_content, config, tmp_path): """Ensure that round-tripping a spack.yaml file doesn't change its content.""" spack_yaml = tmp_path / "spack.yaml" spack_yaml.write_text(original_content) @@ -242,7 +245,7 @@ def test_removing_from_non_existing_list_fails(tmp_path): (False, False), ], ) -def test_update_default_view(init_view, update_value, tmp_path, mock_packages, config): +def test_update_default_view(init_view, update_value, tmp_path, config): """Tests updating the default view with different values.""" env = ev.create_in_dir(tmp_path, with_view=init_view) env.update_default_view(update_value) @@ -291,7 +294,7 @@ def test_update_default_view(init_view, update_value, tmp_path, mock_packages, c ], ) def test_update_default_complex_view( - initial_content, update_value, expected_view, tmp_path, mock_packages, config + initial_content, update_value, expected_view, tmp_path, config ): spack_yaml = tmp_path / "spack.yaml" spack_yaml.write_text(initial_content) @@ -366,7 +369,7 @@ def test_error_on_nonempty_view_dir(tmpdir): _error_on_nonempty_view_dir("file") -def test_can_add_specs_to_environment_without_specs_attribute(tmp_path, mock_packages, config): +def test_can_add_specs_to_environment_without_specs_attribute(tmp_path, config): """Sometimes users have template manifest files, and save one line in the YAML file by removing the empty 'specs: []' attribute. This test ensures that adding a spec to an environment without the 'specs' attribute, creates the attribute first instead of returning @@ -397,12 +400,12 @@ def test_can_add_specs_to_environment_without_specs_attribute(tmp_path, mock_pac # baz - zlib """, - "libpng", + "libdwarf", """spack: specs: # baz - zlib - - libpng + - libdwarf """, ) ], @@ -572,7 +575,7 @@ def test_environment_config_scheme_used(tmp_path, unify_in_config): ], ) def test_conflicts_with_packages_that_are_not_dependencies( - spec_str, expected_raise, expected_spec, tmp_path, mock_packages, config + spec_str, expected_raise, expected_spec, tmp_path, config ): """Tests that we cannot concretize two specs together, if one conflicts with the other, even though they don't have a dependency relation. @@ -601,9 +604,7 @@ def test_conflicts_with_packages_that_are_not_dependencies( @pytest.mark.parametrize( "possible_mpi_spec,unify", [("mpich", False), ("mpich", True), ("zmpi", False), ("zmpi", True)] ) -def test_requires_on_virtual_and_potential_providers( - possible_mpi_spec, unify, tmp_path, mock_packages, config -): +def test_requires_on_virtual_and_potential_providers(possible_mpi_spec, unify, tmp_path, config): """Tests that in an environment we can add packages explicitly, even though they provide a virtual package, and we require the provider of the same virtual to be another package, if they are added explicitly by their name. @@ -698,7 +699,7 @@ def test_removing_spec_from_manifest_with_exact_duplicates( @pytest.mark.regression("35298") -def test_variant_propagation_with_unify_false(tmp_path, mock_packages, config): +def test_variant_propagation_with_unify_false(tmp_path, config): """Spack distributes concretizations to different processes, when unify:false is selected and the number of roots is 2 or more. When that happens, the specs to be concretized need to be properly reconstructed on the worker process, if variant propagation was requested. @@ -722,7 +723,7 @@ def test_variant_propagation_with_unify_false(tmp_path, mock_packages, config): assert node.satisfies("+foo") -def test_env_with_include_defs(mutable_mock_env_path, mock_packages): +def test_env_with_include_defs(mutable_mock_env_path): """Test environment with included definitions file.""" env_path = mutable_mock_env_path env_path.mkdir() @@ -756,7 +757,7 @@ def test_env_with_include_defs(mutable_mock_env_path, mock_packages): e.concretize() -def test_env_with_include_def_missing(mutable_mock_env_path, mock_packages): +def test_env_with_include_def_missing(mutable_mock_env_path): """Test environment with included definitions file that is missing a definition.""" env_path = mutable_mock_env_path env_path.mkdir() @@ -782,7 +783,7 @@ def test_env_with_include_def_missing(mutable_mock_env_path, mock_packages): @pytest.mark.regression("41292") -def test_deconcretize_then_concretize_does_not_error(mutable_mock_env_path, mock_packages): +def test_deconcretize_then_concretize_does_not_error(mutable_mock_env_path): """Tests that, after having deconcretized a spec, we can reconcretize an environment which has 2 or more user specs mapping to the same concrete spec. """ @@ -811,7 +812,7 @@ def test_deconcretize_then_concretize_does_not_error(mutable_mock_env_path, mock @pytest.mark.regression("44216") -def test_root_version_weights_for_old_versions(mutable_mock_env_path, mock_packages): +def test_root_version_weights_for_old_versions(mutable_mock_env_path): """Tests that, when we select two old versions of root specs that have the same version optimization penalty, both are considered. """ @@ -839,7 +840,7 @@ def test_root_version_weights_for_old_versions(mutable_mock_env_path, mock_packa assert gcc.satisfies("@=1.0") -def test_env_view_on_empty_dir_is_fine(tmp_path, config, mock_packages, temporary_store): +def test_env_view_on_empty_dir_is_fine(tmp_path, config, temporary_store): """Tests that creating a view pointing to an empty dir is not an error.""" view_dir = tmp_path / "view" view_dir.mkdir() @@ -851,7 +852,7 @@ def test_env_view_on_empty_dir_is_fine(tmp_path, config, mock_packages, temporar assert view_dir.is_symlink() -def test_env_view_on_non_empty_dir_errors(tmp_path, config, mock_packages, temporary_store): +def test_env_view_on_non_empty_dir_errors(tmp_path, config, temporary_store): """Tests that creating a view pointing to a non-empty dir errors.""" view_dir = tmp_path / "view" view_dir.mkdir() @@ -868,7 +869,7 @@ def test_env_view_on_non_empty_dir_errors(tmp_path, config, mock_packages, tempo "matrix_line", [("^zmpi", "^mpich"), ("~shared", "+shared"), ("shared=False", "+shared-libs")] ) @pytest.mark.regression("40791") -def test_stack_enforcement_is_strict(tmp_path, matrix_line, config, mock_packages): +def test_stack_enforcement_is_strict(tmp_path, matrix_line, config): """Ensure that constraints in matrices are applied strictly after expansion, to avoid inconsistencies between abstract user specs and concrete specs. """ @@ -894,7 +895,7 @@ def test_stack_enforcement_is_strict(tmp_path, matrix_line, config, mock_package e.concretize() -def test_only_roots_are_explicitly_installed(tmp_path, mock_packages, config, temporary_store): +def test_only_roots_are_explicitly_installed(tmp_path, config, temporary_store): """When installing specific non-root specs from an environment, we continue to mark them as implicitly installed. What makes installs explicit is that they are root of the env.""" env = ev.create_in_dir(tmp_path) @@ -908,7 +909,7 @@ def test_only_roots_are_explicitly_installed(tmp_path, mock_packages, config, te assert temporary_store.db.query(explicit=True) == [mpileaks] -def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_path): +def test_environment_from_name_or_dir(mutable_mock_env_path, tmp_path): test_env = ev.create("test") name_env = ev.environment_from_name_or_dir(test_env.name) @@ -923,7 +924,7 @@ def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_ _ = ev.environment_from_name_or_dir("fake-env") -def test_env_include_configs(mutable_mock_env_path, mock_packages): +def test_env_include_configs(mutable_mock_env_path): """check config and package values using new include schema""" env_path = mutable_mock_env_path env_path.mkdir() @@ -970,9 +971,7 @@ def test_env_include_configs(mutable_mock_env_path, mock_packages): assert req_specs == set(["@3.11:"]) -def test_using_multiple_compilers_on_a_node_is_discouraged( - tmp_path, mutable_config, mock_packages -): +def test_using_multiple_compilers_on_a_node_is_discouraged(tmp_path, mutable_config): """Tests that when we specify % Spack tries to use that compiler for all the languages needed by that node. """ From f485a622c8867bce347c4cd44eddd31b1e1e9d68 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Wed, 7 May 2025 00:03:00 -0700 Subject: [PATCH 10/57] test/cmd/maintainers.py: cleanup of mock_packages use (#50334) --- lib/spack/spack/test/cmd/maintainers.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/test/cmd/maintainers.py b/lib/spack/spack/test/cmd/maintainers.py index 88f5f7842e2..eea680f9779 100644 --- a/lib/spack/spack/test/cmd/maintainers.py +++ b/lib/spack/spack/test/cmd/maintainers.py @@ -9,6 +9,8 @@ import spack.main import spack.repo +pytestmark = [pytest.mark.usefixtures("mock_packages")] + maintainers = spack.main.SpackCommand("maintainers") MAINTAINED_PACKAGES = [ @@ -26,17 +28,17 @@ def split(output): return re.split(r"\s+", output) if output else [] -def test_maintained(mock_packages): +def test_maintained(): out = split(maintainers("--maintained")) assert out == MAINTAINED_PACKAGES -def test_unmaintained(mock_packages): +def test_unmaintained(): out = split(maintainers("--unmaintained")) assert out == sorted(set(spack.repo.all_package_names()) - set(MAINTAINED_PACKAGES)) -def test_all(mock_packages, capfd): +def test_all(capfd): with capfd.disabled(): out = split(maintainers("--all")) assert out == [ @@ -63,7 +65,7 @@ def test_all(mock_packages, capfd): assert out == ["maintainers-1:", "user1,", "user2"] -def test_all_by_user(mock_packages, capfd): +def test_all_by_user(capfd): with capfd.disabled(): out = split(maintainers("--all", "--by-user")) assert out == [ @@ -100,22 +102,22 @@ def test_all_by_user(mock_packages, capfd): ] -def test_no_args(mock_packages): +def test_no_args(): with pytest.raises(spack.main.SpackCommandError): maintainers() -def test_no_args_by_user(mock_packages): +def test_no_args_by_user(): with pytest.raises(spack.main.SpackCommandError): maintainers("--by-user") -def test_mutex_args_fail(mock_packages): +def test_mutex_args_fail(): with pytest.raises(SystemExit): maintainers("--maintained", "--unmaintained") -def test_maintainers_list_packages(mock_packages, capfd): +def test_maintainers_list_packages(capfd): with capfd.disabled(): out = split(maintainers("maintainers-1")) assert out == ["user1", "user2"] @@ -129,13 +131,13 @@ def test_maintainers_list_packages(mock_packages, capfd): assert out == ["user2", "user3"] -def test_maintainers_list_fails(mock_packages, capfd): +def test_maintainers_list_fails(capfd): out = maintainers("pkg-a", fail_on_error=False) assert not out assert maintainers.returncode == 1 -def test_maintainers_list_by_user(mock_packages, capfd): +def test_maintainers_list_by_user(capfd): with capfd.disabled(): out = split(maintainers("--by-user", "user1")) assert out == ["maintainers-1", "maintainers-3", "py-extension1"] From eff4c14a094a8fd69b4b0278678d69c7580907c4 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Wed, 7 May 2025 00:04:06 -0700 Subject: [PATCH 11/57] test/providers: switch to mock packages (#50333) --- lib/spack/spack/test/cmd/providers.py | 30 ++++++++++++++++++++------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/test/cmd/providers.py b/lib/spack/spack/test/cmd/providers.py index b9fab73f432..00e4f226457 100644 --- a/lib/spack/spack/test/cmd/providers.py +++ b/lib/spack/spack/test/cmd/providers.py @@ -7,6 +7,8 @@ from spack.main import SpackCommand +pytestmark = [pytest.mark.usefixtures("mock_packages")] + providers = SpackCommand("providers") @@ -24,16 +26,28 @@ def test_it_just_runs(pkg): ( ("mpi",), [ - "mpich", - "mpilander", - "mvapich2", - "openmpi", - "openmpi@1.7.5:", - "openmpi@2.0.0:", - "spectrum-mpi", + "intel-parallel-studio", + "low-priority-provider", + "mpich@3:", + "mpich2", + "multi-provider-mpi@1.10.0", + "multi-provider-mpi@2.0.0", + "zmpi", ], ), - (("D", "awk"), ["ldc", "gawk", "mawk"]), # Call 2 virtual packages at once + ( + ("lapack", "something"), + [ + "intel-parallel-studio", + "low-priority-provider", + "netlib-lapack", + "openblas-with-lapack", + "simple-inheritance", + "splice-a", + "splice-h", + "splice-vh", + ], + ), # Call 2 virtual packages at once ], ) def test_provider_lists(vpkg, provider_list): From 0b4477c0df7b892282796e1f1f89a3031d36b450 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Wed, 7 May 2025 00:06:34 -0700 Subject: [PATCH 12/57] test/cmd/unit_test: have test_list_with* ignore missing repo warning (#50332) --- lib/spack/spack/test/cmd/unit_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/cmd/unit_test.py b/lib/spack/spack/test/cmd/unit_test.py index 06ad0e5e744..cc48c6330c9 100644 --- a/lib/spack/spack/test/cmd/unit_test.py +++ b/lib/spack/spack/test/cmd/unit_test.py @@ -19,7 +19,7 @@ def test_list(): def test_list_with_pytest_arg(): output = spack_test("--list", cmd_test_py) - assert output.strip() == cmd_test_py + assert cmd_test_py in output.strip() def test_list_with_keywords(): @@ -27,7 +27,7 @@ def test_list_with_keywords(): # since the behavior is inconsistent across different pytest # versions, see https://stackoverflow.com/a/48814787/771663 output = spack_test("--list", "-k", "unit_test.py") - assert output.strip() == cmd_test_py + assert cmd_test_py in output.strip() def test_list_long(capsys): From fc268b0945ae1f965af3fd9064a1f79b9540a598 Mon Sep 17 00:00:00 2001 From: Simon Pintarelli <1237199+simonpintarelli@users.noreply.github.com> Date: Wed, 7 May 2025 09:35:47 +0200 Subject: [PATCH 13/57] cosma: add v2.7.0 (#50320) --- var/spack/repos/spack_repo/builtin/packages/cosma/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/cosma/package.py b/var/spack/repos/spack_repo/builtin/packages/cosma/package.py index 9cc7a8b7e5b..af0d4921ef8 100644 --- a/var/spack/repos/spack_repo/builtin/packages/cosma/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/cosma/package.py @@ -21,6 +21,7 @@ class Cosma(CMakePackage): # note: The default archives produced with github do not have the archives # of the submodules. version("master", branch="master", submodules=False) + version("2.7.0", sha256="f4775d18379539d7bb5053bff8acb4e13d6ed31a9677f498d9099a7500488789") version("2.6.6", sha256="1604be101e77192fbcc5551236bc87888d336e402f5409bbdd9dea900401cc37") version("2.6.5", sha256="10d9b7ecc1ce44ec5b9e0c0bf89278a63029912ec3ea99661be8576b553ececf") version("2.6.4", sha256="6d7bd5e3005874af9542a329c93e7ccd29ca1a5573dae27618fac2704fa2b6ab") From 97edcb5acc364c8c61936d86ffb63c84c1d56af0 Mon Sep 17 00:00:00 2001 From: Simon Pintarelli <1237199+simonpintarelli@users.noreply.github.com> Date: Wed, 7 May 2025 09:37:28 +0200 Subject: [PATCH 14/57] tiled-mm v2.3.2 (#50329) --- var/spack/repos/spack_repo/builtin/packages/tiled_mm/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/tiled_mm/package.py b/var/spack/repos/spack_repo/builtin/packages/tiled_mm/package.py index a3b133e7e23..b406bd833b5 100644 --- a/var/spack/repos/spack_repo/builtin/packages/tiled_mm/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/tiled_mm/package.py @@ -19,6 +19,7 @@ class TiledMm(CMakePackage, CudaPackage, ROCmPackage): version("master", branch="master") + version("2.3.2", sha256="1f91ca02f6ee8e400835fa90630618baf86a7b425b4bbbb4151068f72658b858") version("2.3.1", sha256="68914a483e62f796b790ea428210b1d5ef5943d6289e53d1aa62f56a20fbccc8") version("2.3", sha256="504c6201f5a9be9741c55036bf8e2656ae3f4bc19996295b264ee5e303c9253c") version("2.2", sha256="6d0b49c9588ece744166822fd44a7bc5bec3dc666b836de8bf4bf1a7bb675aac") From 43596b4e232b8bd0c9d4d517a8ae7c5242844ba4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David--Cl=C3=A9ris=20Timoth=C3=A9e?= Date: Wed, 7 May 2025 14:05:12 +0200 Subject: [PATCH 15/57] Shamrock: new package (#50293) Co-authored-by: tdavidcl --- .../builtin/packages/shamrock/package.py | 118 ++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100644 var/spack/repos/spack_repo/builtin/packages/shamrock/package.py diff --git a/var/spack/repos/spack_repo/builtin/packages/shamrock/package.py b/var/spack/repos/spack_repo/builtin/packages/shamrock/package.py new file mode 100644 index 00000000000..d8feebbd692 --- /dev/null +++ b/var/spack/repos/spack_repo/builtin/packages/shamrock/package.py @@ -0,0 +1,118 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import glob + +from spack.package import * + + +class Shamrock(CMakePackage): + """The Shamrock exascale framework for astrophysics""" + + homepage = "https://shamrock-code.github.io/" + url = "https://github.com/Shamrock-code/Shamrock/releases/download/v2025.05.0/shamrock-2025.05.0.tar" + git = "https://github.com/Shamrock-code/Shamrock.git" + + maintainers("tdavidcl") + + license("CeCILL Free Software License Agreement v2.1", checked_by="tdavidcl") + + version("main", branch="main", submodules=True) + + version("2025.05.0", sha256="59d5652467fd9453a65ae7b48e0c9b7d4162edc8df92e09d08dcc5275407a897") + + depends_on("c", type="build") + depends_on("cxx", type="build") + + variant("shared", default=True, description="Enables the build of shared libraries") + variant("testing", default=True, description="Enables the build of shared libraries") + variant("pybindings", default=True, description="Install python bindings") + + generator("ninja") + + depends_on("sycl") + requires( + "^[virtuals=sycl] intel-oneapi", + "^[virtuals=sycl] hipsycl", + policy="one_of", + msg="sycl provider must be one of intel-oneapi or hipsycl", + ) + + depends_on("mpi") + depends_on("python") + + extends("python", when="+pybindings") + + def cmake_args(self): + + spec = self.spec + + args = [ + self.define("SHAMROCK_ENABLE_BACKEND", "SYCL"), + self.define("PYTHON_EXECUTABLE", spec["python"].command.path), + self.define_from_variant("BUILD_TEST", "testing"), + ] + + # switch based on SYCL provider + sycl_spec = self.spec["sycl"] + if sycl_spec.satisfies("intel-oneapi"): + args += [ + self.define("SYCL_IMPLEMENTATION", "IntelLLVM"), + self.define("CMAKE_CXX_COMPILER", "icpx"), + self.define("INTEL_LLVM_PATH", self.spec["intel-oneapi"].prefix), + ] + elif sycl_spec.satisfies("hipsycl"): + args += [self.define("SYCL_IMPLEMENTATION", "ACPPDirect")] + + if sycl_spec.satisfies("hipsycl@:0.9.4"): + args += [self.define("CMAKE_CXX_COMPILER", "syclcc")] + else: + args += [self.define("CMAKE_CXX_COMPILER", "acpp")] + + args += [self.define("ACPP_PATH", self.spec["hipsycl"].prefix)] + + return args + + @run_after("install") + def install_python_bindigs(self): + """Copy the .so files to the python site-packages directory""" + + spec = self.spec + libdir = spec.prefix.lib + + if self.spec.satisfies("+pybindings"): + # move shamrock python bindings into expected place + site_packages = join_path(python_platlib, "shamrock") + mkdirp(site_packages) + + # Find all .so files in the build directory + so_files = glob.glob(join_path(libdir, "*.so")) + + # Install each .so file to the install directory + for _f in so_files: + install(_f, site_packages) + + # Python need a __init__.py file to import properly the .so + raw_string = "from .shamrock import *\n" + filename = "__init__.py" + filepath = join_path(site_packages, filename) + with open(filepath, "w") as f: + f.write(raw_string) + + def test_install(self): + """Test the install (executable, python bindings)""" + + shamrock = Executable(self.prefix.bin.shamrock) + + shamrock("--help") + shamrock("--smi") + shamrock("--smi", "--sycl-cfg", "0:0") + + python( + "-c", + "import shamrock;" + "shamrock.change_loglevel(125);" + "shamrock.sys.init('0:0');" + "shamrock.sys.close()", + ) From 21d8c09c5e0c453e97226e6b99c9344dfb40981d Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 7 May 2025 15:45:15 +0200 Subject: [PATCH 16/57] builtin: fix various type/correctness issues (#50340) --- lib/spack/spack/build_systems/compiler.py | 2 +- lib/spack/spack/package.py | 2 ++ .../builtin/packages/alps/package.py | 22 ++++++++--------- .../builtin/packages/amber/package.py | 19 +++++++-------- .../builtin/packages/aotriton/package.py | 4 +++- .../builtin/packages/bash/package.py | 14 ++++------- .../builtin/packages/corenlp/package.py | 23 +++++++----------- .../builtin/packages/cudnn/package.py | 11 +++++---- .../builtin/packages/esmf/package.py | 2 +- .../builtin/packages/fabtests/package.py | 8 +++---- .../builtin/packages/gasnet/package.py | 9 ++++--- .../builtin/packages/geant4/package.py | 19 ++++++++------- .../builtin/packages/git/package.py | 4 +++- .../builtin/packages/hermes/package.py | 2 +- .../builtin/packages/justbuild/package.py | 14 +++++------ .../builtin/packages/lua/package.py | 2 +- .../builtin/packages/madis/package.py | 4 +++- .../builtin/packages/met/package.py | 2 +- .../builtin/packages/mpfr/package.py | 24 ++++++++----------- .../builtin/packages/paraview/package.py | 2 +- .../builtin/packages/py_waves/package.py | 2 +- .../spack_repo/builtin/packages/r/package.py | 3 +-- .../builtin/packages/readline/package.py | 14 ++++------- .../builtin/packages/salmon/package.py | 10 ++++---- .../builtin/packages/vecgeom/package.py | 19 ++++++++------- 25 files changed, 110 insertions(+), 127 deletions(-) diff --git a/lib/spack/spack/build_systems/compiler.py b/lib/spack/spack/build_systems/compiler.py index 8a3bc033bc1..b4f457bfc6a 100644 --- a/lib/spack/spack/build_systems/compiler.py +++ b/lib/spack/spack/build_systems/compiler.py @@ -36,7 +36,7 @@ class CompilerPackage(spack.package_base.PackageBase): #: Compiler argument(s) that produces version information #: If multiple arguments, the earlier arguments must produce errors when invalid - compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion" + compiler_version_argument: Union[str, Tuple[str, ...]] = "-dumpversion" #: Regex used to extract version from compiler's output compiler_version_regex: str = "(.*)" diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c3846e5349d..e73bfb7090b 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -172,3 +172,5 @@ class tty: spack_cxx: str spack_f77: str spack_fc: str +prefix: Prefix +dso_suffix: str diff --git a/var/spack/repos/spack_repo/builtin/packages/alps/package.py b/var/spack/repos/spack_repo/builtin/packages/alps/package.py index 78e2254fc96..fb78d2642f9 100644 --- a/var/spack/repos/spack_repo/builtin/packages/alps/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/alps/package.py @@ -50,19 +50,17 @@ class Alps(CMakePackage): # See https://github.com/ALPSim/ALPS/issues/6#issuecomment-2604912169 # for why this is needed - resources = { + for boost_version, boost_checksum in ( # boost version, shasum - "1.87.0": "af57be25cb4c4f4b413ed692fe378affb4352ea50fbe294a11ef548f4d527d89", - "1.86.0": "1bed88e40401b2cb7a1f76d4bab499e352fa4d0c5f31c0dbae64e24d34d7513b", - "1.85.0": "7009fe1faa1697476bdc7027703a2badb84e849b7b0baad5086b087b971f8617", - "1.84.0": "cc4b893acf645c9d4b698e9a0f08ca8846aa5d6c68275c14c3e7949c24109454", - "1.83.0": "6478edfe2f3305127cffe8caf73ea0176c53769f4bf1585be237eb30798c3b8e", - "1.82.0": "a6e1ab9b0860e6a2881dd7b21fe9f737a095e5f33a3a874afc6a345228597ee6", - "1.81.0": "71feeed900fbccca04a3b4f2f84a7c217186f28a940ed8b7ed4725986baf99fa", - "1.80.0": "1e19565d82e43bc59209a168f5ac899d3ba471d55c7610c677d4ccf2c9c500c0", - } - - for boost_version, boost_checksum in resources.items(): + ("1.87.0", "af57be25cb4c4f4b413ed692fe378affb4352ea50fbe294a11ef548f4d527d89"), + ("1.86.0", "1bed88e40401b2cb7a1f76d4bab499e352fa4d0c5f31c0dbae64e24d34d7513b"), + ("1.85.0", "7009fe1faa1697476bdc7027703a2badb84e849b7b0baad5086b087b971f8617"), + ("1.84.0", "cc4b893acf645c9d4b698e9a0f08ca8846aa5d6c68275c14c3e7949c24109454"), + ("1.83.0", "6478edfe2f3305127cffe8caf73ea0176c53769f4bf1585be237eb30798c3b8e"), + ("1.82.0", "a6e1ab9b0860e6a2881dd7b21fe9f737a095e5f33a3a874afc6a345228597ee6"), + ("1.81.0", "71feeed900fbccca04a3b4f2f84a7c217186f28a940ed8b7ed4725986baf99fa"), + ("1.80.0", "1e19565d82e43bc59209a168f5ac899d3ba471d55c7610c677d4ccf2c9c500c0"), + ): resource( when="^boost@{0}".format(boost_version), name="boost_source_files", diff --git a/var/spack/repos/spack_repo/builtin/packages/amber/package.py b/var/spack/repos/spack_repo/builtin/packages/amber/package.py index 760a50329dd..27bec4bf2ee 100644 --- a/var/spack/repos/spack_repo/builtin/packages/amber/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/amber/package.py @@ -35,14 +35,12 @@ class Amber(Package, CudaPackage): deprecated=True, ) - resources = { - # [version amber, version ambertools , sha256sum] - "20": ("21", "f55fa930598d5a8e9749e8a22d1f25cab7fcf911d98570e35365dd7f262aaafd"), - # '20': ('20', 'b1e1f8f277c54e88abc9f590e788bbb2f7a49bcff5e8d8a6eacfaf332a4890f9'), - "18": ("19", "0c86937904854b64e4831e047851f504ec45b42e593db4ded92c1bee5973e699"), - "16": ("16", "7b876afe566e9dd7eb6a5aa952a955649044360f15c1f5d4d91ba7f41f3105fa"), - } - for ver, (ambertools_ver, ambertools_checksum) in resources.items(): + for ver, ambertools_ver, ambertools_checksum in ( + # (version amber, version ambertools, sha256sum) + ("20", "21", "f55fa930598d5a8e9749e8a22d1f25cab7fcf911d98570e35365dd7f262aaafd"), + ("18", "19", "0c86937904854b64e4831e047851f504ec45b42e593db4ded92c1bee5973e699"), + ("16", "16", "7b876afe566e9dd7eb6a5aa952a955649044360f15c1f5d4d91ba7f41f3105fa"), + ): resource( when="@{0}".format(ver), name="AmberTools", @@ -52,7 +50,7 @@ class Amber(Package, CudaPackage): placement="ambertools_tmpdir", ) - patches = [ + for ver, num, checksum in ( ("20", "1", "10780cb91a022b49ffdd7b1e2bf4a572fa4edb7745f0fc4e5d93b158d6168e42"), ("20", "2", "9c973e3f8f33a271d60787e8862901e8f69e94e7d80cda1695f7fad7bc396093"), ("20", "3", "acb359dc9b1bcff7e0f1965baa9f3f3dc18eeae99c49f1103c1e2986c0bbeed8"), @@ -97,8 +95,7 @@ class Amber(Package, CudaPackage): ("16", "13", "5ce28e6e0118a4780ad72fc096e617c874cde7d140e15f87451babb25aaf2d8f"), ("16", "14", "93703e734e76da30a5e050189a66d5a4d6bec5885752503c4c798e2f44049080"), ("16", "15", "a156ec246cd06688043cefde24de0d715fd46b08f5c0235015c2c5c3c6e37488"), - ] - for ver, num, checksum in patches: + ): patch_url_str = "https://ambermd.org/bugfixes/{0}.0/update.{1}" patch(patch_url_str.format(ver, num), sha256=checksum, level=0, when="@{0}".format(ver)) diff --git a/var/spack/repos/spack_repo/builtin/packages/aotriton/package.py b/var/spack/repos/spack_repo/builtin/packages/aotriton/package.py index 0b700f200f5..bcdc0d1450f 100644 --- a/var/spack/repos/spack_repo/builtin/packages/aotriton/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/aotriton/package.py @@ -58,7 +58,9 @@ def patch(self): def setup_build_environment(self, env: EnvironmentModifications) -> None: """Set environment variables used to control the build""" if self.spec.satisfies("%clang"): - env.set("TRITON_HIP_LLD_PATH", self.spec["llvm-amdgpu"].prefix / bin / ld.lld) + env.set( + "TRITON_HIP_LLD_PATH", join_path(self.spec["llvm-amdgpu"].prefix, "bin", "ld.lld") + ) def cmake_args(self): args = [] diff --git a/var/spack/repos/spack_repo/builtin/packages/bash/package.py b/var/spack/repos/spack_repo/builtin/packages/bash/package.py index 2f46b4fec71..862b0da18c0 100644 --- a/var/spack/repos/spack_repo/builtin/packages/bash/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/bash/package.py @@ -33,7 +33,8 @@ class Bash(AutotoolsPackage, GNUMirrorPackage): depends_on("iconv") depends_on("gettext") - patches = [ + # TODO: patches below are not managed by the GNUMirrorPackage base class + for verstr, num, checksum in ( ("5.2", "001", "f42f2fee923bc2209f406a1892772121c467f44533bedfe00a176139da5d310a"), ("5.2", "002", "45cc5e1b876550eee96f95bffb36c41b6cb7c07d33f671db5634405cd00fd7b8"), ("5.2", "003", "6a090cdbd334306fceacd0e4a1b9e0b0678efdbbdedbd1f5842035990c8abaff"), @@ -176,17 +177,12 @@ class Bash(AutotoolsPackage, GNUMirrorPackage): ("4.3", "046", "b3b456a6b690cd293353f17e22d92a202b3c8bce587ae5f2667c20c9ab6f688f"), ("4.3", "047", "c69248de7e78ba6b92f118fe1ef47bc86479d5040fe0b1f908ace1c9e3c67c4a"), ("4.3", "048", "5b8215451c5d096ca1e115307ffe6613553551a70369525a0778f216c3a4dfa2"), - ] - - # TODO: patches below are not managed by the GNUMirrorPackage base class - for verstr, num, checksum in patches: + ): ver = Version(verstr) patch( - "https://ftpmirror.gnu.org/bash/bash-{0}-patches/bash{1}-{2}".format( - ver, ver.joined, num - ), + f"https://ftpmirror.gnu.org/bash/bash-{ver}-patches/bash{ver.joined}-{num}", level=0, - when="@{0}".format(ver), + when=f"@{ver}", sha256=checksum, ) diff --git a/var/spack/repos/spack_repo/builtin/packages/corenlp/package.py b/var/spack/repos/spack_repo/builtin/packages/corenlp/package.py index 0f43520841f..a9cd5b0c691 100644 --- a/var/spack/repos/spack_repo/builtin/packages/corenlp/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/corenlp/package.py @@ -24,20 +24,15 @@ class Corenlp(Package): version("4.0.0", sha256="07195eed46dd39bdc364d3988da8ec6a5fc9fed8c17613cfe5a8b84d649c8f0f") - resources = [("4.0.0", "f45bde062fb368d72f7d3c7ac1ddc6cfb61d3badc1152572bde17f1a5ed9ec94")] - for ver, checksum in resources: - jarfile = "stanford-corenlp-{0}-models.jar".format(ver) - resource( - when="@{0}".format(ver), - name=jarfile, - url="https://repo1.maven.org/maven2/edu/stanford/nlp/stanford-corenlp/{0}/{1}".format( - ver, jarfile - ), - expand=False, - destination="", - placement=jarfile, - sha256=checksum, - ) + resource( + when="@4.0.0", + name="stanford-corenlp-4.0.0-models.jar", + url="https://repo1.maven.org/maven2/edu/stanford/nlp/stanford-corenlp/4.0.0/stanford-corenlp-4.0.0-models.jar", + expand=False, + destination="", + placement="stanford-corenlp-4.0.0-models.jar", + sha256="f45bde062fb368d72f7d3c7ac1ddc6cfb61d3badc1152572bde17f1a5ed9ec94", + ) depends_on("ant", type="build") diff --git a/var/spack/repos/spack_repo/builtin/packages/cudnn/package.py b/var/spack/repos/spack_repo/builtin/packages/cudnn/package.py index b4049083426..70a43f026b6 100644 --- a/var/spack/repos/spack_repo/builtin/packages/cudnn/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/cudnn/package.py @@ -355,16 +355,17 @@ class Cudnn(Package): for ver, packages in _versions.items(): key = "{0}-{1}".format(platform.system(), platform.machine()) pkg = packages.get(key) - cudnn_ver, cuda_ver = ver.split("-") - long_ver = "{0}-{1}".format(cudnn_ver, cuda_ver) + cudnn_ver, cuda_ver_str = ver.split("-") + cuda_ver = Version(cuda_ver_str) + long_ver = f"{cudnn_ver}-{cuda_ver}" if pkg: version(long_ver, sha256=pkg) # Add constraints matching CUDA version to cuDNN version # cuDNN builds for CUDA 11.x are compatible with all CUDA 11.x: # https://docs.nvidia.com/deeplearning/cudnn/support-matrix/index.html#fntarg_2 - if Version(cuda_ver) >= Version("11"): - cuda_ver = Version(cuda_ver).up_to(1) - depends_on("cuda@{}".format(cuda_ver), when="@{}".format(long_ver)) + if cuda_ver >= Version("11"): + cuda_ver = cuda_ver.up_to(1) + depends_on(f"cuda@{cuda_ver}", when=f"@{long_ver}") def url_for_version(self, version): # Get the system and machine arch for building the file path diff --git a/var/spack/repos/spack_repo/builtin/packages/esmf/package.py b/var/spack/repos/spack_repo/builtin/packages/esmf/package.py index ba293ace6c7..e7fd46ba251 100644 --- a/var/spack/repos/spack_repo/builtin/packages/esmf/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/esmf/package.py @@ -367,7 +367,7 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None: env.set("ESMF_NFCONFIG", "nf-config") netcdfc = spec["netcdf-c"] if netcdfc.satisfies("~shared"): - nc_config = which(os.path.join(netcdfc.prefix.bin, "nc-config")) + nc_config = which(os.path.join(netcdfc.prefix.bin, "nc-config"), required=True) nc_flags = nc_config("--static", "--libs", output=str).strip() env.set("ESMF_NETCDF_LIBS", nc_flags) diff --git a/var/spack/repos/spack_repo/builtin/packages/fabtests/package.py b/var/spack/repos/spack_repo/builtin/packages/fabtests/package.py index 039819a2828..21d9e3fbec3 100644 --- a/var/spack/repos/spack_repo/builtin/packages/fabtests/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/fabtests/package.py @@ -42,7 +42,7 @@ class Fabtests(AutotoolsPackage): depends_on("c", type="build") # generated - versions = [ + for v in ( "1.21.0", "1.20.2", "1.20.1", @@ -66,10 +66,8 @@ class Fabtests(AutotoolsPackage): "1.5.3", "1.5.0", "1.4.2", - ] - - for v in versions: - depends_on("libfabric@{0}".format(v), when="@{0}".format(v)) + ): + depends_on(f"libfabric@{v}", when=f"@{v}") def url_for_version(self, version): if version >= Version("1.8.1"): diff --git a/var/spack/repos/spack_repo/builtin/packages/gasnet/package.py b/var/spack/repos/spack_repo/builtin/packages/gasnet/package.py index 12743a9465c..035457d837a 100644 --- a/var/spack/repos/spack_repo/builtin/packages/gasnet/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/gasnet/package.py @@ -41,8 +41,7 @@ class Gasnet(Package, CudaPackage, ROCmPackage): version("2025.2.0-snapshot", commit="e2fdece76d86d7b4fa090fbff9b46eb98ce97177") # Versions fetched from git require a Bootstrap step - def bootstrap_version(): - return "@master:,2025.2.0-snapshot" + bootstrap_version = "@master:,2025.2.0-snapshot" version("2024.5.0", sha256="f945e80f71d340664766b66290496d230e021df5e5cd88f404d101258446daa9") version("2023.9.0", sha256="2d9f15a794e10683579ce494cd458b0dd97e2d3327c4d17e1fea79bd95576ce6") @@ -144,8 +143,8 @@ def bootstrap_version(): depends_on("mpi", when="conduits=mpi") depends_on("libfabric", when="conduits=ofi") - depends_on("autoconf@2.69", type="build", when=bootstrap_version()) - depends_on("automake@1.16:", type="build", when=bootstrap_version()) + depends_on("autoconf@2.69", type="build", when=bootstrap_version) + depends_on("automake@1.16:", type="build", when=bootstrap_version) conflicts("^hip@:4.4.0", when="+rocm") @@ -154,7 +153,7 @@ def bootstrap_version(): depends_on("oneapi-level-zero@1.8.0:", when="+level_zero") def install(self, spec, prefix): - if spec.satisfies(Gasnet.bootstrap_version()): + if spec.satisfies(self.bootstrap_version): bootstrapsh = Executable("./Bootstrap") bootstrapsh() # Record git-describe when fetched from git: diff --git a/var/spack/repos/spack_repo/builtin/packages/geant4/package.py b/var/spack/repos/spack_repo/builtin/packages/geant4/package.py index 94ea15b60b6..767cbdaec0a 100644 --- a/var/spack/repos/spack_repo/builtin/packages/geant4/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/geant4/package.py @@ -6,6 +6,15 @@ from spack.variant import ConditionalVariantValues +def _std_when(values): + for v in values: + if isinstance(v, ConditionalVariantValues): + for c in v: + yield (c.value, c.when) + else: + yield (v, "") + + class Geant4(CMakePackage): """Geant4 is a toolkit for the simulation of the passage of particles through matter. Its areas of application include high energy, nuclear @@ -173,15 +182,7 @@ class Geant4(CMakePackage): with when("+hdf5"): depends_on("hdf5 +threadsafe") - def std_when(values): - for v in values: - if isinstance(v, ConditionalVariantValues): - for c in v: - yield (c.value, c.when) - else: - yield (v, "") - - for _std, _when in std_when(_cxxstd_values): + for _std, _when in _std_when(_cxxstd_values): depends_on(f"clhep cxxstd={_std}", when=f"{_when} cxxstd={_std}") depends_on(f"vecgeom cxxstd={_std}", when=f"{_when} +vecgeom cxxstd={_std}") diff --git a/var/spack/repos/spack_repo/builtin/packages/git/package.py b/var/spack/repos/spack_repo/builtin/packages/git/package.py index 5a43e4e330a..22613668f62 100644 --- a/var/spack/repos/spack_repo/builtin/packages/git/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/git/package.py @@ -217,7 +217,9 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None: env.append_flags("EXTLIBS", " ".join(extlib_bits)) if not self.spec["curl"].satisfies("libs=shared"): - curlconfig = which(os.path.join(self.spec["curl"].prefix.bin, "curl-config")) + curlconfig = which( + os.path.join(self.spec["curl"].prefix.bin, "curl-config"), required=True + ) # For configure step: env.append_flags("LIBS", curlconfig("--static-libs", output=str).strip()) # For build step: diff --git a/var/spack/repos/spack_repo/builtin/packages/hermes/package.py b/var/spack/repos/spack_repo/builtin/packages/hermes/package.py index bf0fce1b2c7..96386c44185 100644 --- a/var/spack/repos/spack_repo/builtin/packages/hermes/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/hermes/package.py @@ -96,7 +96,7 @@ def set_lib(self, env: EnvironmentModifications, path: str): env.append_flags("LDFLAGS", "-L{}".format(path)) env.prepend_path("PYTHONPATH", "{}".format(path)) - def set_flags(self, env: EnvironmentModifications, path: str): + def set_flags(self, env: EnvironmentModifications): self.set_include(env, "{}/include".format(self.prefix)) self.set_include(env, "{}/include".format(self.prefix)) self.set_lib(env, "{}/lib".format(self.prefix)) diff --git a/var/spack/repos/spack_repo/builtin/packages/justbuild/package.py b/var/spack/repos/spack_repo/builtin/packages/justbuild/package.py index 1640745f70c..212da964c29 100644 --- a/var/spack/repos/spack_repo/builtin/packages/justbuild/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/justbuild/package.py @@ -49,14 +49,14 @@ class Justbuild(Package): sanity_check_is_file = [join_path("bin", "just"), join_path("bin", "just-mr")] def setup_build_environment(self, env: EnvironmentModifications) -> None: - ar = which("ar") + ar = which("ar", required=True) if self.spec.version < Version("1.2.1"): family = ', "COMPILER_FAMILY":"unknown"' else: family = ', "TOOLCHAIN_CONFIG": {"FAMILY": "unknown"}' if self.spec.satisfies("%gcc@10:"): - gcc = which("gcc") - gpp = which("g++") + gcc = which("gcc", required=True) + gpp = which("g++", required=True) env.set( "JUST_BUILD_CONF", " {" @@ -69,9 +69,9 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None: + " }" + "}", ) - elif self.spec.satisfies("%clang@11:") or spec.satisfies("%apple-clang@11:"): - clang = which("clang") - clangpp = which("clang++") + elif self.spec.satisfies("%clang@11:") or self.spec.satisfies("%apple-clang@11:"): + clang = which("clang", required=True) + clangpp = which("clang++", required=True) env.set( "JUST_BUILD_CONF", " {" @@ -88,7 +88,7 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None: raise InstallError("please use gcc >= 10 or clang >= 11") def install(self, spec, prefix): - python = which("python3") + python = which("python3", required=True) python(os.path.join("bin", "bootstrap.py"), ".", prefix) mkdirp(prefix.bin) install(os.path.join(prefix, "out", "bin", "just"), prefix.bin) diff --git a/var/spack/repos/spack_repo/builtin/packages/lua/package.py b/var/spack/repos/spack_repo/builtin/packages/lua/package.py index a65ee27d585..39698c0e459 100644 --- a/var/spack/repos/spack_repo/builtin/packages/lua/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/lua/package.py @@ -43,7 +43,7 @@ class LuaImplPackage(MakefilePackage): description="Fetcher to use in the LuaRocks package manager", ) - lua_version_override = None + lua_version_override: Optional[str] = None def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/var/spack/repos/spack_repo/builtin/packages/madis/package.py b/var/spack/repos/spack_repo/builtin/packages/madis/package.py index d0b910f5b74..9c06b05e4d0 100644 --- a/var/spack/repos/spack_repo/builtin/packages/madis/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/madis/package.py @@ -48,7 +48,9 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None: ldflags.append(pnetcdf.libs.ld_flags) libs.append(pnetcdf.libs.link_flags) - nfconfig = which(os.path.join(self.spec["netcdf-fortran"].prefix.bin, "nf-config")) + nfconfig = which( + os.path.join(self.spec["netcdf-fortran"].prefix.bin, "nf-config"), required=True + ) ldflags.append(nfconfig("--flibs", output=str).strip()) netcdf_f = self.spec["netcdf-fortran"] env.set("NETCDF_INC", netcdf_f.prefix.include) diff --git a/var/spack/repos/spack_repo/builtin/packages/met/package.py b/var/spack/repos/spack_repo/builtin/packages/met/package.py index 8b48266353e..da6b32dd805 100644 --- a/var/spack/repos/spack_repo/builtin/packages/met/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/met/package.py @@ -107,7 +107,7 @@ def setup_build_environment(self, env: EnvironmentModifications) -> None: ldflags.append("-L" + netcdfc.prefix.lib) libs.append(netcdfc.libs.link_flags) else: - nc_config = which(os.path.join(netcdfc.prefix.bin, "nc-config")) + nc_config = which(os.path.join(netcdfc.prefix.bin, "nc-config"), required=True) cppflags.append(nc_config("--cflags", output=str).strip()) ldflags.append(nc_config("--libs", "--static", output=str).strip()) libs.append(nc_config("--libs", "--static", output=str).strip()) diff --git a/var/spack/repos/spack_repo/builtin/packages/mpfr/package.py b/var/spack/repos/spack_repo/builtin/packages/mpfr/package.py index 10cda306c91..9aee6befd9e 100644 --- a/var/spack/repos/spack_repo/builtin/packages/mpfr/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/mpfr/package.py @@ -54,20 +54,16 @@ class Mpfr(AutotoolsPackage, GNUMirrorPackage): # Check the Bugs section of old release pages for patches. # https://www.mpfr.org/mpfr-X.Y.Z/#bugs - patches = { - "4.0.2": "3f80b836948aa96f8d1cb9cc7f3f55973f19285482a96f9a4e1623d460bcccf0", - "4.0.1": "5230aab653fa8675fc05b5bdd3890e071e8df49a92a9d58c4284024affd27739", - "3.1.6": "7a6dd71bcda4803d6b89612706a17b8816e1acd5dd9bf1bec29cf748f3b60008", - "3.1.5": "1ae14fb3a54ae8e0faed20801970255b279eee9e5ac624891ab5d29727f0bc04", - "3.1.4": "113705d5333ef0d0ad3eb136a85404ba6bd1cc524dece5ce902c536aa2e29903", - "3.1.3": "4152a780b3cc6e9643283e59093b43460196d0fea9302d8c93b2496f6679f4e4", - "3.1.2": "1b9fdb515efb09a506a01e1eb307b1464455f5ca63d6c193db3a3da371ab3220", - } - - for ver, checksum in patches.items(): - patch( - "https://www.mpfr.org/mpfr-{0}/allpatches".format(ver), when="@" + ver, sha256=checksum - ) + for ver, checksum in ( + ("4.0.2", "3f80b836948aa96f8d1cb9cc7f3f55973f19285482a96f9a4e1623d460bcccf0"), + ("4.0.1", "5230aab653fa8675fc05b5bdd3890e071e8df49a92a9d58c4284024affd27739"), + ("3.1.6", "7a6dd71bcda4803d6b89612706a17b8816e1acd5dd9bf1bec29cf748f3b60008"), + ("3.1.5", "1ae14fb3a54ae8e0faed20801970255b279eee9e5ac624891ab5d29727f0bc04"), + ("3.1.4", "113705d5333ef0d0ad3eb136a85404ba6bd1cc524dece5ce902c536aa2e29903"), + ("3.1.3", "4152a780b3cc6e9643283e59093b43460196d0fea9302d8c93b2496f6679f4e4"), + ("3.1.2", "1b9fdb515efb09a506a01e1eb307b1464455f5ca63d6c193db3a3da371ab3220"), + ): + patch(f"https://www.mpfr.org/mpfr-{ver}/allpatches", when=f"@{ver}", sha256=checksum) def flag_handler(self, name, flags): # Work around macOS Catalina / Xcode 11 code generation bug diff --git a/var/spack/repos/spack_repo/builtin/packages/paraview/package.py b/var/spack/repos/spack_repo/builtin/packages/paraview/package.py index 9d48ade9ae7..9b4767e53cc 100644 --- a/var/spack/repos/spack_repo/builtin/packages/paraview/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/paraview/package.py @@ -181,7 +181,7 @@ class Paraview(CMakePackage, CudaPackage, ROCmPackage): } # VTK-m and transitively ParaView does not support Tesla Arch - for _arch in range(10, 14): + for _arch in ("10", "11", "12", "13"): conflicts(f"cuda_arch={_arch}", when="+cuda", msg="ParaView requires cuda_arch >= 20") # Starting from cmake@3.18, CUDA architecture managament can be delegated to CMake. diff --git a/var/spack/repos/spack_repo/builtin/packages/py_waves/package.py b/var/spack/repos/spack_repo/builtin/packages/py_waves/package.py index 7617e15f38d..dac567d149f 100644 --- a/var/spack/repos/spack_repo/builtin/packages/py_waves/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/py_waves/package.py @@ -76,7 +76,7 @@ class PyWaves(PythonPackage): def setup_build_environment(self, env: EnvironmentModifications) -> None: env.set("PREFIX", self.prefix) - env.set("PKG_NAME", "waves"), + env.set("PKG_NAME", "waves") if not self.spec.version.isdevelop(): env.set("SETUPTOOLS_SCM_PRETEND_VERSION", self.version) diff --git a/var/spack/repos/spack_repo/builtin/packages/r/package.py b/var/spack/repos/spack_repo/builtin/packages/r/package.py index f6bfd4b2291..bc86eb8b0de 100644 --- a/var/spack/repos/spack_repo/builtin/packages/r/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/r/package.py @@ -279,8 +279,7 @@ def setup_dependent_build_environment( if d.package.extends(self.spec): r_libs_path.append(join_path(d.prefix, self.r_lib_dir)) - r_libs_path = ":".join(r_libs_path) - env.set("R_LIBS", r_libs_path) + env.set("R_LIBS", ":".join(r_libs_path)) # R_LIBS_USER gets set to a directory in HOME/R if it is not set, such as # during package installation with the --vanilla flag. Set it to null # to ensure that it does not point to a directory that may contain R diff --git a/var/spack/repos/spack_repo/builtin/packages/readline/package.py b/var/spack/repos/spack_repo/builtin/packages/readline/package.py index 5409d676e6d..c09ceb0fc72 100644 --- a/var/spack/repos/spack_repo/builtin/packages/readline/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/readline/package.py @@ -29,7 +29,8 @@ class Readline(AutotoolsPackage, GNUMirrorPackage): depends_on("ncurses") - patches = [ + # TODO: patches below are not managed by the GNUMirrorPackage base class + for verstr, num, checksum in [ ("8.2", "001", "bbf97f1ec40a929edab5aa81998c1e2ef435436c597754916e6a5868f273aff7"), ("8.2", "002", "e06503822c62f7bc0d9f387d4c78c09e0ce56e53872011363c74786c7cd4c053"), ("8.2", "003", "24f587ba46b46ed2b1868ccaf9947504feba154bb8faabd4adaea63ef7e6acb0"), @@ -62,17 +63,12 @@ class Readline(AutotoolsPackage, GNUMirrorPackage): ("6.3", "006", "5c237ab3c6c97c23cf52b2a118adc265b7fb411b57c93a5f7c221d50fafbe556"), ("6.3", "007", "4d79b5a2adec3c2e8114cbd3d63c1771f7c6cf64035368624903d257014f5bea"), ("6.3", "008", "3bc093cf526ceac23eb80256b0ec87fa1735540d659742107b6284d635c43787"), - ] - - # TODO: patches below are not managed by the GNUMirrorPackage base class - for verstr, num, checksum in patches: + ]: ver = Version(verstr) patch( - "https://ftpmirror.gnu.org/readline/readline-{0}-patches/readline{1}-{2}".format( - ver, ver.joined, num - ), + f"https://ftpmirror.gnu.org/readline/readline-{ver}-patches/readline{ver.joined}-{num}", level=0, - when="@{0}".format(ver), + when=f"@{ver}", sha256=checksum, ) diff --git a/var/spack/repos/spack_repo/builtin/packages/salmon/package.py b/var/spack/repos/spack_repo/builtin/packages/salmon/package.py index f542d7ecba8..3251111c54c 100644 --- a/var/spack/repos/spack_repo/builtin/packages/salmon/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/salmon/package.py @@ -69,7 +69,7 @@ class Salmon(CMakePackage): conflicts("%gcc@:5.1", when="@0.14.1:") - resources = [ + for ver, repo, checksum in [ ( "1.10.3", "pufferfish", @@ -94,16 +94,14 @@ class Salmon(CMakePackage): ("0.12.0", "RapMap", "05102c0bbc8a0c0056a01cd0e8788fa5b504aee58ac226ab8c0e3ffec8019790"), ("0.9.1", "RapMap", "8975e5a1ed61ed9354ba776272927545f417ecdce95823e71ba1e7b61de7d380"), ("0.8.2", "RapMap", "1691f4bca2b604f05f36772ae45faf0842ab4809843df770bd10366a5cfd6822"), - ] - - for ver, repo, checksum in resources: + ]: resource( name=repo, - url="https://github.com/COMBINE-lab/{0}/archive/salmon-v{1}.zip".format(repo, ver), + url=f"https://github.com/COMBINE-lab/{repo}/archive/salmon-v{ver}.zip", sha256=checksum, placement="external", expand=False, - when="@{0}".format(ver), + when=f"@{ver}", ) # `%gcc13:` requires `` to be manually included. Fixed upstream, diff --git a/var/spack/repos/spack_repo/builtin/packages/vecgeom/package.py b/var/spack/repos/spack_repo/builtin/packages/vecgeom/package.py index cc6375b0af9..76e64f43266 100644 --- a/var/spack/repos/spack_repo/builtin/packages/vecgeom/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/vecgeom/package.py @@ -7,6 +7,15 @@ from spack.variant import ConditionalVariantValues +def _std_when(values): + for v in values: + if isinstance(v, ConditionalVariantValues): + for c in v: + yield (c.value, c.when) + else: + yield (v, "") + + class Vecgeom(CMakePackage, CudaPackage): """The vectorized geometry library for particle-detector simulation (toolkits).""" @@ -130,15 +139,7 @@ class Vecgeom(CMakePackage, CudaPackage): when="@:1.2.10 ^apple-clang@17:", ) - def std_when(values): - for v in values: - if isinstance(v, ConditionalVariantValues): - for c in v: - yield (c.value, c.when) - else: - yield (v, "") - - for _std, _when in std_when(_cxxstd_values): + for _std, _when in _std_when(_cxxstd_values): depends_on(f"geant4 cxxstd={_std}", when=f"{_when} +geant4 cxxstd={_std}") depends_on(f"root cxxstd={_std}", when=f"{_when} +root cxxstd={_std}") depends_on(f"xerces-c cxxstd={_std}", when=f"{_when} +gdml cxxstd={_std}") From 12a7e8d73a1f9581ae031ca749833519b2e2044f Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 7 May 2025 16:23:34 +0200 Subject: [PATCH 17/57] bohrium: don't create python module (#50342) --- .../repos/spack_repo/builtin/packages/bohrium/package.py | 4 ++-- .../builtin/packages/bohrium/{pyadd.py => pyadd.test} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename var/spack/repos/spack_repo/builtin/packages/bohrium/{pyadd.py => pyadd.test} (100%) diff --git a/var/spack/repos/spack_repo/builtin/packages/bohrium/package.py b/var/spack/repos/spack_repo/builtin/packages/bohrium/package.py index 956d5286e45..dbccd598bdd 100644 --- a/var/spack/repos/spack_repo/builtin/packages/bohrium/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/bohrium/package.py @@ -253,7 +253,7 @@ def check_install(self): ] # Compile C++ test program - file_cxxadd = join_path(os.path.dirname(self.module.__file__), "cxxadd.cpp") + file_cxxadd = join_path(os.path.dirname(__file__), "cxxadd.cpp") cxx("-o", "test_cxxadd", file_cxxadd, *cxx_flags) test_cxxadd = Executable("./test_cxxadd") @@ -267,6 +267,6 @@ def check_install(self): # Python test (if +python) if spec.satisfies("+python"): - file_pyadd = join_path(os.path.dirname(self.module.__file__), "pyadd.py") + file_pyadd = join_path(os.path.dirname(__file__), "pyadd.test") py_output = python(file_pyadd, output=str, env=test_env) compare_output(py_output, "Success!\n") diff --git a/var/spack/repos/spack_repo/builtin/packages/bohrium/pyadd.py b/var/spack/repos/spack_repo/builtin/packages/bohrium/pyadd.test similarity index 100% rename from var/spack/repos/spack_repo/builtin/packages/bohrium/pyadd.py rename to var/spack/repos/spack_repo/builtin/packages/bohrium/pyadd.test From 4f2a1806f99772e8cfa4de307250531e2bee8173 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Wed, 7 May 2025 16:40:41 +0200 Subject: [PATCH 18/57] pyproject.toml: format (#50339) --- pyproject.toml | 210 +++++++++++++++++++++++-------------------------- 1 file changed, 98 insertions(+), 112 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 031ef1ba815..e5f3978308b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,8 @@ [project] -name="spack" -description="The spack package manager" -requires-python=">=3.6" -dependencies=[ - "clingo", - "setuptools", -] +name = "spack" +description = "The spack package manager" +requires-python = ">=3.6" +dependencies = ["clingo", "setuptools"] dynamic = ["version"] [project.scripts] @@ -21,16 +18,13 @@ dev = [ "pytest-xdist", "setuptools", "click", - 'black', + "black", "mypy", "isort", "flake8", "vermin", ] -ci = [ - "pytest-cov", - "codecov[toml]", -] +ci = ["pytest-cov", "codecov[toml]"] [build-system] requires = ["hatchling"] @@ -53,9 +47,7 @@ include = [ ] [tool.hatch.envs.default] -features = [ - "dev", -] +features = ["dev"] [tool.hatch.envs.default.scripts] spack = "./bin/spack" @@ -63,10 +55,7 @@ style = "./bin/spack style" test = "./bin/spack unit-test" [tool.hatch.envs.ci] -features = [ - "dev", - "ci", -] +features = ["dev", "ci"] [tool.ruff] line-length = 99 @@ -83,14 +72,14 @@ ignore = ["E731", "E203"] [tool.ruff.lint.isort] split-on-trailing-comma = false section-order = [ - "future", - "standard-library", - "third-party", - "archspec", - "llnl", - "spack", - "first-party", - "local-folder", + "future", + "standard-library", + "third-party", + "archspec", + "llnl", + "spack", + "first-party", + "local-folder", ] [tool.ruff.lint.isort.sections] @@ -104,8 +93,8 @@ llnl = ["llnl"] [tool.black] line-length = 99 -include = '(lib/spack|var/spack/repos|var/spack/test_repos)/.*\.pyi?$|bin/spack$' -extend-exclude = 'lib/spack/external' +include = "(lib/spack|var/spack/repos|var/spack/test_repos)/.*\\.pyi?$|bin/spack$" +extend-exclude = "lib/spack/external" skip_magic_trailing_comma = true [tool.isort] @@ -115,7 +104,9 @@ sections = [ "FUTURE", "STDLIB", "THIRDPARTY", - "ARCHSPEC", "LLNL", "FIRSTPARTY", + "ARCHSPEC", + "LLNL", + "FIRSTPARTY", "LOCALFOLDER", ] known_first_party = "spack" @@ -129,13 +120,9 @@ honor_noqa = true files = [ "lib/spack/llnl/**/*.py", "lib/spack/spack/**/*.py", - "var/spack/repos/spack_repo/builtin/packages/*/package.py" -] -mypy_path = [ - "lib/spack", - "lib/spack/external", - "var/spack/repos", + "var/spack/repos/spack_repo/builtin/packages/*/package.py", ] +mypy_path = ["lib/spack", "lib/spack/external", "var/spack/repos"] allow_redefinition = true # This and a generated import file allows supporting packages @@ -146,68 +133,68 @@ namespace_packages = true ignore_errors = true ignore_missing_imports = true - [[tool.mypy.overrides]] - module = 'spack.*' - ignore_errors = false - ignore_missing_imports = false +[[tool.mypy.overrides]] +module = "spack.*" +ignore_errors = false +ignore_missing_imports = false - [[tool.mypy.overrides]] - module = 'spack_repo.*' - ignore_errors = false - ignore_missing_imports = false - # we can't do this here, not a module scope option, in spack style instead - # disable_error_code = 'no-redef' +[[tool.mypy.overrides]] +module = "spack_repo.*" +ignore_errors = false +ignore_missing_imports = false +# we can't do this here, not a module scope option, in spack style instead +# disable_error_code = 'no-redef' - [[tool.mypy.overrides]] - module = 'llnl.*' - ignore_errors = false - ignore_missing_imports = false +[[tool.mypy.overrides]] +module = "llnl.*" +ignore_errors = false +ignore_missing_imports = false - [[tool.mypy.overrides]] - module = 'spack.test.packages' - ignore_errors = true +[[tool.mypy.overrides]] +module = "spack.test.packages" +ignore_errors = true - # ignore errors in fake import path for packages - [[tool.mypy.overrides]] - module = 'spack.pkg.*' - ignore_errors = true - ignore_missing_imports = true +# ignore errors in fake import path for packages +[[tool.mypy.overrides]] +module = "spack.pkg.*" +ignore_errors = true +ignore_missing_imports = true - # Spack imports a number of external packages, and they *may* require Python 3.8 or - # higher in recent versions. This can cause mypy to fail because we check for 3.7 - # compatibility. We could restrict mypy to run for the oldest supported version (3.7), - # but that means most developers won't be able to run mypy, which means it'll fail - # more in CI. Instead, we exclude these imported packages from mypy checking. - [[tool.mypy.overrides]] - module = [ - 'IPython', - 'altgraph', - 'attr', - 'boto3', - 'botocore', - 'distro', - 'importlib.metadata', - 'jinja2', - 'jsonschema', - 'macholib', - 'markupsafe', - 'numpy', - 'pkg_resources', - 'pyristent', - 'pytest', - 'ruamel.yaml', - 'six', - ] - follow_imports = 'skip' - follow_imports_for_stubs = true +# Spack imports a number of external packages, and they *may* require Python 3.8 or +# higher in recent versions. This can cause mypy to fail because we check for 3.7 +# compatibility. We could restrict mypy to run for the oldest supported version (3.7), +# but that means most developers won't be able to run mypy, which means it'll fail +# more in CI. Instead, we exclude these imported packages from mypy checking. +[[tool.mypy.overrides]] +module = [ + "IPython", + "altgraph", + "attr", + "boto3", + "botocore", + "distro", + "importlib.metadata", + "jinja2", + "jsonschema", + "macholib", + "markupsafe", + "numpy", + "pkg_resources", + "pyristent", + "pytest", + "ruamel.yaml", + "six", +] +follow_imports = "skip" +follow_imports_for_stubs = true [tool.pyright] useLibraryCodeForTypes = true reportMissingImports = true reportWildcardImportFromLibrary = false -include = ['lib/spack', 'var/spack/repos', 'var/spack/test_repos'] -ignore = ['lib/spack/external'] -extraPaths = ['lib/spack', 'lib/spack/external'] +include = ["lib/spack", "var/spack/repos", "var/spack/test_repos"] +ignore = ["lib/spack/external"] +extraPaths = ["lib/spack", "lib/spack/external"] [tool.coverage.run] @@ -217,39 +204,39 @@ branch = true source = ["bin", "lib"] data_file = "./tests-coverage/.coverage" omit = [ - 'lib/spack/spack/test/*', - 'lib/spack/docs/*', - 'lib/spack/external/*', - 'share/spack/qa/*', + "lib/spack/spack/test/*", + "lib/spack/docs/*", + "lib/spack/external/*", + "share/spack/qa/*", ] [tool.coverage.report] # Regexes for lines to exclude from consideration exclude_lines = [ - # Have to re-enable the standard pragma - 'pragma: no cover', + # Have to re-enable the standard pragma + "pragma: no cover", - # Don't complain about missing debug-only code: - 'def __repr__', - 'if self\.debug', + # Don't complain about missing debug-only code: + "def __repr__", + "if self\\.debug", - # Don't complain if tests don't hit defensive assertion code: - 'raise AssertionError', - 'raise NotImplementedError', + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", - # Don't complain if non-runnable code isn't run: - 'if 0:', - 'if False:', - 'if __name__ == .__main__.:', + # Don't complain if non-runnable code isn't run: + "if 0:", + "if False:", + "if __name__ == .__main__.:", ] ignore_errors = true [tool.coverage.paths] source = [ - ".", - "/Users/runner/work/spack/spack", - "/System/Volumes/Data/home/runner/work/spack/spack", - "D:\\a\\spack\\spack", + ".", + "/Users/runner/work/spack/spack", + "/System/Volumes/Data/home/runner/work/spack/spack", + "D:\\a\\spack\\spack", ] [tool.coverage.html] @@ -264,8 +251,7 @@ protected-files = ["__init__.py", "README.rst", "vendor.txt"] patches-dir = "lib/spack/external/patches" [tool.vendoring.transformations] -substitute = [ -] +substitute = [] drop = [ # contains unnecessary scripts "bin/", @@ -278,12 +264,12 @@ drop = [ "pkg_resources/extern/", # trim vendored pygments styles and lexers "pygments/styles/[!_]*.py", - '^pygments/lexers/(?!python|__init__|_mapping).*\.py$', + "^pygments/lexers/(?!python|__init__|_mapping).*\\.py$", # trim rich's markdown support "rich/markdown.py", # ruamel.yaml installs unneded files "ruamel.*.pth", - "pvectorc.*.so" + "pvectorc.*.so", ] [tool.vendoring.typing-stubs] From 63fe6fc8932c1601587e04d87623fca1617dc0ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=90=B4=E5=9D=8E?= Date: Wed, 7 May 2025 22:50:34 +0800 Subject: [PATCH 19/57] Update package.py (#50341) --- var/spack/repos/spack_repo/builtin/packages/cutlass/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/cutlass/package.py b/var/spack/repos/spack_repo/builtin/packages/cutlass/package.py index b2196eb41d9..4053e9e3dd5 100644 --- a/var/spack/repos/spack_repo/builtin/packages/cutlass/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/cutlass/package.py @@ -14,6 +14,7 @@ class Cutlass(CMakePackage, CudaPackage): version("main", branch="main") version("master", branch="master") + version("3.9.2", sha256="4b97bd6cece9701664eec3a634a1f2f2061d85bf76d843fa5799e1a692b4db0d") version("3.9.1", sha256="7ffed3d7363a485c7d8ade63b3944c0a1e3e4cf2f22007f6d1cc3849c96bdc88") version("3.9.0", sha256="0ea98a598d1f77fade5187ff6ec6d9e6ef3acd267ee68850aae6e800dcbd69c7") version("3.8.0", sha256="14a5e6314f23e41295d8377b6fa6028b35392757a0ee4538a4eacaaa5d7eee37") From 527d723db01da6bf8e53c3a9d8d062bb36633109 Mon Sep 17 00:00:00 2001 From: Sinan Date: Wed, 7 May 2025 09:50:58 -0700 Subject: [PATCH 20/57] package_qgis add new versions (#50328) * package_qgis add new versions * restore deprecated version --------- Co-authored-by: sbulut --- .../spack_repo/builtin/packages/qgis/package.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/qgis/package.py b/var/spack/repos/spack_repo/builtin/packages/qgis/package.py index b2de2802f89..84f65a7c291 100644 --- a/var/spack/repos/spack_repo/builtin/packages/qgis/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/qgis/package.py @@ -17,6 +17,13 @@ class Qgis(CMakePackage): maintainers("adamjstewart", "Sinan81") license("GPL-2.0-or-later") + + # Prefer latest LTR + version( + "3.40.6", + sha256="dd68d39a2a29326031195bed2125e8b0fa7112fe9ee74d5f9850d06b02cef6a8", + preferred=True, + ) version("3.40.1", sha256="53110464c9f5ba5562c437e1563ab36dad2f218e6e7d1c0cfbe5b6effe241c8e") # version 3.36 isn't building right now. version( @@ -24,12 +31,8 @@ class Qgis(CMakePackage): sha256="1b64bc92660bf07edc6b6478fc6a13656149e87d92eabe5c3db9493072506e2c", deprecated=True, ) - # Prefer latest LTR - version( - "3.34.13", - sha256="a8873ca9bae346bae48ef3fe3eed702ef1f06d951201464464a64019302ba50b", - preferred=True, - ) + version("3.34.15", sha256="afb0bed05ffbc7bcb6d27dd1a8644b1e63ac2cb322baa058ff65b848c760efc2") + version("3.34.13", sha256="a8873ca9bae346bae48ef3fe3eed702ef1f06d951201464464a64019302ba50b") version("3.34.4", sha256="7d1c5fafff13f508a9bcf6244c9666f891351deb1ace2aedcc63504f070c5ce4") version("3.34.0", sha256="348a2df4c4520813a319b7f72546b3823e044cacd28646ba189b56a49c7d1b5f") version("3.28.15", sha256="217342ba2232cc8fe5bf8f3671c2b3d6daf5504c33006b67424373e70d568dfa") From eb95390ce7dae4f9cc3e8dd935a6a607094c4c79 Mon Sep 17 00:00:00 2001 From: Sinan Date: Wed, 7 May 2025 10:45:14 -0700 Subject: [PATCH 21/57] package/qscintilla: fix build issue (#50317) * package/qscintilla: fix build issue * add maintainer * package/qscintilla: fix build issue * add maintainer --------- Co-authored-by: sbulut --- .../repos/spack_repo/builtin/packages/qscintilla/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/qscintilla/package.py b/var/spack/repos/spack_repo/builtin/packages/qscintilla/package.py index f9896de217b..66f937f1659 100644 --- a/var/spack/repos/spack_repo/builtin/packages/qscintilla/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/qscintilla/package.py @@ -15,6 +15,8 @@ class Qscintilla(QMakePackage): homepage = "https://www.riverbankcomputing.com/software/qscintilla/intro" url = "https://www.riverbankcomputing.com/static/Downloads/QScintilla/2.12.0/QScintilla_src-2.12.0.tar.gz" + maintainers("Sinan81") + license("GPL-3.0-only") version("2.14.1", sha256="dfe13c6acc9d85dfcba76ccc8061e71a223957a6c02f3c343b30a9d43a4cdd4d") @@ -38,7 +40,7 @@ class Qscintilla(QMakePackage): depends_on("py-pyqt5", type=("build", "run"), when="+python ^qt@5") depends_on("python", type=("build", "run"), when="+python") # adter install inquires py-sip variant : so we need to have it - depends_on("py-sip", type="build", when="~python") + depends_on("py-sip", type="build", when="+python") extends("python", when="+python") @@ -118,7 +120,7 @@ def make_qsci_python(self): mkdirp(os.path.join(self.prefix.share.sip, pyqtx)) - sip_build = Executable(self.spec["py-sip"].prefix.bin.join("sip-build")) + sip_build = Executable(self["py-sip"].prefix.bin.join("sip-build")) sip_build( "--target-dir=" + python_platlib, "--qsci-include-dir=" + self.spec.prefix.include, From 9c4207a5514edb0c6ed5aed59ff4197844f66c53 Mon Sep 17 00:00:00 2001 From: Veselin Dobrev Date: Wed, 7 May 2025 12:09:23 -0700 Subject: [PATCH 22/57] mesa: add the latest v24.* and v25.* versions (#47642) * [mesa] Add latest version: 24.2.7 * Fix the llvm build for @18: when libunwind is disabled * [mesa] Updaing to the latest 24.* and 25.* versions * Add libshmfence dependency --------- Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> --- .../builtin/packages/llvm/package.py | 1 + .../builtin/packages/mesa/package.py | 22 ++++++++++++++----- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/llvm/package.py b/var/spack/repos/spack_repo/builtin/packages/llvm/package.py index bed952d1a7a..943b1027606 100644 --- a/var/spack/repos/spack_repo/builtin/packages/llvm/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/llvm/package.py @@ -1212,6 +1212,7 @@ def cmake_args(self): [ define("LLVM_ENABLE_RUNTIMES", runtimes), define("RUNTIMES_CMAKE_ARGS", runtime_cmake_args), + define("LIBCXXABI_USE_LLVM_UNWINDER", not spec.satisfies("libunwind=none")), ] ) diff --git a/var/spack/repos/spack_repo/builtin/packages/mesa/package.py b/var/spack/repos/spack_repo/builtin/packages/mesa/package.py index 7aaeaa7ddec..eb2bfaea561 100644 --- a/var/spack/repos/spack_repo/builtin/packages/mesa/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/mesa/package.py @@ -22,10 +22,12 @@ class Mesa(MesonPackage): version("main", branch="main") version( - "23.3.6", - sha256="cd3d6c60121dea73abbae99d399dc2facaecde1a8c6bd647e6d85410ff4b577b", + "25.0.5", + sha256="c0d245dea0aa4b49f74b3d474b16542e4a8799791cd33d676c69f650ad4378d0", preferred=True, ) + version("24.3.4", sha256="e641ae27191d387599219694560d221b7feaa91c900bcec46bf444218ed66025") + version("23.3.6", sha256="cd3d6c60121dea73abbae99d399dc2facaecde1a8c6bd647e6d85410ff4b577b") version("23.3.3", sha256="518307c0057fa3cee8b58df78be431d4df5aafa7edc60d09278b2d7a0a80f3b4") version("23.2.1", sha256="64de0616fc2d801f929ab1ac2a4f16b3e2783c4309a724c8a259b20df8bbc1cc") version("23.1.9", sha256="295ba27c28146ed09214e8ce79afa1659edf9d142decc3c91f804552d64f7510") @@ -66,6 +68,7 @@ class Mesa(MesonPackage): depends_on("python@:3.11", when="@:23.2", type="build") depends_on("py-packaging", type="build", when="^python@3.12:") depends_on("py-mako@0.8.0:", type="build") + depends_on("py-pyyaml", when="@24.2:", type="build") depends_on("unwind") depends_on("expat") depends_on("zlib-api") @@ -126,6 +129,9 @@ class Mesa(MesonPackage): depends_on("libxt") depends_on("xrandr") depends_on("glproto@1.4.14:") + # In @24.3:, "libxshmfence@1.1:" is needed when: + # (with_dri_platform == 'drm') or (with_any_vk), see mesa's meson.build. + depends_on("libxshmfence@1.1:", when="@24.3:") # version specific issue # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=96130 @@ -198,7 +204,6 @@ def meson_args(self): args = [ "-Dvulkan-drivers=", "-Dgallium-vdpau=disabled", - "-Dgallium-omx=disabled", "-Dgallium-va=disabled", "-Dgallium-xa=disabled", "-Dgallium-nine=false", @@ -209,6 +214,9 @@ def meson_args(self): # gallium-xvmc was removed in @main and @2.23: if self.spec.satisfies("@:22.2"): args.append("-Dgallium-xvmc=disabled") + # the option 'gallium-omx' is present in @24.2.4 and removed in @main + if spec.satisfies("@:24.2.4"): + args.append("-Dgallium-omx=disabled") args_platforms = [] args_gallium_drivers = ["swrast"] @@ -247,10 +255,14 @@ def meson_args(self): if "+egl" in spec: num_frontends += 1 - args.extend(["-Degl=enabled", "-Dgbm=enabled", "-Ddri3=enabled"]) + args.extend(["-Degl=enabled", "-Dgbm=enabled"]) + if spec.satisfies("@:24.2.4"): + args.extend(["-Ddri3=enabled"]) args_platforms.append("surfaceless") else: - args.extend(["-Degl=disabled", "-Dgbm=disabled", "-Ddri3=disabled"]) + args.extend(["-Degl=disabled", "-Dgbm=disabled"]) + if spec.satisfies("@:24.2.4"): + args.extend(["-Ddri3=disabled"]) args.append(opt_bool("+opengl" in spec, "opengl")) args.append(opt_enable("+opengles" in spec, "gles1")) From ef0599b53c433cccef11f770181e4eb894ad188f Mon Sep 17 00:00:00 2001 From: snehring <7978778+snehring@users.noreply.github.com> Date: Wed, 7 May 2025 14:43:37 -0500 Subject: [PATCH 23/57] cryodrgn: adding v3.4.3 (#48804) Signed-off-by: Shane Nehring --- .../spack_repo/builtin/packages/cryodrgn/package.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/cryodrgn/package.py b/var/spack/repos/spack_repo/builtin/packages/cryodrgn/package.py index 87ed39ff51f..8f8daa5da20 100644 --- a/var/spack/repos/spack_repo/builtin/packages/cryodrgn/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/cryodrgn/package.py @@ -15,23 +15,26 @@ class Cryodrgn(PythonPackage): license("GPL-3.0-only", checked_by="A-N-Other") + version("3.4.3", sha256="eadc41190d3c6abe983164db299ebb0d7340840281774eaaea1a12627a80dc10") version("2.3.0", sha256="9dd75967fddfa56d6b2fbfc56933c50c9fb994326112513f223e8296adbf0afc") - depends_on("python@3.7:", type=("build", "run")) + depends_on("python@3.9:3.11", type=("build", "run"), when="@3.4.3:") + depends_on("python@3.7:3.11", type=("build", "run"), when="@2.3.0") depends_on("py-setuptools@61:", type="build") depends_on("py-setuptools-scm@6.2:", type="build") depends_on("py-torch@1:", type=("build", "run")) depends_on("py-pandas@:1", type=("build", "run")) - depends_on("py-numpy", type=("build", "run")) - depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-numpy@:1.26", type=("build", "run")) + depends_on("py-matplotlib@:3.6", type=("build", "run")) depends_on("py-pyyaml", type=("build", "run")) depends_on("py-scipy@1.3.1:", type=("build", "run")) depends_on("py-scikit-learn", type=("build", "run")) depends_on("py-seaborn@:0.11", type=("build", "run")) depends_on("py-cufflinks", type=("build", "run")) depends_on("py-jupyterlab", type=("build", "run")) + depends_on("py-notebook@:6", type=("build", "run"), when="@3.4.3:") depends_on("py-umap-learn", type=("build", "run")) depends_on("py-ipywidgets@:7", type=("build", "run")) depends_on("py-healpy", type=("build", "run")) From 59339be48fc90a02244663a3a3f5ff8581a3d58d Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Wed, 7 May 2025 23:33:56 -0700 Subject: [PATCH 24/57] test/cmd/find.py: switch to use mock_packages (#50358) --- lib/spack/spack/test/cmd/find.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py index 5c543b15f65..f01ec5666b7 100644 --- a/lib/spack/spack/test/cmd/find.py +++ b/lib/spack/spack/test/cmd/find.py @@ -448,7 +448,7 @@ def test_find_loaded(database, working_env): @pytest.mark.regression("37712") -def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path): +def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path, mock_packages): """Tests that having an active environment with a root spec containing a compiler constrained by a version range (i.e. @X.Y rather the single version than @=X.Y) doesn't result in an error when invoking "spack find". From 83f115894b6b58cf3ab5fd0c13eab79a910ababc Mon Sep 17 00:00:00 2001 From: Caetano Melone Date: Thu, 8 May 2025 02:27:07 -0500 Subject: [PATCH 25/57] glib: add preferred version 2.78.3 (#50356) Versions of glib above 2.78.3 don't build (https://github.com/spack/spack/issues/49358). Until this is fixed we should set preferred to a confirmed version instead per https://github.com/spack/spack/issues/49358#issuecomment-2706251681. --- var/spack/repos/spack_repo/builtin/packages/glib/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/glib/package.py b/var/spack/repos/spack_repo/builtin/packages/glib/package.py index 67af4ae5311..f4dd15228f9 100644 --- a/var/spack/repos/spack_repo/builtin/packages/glib/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/glib/package.py @@ -30,7 +30,11 @@ class Glib(MesonPackage): # Even minor versions are stable, odd minor versions are development, only add even numbers version("2.82.5", sha256="05c2031f9bdf6b5aba7a06ca84f0b4aced28b19bf1b50c6ab25cc675277cbc3f") version("2.82.2", sha256="ab45f5a323048b1659ee0fbda5cecd94b099ab3e4b9abf26ae06aeb3e781fd63") - version("2.78.3", sha256="609801dd373796e515972bf95fc0b2daa44545481ee2f465c4f204d224b2bc21") + version( + "2.78.3", + sha256="609801dd373796e515972bf95fc0b2daa44545481ee2f465c4f204d224b2bc21", + preferred=True, + ) version("2.78.0", sha256="44eaab8b720877ce303c5540b657b126f12dc94972d9880b52959f43fb537b30") version("2.76.6", sha256="1136ae6987dcbb64e0be3197a80190520f7acab81e2bfb937dc85c11c8aa9f04") version("2.76.4", sha256="5a5a191c96836e166a7771f7ea6ca2b0069c603c7da3cba1cd38d1694a395dda") From 98c08ce5c69bd6d69689f47d6f8e1945a44cb755 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 13:42:20 +0200 Subject: [PATCH 26/57] repo.py: enable search paths when spack.repo.PATH is assigned (#50370) Fixes a bug where `custom_repo.get_pkg_class("foo")` failed executing `import spack_repo.builtin` even if the builtin repo was configured globally. Instead of assignment of `spack.repo.PATH`, the `spack.repo.enable_repo` function now assigns and enables the repo, meaning that also Python module search paths are modified. --- lib/spack/spack/main.py | 3 +- lib/spack/spack/repo.py | 101 ++++++++++---------------- lib/spack/spack/subprocess_context.py | 3 +- lib/spack/spack/test/cmd/ci.py | 35 ++++----- lib/spack/spack/test/repo.py | 1 - share/spack/qa/run-unit-tests | 4 +- 6 files changed, 60 insertions(+), 87 deletions(-) diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 458ef7d2671..d55f5ec9c4c 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -550,7 +550,6 @@ def setup_main_options(args): spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict( [(key, [spack.paths.mock_packages_path])] ) - spack.repo.PATH = spack.repo.create(spack.config.CONFIG) # If the user asked for it, don't check ssl certs. if args.insecure: @@ -561,6 +560,8 @@ def setup_main_options(args): for config_var in args.config_vars or []: spack.config.add(fullpath=config_var, scope="command_line") + spack.repo.enable_repo(spack.repo.create(spack.config.CONFIG)) + # On Windows10 console handling for ASCI/VT100 sequences is not # on by default. Turn on before we try to write to console # with color diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index e82f59e648a..580d1ab2412 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -91,29 +91,8 @@ class ReposFinder: Returns a loader based on the inspection of the current repository list. """ - def __init__(self): - self._repo_init = _path - self._repo: Optional[RepoType] = None - - @property - def current_repository(self): - if self._repo is None: - self._repo = self._repo_init() - return self._repo - - @current_repository.setter - def current_repository(self, value): - self._repo = value - - @contextlib.contextmanager - def switch_repo(self, substitute: "RepoType"): - """Switch the current repository list for the duration of the context manager.""" - old = self._repo - try: - self._repo = substitute - yield - finally: - self._repo = old + #: The current list of repositories. + repo_path: "RepoPath" def find_spec(self, fullname, python_path, target=None): # "target" is not None only when calling importlib.reload() @@ -134,14 +113,11 @@ def compute_loader(self, fullname: str): namespace, dot, module_name = fullname.rpartition(".") # If it's a module in some repo, or if it is the repo's namespace, let the repo handle it. - current_repo = self.current_repository - is_repo_path = isinstance(current_repo, RepoPath) - if is_repo_path: - repos = current_repo.repos - else: - repos = [current_repo] - for repo in repos: + if not hasattr(self, "repo_path"): + return None + + for repo in self.repo_path.repos: # We are using the namespace of the repo and the repo contains the package if namespace == repo.full_namespace: # With 2 nested conditionals we can call "repo.real_name" only once @@ -156,9 +132,7 @@ def compute_loader(self, fullname: str): # No repo provides the namespace, but it is a valid prefix of # something in the RepoPath. - if is_repo_path and current_repo.by_namespace.is_prefix( - fullname[len(PKG_MODULE_PREFIX_V1) :] - ): + if self.repo_path.by_namespace.is_prefix(fullname[len(PKG_MODULE_PREFIX_V1) :]): return SpackNamespaceLoader() return None @@ -662,7 +636,6 @@ def __init__( if isinstance(repo, str): assert cache is not None, "cache must hold a value, when repo is a string" repo = Repo(repo, cache=cache, overrides=overrides) - repo.finder(self) self.put_last(repo) except RepoError as e: tty.warn( @@ -672,6 +645,20 @@ def __init__( f" spack repo rm {repo}", ) + def enable(self) -> None: + """Set the relevant search paths for package module loading""" + REPOS_FINDER.repo_path = self + for p in reversed(self.python_paths()): + if p not in sys.path: + sys.path.insert(0, p) + + def disable(self) -> None: + """Disable the search paths for package module loading""" + del REPOS_FINDER.repo_path + for p in self.python_paths(): + if p in sys.path: + sys.path.remove(p) + def ensure_unwrapped(self) -> "RepoPath": """Ensure we unwrap this object from any dynamic wrapper (like Singleton)""" return self @@ -845,9 +832,6 @@ def python_paths(self) -> List[str]: def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]: """Find a class for the spec's package and return the class object.""" - for p in self.python_paths(): - if p not in sys.path: - sys.path.insert(0, p) return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name) @autospec @@ -1094,9 +1078,6 @@ def check(condition, msg): # Class attribute overrides by package name self.overrides = overrides or {} - # Optional reference to a RepoPath to influence module import from spack.pkg - self._finder: Optional[RepoPath] = None - # Maps that goes from package name to corresponding file stat self._fast_package_checker: Optional[FastPackageChecker] = None @@ -1108,9 +1089,6 @@ def check(condition, msg): def package_api_str(self) -> str: return f"v{self.package_api[0]}.{self.package_api[1]}" - def finder(self, value: RepoPath) -> None: - self._finder = value - def real_name(self, import_name: str) -> Optional[str]: """Allow users to import Spack packages using Python identifiers. @@ -1363,11 +1341,9 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"] fullname += ".package" class_name = nm.pkg_name_to_class_name(pkg_name) - if self.python_path and self.python_path not in sys.path: - sys.path.insert(0, self.python_path) + try: - with REPOS_FINDER.switch_repo(self._finder or self): - module = importlib.import_module(fullname) + module = importlib.import_module(fullname) except ImportError as e: raise UnknownPackageError(fullname) from e except Exception as e: @@ -1560,12 +1536,6 @@ def create_or_construct( return from_path(repo_yaml_dir) -def _path(configuration=None): - """Get the singleton RepoPath instance for Spack.""" - configuration = configuration or spack.config.CONFIG - return create(configuration=configuration) - - def create(configuration: spack.config.Configuration) -> RepoPath: """Create a RepoPath from a configuration object. @@ -1588,8 +1558,10 @@ def create(configuration: spack.config.Configuration) -> RepoPath: return RepoPath(*repo_dirs, cache=spack.caches.MISC_CACHE, overrides=overrides) -#: Singleton repo path instance -PATH: RepoPath = llnl.util.lang.Singleton(_path) # type: ignore +#: Global package repository instance. +PATH: RepoPath = llnl.util.lang.Singleton( + lambda: create(configuration=spack.config.CONFIG) +) # type: ignore[assignment] # Add the finder to sys.meta_path REPOS_FINDER = ReposFinder() @@ -1615,20 +1587,27 @@ def use_repositories( Returns: Corresponding RepoPath object """ - global PATH paths = [getattr(x, "root", x) for x in paths_and_repos] - scope_name = "use-repo-{}".format(uuid.uuid4()) + scope_name = f"use-repo-{uuid.uuid4()}" repos_key = "repos:" if override else "repos" spack.config.CONFIG.push_scope( spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths}) ) - PATH, saved = create(configuration=spack.config.CONFIG), PATH + old_repo, new_repo = PATH, create(configuration=spack.config.CONFIG) + old_repo.disable() + enable_repo(new_repo) try: - with REPOS_FINDER.switch_repo(PATH): # type: ignore - yield PATH + yield new_repo finally: spack.config.CONFIG.remove_scope(scope_name=scope_name) - PATH = saved + enable_repo(old_repo) + + +def enable_repo(repo_path: RepoPath) -> None: + """Set the global package repository and make them available in module search paths.""" + global PATH + PATH = repo_path + PATH.enable() class MockRepositoryBuilder: diff --git a/lib/spack/spack/subprocess_context.py b/lib/spack/spack/subprocess_context.py index 1463cfab7a3..02c135f3469 100644 --- a/lib/spack/spack/subprocess_context.py +++ b/lib/spack/spack/subprocess_context.py @@ -106,7 +106,7 @@ def __init__(self): def restore(self): spack.config.CONFIG = self.config - spack.repo.PATH = spack.repo.create(self.config) + spack.repo.enable_repo(spack.repo.create(self.config)) spack.platforms.host = self.platform spack.store.STORE = self.store self.test_patches.restore() @@ -129,7 +129,6 @@ def restore(self): def store_patches(): - global patches module_patches = list() class_patches = list() if not patches: diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 07c3d7bdf04..d2894a08a79 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -2028,13 +2028,12 @@ def test_ci_verify_versions_valid( tmpdir, ): repo, _, commits = mock_git_package_changes - spack.repo.PATH.put_first(repo) + with spack.repo.use_repositories(repo): + monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo) - monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo) - - out = ci_cmd("verify-versions", commits[-1], commits[-3]) - assert "Validated diff-test@2.1.5" in out - assert "Validated diff-test@2.1.6" in out + out = ci_cmd("verify-versions", commits[-1], commits[-3]) + assert "Validated diff-test@2.1.5" in out + assert "Validated diff-test@2.1.6" in out def test_ci_verify_versions_standard_invalid( @@ -2045,23 +2044,21 @@ def test_ci_verify_versions_standard_invalid( verify_git_versions_invalid, ): repo, _, commits = mock_git_package_changes - spack.repo.PATH.put_first(repo) + with spack.repo.use_repositories(repo): + monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo) - monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo) - - out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False) - assert "Invalid checksum found diff-test@2.1.5" in out - assert "Invalid commit for diff-test@2.1.6" in out + out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False) + assert "Invalid checksum found diff-test@2.1.5" in out + assert "Invalid commit for diff-test@2.1.6" in out def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_package_changes): repo, _, commits = mock_git_package_changes - spack.repo.PATH.put_first(repo) + with spack.repo.use_repositories(repo): + monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo) - monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo) + pkg_class = spack.spec.Spec("diff-test").package_class + monkeypatch.setattr(pkg_class, "manual_download", True) - pkg_class = spack.spec.Spec("diff-test").package_class - monkeypatch.setattr(pkg_class, "manual_download", True) - - out = ci_cmd("verify-versions", commits[-1], commits[-2]) - assert "Skipping manual download package: diff-test" in out + out = ci_cmd("verify-versions", commits[-1], commits[-2]) + assert "Skipping manual download package: diff-test" in out diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py index eb89ae9c1cb..13a3cb63de1 100644 --- a/lib/spack/spack/test/repo.py +++ b/lib/spack/spack/test/repo.py @@ -312,7 +312,6 @@ class TestRepoPath: def test_creation_from_string(self, mock_test_cache): repo = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=mock_test_cache) assert len(repo.repos) == 1 - assert repo.repos[0]._finder is repo assert repo.by_namespace["builtin.mock"] is repo.repos[0] def test_get_repo(self, mock_test_cache): diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests index 66cca0e2a7c..60775a82862 100755 --- a/share/spack/qa/run-unit-tests +++ b/share/spack/qa/run-unit-tests @@ -40,9 +40,7 @@ spack -p --lines 20 spec mpileaks%gcc $coverage_run $(which spack) bootstrap status --dev --optional # Check that we can import Spack packages directly as a first import -# TODO: this check is disabled, because sys.path is only updated once -# spack.repo.PATH.get_pkg_class is called. -# $coverage_run $(which spack) python -c "import spack.pkg.builtin.mpileaks; repr(spack.pkg.builtin.mpileaks.Mpileaks)" +$coverage_run $(which spack) python -c "from spack_repo.builtin.packages.mpileaks.package import Mpileaks" #----------------------------------------------------------- # Run unit tests with code coverage From 9fa2bb375ce4ca3507a46ff30f10d10fa64a77e9 Mon Sep 17 00:00:00 2001 From: Mikael Simberg Date: Thu, 8 May 2025 13:54:54 +0200 Subject: [PATCH 27/57] fmt: add v11.2.0 (#50343) --- var/spack/repos/spack_repo/builtin/packages/fmt/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/fmt/package.py b/var/spack/repos/spack_repo/builtin/packages/fmt/package.py index c1737e406d7..7c269b55efb 100644 --- a/var/spack/repos/spack_repo/builtin/packages/fmt/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/fmt/package.py @@ -17,6 +17,7 @@ class Fmt(CMakePackage): license("MIT") + version("11.2.0", sha256="203eb4e8aa0d746c62d8f903df58e0419e3751591bb53ff971096eaa0ebd4ec3") version("11.1.4", sha256="49b039601196e1a765e81c5c9a05a61ed3d33f23b3961323d7322e4fe213d3e6") version("11.1.3", sha256="7df2fd3426b18d552840c071c977dc891efe274051d2e7c47e2c83c3918ba6df") version("11.1.2", sha256="ef54df1d4ba28519e31bf179f6a4fb5851d684c328ca051ce5da1b52bf8b1641") From 6f2393a34514a9930838b6c2981f627e853804e7 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 18:31:11 +0200 Subject: [PATCH 28/57] builtin: delete spack.store import (#50383) --- .../builtin/packages/cbtf_krell/package.py | 49 ------------------- .../builtin/packages/openspeedshop/package.py | 41 ---------------- .../packages/openspeedshop_utils/package.py | 40 --------------- 3 files changed, 130 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/cbtf_krell/package.py b/var/spack/repos/spack_repo/builtin/packages/cbtf_krell/package.py index aa9e001c046..303c8e745e7 100644 --- a/var/spack/repos/spack_repo/builtin/packages/cbtf_krell/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/cbtf_krell/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import spack.store from spack.package import * @@ -41,11 +40,6 @@ class CbtfKrell(CMakePackage): description="The build type to build", values=("Debug", "Release", "RelWithDebInfo"), ) - variant( - "crayfe", - default=False, - description="build only the FE tool using the runtime_dir to point to target build.", - ) # Fix build errors with gcc >= 10 patch( @@ -147,44 +141,6 @@ def set_mpi_cmake_options(self, spec, cmake_options): cmake_options.extend(mpi_options) - def set_cray_login_node_cmake_options(self, spec, cmake_options): - # Appends to cmake_options the options that will enable - # the appropriate Cray login node libraries - - cray_login_node_options = [] - rt_platform = "cray" - # How do we get the compute node (CNL) cbtf package - # install directory path. spec['cbtf'].prefix is the - # login node path for this build, as we are building - # the login node components with this spack invocation. We - # need these paths to be the ones created in the CNL - # spack invocation. - be_cbtf = spack.store.db.query_one("cbtf arch=cray-CNL-haswell") - be_cbtfk = spack.store.db.query_one("cbtf-krell arch=cray-CNL-haswell") - be_papi = spack.store.db.query_one("papi arch=cray-CNL-haswell") - be_boost = spack.store.db.query_one("boost arch=cray-CNL-haswell") - be_mont = spack.store.db.query_one("libmonitor arch=cray-CNL-haswell") - be_unw = spack.store.db.query_one("libunwind arch=cray-CNL-haswell") - be_xer = spack.store.db.query_one("xerces-c arch=cray-CNL-haswell") - be_dyn = spack.store.db.query_one("dyninst arch=cray-CNL-haswell") - be_mrnet = spack.store.db.query_one("mrnet arch=cray-CNL-haswell") - - cray_login_node_options.append("-DCN_RUNTIME_PLATFORM=%s" % rt_platform) - - # Use install directories as CMAKE args for the building - # of login cbtf-krell - cray_login_node_options.append("-DCBTF_CN_RUNTIME_DIR=%s" % be_cbtf.prefix) - cray_login_node_options.append("-DCBTF_KRELL_CN_RUNTIME_DIR=%s" % be_cbtfk.prefix) - cray_login_node_options.append("-DPAPI_CN_RUNTIME_DIR=%s" % be_papi.prefix) - cray_login_node_options.append("-DBOOST_CN_RUNTIME_DIR=%s" % be_boost.prefix) - cray_login_node_options.append("-DLIBMONITOR_CN_RUNTIME_DIR=%s" % be_mont.prefix) - cray_login_node_options.append("-DLIBUNWIND_CN_RUNTIME_DIR=%s" % be_unw.prefix) - cray_login_node_options.append("-DXERCESC_CN_RUNTIME_DIR=%s" % be_xer.prefix) - cray_login_node_options.append("-DDYNINST_CN_RUNTIME_DIR=%s" % be_dyn.prefix) - cray_login_node_options.append("-DMRNET_CN_RUNTIME_DIR=%s" % be_mrnet.prefix) - - cmake_options.extend(cray_login_node_options) - def cmake_args(self): spec = self.spec @@ -218,11 +174,6 @@ def cmake_args(self): # Add any MPI implementations coming from variant settings self.set_mpi_cmake_options(spec, cmake_args) - if self.spec.satisfies("+crayfe"): - # We need to build target/compute node components/libraries first - # then pass those libraries to the cbtf-krell login node build - self.set_cray_login_node_cmake_options(spec, cmake_args) - return cmake_args def setup_run_environment(self, env: EnvironmentModifications) -> None: diff --git a/var/spack/repos/spack_repo/builtin/packages/openspeedshop/package.py b/var/spack/repos/spack_repo/builtin/packages/openspeedshop/package.py index 6c9d6df6372..58356140a16 100644 --- a/var/spack/repos/spack_repo/builtin/packages/openspeedshop/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/openspeedshop/package.py @@ -4,7 +4,6 @@ import os -import spack.store from spack.package import * from ..boost.package import Boost @@ -38,11 +37,6 @@ class Openspeedshop(CMakePackage): variant( "runtime", default=False, description="build only the runtime libraries and collectors." ) - variant( - "crayfe", - default=False, - description="build only the FE tool using the runtime_dir to point to target build.", - ) variant("cuda", default=False, description="build with cuda packages included.") variant( @@ -123,11 +117,6 @@ class Openspeedshop(CMakePackage): depends_on("cbtf-krell@develop", when="@develop", type=("build", "link", "run")) depends_on("cbtf-krell@1.9.3:9999", when="@2.4.0:9999", type=("build", "link", "run")) - depends_on("cbtf-krell@develop+crayfe", when="@develop+crayfe", type=("build", "link", "run")) - depends_on( - "cbtf-krell@1.9.3:9999+crayfe", when="@2.4.0:9999+crayfe", type=("build", "link", "run") - ) - depends_on("cbtf-krell@develop+mpich2", when="@develop+mpich2", type=("build", "link", "run")) depends_on( "cbtf-krell@1.9.3:9999+mpich2", when="@2.4.0:9999+mpich2", type=("build", "link", "run") @@ -164,29 +153,6 @@ class Openspeedshop(CMakePackage): build_directory = "build_openspeedshop" - def set_cray_login_node_cmake_options(self, spec, cmake_options): - # Appends to cmake_options the options that will enable the appropriate - # Cray login node libraries - - cray_login_node_options = [] - rt_platform = "cray" - - # How do we get the compute node (CNL) cbtf package install - # directory path? - # spec['cbtf'].prefix is the login node value for this build, as - # we only get here when building the login node components and - # that is all that is known to spack. - store = spack.store - be_ck = store.db.query_one("cbtf-krell arch=cray-CNL-haswell") - - # Equivalent to install-tool cmake arg: - # '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' - # % /cbtf_v2.4.0.release/compute) - cray_login_node_options.append("-DCBTF_KRELL_CN_RUNTIME_DIR=%s" % be_ck.prefix) - cray_login_node_options.append("-DRUNTIME_PLATFORM=%s" % rt_platform) - - cmake_options.extend(cray_login_node_options) - def cmake_args(self): spec = self.spec @@ -240,13 +206,6 @@ def cmake_args(self): if spec.satisfies("+cuda"): cmake_args.extend(["-DCBTF_ARGONAVIS_DIR=%s" % spec["cbtf-argonavis"].prefix]) - if spec.satisfies("+crayfe"): - # We need to build target/compute node - # components/libraries first then pass - # those libraries to the openspeedshop - # login node build - self.set_cray_login_node_cmake_options(spec, cmake_args) - return cmake_args def set_defaultbase_cmake_options(self, spec, cmake_options): diff --git a/var/spack/repos/spack_repo/builtin/packages/openspeedshop_utils/package.py b/var/spack/repos/spack_repo/builtin/packages/openspeedshop_utils/package.py index 77cc536a056..95245ac3516 100644 --- a/var/spack/repos/spack_repo/builtin/packages/openspeedshop_utils/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/openspeedshop_utils/package.py @@ -4,7 +4,6 @@ import os -import spack.store from spack.package import * from ..boost.package import Boost @@ -41,11 +40,6 @@ class OpenspeedshopUtils(CMakePackage): variant( "runtime", default=False, description="build only the runtime libraries and collectors." ) - variant( - "crayfe", - default=False, - description="build only the FE tool using the runtime_dir to point to target build.", - ) variant("cuda", default=False, description="build with cuda packages included.") variant( @@ -117,11 +111,6 @@ class OpenspeedshopUtils(CMakePackage): depends_on("cbtf-krell@develop", when="@develop", type=("build", "link", "run")) depends_on("cbtf-krell@1.9.3:9999", when="@2.4.0:9999", type=("build", "link", "run")) - depends_on("cbtf-krell@develop+crayfe", when="@develop+crayfe", type=("build", "link", "run")) - depends_on( - "cbtf-krell@1.9.3:9999+crayfe", when="@2.4.0:9999+crayfe", type=("build", "link", "run") - ) - depends_on("cbtf-krell@develop+mpich2", when="@develop+mpich2", type=("build", "link", "run")) depends_on( "cbtf-krell@1.9.3:9999+mpich2", when="@2.4.0:9999+mpich2", type=("build", "link", "run") @@ -158,28 +147,6 @@ class OpenspeedshopUtils(CMakePackage): build_directory = "build_openspeedshop" - def set_cray_login_node_cmake_options(self, spec, cmake_options): - # Appends to cmake_options the options that will enable the appropriate - # Cray login node libraries - - cray_login_node_options = [] - rt_platform = "cray" - - # How do we get the compute node (CNL) cbtf package install - # directory path? - # spec['cbtf'].prefix is the login node value for this build, as - # we only get here when building the login node components and - # that is all that is known to spack. - be_ck = spack.store.db.query_one("cbtf-krell arch=cray-CNL-haswell") - - # Equivalent to install-tool cmake arg: - # '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' - # % /cbtf_v2.4.0elease/compute) - cray_login_node_options.append("-DCBTF_KRELL_CN_RUNTIME_DIR=%s" % be_ck.prefix) - cray_login_node_options.append("-DRUNTIME_PLATFORM=%s" % rt_platform) - - cmake_options.extend(cray_login_node_options) - def cmake_args(self): # Appends base options to cmake_args spec = self.spec @@ -220,13 +187,6 @@ def cmake_args(self): ] ) - if spec.satisfies("+crayfe"): - # We need to build target/compute node - # components/libraries first then pass - # those libraries to the openspeedshop - # login node build - self.set_cray_login_node_cmake_options(spec, cmake_args) - cmake_args.extend(["-DBUILD_QT3_GUI=FALSE"]) return cmake_args From a95fa26857d1826285e8318854d83a9cff88c259 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 18:37:45 +0200 Subject: [PATCH 29/57] docs/comments: fix typo with wildcard import (#50379) --- lib/spack/docs/packaging_guide.rst | 6 +++--- lib/spack/spack/cmd/style.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 055f3479270..9d28d820285 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -497,7 +497,7 @@ extends Spack's ``Package`` class. For example, here is .. code-block:: python :linenos: - from spack import * + from spack.package import * class Libelf(Package): """ ... description ... """ @@ -1089,7 +1089,7 @@ You've already seen the ``homepage`` and ``url`` package attributes: .. code-block:: python :linenos: - from spack import * + from spack.package import * class Mpich(Package): @@ -6183,7 +6183,7 @@ running: .. code-block:: python - from spack import * + from spack.package import * This is already part of the boilerplate for packages created with ``spack create``. diff --git a/lib/spack/spack/cmd/style.py b/lib/spack/spack/cmd/style.py index ce870f267f6..d6a5e91a15a 100644 --- a/lib/spack/spack/cmd/style.py +++ b/lib/spack/spack/cmd/style.py @@ -56,7 +56,7 @@ def is_package(f): """Whether flake8 should consider a file as a core file or a package. We run flake8 with different exceptions for the core and for - packages, since we allow `from spack import *` and poking globals + packages, since we allow `from spack.package import *` and poking globals into packages. """ return f.startswith("var/spack/") and f.endswith("package.py") From 7b93d01a681359a6ada6745cf1f63f08efb6d353 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 18:38:18 +0200 Subject: [PATCH 30/57] builtin: remove various redundant wildcard imports (#50380) --- var/spack/repos/spack_repo/builtin/packages/paraconf/package.py | 1 - var/spack/repos/spack_repo/builtin/packages/pdi/package.py | 1 - .../spack_repo/builtin/packages/pdiplugin_decl_hdf5/package.py | 1 - .../spack_repo/builtin/packages/pdiplugin_decl_netcdf/package.py | 1 - .../repos/spack_repo/builtin/packages/pdiplugin_mpi/package.py | 1 - .../spack_repo/builtin/packages/pdiplugin_pycall/package.py | 1 - .../spack_repo/builtin/packages/pdiplugin_serialize/package.py | 1 - .../spack_repo/builtin/packages/pdiplugin_set_value/package.py | 1 - .../repos/spack_repo/builtin/packages/pdiplugin_trace/package.py | 1 - .../spack_repo/builtin/packages/pdiplugin_user_code/package.py | 1 - var/spack/repos/spack_repo/builtin/packages/zpp/package.py | 1 - 11 files changed, 11 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/paraconf/package.py b/var/spack/repos/spack_repo/builtin/packages/paraconf/package.py index 3cab4c098e4..93ca6aa8c2d 100644 --- a/var/spack/repos/spack_repo/builtin/packages/paraconf/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/paraconf/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * diff --git a/var/spack/repos/spack_repo/builtin/packages/pdi/package.py b/var/spack/repos/spack_repo/builtin/packages/pdi/package.py index 06bf5a06b42..07aebed5cfd 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdi/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdi/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.hooks.sbang import sbang_shebang_line from spack.package import * diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_hdf5/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_hdf5/package.py index 94c748d0671..d8976845ec8 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_hdf5/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_hdf5/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_netcdf/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_netcdf/package.py index 808fbceb0ae..43f4a8bbe71 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_netcdf/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_decl_netcdf/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_mpi/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_mpi/package.py index 2433101d0f1..f10b918243b 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_mpi/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_mpi/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_pycall/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_pycall/package.py index 46a8c537bab..d994c7282f5 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_pycall/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_pycall/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_serialize/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_serialize/package.py index c105b17c157..c026ccf0fa7 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_serialize/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_serialize/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_set_value/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_set_value/package.py index e30bbd18342..4c77dbf0fb0 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_set_value/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_set_value/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_trace/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_trace/package.py index ba52ba9fec5..b25e2a4fa24 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_trace/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_trace/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_user_code/package.py b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_user_code/package.py index 4a5ed153606..0ce9b828a4c 100644 --- a/var/spack/repos/spack_repo/builtin/packages/pdiplugin_user_code/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/pdiplugin_user_code/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * from ..pdi.package import Pdi diff --git a/var/spack/repos/spack_repo/builtin/packages/zpp/package.py b/var/spack/repos/spack_repo/builtin/packages/zpp/package.py index f829a87f576..8197daec20a 100644 --- a/var/spack/repos/spack_repo/builtin/packages/zpp/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/zpp/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * from spack.package import * From 7bbf581169996514f2de319ed3d9e9e7740bc311 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 18:42:06 +0200 Subject: [PATCH 31/57] singularity-eos: remove conditional depends_on (#50381) --- .../spack_repo/builtin/packages/singularity_eos/package.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/singularity_eos/package.py b/var/spack/repos/spack_repo/builtin/packages/singularity_eos/package.py index ffd264c3254..d42f2d08a65 100644 --- a/var/spack/repos/spack_repo/builtin/packages/singularity_eos/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/singularity_eos/package.py @@ -4,7 +4,6 @@ import os -import spack from spack.package import * @@ -115,9 +114,7 @@ class SingularityEos(CMakePackage, CudaPackage): # specfic specs when using GPU/cuda offloading depends_on("kokkos +wrapper+cuda_lambda", when="+cuda+kokkos") - # fix for older spacks - if Version(spack.spack_version) >= Version("0.17"): - depends_on("kokkos-kernels ~shared", when="+kokkos-kernels") + depends_on("kokkos-kernels ~shared", when="+kokkos-kernels") for _flag in list(CudaPackage.cuda_arch_values): depends_on("kokkos cuda_arch=" + _flag, when="+cuda+kokkos cuda_arch=" + _flag) From b0b316c6462d740565b9be62dcd796870b84fcb9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 18:45:09 +0200 Subject: [PATCH 32/57] builtin: add a few missing __init__.py (#50374) --- lib/spack/spack/audit.py | 2 +- lib/spack/spack/repo.py | 2 ++ var/spack/repos/spack_repo/builtin/__init__.py | 3 +++ var/spack/repos/spack_repo/builtin/packages/__init__.py | 3 +++ 4 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/spack_repo/builtin/__init__.py create mode 100644 var/spack/repos/spack_repo/builtin/packages/__init__.py diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py index a93e151636e..68ed19602b2 100644 --- a/lib/spack/spack/audit.py +++ b/lib/spack/spack/audit.py @@ -350,7 +350,7 @@ def _ensure_no_folders_without_package_py(error_cls): for repository in spack.repo.PATH.repos: missing = [] for entry in os.scandir(repository.packages_path): - if not entry.is_dir(): + if not entry.is_dir() or entry.name == "__pycache__": continue package_py = pathlib.Path(entry.path) / spack.repo.package_file_name if not package_py.exists(): diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 580d1ab2412..394aaf6b404 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -232,6 +232,8 @@ def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") - changed: Set[str] = set() for path in lines: dir_name, _, _ = path.partition("/") + if not nm.valid_module_name(dir_name, repo.package_api): + continue pkg_name = nm.pkg_dir_to_pkg_name(dir_name, repo.package_api) if pkg_name not in added and pkg_name not in removed: changed.add(pkg_name) diff --git a/var/spack/repos/spack_repo/builtin/__init__.py b/var/spack/repos/spack_repo/builtin/__init__.py new file mode 100644 index 00000000000..c4ecc87fb8a --- /dev/null +++ b/var/spack/repos/spack_repo/builtin/__init__.py @@ -0,0 +1,3 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/var/spack/repos/spack_repo/builtin/packages/__init__.py b/var/spack/repos/spack_repo/builtin/packages/__init__.py new file mode 100644 index 00000000000..c4ecc87fb8a --- /dev/null +++ b/var/spack/repos/spack_repo/builtin/packages/__init__.py @@ -0,0 +1,3 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) From cd75e52ba2709e439a51cdad6970e49be99ea5ae Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 18:52:35 +0200 Subject: [PATCH 33/57] yaml_cpp: do not import spack.spec (#50382) --- .../builtin/packages/yaml_cpp/package.py | 21 ++++++------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/yaml_cpp/package.py b/var/spack/repos/spack_repo/builtin/packages/yaml_cpp/package.py index 930240bf931..cd8b3f2de89 100644 --- a/var/spack/repos/spack_repo/builtin/packages/yaml_cpp/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/yaml_cpp/package.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * -from spack.spec import ConflictsInSpecError from ..boost.package import Boost @@ -57,20 +56,12 @@ def flag_handler(self, name, flags): # the user can add arbitrary strings to the flags. Here we can at least # fail early. # We'll include cppflags in case users mistakenly put c++ flags there. - spec = self.spec - if name in ("cxxflags", "cppflags") and spec.satisfies("+tests"): - if "-stdlib=libc++" in flags: - raise ConflictsInSpecError( - spec, - [ - ( - spec, - spec.compiler_flags[name], - spec.variants["tests"], - yaml_cpp_tests_libcxx_error_msg, - ) - ], - ) + if ( + name in ("cxxflags", "cppflags") + and self.spec.satisfies("+tests") + and "-stdlib=libc++" in flags + ): + raise InstallError(yaml_cpp_tests_libcxx_error_msg) return (flags, None, None) def cmake_args(self): From b3772f8bb6388b175f783adff697ea977a3f2683 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Thu, 8 May 2025 19:27:24 +0200 Subject: [PATCH 34/57] builtin: remove unused imports from build_systems (#50385) --- var/spack/repos/spack_repo/builtin/packages/libmng/package.py | 1 - var/spack/repos/spack_repo/builtin/packages/libuv/package.py | 1 - var/spack/repos/spack_repo/builtin/packages/sccache/package.py | 1 - 3 files changed, 3 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/libmng/package.py b/var/spack/repos/spack_repo/builtin/packages/libmng/package.py index ef3242cca9f..0b6c22ea679 100644 --- a/var/spack/repos/spack_repo/builtin/packages/libmng/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/libmng/package.py @@ -2,7 +2,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import spack.build_systems import spack.build_systems.autotools import spack.build_systems.cmake from spack.package import * diff --git a/var/spack/repos/spack_repo/builtin/packages/libuv/package.py b/var/spack/repos/spack_repo/builtin/packages/libuv/package.py index ef0a7c09256..093896d9c95 100644 --- a/var/spack/repos/spack_repo/builtin/packages/libuv/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/libuv/package.py @@ -3,7 +3,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import sys -import spack.build_systems import spack.build_systems.autotools from spack.package import * diff --git a/var/spack/repos/spack_repo/builtin/packages/sccache/package.py b/var/spack/repos/spack_repo/builtin/packages/sccache/package.py index 438040f51bf..6037b26e5bf 100644 --- a/var/spack/repos/spack_repo/builtin/packages/sccache/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/sccache/package.py @@ -5,7 +5,6 @@ import os import re -import spack.build_systems import spack.build_systems.cargo from spack.package import * From 60f2698a4aba0de4fd2ebb2aff5853af8e4d0661 Mon Sep 17 00:00:00 2001 From: Robert Maaskant Date: Thu, 8 May 2025 20:07:04 +0200 Subject: [PATCH 35/57] trivy: add v0.62.0 and v0.62.1 (#50344) --- var/spack/repos/spack_repo/builtin/packages/trivy/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/trivy/package.py b/var/spack/repos/spack_repo/builtin/packages/trivy/package.py index 2bf541a6095..64cfed2118f 100644 --- a/var/spack/repos/spack_repo/builtin/packages/trivy/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/trivy/package.py @@ -15,9 +15,12 @@ class Trivy(GoPackage): license("Apache-2.0", checked_by="RobertMaaskant") + version("0.62.1", sha256="1b8000f08876dd02203021414581275daa69db00fab731351dbcf2a008ebe82a") + version("0.62.0", sha256="2b0b4df4bbfebde00a14a0616f5013db4cbba0f021a780a7e3b717a2c2978493") version("0.61.1", sha256="f6ad43e008c008d67842c9e2b4af80c2e96854db8009fba48fc37b4f9b15f59b") version("0.61.0", sha256="1e97b1b67a4c3aee9c567534e60355033a58ce43a3705bdf198d7449d53b6979") + depends_on("go@1.24.2:", type="build", when="@0.62:") depends_on("go@1.24:", type="build") build_directory = "cmd/trivy" From 1ba40b99eebe690e30ec20e3baa8c581dd4ee82d Mon Sep 17 00:00:00 2001 From: Robert Maaskant Date: Thu, 8 May 2025 20:07:22 +0200 Subject: [PATCH 36/57] yq: add v4.45.2 (#50345) --- var/spack/repos/spack_repo/builtin/packages/yq/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/yq/package.py b/var/spack/repos/spack_repo/builtin/packages/yq/package.py index af065fbd161..25eeff9ac67 100644 --- a/var/spack/repos/spack_repo/builtin/packages/yq/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/yq/package.py @@ -16,6 +16,7 @@ class Yq(GoPackage): license("MIT", checked_by="teaguesterling") + version("4.45.2", sha256="7ae8f8a4acc78dba5ab3a4bb004d390bbf6fe1cd1fc5746ff7db19f8e627b84f") version("4.45.1", sha256="074a21a002c32a1db3850064ad1fc420083d037951c8102adecfea6c5fd96427") version("4.44.6", sha256="c0acef928168e5fdb26cd7e8320eddde822f30cf1942817f3f6b854dd721653f") version("4.44.5", sha256="1505367f4a6c0c4f3b91c6197ffed4112d29ef97c48d0b5e66530cfa851d3f0e") @@ -26,5 +27,6 @@ class Yq(GoPackage): version("4.35.2", sha256="8b17d710c56f764e9beff06d7a7b1c77d87c4ba4219ce4ce67e7ee29670f4f13") # from go.mod + depends_on("go@1.23:", type="build", when="@4.45.2:") depends_on("go@1.21:", type="build", when="@4.40:") depends_on("go@1.20:", type="build", when="@4.31:") From a82e21e82f0b18a579b343bc34529a72b287001c Mon Sep 17 00:00:00 2001 From: Chris Marsh Date: Thu, 8 May 2025 12:24:13 -0600 Subject: [PATCH 37/57] add 0.61.2 and fix numpy version constraints (#50352) --- .../spack_repo/builtin/packages/py_numba/package.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/py_numba/package.py b/var/spack/repos/spack_repo/builtin/packages/py_numba/package.py index afa9553bdec..a4b92439031 100644 --- a/var/spack/repos/spack_repo/builtin/packages/py_numba/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/py_numba/package.py @@ -16,6 +16,7 @@ class PyNumba(PythonPackage): license("BSD-2-Clause") + version("0.61.2", sha256="8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d") version("0.61.0", sha256="888d2e89b8160899e19591467e8fdd4970e07606e1fbc248f239c89818d5f925") version("0.60.0", sha256="5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16") version("0.59.1", sha256="76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b") @@ -57,7 +58,13 @@ class PyNumba(PythonPackage): depends_on("python@3.6:3.9", when="@0.53", type=("build", "run")) depends_on("python@3.6:3.8", when="@0.52", type=("build", "run")) depends_on("python@3.6:3.8", when="@0.48:0.51", type=("build", "run")) - depends_on("py-numpy@2.0:2.2", when="@0.60:", type=("build", "run")) + + # max_numpy_run_version in setup.py is a non inclusive upper bound + # min_numpy_run_version < min_numpy_build_version and these ranges use + # min_numpy_build_version + depends_on("py-numpy@2.0:2.2", when="@0.61.2", type=("build", "run")) + depends_on("py-numpy@2.0:2.1", when="@0.61.0", type=("build", "run")) + depends_on("py-numpy@2.0", when="@0.60", type=("build", "run")) depends_on("py-numpy@1.22:1.26", when="@0.58.1:0.59", type=("build", "run")) depends_on("py-numpy@1.21:1.25", when="@0.58.0", type=("build", "run")) depends_on("py-numpy@1.21:1.24", when="@0.57", type=("build", "run")) From f45e312f810b9d9a5254b85589ab383ccb0d28b3 Mon Sep 17 00:00:00 2001 From: "Victor A. P. Magri" <50467563+victorapm@users.noreply.github.com> Date: Thu, 8 May 2025 11:40:56 -0700 Subject: [PATCH 38/57] raja: add gpu-profiling variant (#50354) --- .../spack_repo/builtin/packages/raja/package.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/raja/package.py b/var/spack/repos/spack_repo/builtin/packages/raja/package.py index e582e5fb5c3..28b1786a4e1 100644 --- a/var/spack/repos/spack_repo/builtin/packages/raja/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/raja/package.py @@ -195,6 +195,7 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage): ) variant("omptarget", default=False, description="Build OpenMP on target device support") variant("sycl", default=False, description="Build sycl backend") + variant("gpu-profiling", default=False, description="Enable GPU profiling") variant("plugins", default=False, description="Enable runtime plugins") variant("examples", default=True, description="Build examples.") @@ -267,6 +268,10 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage): for sm_ in CudaPackage.cuda_arch_values: depends_on("camp +cuda cuda_arch={0}".format(sm_), when="cuda_arch={0}".format(sm_)) + conflicts("+gpu-profiling", when="~cuda~rocm", msg="GPU profiling requires CUDA or ROCm") + conflicts("+gpu-profiling +cuda", when="@:2022.02.99") + conflicts("+gpu-profiling +rocm", when="@:2022.02.99") + conflicts("+omptarget +rocm") conflicts("+sycl +omptarget") conflicts("+sycl +rocm") @@ -323,11 +328,7 @@ def initconfig_hardware_entries(self): entries.append("#------------------{0}\n".format("-" * 30)) entries.append(cmake_cache_option("ENABLE_OPENMP", spec.satisfies("+openmp"))) - - if spec.satisfies("+cuda"): - entries.append(cmake_cache_option("ENABLE_CUDA", True)) - else: - entries.append(cmake_cache_option("ENABLE_CUDA", False)) + entries.append(cmake_cache_option("ENABLE_CUDA", spec.satisfies("+cuda"))) if spec.satisfies("+rocm"): entries.append(cmake_cache_option("ENABLE_HIP", True)) @@ -376,6 +377,12 @@ def initconfig_package_entries(self): ) entries.append(cmake_cache_option("RAJA_ENABLE_SYCL", spec.satisfies("+sycl"))) + entries.append( + cmake_cache_option("RAJA_ENABLE_NV_TOOLS_EXT", spec.satisfies("+gpu-profiling +cuda")) + ) + entries.append( + cmake_cache_option("RAJA_ENABLE_ROCTX", spec.satisfies("+gpu-profiling +rocm")) + ) if spec.satisfies("+lowopttest"): entries.append(cmake_cache_string("CMAKE_CXX_FLAGS_RELEASE", "-O1")) From 7e3af5d42d551adcf24aeab6deaa9aeabe521c81 Mon Sep 17 00:00:00 2001 From: ShujieL Date: Thu, 8 May 2025 11:52:57 -0700 Subject: [PATCH 39/57] dd4hep: add v1.32 (#50359) * Update package.py for dd4hep 1.32 * Update package.py to fix the podio-dd4hep version * fix the dd4hep 1.32 hash Co-authored-by: Sakib Rahman --------- Co-authored-by: Sakib Rahman --- var/spack/repos/spack_repo/builtin/packages/dd4hep/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/dd4hep/package.py b/var/spack/repos/spack_repo/builtin/packages/dd4hep/package.py index 51386e589e7..cafc82c85c3 100644 --- a/var/spack/repos/spack_repo/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/dd4hep/package.py @@ -25,6 +25,7 @@ class Dd4hep(CMakePackage): license("LGPL-3.0-or-later") version("master", branch="master") + version("1.32", sha256="8bde4eab9af9841e040447282ea7df3a16e4bcec587c3a1e32f41987da9b1b4d") version("1.31", sha256="9c06a1b4462fc1b51161404889c74b37350162d0b0ac2154db27e3f102670bd1") version("1.30", sha256="02de46151e945eff58cffd84b4b86d35051f4436608199c3efb4d2e1183889fe") version("1.29", sha256="435d25a7ef093d8bf660f288b5a89b98556b4c1c293c55b93bf641fb4cba77e9") From 0f7c1b5e38c64616002a783a15790d7b8267524e Mon Sep 17 00:00:00 2001 From: Robert Maaskant Date: Thu, 8 May 2025 20:57:00 +0200 Subject: [PATCH 40/57] go: add v1.23.9 and v1.24.3 (#50346) --- var/spack/repos/spack_repo/builtin/packages/go/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/go/package.py b/var/spack/repos/spack_repo/builtin/packages/go/package.py index d4cd1e4c99c..fbb0bd427b8 100644 --- a/var/spack/repos/spack_repo/builtin/packages/go/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/go/package.py @@ -39,9 +39,11 @@ class Go(Package): license("BSD-3-Clause") + version("1.24.3", sha256="229c08b600b1446798109fae1f569228102c8473caba8104b6418cb5bc032878") version("1.24.2", sha256="9dc77ffadc16d837a1bf32d99c624cb4df0647cee7b119edd9e7b1bcc05f2e00") version("1.24.1", sha256="8244ebf46c65607db10222b5806aeb31c1fcf8979c1b6b12f60c677e9a3c0656") version("1.24.0", sha256="d14120614acb29d12bcab72bd689f257eb4be9e0b6f88a8fb7e41ac65f8556e5") + version("1.23.9", sha256="08f6419547563ed9e7037d12b9c8909677c72f75f62ef85887ed9dbf49b8d2dd") version("1.23.8", sha256="0ca1f1e37ea255e3ce283af3f4e628502fb444587da987a5bb96d6c6f15930d4") version("1.23.7", sha256="7cfabd46b73eb4c26b19d69515dd043d7183a6559acccd5cfdb25eb6b266a458") version("1.23.6", sha256="039c5b04e65279daceee8a6f71e70bd05cf5b801782b6f77c6e19e2ed0511222") From 1a379215da4fd26f52ead9b5e4be2d5617f14ab5 Mon Sep 17 00:00:00 2001 From: jgraciahlrs Date: Thu, 8 May 2025 21:23:02 +0200 Subject: [PATCH 41/57] Allow usage of config variables and env variables with `include_concrete` (#45871) * Allow usage of spack config vars in concrete env path * Update docs on usage of spack config vars in concrete env path --- lib/spack/docs/environments.rst | 6 ++++-- lib/spack/spack/environment/environment.py | 6 +++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index 27f9a772e23..d37bf98d7cd 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -539,7 +539,9 @@ from the command line. You can also include an environment directly in the ``spack.yaml`` file. It involves adding the ``include_concrete`` heading in the yaml followed by the -absolute path to the independent environments. +absolute path to the independent environments. Note, that you may use Spack +config variables such as ``$spack`` or environment variables as long as the +expression expands to an absolute path. .. code-block:: yaml @@ -549,7 +551,7 @@ absolute path to the independent environments. unify: true include_concrete: - /absolute/path/to/environment1 - - /absolute/path/to/environment2 + - $spack/../path/to/environment2 Once the ``spack.yaml`` has been updated you must concretize the environment to diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py index 95476677986..38c54b0c23d 100644 --- a/lib/spack/spack/environment/environment.py +++ b/lib/spack/spack/environment/environment.py @@ -1049,7 +1049,11 @@ def add_view(name, values): def _process_concrete_includes(self): """Extract and load into memory included concrete spec data.""" - self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, []) + _included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, []) + # Expand config and environment variables + self.included_concrete_envs = [ + spack.util.path.canonicalize_path(_env) for _env in _included_concrete_envs + ] if self.included_concrete_envs: if os.path.exists(self.lock_path): From 31c2897fd85d0fbb7ab1490232b4943161259a25 Mon Sep 17 00:00:00 2001 From: Kyle Shores Date: Thu, 8 May 2025 14:41:33 -0500 Subject: [PATCH 42/57] musica: adding a netcdf-fortran dependency (#50252) --- var/spack/repos/spack_repo/builtin/packages/musica/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/musica/package.py b/var/spack/repos/spack_repo/builtin/packages/musica/package.py index 94de144f787..56a3007365f 100644 --- a/var/spack/repos/spack_repo/builtin/packages/musica/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/musica/package.py @@ -40,6 +40,7 @@ class Musica(CMakePackage): depends_on("cxx", type="build") depends_on("fortran", type="build") depends_on("mpi", when="+mpi") + depends_on("netcdf-fortran", when="+tuvx") def cmake_args(self): args = [ From f6da037129bdeeccc0c2230d0e0e739ff7f6a669 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Thu, 8 May 2025 13:54:43 -0600 Subject: [PATCH 43/57] binary_distribution: Handle fetch error during rebuild-index (#50387) Allow rebuild-index to continue if fetching some specs fails for any reason, and issue a warning indicating which manifest is associated with the failed fetch. --- lib/spack/spack/binary_distribution.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index a46b2fcb938..90a54f49390 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -717,7 +717,11 @@ def _read_specs_and_push_index( temp_dir: Location to write index.json and hash for pushing """ for file in file_list: - fetched_spec = spack.spec.Spec.from_dict(read_method(file)) + try: + fetched_spec = spack.spec.Spec.from_dict(read_method(file)) + except Exception as e: + tty.warn(f"Unable to fetch spec for manifest {file} due to: {e}") + continue db.add(fetched_spec) db.mark(fetched_spec, "in_buildcache", True) From 03cb30cb961fa2b74bb0b16071cd06d7a2940756 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Thu, 8 May 2025 17:25:35 -0600 Subject: [PATCH 44/57] binary_distribution: Do not look in sub-mirrors when indexing (#50389) When indexing top level specs, eg, in s3://spack-binaries/develop, do not sync manifests from all the stacks. Instead, add the path to the spec manifests to the url to sync, so that only items in s3://spack-binaries/develop/v3/manifests/spec are copied to the local system. --- lib/spack/spack/binary_distribution.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 90a54f49390..6d9d2a9f83c 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -754,6 +754,7 @@ def file_read_method(manifest_path): cache_entry.destroy() return spec_dict + url_to_list = url_util.join(url, buildcache_relative_specs_url()) sync_command_args = [ "s3", "sync", @@ -761,11 +762,11 @@ def file_read_method(manifest_path): "*", "--include", "*.spec.manifest.json", - url, + url_to_list, tmpspecsdir, ] - tty.debug(f"Using aws s3 sync to download manifests from {url} to {tmpspecsdir}") + tty.debug(f"Using aws s3 sync to download manifests from {url_to_list} to {tmpspecsdir}") try: aws(*sync_command_args, output=os.devnull, error=os.devnull) From f30d8ea2a54a6ac673874643e49c5977666a68c5 Mon Sep 17 00:00:00 2001 From: Sinan Date: Thu, 8 May 2025 16:44:23 -0700 Subject: [PATCH 45/57] package/lemon,qjson,qtkeychain: fix c compiler depencency (#50311) * package/lemon,qjson,qtkeychain: fix c compiler depencency * remove generated --------- Co-authored-by: sbulut --- var/spack/repos/spack_repo/builtin/packages/lemon/package.py | 3 ++- var/spack/repos/spack_repo/builtin/packages/qjson/package.py | 3 ++- .../repos/spack_repo/builtin/packages/qtkeychain/package.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/lemon/package.py b/var/spack/repos/spack_repo/builtin/packages/lemon/package.py index 2dd5c3f87d9..1b442a7d972 100644 --- a/var/spack/repos/spack_repo/builtin/packages/lemon/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/lemon/package.py @@ -22,7 +22,8 @@ class Lemon(CMakePackage): # soplex not mentioned in docs but shown in cmakecache # variant("soplex", default=False, description="Enable SOPLEX solver backend") #TODO - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") depends_on("glpk", when="+glpk") depends_on("cplex", when="+ilog") diff --git a/var/spack/repos/spack_repo/builtin/packages/qjson/package.py b/var/spack/repos/spack_repo/builtin/packages/qjson/package.py index 3a7c09b8a2b..23f45a0dadc 100644 --- a/var/spack/repos/spack_repo/builtin/packages/qjson/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/qjson/package.py @@ -16,7 +16,8 @@ class Qjson(CMakePackage): version("0.9.0", sha256="e812617477f3c2bb990561767a4cd8b1d3803a52018d4878da302529552610d4") - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") depends_on("qt") diff --git a/var/spack/repos/spack_repo/builtin/packages/qtkeychain/package.py b/var/spack/repos/spack_repo/builtin/packages/qtkeychain/package.py index e1228f36b4b..5c7e9ee6eff 100644 --- a/var/spack/repos/spack_repo/builtin/packages/qtkeychain/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/qtkeychain/package.py @@ -15,7 +15,8 @@ class Qtkeychain(CMakePackage): version("0.9.1", sha256="9c2762d9d0759a65cdb80106d547db83c6e9fdea66f1973c6e9014f867c6f28e") - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") depends_on("qt+dbus") depends_on("libsecret") From c21dc1a27a1c8e3375beea044c04afadeaf76bf0 Mon Sep 17 00:00:00 2001 From: Chris Green Date: Fri, 9 May 2025 00:23:45 -0500 Subject: [PATCH 46/57] jsonnet: Support CMake builds with external `nlohmann-json` (#49284) * jsonnet: Support CMake builds with external `nlohmann-json` * New version 0.21.0 --- .../builtin/packages/jsonnet/package.py | 52 ++++++++++++++++--- 1 file changed, 44 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/jsonnet/package.py b/var/spack/repos/spack_repo/builtin/packages/jsonnet/package.py index f59c61bc837..de7562fa4ab 100644 --- a/var/spack/repos/spack_repo/builtin/packages/jsonnet/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/jsonnet/package.py @@ -3,41 +3,77 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack.build_systems.python import PythonPipBuilder +import spack.build_systems.python +from spack.build_systems import cmake, makefile from spack.package import * -class Jsonnet(MakefilePackage): +class Jsonnet(MakefilePackage, CMakePackage): """A data templating language for app and tool developers based on JSON""" homepage = "https://jsonnet.org/" git = "https://github.com/google/jsonnet.git" url = "https://github.com/google/jsonnet/archive/refs/tags/v0.18.0.tar.gz" - maintainers("jcpunk") + maintainers("greenc-FNAL", "gartung", "jcpunk", "marcmengel", "marcpaterno") license("Apache-2.0") version("master", branch="master") + version("0.21.0", sha256="a12ebca72e43e7061ffe4ef910e572b95edd7778a543d6bf85f6355bd290300e") + version("0.20.0", sha256="77bd269073807731f6b11ff8d7c03e9065aafb8e4d038935deb388325e52511b") + version("0.19.1", sha256="f5a20f2dc98fdebd5d42a45365f52fa59a7e6b174e43970fea4f9718a914e887") version("0.18.0", sha256="85c240c4740f0c788c4d49f9c9c0942f5a2d1c2ae58b2c71068107bc80a3ced4") version("0.17.0", sha256="076b52edf888c01097010ad4299e3b2e7a72b60a41abbc65af364af1ed3c8dbe") + variant("python", default=False, description="Provide Python bindings for jsonnet") + + build_system("makefile", conditional("cmake", when="@0.21.0:"), default="makefile") + conflicts("%gcc@:5.4.99", when="@0.18.0:") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated + depends_on("c", type="build") + depends_on("cxx", type="build") + + with when("build_system=cmake"): + depends_on("nlohmann-json@3.6.1:") - variant("python", default=False, description="Provide Python bindings for jsonnet") extends("python", when="+python") depends_on("py-setuptools", type=("build",), when="+python") depends_on("py-pip", type=("build",), when="+python") depends_on("py-wheel", type=("build",), when="+python") + +class MakefileBuilder(makefile.MakefileBuilder): + @property def install_targets(self): return ["PREFIX={0}".format(self.prefix), "install"] @run_after("install") def python_install(self): - if "+python" in self.spec: - pip(*PythonPipBuilder.std_args(self), f"--prefix={self.prefix}", ".") + if self.pkg.spec.satisfies("+python"): + pip( + *spack.build_systems.python.PythonPipBuilder.std_args(self.pkg), + f"--prefix={self.pkg.prefix}", + ".", + ) + + +class CMakeBuilder(cmake.CMakeBuilder): + + def cmake_args(self): + return [ + self.define("USE_SYSTEM_JSON", True), + self.define("BUILD_SHARED_BINARIES", True), + self.define("BUILD_TESTS", self.pkg.run_tests), + ] + + @run_after("install") + def python_install(self): + if self.pkg.spec.satisfies("+python"): + pip( + *spack.build_systems.python.PythonPipBuilder.std_args(self.pkg), + f"--prefix={self.pkg.prefix}", + ".", + ) From d7e740defa100d138d9ca71f64af3e3bdc4786ed Mon Sep 17 00:00:00 2001 From: Jonas Eschle Date: Fri, 9 May 2025 07:29:02 +0200 Subject: [PATCH 47/57] py-hepstats: new package (#43697) * enh: add py-hepstats package * fix: version * fix: update pypi version * fix: update hash * fix: use github package * fix: allow download from pypi * chore: remove unused Bazel, cleanup imports * enh: add 0.9.2 version * fix: update dependencies for version 0.9.0 and adjust build system * chore: move to new builtin directory --------- Co-authored-by: jonas-eschle --- .../builtin/packages/py_hepstats/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 var/spack/repos/spack_repo/builtin/packages/py_hepstats/package.py diff --git a/var/spack/repos/spack_repo/builtin/packages/py_hepstats/package.py b/var/spack/repos/spack_repo/builtin/packages/py_hepstats/package.py new file mode 100644 index 00000000000..6bbe00381c2 --- /dev/null +++ b/var/spack/repos/spack_repo/builtin/packages/py_hepstats/package.py @@ -0,0 +1,43 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyHepstats(PythonPackage): + """hepstats is a library for statistical inference aiming + to cover the needs in High Energy Physics. + It is part of the Scikit-HEP project. + """ + + homepage = "https://github.com/scikit-hep/hepstats" + pypi = "hepstats/hepstats-0.8.1.tar.gz" + maintainers("jonas-eschle") + + license("BSD-3-Clause", checked_by="jonas-eschle") + + tags = ["likelihood", "statistics", "inference", "fitting", "hep"] + + version("0.9.2", sha256="cf929871d45e338492eef585faaaa23eff93b200b4787d6b6181dc81f2607be7") + version("0.8.1", sha256="ebb890496d7aebbf1d717de15d073be31d6775065308a4e0f263ed4051992b3f") + + depends_on("python@3.9:", type=("build", "run"), when="@0.8:") + # Build system changed from setuptools to hatch with v0.9.0 + depends_on("py-setuptools@42:", type="build", when="@:0.8.1") + depends_on("py-setuptools-scm@3.4:+toml", type="build", when="@:0.8.1") + depends_on("py-hatchling", type="build", when="@0.9.0:") + depends_on("py-hatch-vcs", type="build", when="@0.9.0:") + + variant("zfit", default=False, description="Allows to use improved tools from zfit.") + + with default_args(type=("build", "run")): + depends_on("py-pandas") + depends_on("py-numpy") + depends_on("py-asdf") + depends_on("py-scipy") + depends_on("py-tqdm") + depends_on("py-uhi") + + with when("+zfit"): + depends_on("py-zfit@0.20:", when="@0.8:") From 18ea8f813e9b0bb5874a7e2761bea26f04293da5 Mon Sep 17 00:00:00 2001 From: Dave Keeshan <96727608+davekeeshan@users.noreply.github.com> Date: Fri, 9 May 2025 06:34:23 +0100 Subject: [PATCH 48/57] yosys: add v0.53 (#50372) --- var/spack/repos/spack_repo/builtin/packages/yosys/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/yosys/package.py b/var/spack/repos/spack_repo/builtin/packages/yosys/package.py index a17b8e59c01..fc8df8ee26f 100644 --- a/var/spack/repos/spack_repo/builtin/packages/yosys/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/yosys/package.py @@ -28,6 +28,7 @@ class Yosys(MakefilePackage): version("master", branch="master") + version("0.53", commit="53c22ab7c0ced80861c7536c5dae682c30fb5834", submodules=True) version("0.52", commit="fee39a3284c90249e1d9684cf6944ffbbcbb8f90", submodules=True) version("0.51", commit="c4b5190229616f7ebf8197f43990b4429de3e420", submodules=True) version("0.50", commit="b5170e1394f602c607e75bdbb1a2b637118f2086", submodules=True) From f039b2209382910629d76a80f29285adb173cec1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=90=B4=E5=9D=8E?= Date: Fri, 9 May 2025 13:34:41 +0800 Subject: [PATCH 49/57] Update package.py (#50378) --- .../builtin/packages/apache_tvm/package.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/apache_tvm/package.py b/var/spack/repos/spack_repo/builtin/packages/apache_tvm/package.py index f162e268836..e3b64731dd8 100644 --- a/var/spack/repos/spack_repo/builtin/packages/apache_tvm/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/apache_tvm/package.py @@ -13,13 +13,15 @@ class ApacheTvm(CMakePackage, CudaPackage): hardware backend.""" homepage = "https://tvm.apache.org/" - url = "https://dlcdn.apache.org/tvm/tvm-v0.16.0/apache-tvm-src-v0.16.0.tar.gz" license("Apache-2.0", checked_by="alex391") + url = "https://github.com/apache/tvm/releases/download/v0.19.0/apache-tvm-src-v0.19.0.tar.gz" + version("0.19.0", sha256="13fd707eae37b9b2b77bccd39668764f61ae6824d50cd1ab8164df1c75565be1") version( "0.16.0", sha256="55e2629c39248ef3b1ee280e34a960182bd17bea7ae0d0fa132bbdaaf5aba1ac", + url="https://dlcdn.apache.org/tvm/tvm-v0.16.0/apache-tvm-src-v0.16.0.tar.gz", deprecated=True, ) @@ -28,10 +30,16 @@ class ApacheTvm(CMakePackage, CudaPackage): depends_on("c", type="build") depends_on("cxx", type="build") depends_on("cmake@3.18:", type="build") - depends_on("python@3.7:3.8", type=("build", "run")) + + depends_on("python@3.7:", type=("build", "run")) + conflicts("^python@3.9.0:", when="@:0.16") + depends_on("zlib-api", type=("link", "run")) depends_on("ncurses", type=("link", "run")) - depends_on("llvm@4:18.1.8", type="build", when="+llvm") + + depends_on("llvm@4:", type="build", when="+llvm") + conflicts("^llvm@19.0.0:", when="@:0.16+llvm") + depends_on("cuda@8:", when="+cuda") def cmake_args(self): From a505fb1f373e47415a18fff3615693fd1c7522de Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Thu, 8 May 2025 22:45:49 -0700 Subject: [PATCH 50/57] unit tests: switch TestSpecList to use mock packages (#50353) --- lib/spack/spack/test/spec_list.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/test/spec_list.py b/lib/spack/spack/test/spec_list.py index fdafdbf05d7..e8068643a6c 100644 --- a/lib/spack/spack/test/spec_list.py +++ b/lib/spack/spack/test/spec_list.py @@ -55,6 +55,7 @@ def parser_and_speclist(): return parser, result +@pytest.mark.usefixtures("mock_packages") class TestSpecList: @pytest.mark.regression("28749") @pytest.mark.parametrize( @@ -83,8 +84,8 @@ class TestSpecList: ), # A constraint affects both the root and a dependency ( - [{"matrix": [["gromacs"], ["%gcc"], ["+plumed ^plumed%gcc"]]}], - ["gromacs+plumed%gcc ^plumed%gcc"], + [{"matrix": [["version-test-root"], ["%gcc"], ["^version-test-pkg%gcc"]]}], + ["version-test-root%gcc ^version-test-pkg%gcc"], ), ], ) @@ -158,7 +159,7 @@ def test_spec_list_recursion_specs_as_constraints(self): assert result.specs == DEFAULT_SPECS @pytest.mark.regression("16841") - def test_spec_list_matrix_exclude(self, mock_packages): + def test_spec_list_matrix_exclude(self): parser = SpecListParser() result = parser.parse_user_specs( name="specs", @@ -171,7 +172,7 @@ def test_spec_list_matrix_exclude(self, mock_packages): ) assert len(result.specs) == 1 - def test_spec_list_exclude_with_abstract_hashes(self, mock_packages, install_mockery): + def test_spec_list_exclude_with_abstract_hashes(self, install_mockery): # Put mpich in the database so it can be referred to by hash. mpich_1 = spack.concretize.concretize_one("mpich+debug") mpich_2 = spack.concretize.concretize_one("mpich~debug") From ce700d69d7980dbfccde8d62b6e9f014ac9f7d55 Mon Sep 17 00:00:00 2001 From: "Marc T. Henry de Frahan" Date: Fri, 9 May 2025 00:10:53 -0600 Subject: [PATCH 51/57] Add amr-wind versions (#50373) --- .../repos/spack_repo/builtin/packages/amr_wind/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/amr_wind/package.py b/var/spack/repos/spack_repo/builtin/packages/amr_wind/package.py index 4d21bc545fc..9431ca17b86 100644 --- a/var/spack/repos/spack_repo/builtin/packages/amr_wind/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/amr_wind/package.py @@ -20,6 +20,12 @@ class AmrWind(CMakePackage, CudaPackage, ROCmPackage): license("BSD-3-Clause") version("main", branch="main", submodules=True) + version( + "3.4.2", tag="v3.4.2", commit="ed475a0533dfacf1fdff0b707518ccf99040d9f9", submodules=True + ) + version( + "3.4.1", tag="v3.4.1", commit="effe63ca9061e6d2bd5c5e84b690d29c0869f029", submodules=True + ) version( "3.4.0", tag="v3.4.0", commit="38d1b9fd0b70aab4a01fd507f039750c2508bd1c", submodules=True ) From 89a79d3df0781e5074a346bac6004ca099b736cf Mon Sep 17 00:00:00 2001 From: Patrick Diehl Date: Fri, 9 May 2025 01:28:30 -0600 Subject: [PATCH 52/57] hpx: add fetching APEX and specify develop (#50289) * Add fetching APEX and specify develop * Using the spack package * [@spackbot] updating style on behalf of diehlpk * Add restrictions for 1.5 --- .../builtin/packages/hpx/package.py | 20 +++++-------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/hpx/package.py b/var/spack/repos/spack_repo/builtin/packages/hpx/package.py index 9bc4b27b14f..7fa83be0381 100644 --- a/var/spack/repos/spack_repo/builtin/packages/hpx/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/hpx/package.py @@ -66,7 +66,7 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): values=lambda x: isinstance(x, str) and (x.isdigit() or x == "auto"), ) - instrumentation_values = ("apex", "google_perftools", "papi", "valgrind", "thread_debug") + instrumentation_values = ("google_perftools", "papi", "valgrind", "thread_debug") variant( "instrumentation", values=any_combination_of(*instrumentation_values), @@ -93,10 +93,11 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): variant("examples", default=False, description="Build examples") variant("async_mpi", default=False, description="Enable MPI Futures.") variant("async_cuda", default=False, description="Enable CUDA Futures.") + variant("apex", default=False, description="Enable APEX support") # Build dependencies depends_on("cxx", type="build") - + depends_on("apex", when="+apex") depends_on("python", type=("build", "test", "run")) depends_on("git", type="build") depends_on("cmake", type="build") @@ -122,7 +123,6 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): depends_on("cuda", when="+async_cuda") - depends_on("otf2", when="instrumentation=apex") depends_on("gperftools", when="instrumentation=google_perftools") depends_on("papi", when="instrumentation=papi") depends_on("valgrind", when="instrumentation=valgrind") @@ -155,6 +155,7 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): # Restrictions for 1.5.x conflicts("cxxstd=11", when="@1.5:") + depends_on("apex@2.3:", when="@1.5") # Restrictions for 1.2.X with when("@:1.2.1"): @@ -211,8 +212,6 @@ class Hpx(CMakePackage, CudaPackage, ROCmPackage): conflicts("~generic_coroutines", when="target=aarch64:", msg=_msg_generic_coroutines_target) conflicts("~generic_coroutines", when="target=arm:", msg=_msg_generic_coroutines_target) - # Patches APEX - patch("git_external.patch", when="@1.3.0 instrumentation=apex") patch("mimalloc_no_version_requirement.patch", when="@:1.8.0 malloc=mimalloc") def url_for_version(self, version): @@ -242,6 +241,7 @@ def cmake_args(self): self.define_from_variant("HPX_WITH_EXAMPLES", "examples"), self.define_from_variant("HPX_WITH_ASYNC_MPI", "async_mpi"), self.define_from_variant("HPX_WITH_ASYNC_CUDA", "async_cuda"), + self.define_from_variant("HPX_WITH_APEX", "apex"), self.define("HPX_WITH_TESTS", self.run_tests), self.define("HPX_WITH_NETWORKING", "networking=none" not in spec), self.define("HPX_WITH_PARCELPORT_TCP", spec.satisfies("networking=tcp")), @@ -278,14 +278,4 @@ def cmake_args(self): self.define("HPX_WITH_LOGGING", True), ] - if spec.satisfies("instrumentation=apex"): - args += [ - self.define("APEX_WITH_OTF2", True), - self.define("OTF2_ROOT", spec["otf2"].prefix), - ] - - # it seems like there was a bug in the default version of APEX in 1.5.x - if spec.satisfies("@1.5"): - args += [self.define("HPX_WITH_APEX_TAG", "v2.3.0")] - return args From 1a26ec7b8bec655320e59c939b4440098674ead6 Mon Sep 17 00:00:00 2001 From: G-Ragghianti <33492707+G-Ragghianti@users.noreply.github.com> Date: Fri, 9 May 2025 03:29:27 -0400 Subject: [PATCH 53/57] parsec: new version and compiler dependency (#50292) --- var/spack/repos/spack_repo/builtin/packages/parsec/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/parsec/package.py b/var/spack/repos/spack_repo/builtin/packages/parsec/package.py index 522297a4ee3..aa76f04af4d 100644 --- a/var/spack/repos/spack_repo/builtin/packages/parsec/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/parsec/package.py @@ -26,6 +26,7 @@ class Parsec(CMakePackage, CudaPackage): license("BSD-3-Clause-Open-MPI") version("master", branch="master") + version("4.0.2411", sha256="3f5750565b9f673626284dd0ba835dadea3633577fee50ac217baf43a335f2ef") version("3.0.2209", sha256="67d383d076991484cb2a265f56420abdea7cc1f329c63ac65a3e96fbfb6cc295") version("3.0.2012", sha256="7a8403ca67305738f3974cbc7a51b64c4ec353ae9170f2468262a9a52035eff6") version( @@ -59,6 +60,8 @@ class Parsec(CMakePackage, CudaPackage): # TODO: Spack does not handle cross-compilation atm # variant('xcompile', default=False, description='Cross compile') + depends_on("c", type="build") + depends_on("cmake@3.18:", type="build") depends_on("python", type="build") depends_on("flex", type="build") From 2491a9abff2e4d5a098adfe91a5958e47adea8f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A9mi=20Lacroix?= Date: Fri, 9 May 2025 09:58:45 +0200 Subject: [PATCH 54/57] conquest: explicitly configure the MPI compilers. (#50287) --- var/spack/repos/spack_repo/builtin/packages/conquest/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/conquest/package.py b/var/spack/repos/spack_repo/builtin/packages/conquest/package.py index 5a1208acc8f..ae1e5aa2711 100644 --- a/var/spack/repos/spack_repo/builtin/packages/conquest/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/conquest/package.py @@ -87,6 +87,8 @@ def edit(self, spec, prefix): else: defs_file = FileFilter("./src/system.make") + defs_file.filter(".*FC=.*", f"FC={spec['mpi'].mpifc}") + defs_file.filter(".*F77=.*", f"F77={spec['mpi'].mpif77}") defs_file.filter(".*COMPFLAGS=.*", f"COMPFLAGS= {fflags}") defs_file.filter(".*LINKFLAGS=.*", f"LINKFLAGS= {ldflags}") defs_file.filter(".*BLAS=.*", f"BLAS= {lapack_ld} {blas_ld}") From 33c8f518ae087b2abe7b5c781eb6a0ae26a1c264 Mon Sep 17 00:00:00 2001 From: Thomas-Ulrich Date: Fri, 9 May 2025 10:01:00 +0200 Subject: [PATCH 55/57] seissol: fix build by adding language dependance (#50302) --- .../spack_repo/builtin/packages/seissol/package.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/seissol/package.py b/var/spack/repos/spack_repo/builtin/packages/seissol/package.py index 700cca006e0..3927d7e14c1 100644 --- a/var/spack/repos/spack_repo/builtin/packages/seissol/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/seissol/package.py @@ -41,6 +41,10 @@ class Seissol(CMakePackage, CudaPackage, ROCmPackage): maintainers("Thomas-Ulrich", "davschneller", "vikaskurapati") + depends_on("cxx", type="build") + depends_on("c", type="build") + depends_on("fortran", type="build", when="equations=poroelastic") + variant("asagi", default=True, description="Use ASAGI for material input") variant( "convergence_order", @@ -206,8 +210,10 @@ class Seissol(CMakePackage, CudaPackage, ROCmPackage): depends_on("asagi +mpi +mpi3", when="+asagi") - depends_on("easi ~asagi jit=impalajit,lua", when="~asagi") - depends_on("easi +asagi jit=impalajit,lua", when="+asagi") + depends_on("asagi@:1.0.1", when="@:1.3.1 +asagi") + + depends_on("easi ~asagi jit=lua", when="~asagi") + depends_on("easi +asagi jit=lua", when="+asagi") depends_on("intel-oneapi-mkl threads=none", when="gemm_tools_list=MKL") depends_on("blis threads=none", when="gemm_tools_list=BLIS") @@ -250,7 +256,6 @@ def cmake_args(self): self.define_from_variant("EQUATIONS", "equations"), self.define_from_variant("NETCDF", "netcdf"), ] - gemm_tools_list = ",".join(self.spec.variants["gemm_tools_list"].value) args.append(f"-DGEMM_TOOLS_LIST={gemm_tools_list}") @@ -361,7 +366,7 @@ def cmake_args(self): args.append(f"-DHOST_ARCH={hostarch}") - args.append(self.define("PYTHON_EXECUTABLE", self.spec["python"].command.path)) + args.append(self.define("Python3_EXECUTABLE", self.spec["python"].command.path)) return args From c45e02d58f001be94e7b055042d28f86c4356ef3 Mon Sep 17 00:00:00 2001 From: "John W. Parent" <45471568+johnwparent@users.noreply.github.com> Date: Fri, 9 May 2025 03:04:46 -0500 Subject: [PATCH 56/57] mpilander: conflict with Windows (#49733) --- .../repos/spack_repo/builtin/packages/mpilander/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/spack_repo/builtin/packages/mpilander/package.py b/var/spack/repos/spack_repo/builtin/packages/mpilander/package.py index 24ccfbc72bb..5c523c745d5 100644 --- a/var/spack/repos/spack_repo/builtin/packages/mpilander/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/mpilander/package.py @@ -38,6 +38,8 @@ class Mpilander(CMakePackage): conflicts("%apple-clang@:7.4") conflicts("%intel@:16") + conflicts("platform=windows") + def cmake_args(self): args = [ # tests and examples From c9ab0d8fcbf8c37360e53b6219f96e56b0824f58 Mon Sep 17 00:00:00 2001 From: Sergey Kosukhin Date: Fri, 9 May 2025 10:22:08 +0200 Subject: [PATCH 57/57] icon: add version 2025.04 (#50245) --- .../spack_repo/builtin/packages/icon/package.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/spack_repo/builtin/packages/icon/package.py b/var/spack/repos/spack_repo/builtin/packages/icon/package.py index 7714cbd2a3c..e7b1c48f4cb 100644 --- a/var/spack/repos/spack_repo/builtin/packages/icon/package.py +++ b/var/spack/repos/spack_repo/builtin/packages/icon/package.py @@ -14,11 +14,16 @@ class Icon(AutotoolsPackage): homepage = "https://www.icon-model.org" url = "https://gitlab.dkrz.de/icon/icon-model/-/archive/icon-2024.01-public/icon-model-icon-2024.01-public.tar.gz" + git = "https://gitlab.dkrz.de/icon/icon-model.git" + submodules = True maintainers("skosukhin", "Try2Code") license("BSD-3-Clause", checked_by="skosukhin") + version( + "2025.04", tag="icon-2025.04-public", commit="1be2ca66ea0de149971d2e77e88a9f11c764bd22" + ) version("2024.10", sha256="5c461c783eb577c97accd632b18140c3da91c1853d836ca2385f376532e9bad1") version("2024.07", sha256="f53043ba1b36b8c19d0d2617ab601c3b9138b90f8ff8ca6db0fd079665eb5efa") version("2024.01-1", sha256="3e57608b7e1e3cf2f4cb318cfe2fdb39678bd53ca093955d99570bd6d7544184") @@ -95,10 +100,9 @@ class Icon(AutotoolsPackage): # Optimization Features: variant("mixed-precision", default=False, description="Enable mixed-precision dynamical core") - depends_on("c", type="build") # generated - depends_on("cxx", type="build") # generated - depends_on("fortran", type="build") # generated - + depends_on("c", type="build") + depends_on("cxx", type="build") + depends_on("fortran", type="build") depends_on("python", type="build") depends_on("perl", type="build") depends_on("cmake@3.18:", type="build") @@ -205,11 +209,13 @@ def configure_args(self): "-arch=sm_{0}".format(self.nvidia_targets[gpu]), "-ccbin={0}".format(spack_cxx), ] - flags["ICON_LDFLAGS"].extend(self.compiler.stdcxx_libs) libs += self.spec["cuda"].libs else: args.append("--disable-gpu") + if gpu in self.nvidia_targets or "+comin" in self.spec: + flags["ICON_LDFLAGS"].extend(self.compiler.stdcxx_libs) + if self.compiler.name == "gcc": flags["CFLAGS"].append("-g") flags["ICON_CFLAGS"].append("-O3")