From 2c05ce3607bb346897c24a7d0486c08de1f7fea9 Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Tue, 6 May 2025 12:32:15 -0600 Subject: [PATCH] binary_distribution: content addressable tarballs (#48713) binary_distribution: content addressable url buildcache Change how binary mirrors are laid out, adopting content addressing for every piece of data spack stores in a binary mirror. Items (e.g. tarballs, specfiles, public keys, indices, etc) are now discoverable via manifest files which give the size, checksum, compression type, etc of the the stored item. The information in the manifest, in turn, is used to find the actual data, which is stored by its content address in the blobs directory. Additionally, signing is now applied to the manifest files, rather than to the spec files themselves. --- lib/spack/docs/binary_caches.rst | 205 ++- lib/spack/docs/signing.rst | 140 +- lib/spack/spack/binary_distribution.py | 1549 ++++++++--------- lib/spack/spack/buildcache_migrate.py | 351 ++++ lib/spack/spack/ci/__init__.py | 51 +- lib/spack/spack/ci/common.py | 36 +- lib/spack/spack/ci/gitlab.py | 22 +- lib/spack/spack/cmd/bootstrap.py | 9 +- lib/spack/spack/cmd/buildcache.py | 249 ++- lib/spack/spack/cmd/ci.py | 2 +- lib/spack/spack/error.py | 13 + lib/spack/spack/installer.py | 9 +- lib/spack/spack/schema/buildcache_spec.py | 6 +- .../spack/schema/url_buildcache_manifest.py | 45 + lib/spack/spack/test/bindist.py | 467 ++++- lib/spack/spack/test/build_distribution.py | 9 +- lib/spack/spack/test/cmd/buildcache.py | 362 +++- lib/spack/spack/test/cmd/ci.py | 30 +- lib/spack/spack/test/cmd/gpg.py | 19 +- lib/spack/spack/test/cmd/mirror.py | 7 +- lib/spack/spack/test/conftest.py | 4 - ...l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json | 54 - ...2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack | Bin 10240 -> 0 bytes ...8A04B882E19D85FD36EE069565D80B055C92FF.pub | 29 + ...9D4971F0097B1E7A3EB57371B484802E78D7CD.pub | 29 + .../signed/build_cache/_pgp/index.json | 1 + .../v2_layout/signed/build_cache/index.json | 1 + .../signed/build_cache/index.json.hash | 1 + ...qqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig | 124 ++ ...vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig | 72 + ...729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack | Bin 0 -> 4099 bytes ....13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack | Bin 0 -> 3633 bytes ...3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig | 429 +++++ ...ipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig | 317 ++++ ...cxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig | 99 ++ ...pzeljwairalfjm3k6fntbb64nt6n.spec.json.sig | 151 ++ ...729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack | Bin 0 -> 9063 bytes ....13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack | Bin 0 -> 8625 bytes ...1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack | Bin 0 -> 5589 bytes ...2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack | Bin 0 -> 5077 bytes .../v2_layout/unsigned/build_cache/index.json | 1 + .../unsigned/build_cache/index.json.hash | 1 + ...sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json | 105 ++ ...rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json | 53 + ...729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack | Bin 0 -> 4092 bytes ....13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack | Bin 0 -> 3622 bytes ...u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json | 410 +++++ ...jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json | 298 ++++ ...qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json | 80 + ...izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json | 132 ++ ...729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack | Bin 0 -> 9058 bytes ....13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack | Bin 0 -> 8627 bytes ...1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack | Bin 0 -> 5596 bytes ...2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack | Bin 0 -> 5070 bytes lib/spack/spack/url_buildcache.py | 1239 +++++++++++++ .../gitlab/cloud_pipelines/configs/ci.yaml | 23 +- .../configs/multi-src-mirrors.yaml.in | 5 - .../aws-pcluster-neoverse_v1/spack.yaml | 5 +- .../stacks/aws-pcluster-x86_64_v4/spack.yaml | 5 +- share/spack/spack-completion.bash | 11 +- share/spack/spack-completion.fish | 13 + 61 files changed, 5940 insertions(+), 1333 deletions(-) create mode 100644 lib/spack/spack/buildcache_migrate.py create mode 100644 lib/spack/spack/schema/url_buildcache_manifest.py delete mode 100644 lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json delete mode 100644 lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2/gcc-4.5.0/archive-files-2.0/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/index.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/index.json.hash create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack create mode 100644 lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack create mode 100644 lib/spack/spack/url_buildcache.py diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 35f7441fe1f..f736fab75f0 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -45,10 +45,14 @@ provided binary cache, which can be a local directory or a remote URL. Here is an example where a build cache is created in a local directory named "spack-cache", to which we push the "ninja" spec: +ninja-1.12.1-vmvycib6vmiofkdqgrblo7zsvp7odwut + .. code-block:: console $ spack buildcache push ./spack-cache ninja - ==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache + ==> Selected 30 specs to push to file:///home/spackuser/spack/spack-cache + ... + ==> [30/30] Pushed ninja@1.12.1/ngldn2k Note that ``ninja`` must be installed locally for this to work. @@ -98,9 +102,10 @@ Now you can use list: .. code-block:: console $ spack buildcache list - ==> 1 cached build. - -- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------ - ninja@1.10.2 + ==> 24 cached builds. + -- linux-ubuntu22.04-sapphirerapids / gcc@12.3.0 ---------------- + [ ... ] + ninja@1.12.1 With ``mymirror`` configured and an index available, Spack will automatically use it during concretization and installation. That means that you can expect @@ -111,17 +116,17 @@ verify by re-installing ninja: $ spack uninstall ninja $ spack install ninja - ==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz - ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig - gpg: Signature made Do 12 Jan 2023 16:01:04 CET - gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6 + [ ... ] + ==> Installing ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh [24/24] + gpg: Signature made Thu 06 Mar 2025 10:03:38 AM MST + gpg: using RSA key 75BC0528114909C076E2607418010FFAD73C9B07 gpg: Good signature from "example (GPG created for Spack) " [ultimate] - ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack - ==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache - ==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz - Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s - [+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz - + ==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/f0/f08eb62661ad159d2d258890127fc6053f5302a2f490c1c7f7bd677721010ee0 + ==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/c7/c79ac6e40dfdd01ac499b020e52e57aa91151febaea3ad183f90c0f78b64a31a + ==> Extracting ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh from binary cache + ==> ninja: Successfully installed ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh + Search: 0.00s. Fetch: 0.11s. Install: 0.11s. Extract: 0.10s. Relocate: 0.00s. Total: 0.22s + [+] /home/spackuser/spack/opt/spack/linux-ubuntu22.04-sapphirerapids/gcc-12.3.0/ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh It worked! You've just completed a full example of creating a build cache with a spec of interest, adding it as a mirror, updating its index, listing the contents, @@ -344,19 +349,18 @@ which lets you get started quickly. See the following resources for more informa ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Create tarball of installed Spack package and all dependencies. -Tarballs are checksummed and signed if gpg2 is available. -Places them in a directory ``build_cache`` that can be copied to a mirror. -Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches. +Tarballs and specfiles are compressed and checksummed, manifests are signed if gpg2 is available. +Commands like ``spack buildcache install`` will search Spack mirrors to get the list of build caches. ============== ======================================================================================================================== Arguments Description ============== ======================================================================================================================== ```` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches -``-d `` directory in which ``build_cache`` directory is created, defaults to ``.`` -``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists +``-d `` directory in which ``v3`` and ``blobs`` directories are created, defaults to ``.`` +``-f`` overwrite compressed tarball and spec metadata files if they already exist ``-k `` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used. ``-r`` make paths in binaries relative before creating tarball -``-y`` answer yes to all create unsigned ``build_cache`` questions +``-y`` answer yes to all questions about creating unsigned build caches ============== ======================================================================================================================== ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -397,6 +401,165 @@ List public keys available on Spack mirror. ========= ============================================== Arguments Description ========= ============================================== -``-i`` trust the keys downloaded with prompt for each +``-it`` trust the keys downloaded with prompt for each ``-y`` answer yes to all trust all keys downloaded ========= ============================================== + +.. _build_cache_layout: + +------------------ +Build Cache Layout +------------------ + +This section describes the structure and content of URL-style build caches, as +distinguished from OCI-style build caches. + +The entry point for a binary package is a manifest json file that points to at +least two other files stored as content-addressed blobs. These files include a spec +metadata file, as well as the installation directory of the package stored as +a compressed archive file. Binary package manifest files are named to indicate +the package name and version, as well as the hash of the concrete spec. For +example:: + + gcc-runtime-12.3.0-qyu2lvgt3nxh7izxycugdbgf5gsdpkjt.spec.manifest.json + +would contain the manifest for a binary package of ``gcc-runtime@12.3.0``. +The id of the built package is defined to be the DAG hash of the concrete spec, +and exists in the name of the file as well. The id distinguishes a particular +binary package from all other binary packages with the same package name and +version. Below is an example binary package manifest file. Such a file would +live in the versioned spec manifests directory of a binary mirror, for example +``v3/manifests/spec/``:: + + { + "version": 3, + "data": [ + { + "contentLength": 10731083, + "mediaType": "application/vnd.spack.install.v2.tar+gzip", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210" + }, + { + "contentLength": 1000, + "mediaType": "application/vnd.spack.spec.v5+json", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041" + } + ] + } + +The manifest points to both the compressed tar file as well as the compressed +spec metadata file, and contains the checksum of each. This checksum +is also used as the address of the associated file, and hence, must be +known in order to locate the tarball or spec file within the mirror. Once the +tarball or spec metadata file is downloaded, the checksum should be computed locally +and compared to the checksum in the manifest to ensure the contents have not changed +since the binary package was pushed. Spack stores all data files (including compressed +tar files, spec metadata, indices, public keys, etc) within a ``blobs//`` +directory, using the first two characters of the checksum as a sub-directory +to reduce the number files in a single folder. Here is a depiction of the +organization of binary mirror contents:: + + mirror_directory/ + v3/ + layout.json + manifests/ + spec/ + gcc-runtime/ + gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json + gmake/ + gmake-4.4.1-lpr4j77rcgkg5536tmiuzwzlcjsiomph.spec.manifest.json + compiler-wrapper/ + compiler-wrapper-1.0-s7ieuyievp57vwhthczhaq2ogowf3ohe.spec.manifest.json + index/ + index.manifest.json + key/ + 75BC0528114909C076E2607418010FFAD73C9B07.key.manifest.json + keys.manifest.json + blobs/ + sha256/ + 0f/ + 0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210 + fb/ + fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041 + 2a/ + 2a21836d206ccf0df780ab0be63fdf76d24501375306a35daa6683c409b7922f + ... + +Files within the ``manifests`` directory are organized into subdirectories by +the type of entity they represent. Binary package manifests live in the ``spec/`` +directory, binary cache index manifests live in the ``index/`` directory, and +manifests for public keys and their indices live in the ``key/`` subdirectory. +Regardless of the type of entity they represent, all manifest files are named +with an extension ``.manifest.json``. + +Every manifest contains a ``data`` array, each element of which refers to an +associated file stored a content-addressed blob. Considering the example spec +manifest shown above, the compressed installation archive can be found by +picking out the data blob with the appropriate ``mediaType``, which in this +case would be ``application/vnd.spack.install.v1.tar+gzip``. The associated +file is found by looking in the blobs directory under ``blobs/sha256/fb/`` for +the file named with the complete checksum value. + +As mentioned above, every entity in a binary mirror (aka build cache) is stored +as a content-addressed blob pointed to by a manifest. While an example spec +manifest (i.e. a manifest for a binary package) is shown above, here is what +the manifest of a build cache index looks like:: + + { + "version": 3, + "data": [ + { + "contentLength": 6411, + "mediaType": "application/vnd.spack.db.v8+json", + "compression": "none", + "checksumAlgorithm": "sha256", + "checksum": "225a3e9da24d201fdf9d8247d66217f5b3f4d0fc160db1498afd998bfd115234" + } + ] + } + +Some things to note about this manifest are that it points to a blob that is not +compressed (``compression: "none"``), and that the ``mediaType`` is one we have +not seen yet, ``application/vnd.spack.db.v8+json``. The decision not to compress +build cache indices stems from the fact that spack does not yet sign build cache +index manifests. Once that changes, you may start to see these indices stored as +compressed blobs. + +For completeness, here are examples of manifests for the other two types of entities +you might find in a spack build cache. First a public key manifest:: + + { + "version": 3, + "data": [ + { + "contentLength": 2472, + "mediaType": "application/pgp-keys", + "compression": "none", + "checksumAlgorithm": "sha256", + "checksum": "9fc18374aebc84deb2f27898da77d4d4410e5fb44c60c6238cb57fb36147e5c7" + } + ] + } + +Note the ``mediaType`` of ``application/pgp-keys``. Finally, a public key index manifest:: + + { + "version": 3, + "data": [ + { + "contentLength": 56, + "mediaType": "application/vnd.spack.keyindex.v1+json", + "compression": "none", + "checksumAlgorithm": "sha256", + "checksum": "29b3a0eb6064fd588543bc43ac7d42d708a69058dafe4be0859e3200091a9a1c" + } + ] + } + +Again note the ``mediaType`` of ``application/vnd.spack.keyindex.v1+json``. Also note +that both the above manifest examples refer to uncompressed blobs, this is for the same +reason spack does not yet compress build cache index blobs. diff --git a/lib/spack/docs/signing.rst b/lib/spack/docs/signing.rst index 5a34305cbdd..2ef0429711b 100644 --- a/lib/spack/docs/signing.rst +++ b/lib/spack/docs/signing.rst @@ -176,92 +176,72 @@ community without needing deep familiarity with GnuPG or Public Key Infrastructure. -.. _build_cache_format: +.. _build_cache_signing: ------------------- -Build Cache Format ------------------- +------------------- +Build Cache Signing +------------------- -A binary package consists of a metadata file unambiguously defining the -built package (and including other details such as how to relocate it) -and the installation directory of the package stored as a compressed -archive file. The metadata files can either be unsigned, in which case -the contents are simply the json-serialized concrete spec plus metadata, -or they can be signed, in which case the json-serialized concrete spec -plus metadata is wrapped in a gpg cleartext signature. Built package -metadata files are named to indicate the operating system and -architecture for which the package was built as well as the compiler -used to build it and the packages name and version. For example:: +For an in-depth description of the layout of a binary mirror, see +the :ref:`documentation` covering binary caches. The +key takeaway from that discussion that applies here is that the entry point +to a binary package is it's manifest. The manifest refers unambiguously to the +spec metadata and compressed archive, which are stored as content-addressed +blobs. - linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig - -would contain the concrete spec and binary metadata for a binary package -of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell`` -architecture. The id of the built package exists in the name of the file -as well (after the package name and version) and in this case begins -with ``llv2ys``. The id distinguishes a particular built package from all -other built packages with the same os/arch, compiler, name, and version. -Below is an example of a signed binary package metadata file. Such a -file would live in the ``build_cache`` directory of a binary mirror:: +The manifest files can either be signed or unsigned, but are always given +a name ending with ``.spec.manifest.json`` regardless. The difference between +signed and unsigned manifests is simply that the signed version is wrapped in +a gpg cleartext signature, as illustrated below:: -----BEGIN PGP SIGNED MESSAGE----- Hash: SHA512 { - "spec": { - - }, - - "buildcache_layout_version": 1, - "binary_cache_checksum": { - "hash_algorithm": "sha256", - "hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423" - } + "version": 3, + "data": [ + { + "contentLength": 10731083, + "mediaType": "application/vnd.spack.install.v2.tar+gzip", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210" + }, + { + "contentLength": 1000, + "mediaType": "application/vnd.spack.spec.v5+json", + "compression": "gzip", + "checksumAlgorithm": "sha256", + "checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041" + } + ] } - -----BEGIN PGP SIGNATURE----- - iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc - ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn - 4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+ - QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b - 887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6 - 4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH - qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7 - QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi - Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx - j3DXty57 - =3gvm + + iQGzBAEBCgAdFiEEdbwFKBFJCcB24mB0GAEP+tc8mwcFAmf2rr4ACgkQGAEP+tc8 + mwfefwv+KJs8MsQ5ovFaBdmyx5H/3k4rO4QHBzuSPOB6UaxErA9IyOB31iP6vNTU + HzYpxz6F5dJCJWmmNEMN/0+vjhMHEOkqd7M1l5reVcxduTF2yc4tBZUO2gienEHL + W0e+SnUznl1yc/aVpChUiahO2zToCsI8HZRNT4tu6iCnE/OpghqjsSdBOZHmSNDD + 5wuuCxfDUyWI6ZlLclaaB7RdbCUUJf/iqi711J+wubvnDFhc6Ynwm1xai5laJ1bD + ev3NrSb2AAroeNFVo4iECA0fZC1OZQYzaRmAEhBXtCideGJ5Zf2Cp9hmCwNK8Hq6 + bNt94JP9LqC3FCCJJOMsPyOOhMSA5MU44zyyzloRwEQpHHLuFzVdbTHA3dmTc18n + HxNLkZoEMYRc8zNr40g0yb2lCbc+P11TtL1E+5NlE34MX15mPewRCiIFTMwhCnE3 + gFSKtW1MKustZE35/RUwd2mpJRf+mSRVCl1f1RiFjktLjz7vWQq7imIUSam0fPDr + XD4aDogm + =RrFX -----END PGP SIGNATURE----- If a user has trusted the public key associated with the private key -used to sign the above spec file, the signature can be verified with +used to sign the above manifest file, the signature can be verified with gpg, as follows:: - $ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig + $ gpg --verify gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json -The metadata (regardless whether signed or unsigned) contains the checksum -of the ``.spack`` file containing the actual installation. The checksum should -be compared to a checksum computed locally on the ``.spack`` file to ensure the -contents have not changed since the binary spec plus metadata were signed. The -``.spack`` files are actually tarballs containing the compressed archive of the -install tree. These files, along with the metadata files, live within the -``build_cache`` directory of the mirror, and together are organized as follows:: - - build_cache/ - # unsigned metadata (for indexing, contains sha256 of .spack file) - ----24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json - # clearsigned metadata (same as above, but signed) - ----24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig - / - / - -/ - # tar.gz-compressed prefix (may support more compression formats later) - ----24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack - -Uncompressing and extracting the ``.spack`` file results in the install tree. -This is in contrast to previous versions of spack, where the ``.spack`` file -contained a (duplicated) metadata file, a signature file and a nested tarball -containing the install tree. +When attempting to install a binary package that has been signed, spack will +attempt to verify the signature with one of the trusted keys in its keyring, +and will fail if unable to do so. While not recommended, it is possible to +force installation of a signed package without verification by providing the +``--no-check-signature`` argument to ``spack install ...``. .. _internal_implementation: @@ -320,10 +300,10 @@ the following way: Reputational Public Key are imported into a keyring by the ``spack gpg …`` sub-command. This is initiated by the job’s build script which is created by the generate job at the beginning of the pipeline. -4. Assuming the package has dependencies those specs are verified using +4. Assuming the package has dependencies those spec manifests are verified using the keyring. -5. The package is built and the spec.json is generated -6. The spec.json is signed by the keyring and uploaded to the mirror’s +5. The package is built and the spec manifest is generated +6. The spec manifest is signed by the keyring and uploaded to the mirror’s build cache. **Reputational Key** @@ -376,24 +356,24 @@ following way: 4. In addition to the secret, the runner creates a tmpfs memory mounted directory where the GnuPG keyring will be created to verify, and then resign the package specs. -5. The job script syncs all spec.json.sig files from the build cache to +5. The job script syncs all spec manifest files from the build cache to a working directory in the job’s execution environment. 6. The job script then runs the ``sign.sh`` script built into the notary Docker image. 7. The ``sign.sh`` script imports the public components of the Reputational and Intermediate CI Keys and uses them to verify good - signatures on the spec.json.sig files. If any signed spec does not - verify the job immediately fails. -8. Assuming all specs are verified, the ``sign.sh`` script then unpacks - the spec json data from the signed file in preparation for being + signatures on the spec.manifest.json files. If any signed manifest + does not verify, the job immediately fails. +8. Assuming all manifests are verified, the ``sign.sh`` script then unpacks + the manifest json data from the signed file in preparation for being re-signed with the Reputational Key. 9. The private components of the Reputational Key are decrypted to standard out using ``aws-encryption-cli`` directly into a ``gpg –import …`` statement which imports the key into the keyring mounted in-memory. -10. The private key is then used to sign each of the json specs and the +10. The private key is then used to sign each of the manifests and the keyring is removed from disk. -11. The re-signed json specs are resynced to the AWS S3 Mirror and the +11. The re-signed manifests are resynced to the AWS S3 Mirror and the public signing of the packages for the develop or release pipeline that created them is complete. diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 167e2129a97..a46b2fcb938 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -7,6 +7,7 @@ import concurrent.futures import contextlib import copy +import datetime import hashlib import io import itertools @@ -24,7 +25,7 @@ import urllib.request import warnings from contextlib import closing -from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union +from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union import llnl.util.filesystem as fsys import llnl.util.lang @@ -32,7 +33,7 @@ from llnl.util.filesystem import mkdirp import spack.caches -import spack.config as config +import spack.config import spack.database as spack_db import spack.deptypes as dt import spack.error @@ -83,16 +84,20 @@ from spack.util.executable import which from .enums import InstallRecordStatus - -BUILD_CACHE_RELATIVE_PATH = "build_cache" -BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp" - -#: The build cache layout version that this version of Spack creates. -#: Version 2: includes parent directories of the package prefix in the tarball -CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2 - - -INDEX_HASH_FILE = "index.json.hash" +from .url_buildcache import ( + CURRENT_BUILD_CACHE_LAYOUT_VERSION, + SUPPORTED_LAYOUT_VERSIONS, + BlobRecord, + BuildcacheComponent, + BuildcacheEntryError, + BuildcacheManifest, + InvalidMetadataFile, + MirrorForSpec, + MirrorURLAndVersion, + URLBuildcacheEntry, + get_url_buildcache_class, + get_valid_spec_file, +) class BuildCacheDatabase(spack_db.Database): @@ -166,10 +171,11 @@ def __init__(self, cache_root: Optional[str] = None): self._index_contents_key = "contents.json" # a FileCache instance storing copies of remote binary cache indices - self._index_file_cache: Optional[file_cache.FileCache] = None + self._index_file_cache: file_cache.FileCache = file_cache.FileCache(self._index_cache_root) + self._index_file_cache_initialized = False - # stores a map of mirror URL to index hash and cache key (index path) - self._local_index_cache: Optional[dict] = None + # stores a map of mirror URL and version layout to index hash and cache key (index path) + self._local_index_cache: dict[str, dict] = {} # hashes of remote indices already ingested into the concrete spec # cache (_mirrors_for_spec) @@ -177,22 +183,15 @@ def __init__(self, cache_root: Optional[str] = None): # mapping from mirror urls to the time.time() of the last index fetch and a bool indicating # whether the fetch succeeded or not. - self._last_fetch_times: Dict[str, float] = {} + self._last_fetch_times: Dict[MirrorURLAndVersion, float] = {} # _mirrors_for_spec is a dictionary mapping DAG hashes to lists of # entries indicating mirrors where that concrete spec can be found. - # Each entry is a dictionary consisting of: - # - # - the mirror where the spec is, keyed by ``mirror_url`` - # - the concrete spec itself, keyed by ``spec`` (including the - # full hash, since the dag hash may match but we want to - # use the updated source if available) - self._mirrors_for_spec: Dict[str, dict] = {} + # Each entry is a MirrorURLAndVersion. + self._mirrors_for_spec: Dict[str, List[MirrorForSpec]] = {} def _init_local_index_cache(self): - if not self._index_file_cache: - self._index_file_cache = file_cache.FileCache(self._index_cache_root) - + if not self._index_file_cache_initialized: cache_key = self._index_contents_key self._index_file_cache.init_entry(cache_key) @@ -203,13 +202,15 @@ def _init_local_index_cache(self): with self._index_file_cache.read_transaction(cache_key) as cache_file: self._local_index_cache = json.load(cache_file) + self._index_file_cache_initialized = True + def clear(self): """For testing purposes we need to be able to empty the cache and clear associated data structures.""" if self._index_file_cache: self._index_file_cache.destroy() - self._index_file_cache = None - self._local_index_cache = None + self._index_file_cache = file_cache.FileCache(self._index_cache_root) + self._local_index_cache = {} self._specs_already_associated = set() self._last_fetch_times = {} self._mirrors_for_spec = {} @@ -231,18 +232,21 @@ def regenerate_spec_cache(self, clear_existing=False): self._specs_already_associated = set() self._mirrors_for_spec = {} - for mirror_url in self._local_index_cache: - cache_entry = self._local_index_cache[mirror_url] + for url_and_version in self._local_index_cache: + cache_entry = self._local_index_cache[url_and_version] cached_index_path = cache_entry["index_path"] cached_index_hash = cache_entry["index_hash"] if cached_index_hash not in self._specs_already_associated: - self._associate_built_specs_with_mirror(cached_index_path, mirror_url) + self._associate_built_specs_with_mirror( + cached_index_path, MirrorURLAndVersion.from_string(url_and_version) + ) self._specs_already_associated.add(cached_index_hash) - def _associate_built_specs_with_mirror(self, cache_key, mirror_url): - tmpdir = tempfile.mkdtemp() + def _associate_built_specs_with_mirror(self, cache_key, url_and_version: MirrorURLAndVersion): + mirror_url = url_and_version.url + layout_version = url_and_version.version - try: + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: db = BuildCacheDatabase(tmpdir) try: @@ -252,8 +256,9 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url): db._read_from_file(pathlib.Path(cache_path)) except spack_db.InvalidDatabaseVersionError as e: tty.warn( - f"you need a newer Spack version to read the buildcache index for the " - f"following mirror: '{mirror_url}'. {e.database_version_message}" + "you need a newer Spack version to read the buildcache index " + f"for the following v{layout_version} mirror: '{mirror_url}'. " + f"{e.database_version_message}" ) return @@ -272,15 +277,16 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url): for entry in self._mirrors_for_spec[dag_hash]: # A binary mirror can only have one spec per DAG hash, so # if we already have an entry under this DAG hash for this - # mirror url, we're done. - if entry["mirror_url"] == mirror_url: + # mirror url/layout version, we're done. + if ( + entry.url_and_version.url == mirror_url + and entry.url_and_version.version == layout_version + ): break else: self._mirrors_for_spec[dag_hash].append( - {"mirror_url": mirror_url, "spec": indexed_spec} + MirrorForSpec(url_and_version, indexed_spec) ) - finally: - shutil.rmtree(tmpdir) def get_all_built_specs(self): spec_list = [] @@ -289,7 +295,7 @@ def get_all_built_specs(self): # with the same DAG hash are equivalent, so we can just # return the first one in the list. if len(self._mirrors_for_spec[dag_hash]) > 0: - spec_list.append(self._mirrors_for_spec[dag_hash][0]["spec"]) + spec_list.append(self._mirrors_for_spec[dag_hash][0].spec) return spec_list @@ -340,9 +346,9 @@ def find_by_hash(self, find_hash, mirrors_to_check=None): if not mirrors_to_check: return results mirror_urls = mirrors_to_check.values() - return [r for r in results if r["mirror_url"] in mirror_urls] + return [r for r in results if r.url_and_version.url in mirror_urls] - def update_spec(self, spec, found_list): + def update_spec(self, spec: spack.spec.Spec, found_list: List[MirrorForSpec]): """ Take list of {'mirror_url': m, 'spec': s} objects and update the local built_spec_cache @@ -355,13 +361,11 @@ def update_spec(self, spec, found_list): current_list = self._mirrors_for_spec[spec_dag_hash] for new_entry in found_list: for cur_entry in current_list: - if new_entry["mirror_url"] == cur_entry["mirror_url"]: - cur_entry["spec"] = new_entry["spec"] + if new_entry.url_and_version == cur_entry.url_and_version: + cur_entry.spec = new_entry.spec break else: - current_list.append( - {"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]} - ) + current_list.append(MirrorForSpec(new_entry.url_and_version, new_entry.spec)) def update(self, with_cooldown=False): """Make sure local cache of buildcache index files is up to date. @@ -373,8 +377,10 @@ def update(self, with_cooldown=False): from each configured mirror and stored locally (both in memory and on disk under ``_index_cache_root``).""" self._init_local_index_cache() - configured_mirror_urls = [ - m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values() + configured_mirrors = [ + MirrorURLAndVersion(m.fetch_url, layout_version) + for layout_version in SUPPORTED_LAYOUT_VERSIONS + for m in spack.mirrors.mirror.MirrorCollection(binary=True).values() ] items_to_remove = [] spec_cache_clear_needed = False @@ -408,34 +414,36 @@ def update(self, with_cooldown=False): ttl = spack.config.get("config:binary_index_ttl", 600) now = time.time() - for cached_mirror_url in self._local_index_cache: - cache_entry = self._local_index_cache[cached_mirror_url] + for local_index_cache_key in self._local_index_cache: + urlAndVersion = MirrorURLAndVersion.from_string(local_index_cache_key) + cached_mirror_url = urlAndVersion.url + cache_entry = self._local_index_cache[local_index_cache_key] cached_index_path = cache_entry["index_path"] - if cached_mirror_url in configured_mirror_urls: + if urlAndVersion in configured_mirrors: # Only do a fetch if the last fetch was longer than TTL ago if ( with_cooldown and ttl > 0 and cached_mirror_url in self._last_fetch_times - and now - self._last_fetch_times[cached_mirror_url][0] < ttl + and now - self._last_fetch_times[urlAndVersion][0] < ttl ): # We're in the cooldown period, don't try to fetch again # If the fetch succeeded last time, consider this update a success, otherwise # re-report the error here - if self._last_fetch_times[cached_mirror_url][1]: + if self._last_fetch_times[urlAndVersion][1]: all_methods_failed = False else: # May need to fetch the index and update the local caches try: needs_regen = self._fetch_and_cache_index( - cached_mirror_url, cache_entry=cache_entry + urlAndVersion, cache_entry=cache_entry ) - self._last_fetch_times[cached_mirror_url] = (now, True) + self._last_fetch_times[urlAndVersion] = (now, True) all_methods_failed = False except FetchIndexError as e: needs_regen = False fetch_errors.append(e) - self._last_fetch_times[cached_mirror_url] = (now, False) + self._last_fetch_times[urlAndVersion] = (now, False) # The need to regenerate implies a need to clear as well. spec_cache_clear_needed |= needs_regen spec_cache_regenerate_needed |= needs_regen @@ -443,12 +451,12 @@ def update(self, with_cooldown=False): # No longer have this mirror, cached index should be removed items_to_remove.append( { - "url": cached_mirror_url, + "url": local_index_cache_key, "cache_key": os.path.join(self._index_cache_root, cached_index_path), } ) - if cached_mirror_url in self._last_fetch_times: - del self._last_fetch_times[cached_mirror_url] + if urlAndVersion in self._last_fetch_times: + del self._last_fetch_times[urlAndVersion] spec_cache_clear_needed = True spec_cache_regenerate_needed = True @@ -462,19 +470,19 @@ def update(self, with_cooldown=False): # Iterate the configured mirrors now. Any mirror urls we do not # already have in our cache must be fetched, stored, and represented # locally. - for mirror_url in configured_mirror_urls: - if mirror_url in self._local_index_cache: + for urlAndVersion in configured_mirrors: + if str(urlAndVersion) in self._local_index_cache: continue # Need to fetch the index and update the local caches try: - needs_regen = self._fetch_and_cache_index(mirror_url) - self._last_fetch_times[mirror_url] = (now, True) + needs_regen = self._fetch_and_cache_index(urlAndVersion) + self._last_fetch_times[urlAndVersion] = (now, True) all_methods_failed = False except FetchIndexError as e: fetch_errors.append(e) needs_regen = False - self._last_fetch_times[mirror_url] = (now, False) + self._last_fetch_times[urlAndVersion] = (now, False) # Generally speaking, a new mirror wouldn't imply the need to # clear the spec cache, so leave it as is. if needs_regen: @@ -482,7 +490,7 @@ def update(self, with_cooldown=False): self._write_local_index_cache() - if configured_mirror_urls and all_methods_failed: + if configured_mirrors and all_methods_failed: raise FetchCacheError(fetch_errors) if fetch_errors: tty.warn( @@ -492,14 +500,14 @@ def update(self, with_cooldown=False): if spec_cache_regenerate_needed: self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed) - def _fetch_and_cache_index(self, mirror_url, cache_entry={}): + def _fetch_and_cache_index(self, url_and_version: MirrorURLAndVersion, cache_entry={}): """Fetch a buildcache index file from a remote mirror and cache it. If we already have a cached index from this mirror, then we first check if the hash has changed, and we avoid fetching it if not. Args: - mirror_url (str): Base url of mirror + url_and_version: Contains mirror base url and target binary cache layout version cache_entry (dict): Old cache metadata with keys ``index_hash``, ``index_path``, ``etag`` @@ -509,24 +517,18 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): Throws: FetchIndexError """ + mirror_url = url_and_version.url + layout_version = url_and_version.version + # TODO: get rid of this request, handle 404 better scheme = urllib.parse.urlparse(mirror_url).scheme - if scheme != "oci" and not web_util.url_exists( - url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) - ): - return False - - if scheme == "oci": - # TODO: Actually etag and OCI are not mutually exclusive... - fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None)) - elif cache_entry.get("etag"): - fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"]) - else: - fetcher = DefaultIndexFetcher( - mirror_url, local_hash=cache_entry.get("index_hash", None) - ) + if scheme != "oci": + cache_class = get_url_buildcache_class(layout_version=layout_version) + if not web_util.url_exists(cache_class.get_index_url(mirror_url)): + return False + fetcher: IndexFetcher = get_index_fetcher(scheme, url_and_version, cache_entry) result = fetcher.conditional_fetch() # Nothing to do @@ -534,13 +536,13 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}): return False # Persist new index.json - url_hash = compute_hash(mirror_url) + url_hash = compute_hash(f"{mirror_url}/v{layout_version}") cache_key = "{}_{}.json".format(url_hash[:10], result.hash[:10]) self._index_file_cache.init_entry(cache_key) with self._index_file_cache.write_transaction(cache_key) as (old, new): new.write(result.data) - self._local_index_cache[mirror_url] = { + self._local_index_cache[str(url_and_version)] = { "index_hash": result.hash, "index_path": cache_key, "etag": result.etag, @@ -572,18 +574,6 @@ def compute_hash(data): return hashlib.sha256(data).hexdigest() -def build_cache_relative_path(): - return BUILD_CACHE_RELATIVE_PATH - - -def build_cache_keys_relative_path(): - return BUILD_CACHE_KEYS_RELATIVE_PATH - - -def build_cache_prefix(prefix): - return os.path.join(prefix, build_cache_relative_path()) - - def buildinfo_file_name(prefix): """Filename of the binary package meta-data file""" return os.path.join(prefix, ".spack", "binary_distribution") @@ -631,31 +621,55 @@ def get_buildinfo_dict(spec): } -def tarball_directory_name(spec): - """ - Return name of the tarball directory according to the convention - -//-/ - """ - return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}") +def buildcache_relative_keys_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.KEY)) -def tarball_name(spec, ext): - """ - Return the name of the tarfile according to the convention - --- - """ - spec_formatted = spec.format_path( - "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" +def buildcache_relative_keys_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.KEY)) + + +def buildcache_relative_specs_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.SPEC)) + + +def buildcache_relative_specs_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.SPEC)) + + +def buildcache_relative_blobs_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.BLOB)) + + +def buildcache_relative_blobs_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.BLOB)) + + +def buildcache_relative_index_path(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return os.path.join(*cache_class.get_relative_path_components(BuildcacheComponent.INDEX)) + + +def buildcache_relative_index_url(layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION): + cache_class = get_url_buildcache_class(layout_version=layout_version) + return url_util.join(*cache_class.get_relative_path_components(BuildcacheComponent.INDEX)) + + +@llnl.util.lang.memoized +def warn_v2_layout(mirror_url: str, action: str) -> bool: + tty.warn( + f"{action} from a v2 binary mirror layout, located at \n" + f" {mirror_url} is deprecated. Support for this will be \n" + " removed in a future version of spack. Please consider running `spack \n" + " buildcache migrate' or rebuilding the specs in this mirror." ) - return f"{spec_formatted}{ext}" - - -def tarball_path_name(spec, ext): - """ - Return the full path+name for a given spec according to the convention - / - """ - return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext)) + return True def select_signing_key() -> str: @@ -672,11 +686,17 @@ def select_signing_key() -> str: return keys[0] -def sign_specfile(key: str, specfile_path: str) -> str: - """sign and return the path to the signed specfile""" - signed_specfile_path = f"{specfile_path}.sig" - spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True) - return signed_specfile_path +def _push_index(db: BuildCacheDatabase, temp_dir: str, cache_prefix: str): + """Generate the index, compute its hash, and push the files to the mirror""" + index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE) + with open(index_json_path, "w", encoding="utf-8") as f: + db._write_to_file(f) + + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_class.push_local_file_as_blob( + index_json_path, cache_prefix, "index", BuildcacheComponent.INDEX, compression="none" + ) + cache_class.maybe_push_layout_json(cache_prefix) def _read_specs_and_push_index( @@ -685,10 +705,8 @@ def _read_specs_and_push_index( cache_prefix: str, db: BuildCacheDatabase, temp_dir: str, - concurrency: int, ): - """Read all the specs listed in the provided list, using thread given thread parallelism, - generate the index, and push it to the mirror. + """Read listed specs, generate the index, and push it to the mirror. Args: file_list: List of urls or file paths pointing at spec files to read @@ -697,60 +715,21 @@ def _read_specs_and_push_index( cache_prefix: prefix of the build cache on s3 where index should be pushed. db: A spack database used for adding specs and then writing the index. temp_dir: Location to write index.json and hash for pushing - concurrency: Number of parallel processes to use when fetching """ for file in file_list: - contents = read_method(file) - # Need full spec.json name or this gets confused with index.json. - if file.endswith(".json.sig"): - specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents) - fetched_spec = spack.spec.Spec.from_dict(specfile_json) - elif file.endswith(".json"): - fetched_spec = spack.spec.Spec.from_json(contents) - else: - continue - + fetched_spec = spack.spec.Spec.from_dict(read_method(file)) db.add(fetched_spec) db.mark(fetched_spec, "in_buildcache", True) - # Now generate the index, compute its hash, and push the two files to - # the mirror. - index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE) - with open(index_json_path, "w", encoding="utf-8") as f: - db._write_to_file(f) - - # Read the index back in and compute its hash - with open(index_json_path, encoding="utf-8") as f: - index_string = f.read() - index_hash = compute_hash(index_string) - - # Write the hash out to a local file - index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE) - with open(index_hash_path, "w", encoding="utf-8") as f: - f.write(index_hash) - - # Push the index itself - web_util.push_to_url( - index_json_path, - url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE), - keep_original=False, - extra_args={"ContentType": "application/json", "CacheControl": "no-cache"}, - ) - - # Push the hash - web_util.push_to_url( - index_hash_path, - url_util.join(cache_prefix, INDEX_HASH_FILE), - keep_original=False, - extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"}, - ) + _push_index(db, temp_dir, cache_prefix) -def _specs_from_cache_aws_cli(cache_prefix): +def _specs_from_cache_aws_cli(url: str, tmpspecsdir: str): """Use aws cli to sync all the specs into a local temporary directory. Args: - cache_prefix (str): prefix of the build cache on s3 + url: prefix of the build cache on s3 + tmpspecsdir: path to temporary directory to use for writing files Return: List of the local file paths and a function that can read each one from the file system. @@ -759,39 +738,42 @@ def _specs_from_cache_aws_cli(cache_prefix): file_list = None aws = which("aws") - def file_read_method(file_path): - with open(file_path, encoding="utf-8") as fd: - return fd.read() + if not aws: + tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") + return file_list, read_fn + + def file_read_method(manifest_path): + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_class(url, allow_unsigned=True) + cache_entry.read_manifest(manifest_url=f"file://{manifest_path}") + spec_dict = cache_entry.fetch_metadata() + cache_entry.destroy() + return spec_dict - tmpspecsdir = tempfile.mkdtemp() sync_command_args = [ "s3", "sync", "--exclude", "*", "--include", - "*.spec.json.sig", - "--include", - "*.spec.json", - cache_prefix, + "*.spec.manifest.json", + url, tmpspecsdir, ] + tty.debug(f"Using aws s3 sync to download manifests from {url} to {tmpspecsdir}") + try: - tty.debug( - "Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir) - ) aws(*sync_command_args, output=os.devnull, error=os.devnull) - file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"]) + file_list = fsys.find(tmpspecsdir, ["*.spec.manifest.json"]) read_fn = file_read_method except Exception: tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") - shutil.rmtree(tmpspecsdir) return file_list, read_fn -def _specs_from_cache_fallback(url: str): +def _specs_from_cache_fallback(url: str, tmpspecsdir: str): """Use spack.util.web module to get a list of all the specs at the remote url. Args: @@ -804,20 +786,20 @@ def _specs_from_cache_fallback(url: str): read_fn = None file_list = None - def url_read_method(url): - contents = None - try: - _, _, spec_file = web_util.read_from_url(url) - contents = codecs.getreader("utf-8")(spec_file).read() - except (web_util.SpackWebError, OSError) as e: - tty.error(f"Error reading specfile: {url}: {e}") - return contents + def url_read_method(manifest_url): + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_class(url, allow_unsigned=True) + cache_entry.read_manifest(manifest_url) + spec_dict = cache_entry.fetch_metadata() + cache_entry.destroy() + return spec_dict try: + url_to_list = url_util.join(url, buildcache_relative_specs_url()) file_list = [ - url_util.join(url, entry) - for entry in web_util.list_url(url) - if entry.endswith("spec.json") or entry.endswith("spec.json.sig") + url_util.join(url_to_list, entry) + for entry in web_util.list_url(url_to_list, recursive=True) + if entry.endswith("spec.manifest.json") ] read_fn = url_read_method except Exception as err: @@ -828,12 +810,13 @@ def url_read_method(url): return file_list, read_fn -def _spec_files_from_cache(url: str): +def _spec_files_from_cache(url: str, tmpspecsdir: str): """Get a list of all the spec files in the mirror and a function to read them. Args: url: Base url of mirror (location of spec files) + tmpspecsdir: Temporary location for writing files Return: A tuple where the first item is a list of absolute file paths or @@ -848,59 +831,59 @@ def _spec_files_from_cache(url: str): callbacks.append(_specs_from_cache_fallback) for specs_from_cache_fn in callbacks: - file_list, read_fn = specs_from_cache_fn(url) + file_list, read_fn = specs_from_cache_fn(url, tmpspecsdir) if file_list: return file_list, read_fn raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url)) -def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32): +def _url_generate_package_index(url: str, tmpdir: str): """Create or replace the build cache index on the given mirror. The buildcache index contains an entry for each binary package under the cache_prefix. Args: url: Base url of binary mirror. - concurrency: The desired threading concurrency to use when fetching the spec files from - the mirror. Return: None """ - url = url_util.join(url, build_cache_relative_path()) - try: - file_list, read_fn = _spec_files_from_cache(url) - except ListMirrorSpecsError as e: - raise GenerateIndexError(f"Unable to generate package index: {e}") from e + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpspecsdir: + try: + file_list, read_fn = _spec_files_from_cache(url, tmpspecsdir) + except ListMirrorSpecsError as e: + raise GenerateIndexError(f"Unable to generate package index: {e}") from e - tty.debug(f"Retrieving spec descriptor files from {url} to build index") + tty.debug(f"Retrieving spec descriptor files from {url} to build index") - db = BuildCacheDatabase(tmpdir) - db._write() + db = BuildCacheDatabase(tmpdir) + db._write() - try: - _read_specs_and_push_index( - file_list, read_fn, url, db, str(db.database_directory), concurrency - ) - except Exception as e: - raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e + try: + _read_specs_and_push_index(file_list, read_fn, url, db, str(db.database_directory)) + except Exception as e: + raise GenerateIndexError( + f"Encountered problem pushing package index to {url}: {e}" + ) from e -def generate_key_index(key_prefix: str, tmpdir: str) -> None: +def generate_key_index(mirror_url: str, tmpdir: str) -> None: """Create the key index page. - Creates (or replaces) the "index.json" page at the location given in key_prefix. This page - contains an entry for each key (.pub) under key_prefix. + Creates (or replaces) the "index.json" page at the location given in mirror_url. This page + contains an entry for each key under mirror_url. """ - tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index") + tty.debug(f"Retrieving key.pub files from {url_util.format(mirror_url)} to build key index") + + key_prefix = url_util.join(mirror_url, buildcache_relative_keys_url()) try: fingerprints = ( - entry[:-4] + entry[:-18] for entry in web_util.list_url(key_prefix, recursive=False) - if entry.endswith(".pub") + if entry.endswith(".key.manifest.json") ) except Exception as e: raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e @@ -911,13 +894,17 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None: with open(target, "w", encoding="utf-8") as f: sjson.dump(index, f) + cache_class = get_url_buildcache_class() + try: - web_util.push_to_url( - target, - url_util.join(key_prefix, "index.json"), - keep_original=False, - extra_args={"ContentType": "application/json"}, + cache_class.push_local_file_as_blob( + local_file_path=target, + mirror_url=mirror_url, + manifest_name="keys", + component_type=BuildcacheComponent.KEY_INDEX, + compression="none", ) + cache_class.maybe_push_layout_json(mirror_url) except Exception as e: raise GenerateIndexError( f"Encountered problem pushing key index to {key_prefix}: {e}" @@ -1069,51 +1056,13 @@ def _do_create_tarball( return tar_gz_checksum.hexdigest(), tar_checksum.hexdigest() -class ExistsInBuildcache(NamedTuple): - signed: bool - unsigned: bool - tarball: bool - - -class BuildcacheFiles: - def __init__(self, spec: spack.spec.Spec, local: str, remote: str): - """ - Args: - spec: The spec whose tarball and specfile are being managed. - local: The local path to the buildcache. - remote: The remote URL to the buildcache. - """ - self.local = local - self.remote = remote - self.spec = spec - - def remote_specfile(self, signed: bool) -> str: - return url_util.join( - self.remote, - build_cache_relative_path(), - tarball_name(self.spec, ".spec.json.sig" if signed else ".spec.json"), - ) - - def remote_tarball(self) -> str: - return url_util.join( - self.remote, build_cache_relative_path(), tarball_path_name(self.spec, ".spack") - ) - - def local_specfile(self) -> str: - return os.path.join(self.local, f"{self.spec.dag_hash()}.spec.json") - - def local_tarball(self) -> str: - return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz") - - -def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache: - """returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs - exist in the buildcache""" - files = BuildcacheFiles(spec, tmpdir, out_url) - signed = web_util.url_exists(files.remote_specfile(signed=True)) - unsigned = web_util.url_exists(files.remote_specfile(signed=False)) - tarball = web_util.url_exists(files.remote_tarball()) - return ExistsInBuildcache(signed, unsigned, tarball) +def _exists_in_buildcache( + spec: spack.spec.Spec, out_url: str, allow_unsigned: bool = False +) -> URLBuildcacheEntry: + """creates and returns (after checking existence) a URLBuildcacheEntry""" + cache_type = get_url_buildcache_class(CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_type(out_url, spec, allow_unsigned=allow_unsigned) + return cache_entry def prefixes_to_relocate(spec): @@ -1124,42 +1073,12 @@ def prefixes_to_relocate(spec): def _url_upload_tarball_and_specfile( - spec: spack.spec.Spec, - tmpdir: str, - out_url: str, - exists: ExistsInBuildcache, - signing_key: Optional[str], + spec: spack.spec.Spec, tmpdir: str, cache_entry: URLBuildcacheEntry, signing_key: Optional[str] ): - files = BuildcacheFiles(spec, tmpdir, out_url) - tarball = files.local_tarball() + tarball = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz") checksum, _ = create_tarball(spec, tarball) - spec_dict = spec.to_dict(hash=ht.dag_hash) - spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION - spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum} - if exists.tarball: - web_util.remove_url(files.remote_tarball()) - if exists.signed: - web_util.remove_url(files.remote_specfile(signed=True)) - if exists.unsigned: - web_util.remove_url(files.remote_specfile(signed=False)) - web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False) - - specfile = files.local_specfile() - with open(specfile, "w", encoding="utf-8") as f: - # Note: when using gpg clear sign, we need to avoid long lines (19995 chars). - # If lines are longer, they are truncated without error. Thanks GPG! - # So, here we still add newlines, but no indent, so save on file size and - # line length. - json.dump(spec_dict, f, indent=0, separators=(",", ":")) - - # sign the tarball and spec file with gpg - if signing_key: - specfile = sign_specfile(signing_key, specfile) - - web_util.push_to_url( - specfile, files.remote_specfile(signed=bool(signing_key)), keep_original=False - ) + cache_entry.push_binary_package(spec, tarball, "sha256", checksum, tmpdir, signing_key) class Uploader: @@ -1357,10 +1276,13 @@ def _url_push( errors: List[Tuple[spack.spec.Spec, BaseException]] = [] exists_futures = [ - executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs + executor.submit( + _exists_in_buildcache, spec, out_url, allow_unsigned=False if signing_key else True + ) + for spec in specs ] - exists = { + cache_entries = { spec.dag_hash(): exists_future.result() for spec, exists_future in zip(specs, exists_futures) } @@ -1369,8 +1291,9 @@ def _url_push( specs_to_upload = [] for spec in specs: - signed, unsigned, tarball = exists[spec.dag_hash()] - if (signed or unsigned) and tarball: + if cache_entries[spec.dag_hash()].exists( + [BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL] + ): skipped.append(spec) else: specs_to_upload.append(spec) @@ -1390,8 +1313,7 @@ def _url_push( _url_upload_tarball_and_specfile, spec, tmpdir, - out_url, - exists[spec.dag_hash()], + cache_entries[spec.dag_hash()], signing_key, ) for spec in specs_to_upload @@ -1414,6 +1336,10 @@ def _url_push( if not uploaded_any: return skipped, errors + # If the layout.json doesn't yet exist on this mirror, push it + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_class.maybe_push_layout_json(out_url) + if signing_key: keys_tmpdir = os.path.join(tmpdir, "keys") os.mkdir(keys_tmpdir) @@ -1730,6 +1656,9 @@ def extra_config(spec: spack.spec.Spec): "hash_algorithm": "sha256", "hash": checksums[spec.dag_hash()].compressed_digest.digest, } + spec_dict["archive_size"] = checksums[spec.dag_hash()].size + spec_dict["archive_timestamp"] = datetime.datetime.now().astimezone().isoformat() + spec_dict["archive_compression"] = "gzip" return spec_dict # Upload manifests @@ -1842,26 +1771,6 @@ def _oci_update_index( upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest) -def try_verify(specfile_path): - """Utility function to attempt to verify a local file. Assumes the - file is a clearsigned signature file. - - Args: - specfile_path (str): Path to file to be verified. - - Returns: - ``True`` if the signature could be verified, ``False`` otherwise. - """ - suppress = config.get("config:suppress_gpg_warnings", False) - - try: - spack.util.gpg.verify(specfile_path, suppress_warnings=suppress) - except Exception: - return False - - return True - - def try_fetch(url_to_fetch): """Utility function to try and fetch a file from a url, stage it locally, and return the path to the staged file. @@ -1884,55 +1793,13 @@ def try_fetch(url_to_fetch): return stage -def _delete_staged_downloads(download_result): - """Clean up stages used to download tarball and specfile""" - download_result["tarball_stage"].destroy() - download_result["specfile_stage"].destroy() - - -def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]: - """Read and validate a spec file, returning the spec dict with its layout version, or raising - InvalidMetadataFile if invalid.""" - try: - with open(path, "rb") as f: - binary_content = f.read() - except OSError: - raise InvalidMetadataFile(f"No such file: {path}") - - # In the future we may support transparently decompressing compressed spec files. - if binary_content[:2] == b"\x1f\x8b": - raise InvalidMetadataFile("Compressed spec files are not supported") - - try: - as_string = binary_content.decode("utf-8") - if path.endswith(".json.sig"): - spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string) - else: - spec_dict = json.loads(as_string) - except Exception as e: - raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e - - # Ensure this version is not too new. - try: - layout_version = int(spec_dict.get("buildcache_layout_version", 0)) - except ValueError as e: - raise InvalidMetadataFile("Could not parse layout version") from e - - if layout_version > max_supported_layout: - raise InvalidMetadataFile( - f"Layout version {layout_version} is too new for this version of Spack" - ) - - return spec_dict, layout_version - - -def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None): - """ - Download binary tarball for given package into stage area, returning - path to downloaded tarball if successful, None otherwise. +def download_tarball( + spec: spack.spec.Spec, unsigned: Optional[bool] = False, mirrors_for_spec=None +) -> Optional[spack.stage.Stage]: + """Download binary tarball for given package Args: - spec (spack.spec.Spec): Concrete spec + spec: a concrete spec unsigned: if ``True`` or ``False`` override the mirror signature verification defaults mirrors_for_spec (list): Optional list of concrete specs and mirrors obtained by calling binary_distribution.get_mirrors_for_spec(). @@ -1940,19 +1807,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No configured mirrors. Returns: - ``None`` if the tarball could not be downloaded (maybe also verified, - depending on whether new-style signed binary packages were found). - Otherwise, return an object indicating the path to the downloaded - tarball, the path to the downloaded specfile (in the case of new-style - buildcache), and whether or not the tarball is already verified. - - .. code-block:: JSON - - { - "tarball_path": "path-to-locally-saved-tarfile", - "specfile_path": "none-or-path-to-locally-saved-specfile", - "signature_verified": "true-if-binary-pkg-was-already-verified" - } + ``None`` if the tarball could not be downloaded, the signature verified + (if required), and its checksum validated. Otherwise, return the stage + containing the downloaded tarball. """ configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = ( spack.mirrors.mirror.MirrorCollection(binary=True).values() @@ -1960,9 +1817,6 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No if not configured_mirrors: tty.die("Please add a spack mirror to allow download of pre-compiled packages.") - tarball = tarball_path_name(spec, ".spack") - specfile_prefix = tarball_name(spec, ".spec") - # Note on try_first and try_next: # mirrors_for_spec mostly likely came from spack caching remote # mirror indices locally and adding their specs to a local data @@ -1972,113 +1826,67 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No # look in all configured mirrors if needed, as maybe the spec # we need was in an un-indexed mirror. No need to check any # mirror for the spec twice though. - try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else [] - try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first] - mirror_urls = try_first + try_next + try_first = [i.url_and_version for i in mirrors_for_spec] if mirrors_for_spec else [] + + try_next = [] + for try_layout in SUPPORTED_LAYOUT_VERSIONS: + try_next.extend([MirrorURLAndVersion(i.fetch_url, try_layout) for i in configured_mirrors]) + urls_and_versions = try_first + [uv for uv in try_next if uv not in try_first] # TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here. - def fetch_url_to_mirror(url): + def fetch_url_to_mirror(url_and_version): + url = url_and_version.url + layout_version = url_and_version.version for mirror in configured_mirrors: if mirror.fetch_url == url: - return mirror - return spack.mirrors.mirror.Mirror(url) + return mirror, layout_version + return spack.mirrors.mirror.Mirror(url), layout_version - mirrors = [fetch_url_to_mirror(url) for url in mirror_urls] + mirrors = [fetch_url_to_mirror(url_and_version) for url_and_version in urls_and_versions] - tried_to_verify_sigs = [] + for mirror, layout_version in mirrors: + # Override mirror's default if + currently_unsigned = unsigned if unsigned is not None else not mirror.signed - # Assumes we care more about finding a spec file by preferred ext - # than by mirrory priority. This can be made less complicated as - # we remove support for deprecated spec formats and buildcache layouts. - for try_signed in (True, False): - for mirror in mirrors: - # Override mirror's default if - currently_unsigned = unsigned if unsigned is not None else not mirror.signed + # If it's an OCI index, do things differently, since we cannot compose URLs. + fetch_url = mirror.fetch_url - # If it's an OCI index, do things differently, since we cannot compose URLs. - fetch_url = mirror.fetch_url + # TODO: refactor this to some "nice" place. + if fetch_url.startswith("oci://"): + ref = spack.oci.image.ImageReference.from_string(fetch_url[len("oci://") :]).with_tag( + _oci_default_tag(spec) + ) - # TODO: refactor this to some "nice" place. - if fetch_url.startswith("oci://"): - ref = spack.oci.image.ImageReference.from_string( - fetch_url[len("oci://") :] - ).with_tag(_oci_default_tag(spec)) - - # Fetch the manifest - try: - response = spack.oci.opener.urlopen( - urllib.request.Request( - url=ref.manifest_url(), - headers={"Accept": ", ".join(spack.oci.oci.manifest_content_type)}, - ) + # Fetch the manifest + try: + response = spack.oci.opener.urlopen( + urllib.request.Request( + url=ref.manifest_url(), + headers={"Accept": ", ".join(spack.oci.oci.manifest_content_type)}, ) - except Exception: - continue - - # Download the config = spec.json and the relevant tarball - try: - manifest = json.load(response) - spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) - tarball_digest = spack.oci.image.Digest.from_string( - manifest["layers"][-1]["digest"] - ) - except Exception: - continue - - with spack.oci.oci.make_stage( - ref.blob_url(spec_digest), spec_digest, keep=True - ) as local_specfile_stage: - try: - local_specfile_stage.fetch() - local_specfile_stage.check() - try: - _get_valid_spec_file( - local_specfile_stage.save_filename, - CURRENT_BUILD_CACHE_LAYOUT_VERSION, - ) - except InvalidMetadataFile as e: - tty.warn( - f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} " - f"from {fetch_url} due to invalid metadata file: {e}" - ) - local_specfile_stage.destroy() - continue - except Exception: - continue - local_specfile_stage.cache_local() - - with spack.oci.oci.make_stage( - ref.blob_url(tarball_digest), tarball_digest, keep=True - ) as tarball_stage: - try: - tarball_stage.fetch() - tarball_stage.check() - except Exception: - continue - tarball_stage.cache_local() - - return { - "tarball_stage": tarball_stage, - "specfile_stage": local_specfile_stage, - "signature_verified": False, - "signature_required": not currently_unsigned, - } - - else: - ext = "json.sig" if try_signed else "json" - specfile_path = url_util.join( - fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix ) - specfile_url = f"{specfile_path}.{ext}" - spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball) - local_specfile_stage = try_fetch(specfile_url) - if local_specfile_stage: - local_specfile_path = local_specfile_stage.save_filename - signature_verified = False + except Exception: + continue + # Download the config = spec.json and the relevant tarball + try: + manifest = json.load(response) + spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"]) + tarball_digest = spack.oci.image.Digest.from_string( + manifest["layers"][-1]["digest"] + ) + except Exception: + continue + + with spack.oci.oci.make_stage( + ref.blob_url(spec_digest), spec_digest, keep=True + ) as local_specfile_stage: + try: + local_specfile_stage.fetch() + local_specfile_stage.check() try: - _get_valid_spec_file( - local_specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION + get_valid_spec_file( + local_specfile_stage.save_filename, CURRENT_BUILD_CACHE_LAYOUT_VERSION ) except InvalidMetadataFile as e: tty.warn( @@ -2087,59 +1895,48 @@ def fetch_url_to_mirror(url): ) local_specfile_stage.destroy() continue + except Exception: + continue + local_specfile_stage.cache_local() - if try_signed and not currently_unsigned: - # If we found a signed specfile at the root, try to verify - # the signature immediately. We will not download the - # tarball if we could not verify the signature. - tried_to_verify_sigs.append(specfile_url) - signature_verified = try_verify(local_specfile_path) - if not signature_verified: - tty.warn(f"Failed to verify: {specfile_url}") + local_specfile_stage.destroy() - if currently_unsigned or signature_verified or not try_signed: - # We will download the tarball in one of three cases: - # 1. user asked for --no-check-signature - # 2. user didn't ask for --no-check-signature, but we - # found a spec.json.sig and verified the signature already - # 3. neither of the first two cases are true, but this file - # is *not* a signed json (not a spec.json.sig file). That - # means we already looked at all the mirrors and either didn't - # find any .sig files or couldn't verify any of them. But it - # is still possible to find an old style binary package where - # the signature is a detached .asc file in the outer archive - # of the tarball, and in that case, the only way to know is to - # download the tarball. This is a deprecated use case, so if - # something goes wrong during the extraction process (can't - # verify signature, checksum doesn't match) we will fail at - # that point instead of trying to download more tarballs from - # the remaining mirrors, looking for one we can use. - tarball_stage = try_fetch(spackfile_url) - if tarball_stage: - return { - "tarball_stage": tarball_stage, - "specfile_stage": local_specfile_stage, - "signature_verified": signature_verified, - "signature_required": not currently_unsigned, - } + with spack.oci.oci.make_stage( + ref.blob_url(tarball_digest), tarball_digest, keep=True + ) as tarball_stage: + try: + tarball_stage.fetch() + tarball_stage.check() + except Exception: + continue + tarball_stage.cache_local() - local_specfile_stage.destroy() + return tarball_stage + else: + cache_type = get_url_buildcache_class(layout_version=layout_version) + cache_entry = cache_type(fetch_url, spec, allow_unsigned=currently_unsigned) + + try: + cache_entry.fetch_archive() + except Exception as e: + tty.debug( + f"Encountered error attempting to fetch archive for " + f"{spec.name}/{spec.dag_hash()[:7]} from {fetch_url} " + f"(v{layout_version}) due to {e}" + ) + cache_entry.destroy() + continue + + if layout_version == 2: + warn_v2_layout(fetch_url, "Installing a spec") + + return cache_entry.get_archive_stage() # Falling through the nested loops meeans we exhaustively searched # for all known kinds of spec files on all mirrors and did not find - # an acceptable one for which we could download a tarball. - - if tried_to_verify_sigs: - raise NoVerifyException( - ( - "Spack found new style signed binary packages, " - "but was unable to verify any of them. Please " - "obtain and trust the correct public key. If " - "these are public spack binaries, please see the " - "spack docs for locations where keys can be found." - ) - ) - + # an acceptable one for which we could download a tarball and (if + # needed) verify a signature. So at this point, we will proceed to + # install from source. return None @@ -2297,54 +2094,6 @@ def relocate_package(spec: spack.spec.Spec) -> None: os.unlink(install_manifest) -def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum): - stagepath = os.path.dirname(filename) - spackfile_name = tarball_name(spec, ".spack") - spackfile_path = os.path.join(stagepath, spackfile_name) - tarfile_name = tarball_name(spec, ".tar.gz") - tarfile_path = os.path.join(extract_to, tarfile_name) - json_name = tarball_name(spec, ".spec.json") - json_path = os.path.join(extract_to, json_name) - with closing(tarfile.open(spackfile_path, "r")) as tar: - tar.extractall(extract_to) - # some buildcache tarfiles use bzip2 compression - if not os.path.exists(tarfile_path): - tarfile_name = tarball_name(spec, ".tar.bz2") - tarfile_path = os.path.join(extract_to, tarfile_name) - - if os.path.exists(json_path): - specfile_path = json_path - else: - raise ValueError("Cannot find spec file for {0}.".format(extract_to)) - - if signature_required: - if os.path.exists("%s.asc" % specfile_path): - suppress = config.get("config:suppress_gpg_warnings", False) - try: - spack.util.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress) - except Exception: - raise NoVerifyException( - "Spack was unable to verify package " - "signature, please obtain and trust the " - "correct public key." - ) - else: - raise UnsignedPackageException( - "To install unsigned packages, use the --no-check-signature option." - ) - - # compute the sha256 checksum of the tarball - local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) - expected = remote_checksum["hash"] - - # if the checksums don't match don't install - if local_checksum != expected: - size, contents = fsys.filesummary(tarfile_path) - raise NoChecksumException(tarfile_path, size, contents, "sha256", expected, local_checksum) - - return tarfile_path - - def _tar_strip_component(tar: tarfile.TarFile, prefix: str): """Yield all members of tarfile that start with given prefix, and strip that prefix (including symlinks)""" @@ -2377,11 +2126,12 @@ def extract_buildcache_tarball(tarfile_path: str, destination: str) -> None: ) -def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER): +def extract_tarball(spec, tarball_stage: spack.stage.Stage, force=False, timer=timer.NULL_TIMER): """ extract binary tarball for given package into install area """ timer.start("extract") + if os.path.exists(spec.prefix): if force: shutil.rmtree(spec.prefix) @@ -2396,78 +2146,26 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER): default_perms="parents", ) - specfile_path = download_result["specfile_stage"].save_filename - spec_dict, layout_version = _get_valid_spec_file( - specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION - ) - bchecksum = spec_dict["binary_cache_checksum"] + tarfile_path = tarball_stage.save_filename - filename = download_result["tarball_stage"].save_filename - signature_verified: bool = download_result["signature_verified"] - signature_required: bool = download_result["signature_required"] - tmpdir = None - - if layout_version == 0: - # Handle the older buildcache layout where the .spack file - # contains a spec json, maybe an .asc file (signature), - # and another tarball containing the actual install tree. - tmpdir = tempfile.mkdtemp() - try: - tarfile_path = _extract_inner_tarball( - spec, filename, tmpdir, signature_required, bchecksum - ) - except Exception as e: - _delete_staged_downloads(download_result) - shutil.rmtree(tmpdir) - raise e - elif 1 <= layout_version <= 2: - # Newer buildcache layout: the .spack file contains just - # in the install tree, the signature, if it exists, is - # wrapped around the spec.json at the root. If sig verify - # was required, it was already done before downloading - # the tarball. - tarfile_path = filename - - if signature_required and not signature_verified: - raise UnsignedPackageException( - "To install unsigned packages, use the --no-check-signature option, " - "or configure the mirror with signed: false." - ) - - # compute the sha256 checksum of the tarball - local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path) - expected = bchecksum["hash"] - - # if the checksums don't match don't install - if local_checksum != expected: - size, contents = fsys.filesummary(tarfile_path) - _delete_staged_downloads(download_result) - raise NoChecksumException( - tarfile_path, size, contents, "sha256", expected, local_checksum - ) try: extract_buildcache_tarball(tarfile_path, destination=spec.prefix) except Exception: shutil.rmtree(spec.prefix, ignore_errors=True) - _delete_staged_downloads(download_result) + tarball_stage.destroy() raise - os.remove(tarfile_path) - os.remove(specfile_path) timer.stop("extract") - timer.start("relocate") + try: relocate_package(spec) except Exception as e: shutil.rmtree(spec.prefix, ignore_errors=True) raise e finally: - if tmpdir: - shutil.rmtree(tmpdir, ignore_errors=True) - if os.path.exists(filename): - os.remove(filename) - _delete_staged_downloads(download_result) + tarball_stage.destroy() + timer.stop("relocate") @@ -2543,28 +2241,15 @@ def install_root_node( warnings.warn("Package for spec {0} already installed.".format(spec.format())) return - download_result = download_tarball(spec.build_spec, unsigned) - if not download_result: + tarball_stage = download_tarball(spec.build_spec, unsigned) + if not tarball_stage: msg = 'download of binary cache file for spec "{0}" failed' raise RuntimeError(msg.format(spec.build_spec.format())) - if sha256: - checker = spack.util.crypto.Checker(sha256) - msg = 'cannot verify checksum for "{0}" [expected={1}]' - tarball_path = download_result["tarball_stage"].save_filename - msg = msg.format(tarball_path, sha256) - if not checker.check(tarball_path): - size, contents = fsys.filesummary(tarball_path) - _delete_staged_downloads(download_result) - raise NoChecksumException( - tarball_path, size, contents, checker.hash_name, sha256, checker.sum - ) - tty.debug("Verified SHA256 checksum of the build cache") - # don't print long padded paths while extracting/relocating binaries with spack.util.path.filter_padding(): tty.msg('Installing "{0}" from a buildcache'.format(spec.format())) - extract_tarball(spec, download_result, force) + extract_tarball(spec, tarball_stage, force) spec.package.windows_establish_runtime_linkage() spack.hooks.post_install(spec, False) spack.store.STORE.db.add(spec, allow_missing=allow_missing) @@ -2587,51 +2272,30 @@ def try_direct_fetch(spec, mirrors=None): """ Try to find the spec directly on the configured mirrors """ - specfile_name = tarball_name(spec, ".spec.json") - signed_specfile_name = tarball_name(spec, ".spec.json.sig") - specfile_is_signed = False - found_specs = [] - + found_specs: List[MirrorForSpec] = [] binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values() - for mirror in binary_mirrors: - buildcache_fetch_url_json = url_util.join( - mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_name - ) - buildcache_fetch_url_signed_json = url_util.join( - mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name - ) - try: - _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) - specfile_contents = codecs.getreader("utf-8")(fs).read() - specfile_is_signed = True - except (web_util.SpackWebError, OSError) as e1: + for layout_version in SUPPORTED_LAYOUT_VERSIONS: + for mirror in binary_mirrors: + # layout_version could eventually come from the mirror config + cache_class = get_url_buildcache_class(layout_version=layout_version) + cache_entry = cache_class(mirror.fetch_url, spec) + try: - _, _, fs = web_util.read_from_url(buildcache_fetch_url_json) - specfile_contents = codecs.getreader("utf-8")(fs).read() - specfile_is_signed = False - except (web_util.SpackWebError, OSError) as e2: - tty.debug( - f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}", - e1, - level=2, - ) - tty.debug( - f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2 - ) + spec_dict = cache_entry.fetch_metadata() + except BuildcacheEntryError: continue + finally: + cache_entry.destroy() - # read the spec from the build cache file. All specs in build caches - # are concrete (as they are built) so we need to mark this spec - # concrete on read-in. - if specfile_is_signed: - specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents) - fetched_spec = spack.spec.Spec.from_dict(specfile_json) - else: - fetched_spec = spack.spec.Spec.from_json(specfile_contents) - fetched_spec._mark_concrete() + # All specs in build caches are concrete (as they are built) so we need + # to mark this spec concrete on read-in. + fetched_spec = spack.spec.Spec.from_dict(spec_dict) + fetched_spec._mark_concrete() - found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec}) + found_specs.append( + MirrorForSpec(MirrorURLAndVersion(mirror.fetch_url, layout_version), fetched_spec) + ) return found_specs @@ -2692,7 +2356,12 @@ def clear_spec_cache(): BINARY_INDEX.clear() -def get_keys(install=False, trust=False, force=False, mirrors=None): +def get_keys( + install: bool = False, + trust: bool = False, + force: bool = False, + mirrors: Optional[Dict[Any, spack.mirrors.mirror.Mirror]] = None, +): """Get pgp public keys available on mirror with suffix .pub""" mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True) @@ -2700,53 +2369,117 @@ def get_keys(install=False, trust=False, force=False, mirrors=None): tty.die("Please add a spack mirror to allow " + "download of build caches.") for mirror in mirror_collection.values(): - fetch_url = mirror.fetch_url - # TODO: oci:// does not support signing. - if fetch_url.startswith("oci://"): - continue - keys_url = url_util.join( - fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH - ) - keys_index = url_util.join(keys_url, "index.json") + for layout_version in SUPPORTED_LAYOUT_VERSIONS: + fetch_url = mirror.fetch_url + # TODO: oci:// does not support signing. + if fetch_url.startswith("oci://"): + continue - tty.debug("Finding public keys in {0}".format(url_util.format(fetch_url))) + if layout_version == 2: + _get_keys_v2(fetch_url, install, trust, force) + else: + _get_keys(fetch_url, layout_version, install, trust, force) + +def _get_keys( + mirror_url: str, + layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION, + install: bool = False, + trust: bool = False, + force: bool = False, +) -> None: + cache_class = get_url_buildcache_class(layout_version=layout_version) + + tty.debug("Finding public keys in {0}".format(url_util.format(mirror_url))) + + keys_prefix = url_util.join( + mirror_url, *cache_class.get_relative_path_components(BuildcacheComponent.KEY) + ) + key_index_manifest_url = url_util.join(keys_prefix, "keys.manifest.json") + index_entry = cache_class(mirror_url, allow_unsigned=True) + + try: + index_manifest = index_entry.read_manifest(manifest_url=key_index_manifest_url) + index_blob_path = index_entry.fetch_blob(index_manifest.data[0]) + except BuildcacheEntryError as e: + tty.debug(f"Failed to fetch key index due to: {e}") + index_entry.destroy() + return + + with open(index_blob_path, encoding="utf-8") as fd: + json_index = json.load(fd) + index_entry.destroy() + + for fingerprint, _ in json_index["keys"].items(): + key_manifest_url = url_util.join(keys_prefix, f"{fingerprint}.key.manifest.json") + key_entry = cache_class(mirror_url, allow_unsigned=True) try: - _, _, json_file = web_util.read_from_url(keys_index) - json_index = sjson.load(json_file) - except (web_util.SpackWebError, OSError, ValueError) as url_err: - # TODO: avoid repeated request - if web_util.url_exists(keys_index): - tty.error( - f"Unable to find public keys in {url_util.format(fetch_url)}," - f" caught exception attempting to read from {url_util.format(keys_index)}." - ) - tty.debug(url_err) - + key_manifest = key_entry.read_manifest(manifest_url=key_manifest_url) + key_blob_path = key_entry.fetch_blob(key_manifest.data[0]) + except BuildcacheEntryError as e: + tty.debug(f"Failed to fetch key {fingerprint} due to: {e}") + key_entry.destroy() continue - for fingerprint, key_attributes in json_index["keys"].items(): - link = os.path.join(keys_url, fingerprint + ".pub") + tty.debug("Found key {0}".format(fingerprint)) + if install: + if trust: + spack.util.gpg.trust(key_blob_path) + tty.debug(f"Added {fingerprint} to trusted keys.") + else: + tty.debug( + "Will not add this key to trusted keys." + "Use -t to install all downloaded keys" + ) - with Stage(link, name="build_cache", keep=True) as stage: - if os.path.exists(stage.save_filename) and force: - os.remove(stage.save_filename) - if not os.path.exists(stage.save_filename): - try: - stage.fetch() - except spack.error.FetchError: - continue + key_entry.destroy() - tty.debug("Found key {0}".format(fingerprint)) - if install: - if trust: - spack.util.gpg.trust(stage.save_filename) - tty.debug("Added this key to trusted keys.") - else: - tty.debug( - "Will not add this key to trusted keys." - "Use -t to install all downloaded keys" - ) + +def _get_keys_v2(mirror_url, install=False, trust=False, force=False): + cache_class = get_url_buildcache_class(layout_version=2) + + keys_url = url_util.join( + mirror_url, *cache_class.get_relative_path_components(BuildcacheComponent.KEY) + ) + keys_index = url_util.join(keys_url, "index.json") + + tty.debug("Finding public keys in {0}".format(url_util.format(mirror_url))) + + try: + _, _, json_file = web_util.read_from_url(keys_index) + json_index = sjson.load(json_file) + except (web_util.SpackWebError, OSError, ValueError) as url_err: + # TODO: avoid repeated request + if web_util.url_exists(keys_index): + tty.error( + f"Unable to find public keys in {url_util.format(mirror_url)}," + f" caught exception attempting to read from {url_util.format(keys_index)}." + ) + tty.error(url_err) + return + + for fingerprint, key_attributes in json_index["keys"].items(): + link = os.path.join(keys_url, fingerprint + ".pub") + + with Stage(link, name="build_cache", keep=True) as stage: + if os.path.exists(stage.save_filename) and force: + os.remove(stage.save_filename) + if not os.path.exists(stage.save_filename): + try: + stage.fetch() + except spack.error.FetchError: + continue + + tty.debug("Found key {0}".format(fingerprint)) + if install: + if trust: + spack.util.gpg.trust(stage.save_filename) + tty.debug("Added this key to trusted keys.") + else: + tty.debug( + "Will not add this key to trusted keys." + "Use -t to install all downloaded keys" + ) def _url_push_keys( @@ -2762,19 +2495,29 @@ def _url_push_keys( for key, file in zip(keys, files): spack.util.gpg.export_keys(file, [key]) + cache_class = get_url_buildcache_class() + for mirror in mirrors: push_url = mirror if isinstance(mirror, str) else mirror.push_url - keys_url = url_util.join( - push_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH - ) tty.debug(f"Pushing public keys to {url_util.format(push_url)}") + pushed_a_key = False for key, file in zip(keys, files): - web_util.push_to_url(file, url_util.join(keys_url, os.path.basename(file))) + cache_class.push_local_file_as_blob( + local_file_path=file, + mirror_url=push_url, + manifest_name=f"{key}.key", + component_type=BuildcacheComponent.KEY, + compression="none", + ) + pushed_a_key = True if update_index: - generate_key_index(keys_url, tmpdir=tmpdir) + generate_key_index(push_url, tmpdir=tmpdir) + + if pushed_a_key or update_index: + cache_class.maybe_push_layout_json(push_url) def needs_rebuild(spec, mirror_url): @@ -2791,14 +2534,10 @@ def needs_rebuild(spec, mirror_url): # Try to retrieve the specfile directly, based on the known # format of the name, in order to determine if the package # needs to be rebuilt. - cache_prefix = build_cache_prefix(mirror_url) - specfile_name = tarball_name(spec, ".spec.json") - specfile_path = os.path.join(cache_prefix, specfile_name) - - # Only check for the presence of the json version of the spec. If the - # mirror only has the json version, or doesn't have the spec at all, we - # need to rebuild. - return not web_util.url_exists(specfile_path) + cache_class = get_url_buildcache_class(layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION) + cache_entry = cache_class(mirror_url, spec, allow_unsigned=True) + exists = cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + return not exists def check_specs_against_mirrors(mirrors, specs, output_file=None): @@ -2840,48 +2579,12 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None): return 1 if rebuilds else 0 -def _download_buildcache_entry(mirror_root, descriptions): - for description in descriptions: - path = description["path"] - mkdirp(path) - fail_if_missing = description["required"] - for url in description["url"]: - description_url = os.path.join(mirror_root, url) - stage = Stage(description_url, name="build_cache", path=path, keep=True) - try: - stage.fetch() - break - except spack.error.FetchError as e: - tty.debug(e) - else: - if fail_if_missing: - tty.error("Failed to download required url {0}".format(description_url)) - return False - return True - - -def download_buildcache_entry(file_descriptions, mirror_url=None): - if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True): - tty.die( - "Please provide or add a spack mirror to allow " + "download of buildcache entries." - ) - - if mirror_url: - mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH) - return _download_buildcache_entry(mirror_root, file_descriptions) - - for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values(): - mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH) - - if _download_buildcache_entry(mirror_root, file_descriptions): - return True - else: - continue - - return False - - -def download_single_spec(concrete_spec, destination, mirror_url=None): +def download_single_spec( + concrete_spec, + destination, + mirror_url=None, + layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION, +): """Download the buildcache files for a single concrete spec. Args: @@ -2889,24 +2592,39 @@ def download_single_spec(concrete_spec, destination, mirror_url=None): destination (str): path where to put the downloaded buildcache mirror_url (str): url of the mirror from which to download """ - tarfile_name = tarball_name(concrete_spec, ".spack") - tarball_dir_name = tarball_directory_name(concrete_spec) - tarball_path_name = os.path.join(tarball_dir_name, tarfile_name) - local_tarball_path = os.path.join(destination, tarball_dir_name) + if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True): + tty.die( + "Please provide or add a spack mirror to allow " + "download of buildcache entries." + ) - files_to_fetch = [ - {"url": [tarball_path_name], "path": local_tarball_path, "required": True}, - { - "url": [ - tarball_name(concrete_spec, ".spec.json.sig"), - tarball_name(concrete_spec, ".spec.json"), - ], - "path": destination, - "required": True, - }, - ] + urls = ( + [mirror_url] + if mirror_url + else [ + mirror.fetch_url + for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values() + ] + ) - return download_buildcache_entry(files_to_fetch, mirror_url) + mkdirp(destination) + + for url in urls: + cache_class = get_url_buildcache_class(layout_version=layout_version) + cache_entry = cache_class(url, concrete_spec, allow_unsigned=True) + + try: + cache_entry.fetch_metadata() + cache_entry.fetch_archive() + except BuildcacheEntryError as e: + tty.warn(f"Error downloading {concrete_spec.name}/{concrete_spec.dag_hash()[:7]}: {e}") + cache_entry.destroy() + continue + + shutil.move(cache_entry.get_local_spec_path(), destination) + shutil.move(cache_entry.get_local_archive_path(), destination) + return True + + return False class BinaryCacheQuery: @@ -2951,7 +2669,53 @@ class BuildcacheIndexError(spack.error.SpackError): FetchIndexResult = collections.namedtuple("FetchIndexResult", "etag hash data fresh") -class DefaultIndexFetcher: +class IndexFetcher: + def conditional_fetch(self) -> FetchIndexResult: + raise NotImplementedError(f"{self.__class__.__name__} is abstract") + + def get_index_manifest(self, manifest_response) -> BlobRecord: + """Read the response of the manifest request and return a BlobRecord""" + cache_class = get_url_buildcache_class(CURRENT_BUILD_CACHE_LAYOUT_VERSION) + try: + result = codecs.getreader("utf-8")(manifest_response).read() + except (ValueError, OSError) as e: + raise FetchIndexError(f"Remote index {manifest_response.url} is invalid", e) from e + + manifest = BuildcacheManifest.from_dict( + # Currently we do not sign buildcache index, but we could + cache_class.verify_and_extract_manifest(result, verify=False) + ) + blob_record = manifest.get_blob_records( + cache_class.component_to_media_type(BuildcacheComponent.INDEX) + )[0] + return blob_record + + def fetch_index_blob( + self, cache_entry: URLBuildcacheEntry, blob_record: BlobRecord + ) -> Tuple[str, str]: + """Fetch the index blob indicated by the BlobRecord, and return the + (checksum, contents) of the blob""" + try: + staged_blob_path = cache_entry.fetch_blob(blob_record) + except BuildcacheEntryError as e: + cache_entry.destroy() + raise FetchIndexError( + f"Could not fetch index blob from {cache_entry.mirror_url}" + ) from e + + with open(staged_blob_path, encoding="utf-8") as fd: + blob_result = fd.read() + + computed_hash = compute_hash(blob_result) + + if computed_hash != blob_record.checksum: + cache_entry.destroy() + raise FetchIndexError(f"Remote index at {cache_entry.mirror_url} is invalid") + + return (computed_hash, blob_result) + + +class DefaultIndexFetcherV2(IndexFetcher): """Fetcher for index.json, using separate index.json.hash as cache invalidation strategy""" def __init__(self, url, local_hash, urlopen=web_util.urlopen): @@ -2962,7 +2726,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen): def get_remote_hash(self): # Failure to fetch index.json.hash is not fatal - url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE) + url_index_hash = url_util.join(self.url, "build_cache", "index.json.hash") try: response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) remote_hash = response.read(64) @@ -2983,7 +2747,7 @@ def conditional_fetch(self) -> FetchIndexResult: return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) # Otherwise, download index.json - url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) + url_index = url_util.join(self.url, "build_cache", spack_db.INDEX_JSON_FILE) try: response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) @@ -3014,10 +2778,12 @@ def conditional_fetch(self) -> FetchIndexResult: response.headers.get("Etag", None) or response.headers.get("etag", None) ) + warn_v2_layout(self.url, "Fetching an index") + return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False) -class EtagIndexFetcher: +class EtagIndexFetcherV2(IndexFetcher): """Fetcher for index.json, using ETags headers as cache invalidation strategy""" def __init__(self, url, etag, urlopen=web_util.urlopen): @@ -3027,7 +2793,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen): def conditional_fetch(self) -> FetchIndexResult: # Just do a conditional fetch immediately - url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE) + url = url_util.join(self.url, "build_cache", spack_db.INDEX_JSON_FILE) headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} try: @@ -3045,6 +2811,8 @@ def conditional_fetch(self) -> FetchIndexResult: except (ValueError, OSError) as e: raise FetchIndexError(f"Remote index {url} is invalid", e) from e + warn_v2_layout(self.url, "Fetching an index") + headers = response.headers etag_header_value = headers.get("Etag", None) or headers.get("etag", None) return FetchIndexResult( @@ -3055,10 +2823,12 @@ def conditional_fetch(self) -> FetchIndexResult: ) -class OCIIndexFetcher: - def __init__(self, url: str, local_hash, urlopen=None) -> None: +class OCIIndexFetcher(IndexFetcher): + def __init__(self, url_and_version: MirrorURLAndVersion, local_hash, urlopen=None) -> None: self.local_hash = local_hash + url = url_and_version.url + # Remove oci:// prefix assert url.startswith("oci://") self.ref = spack.oci.image.ImageReference.from_string(url[6:]) @@ -3111,6 +2881,130 @@ def conditional_fetch(self) -> FetchIndexResult: return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False) +class DefaultIndexFetcher(IndexFetcher): + """Fetcher for buildcache index, cache invalidation via manifest contents""" + + def __init__(self, url_and_version: MirrorURLAndVersion, local_hash, urlopen=web_util.urlopen): + self.url = url_and_version.url + self.layout_version = url_and_version.version + self.local_hash = local_hash + self.urlopen = urlopen + self.headers = {"User-Agent": web_util.SPACK_USER_AGENT} + + def conditional_fetch(self) -> FetchIndexResult: + cache_class = get_url_buildcache_class(layout_version=self.layout_version) + url_index_manifest = cache_class.get_index_url(self.url) + + try: + response = self.urlopen( + urllib.request.Request(url_index_manifest, headers=self.headers) + ) + except OSError as e: + raise FetchIndexError( + f"Could not read index manifest from {url_index_manifest}" + ) from e + + index_blob_record = self.get_index_manifest(response) + + # Early exit if our cache is up to date. + if self.local_hash and self.local_hash == index_blob_record.checksum: + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + + # Otherwise, download the index blob + cache_entry = cache_class(self.url, allow_unsigned=True) + computed_hash, result = self.fetch_index_blob(cache_entry, index_blob_record) + cache_entry.destroy() + + # For now we only handle etags on http(s), since 304 error handling + # in s3:// is not there yet. + if urllib.parse.urlparse(self.url).scheme not in ("http", "https"): + etag = None + else: + etag = web_util.parse_etag( + response.headers.get("Etag", None) or response.headers.get("etag", None) + ) + + return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False) + + +class EtagIndexFetcher(IndexFetcher): + """Fetcher for buildcache index, cache invalidation via ETags headers + + This class differs from the DefaultIndexFetcher in the following ways: 1) It + is provided with an etag value on creation, rather than an index checksum + value. Note that since we never start out with an etag, the default fetcher + must have been used initially and determined that the etag approach is valid. + 2) It provides this etag value in the 'If-None-Match' request header for the + index manifest. 3) It checks for special exception type and response code + indicating the index manifest is not modified, exiting early and returning + 'Fresh', if encountered. 4) If it needs to actually read the manfiest, it + does not need to do any checks of the url scheme to determine whether an + etag should be included in the return value.""" + + def __init__(self, url_and_version: MirrorURLAndVersion, etag, urlopen=web_util.urlopen): + self.url = url_and_version.url + self.layout_version = url_and_version.version + self.etag = etag + self.urlopen = urlopen + + def conditional_fetch(self) -> FetchIndexResult: + # Do a conditional fetch of the index manifest (i.e. using If-None-Match header) + cache_class = get_url_buildcache_class(layout_version=self.layout_version) + manifest_url = cache_class.get_index_url(self.url) + headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} + + try: + response = self.urlopen(urllib.request.Request(manifest_url, headers=headers)) + except urllib.error.HTTPError as e: + if e.getcode() == 304: + # The remote manifest has not been modified, i.e. the index we + # already have is the freshest there is. + return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) + raise FetchIndexError(f"Could not fetch index manifest {manifest_url}", e) from e + except OSError as e: # URLError, socket.timeout, etc. + raise FetchIndexError(f"Could not fetch index manifest {manifest_url}", e) from e + + # We need to read the index manifest and fetch the associated blob + cache_entry = cache_class(self.url, allow_unsigned=True) + computed_hash, result = self.fetch_index_blob( + cache_entry, self.get_index_manifest(response) + ) + cache_entry.destroy() + + headers = response.headers + etag_header_value = headers.get("Etag", None) or headers.get("etag", None) + + return FetchIndexResult( + etag=web_util.parse_etag(etag_header_value), + hash=computed_hash, + data=result, + fresh=False, + ) + + +def get_index_fetcher( + scheme: str, url_and_version: MirrorURLAndVersion, cache_entry: Dict[str, str] +) -> IndexFetcher: + if scheme == "oci": + # TODO: Actually etag and OCI are not mutually exclusive... + return OCIIndexFetcher(url_and_version, cache_entry.get("index_hash", None)) + elif cache_entry.get("etag"): + if url_and_version.version < 3: + return EtagIndexFetcherV2(url_and_version.url, cache_entry["etag"]) + else: + return EtagIndexFetcher(url_and_version, cache_entry["etag"]) + + else: + if url_and_version.version < 3: + return DefaultIndexFetcherV2( + url_and_version.url, local_hash=cache_entry.get("index_hash", None) + ) + else: + return DefaultIndexFetcher( + url_and_version, local_hash=cache_entry.get("index_hash", None) + ) + + class NoOverwriteException(spack.error.SpackError): """Raised when a file would be overwritten""" @@ -3147,27 +3041,6 @@ def __init__(self, keys): super().__init__(err_msg) -class NoVerifyException(spack.error.SpackError): - """ - Raised if file fails signature verification. - """ - - pass - - -class NoChecksumException(spack.error.SpackError): - """ - Raised if file fails checksum verification. - """ - - def __init__(self, path, size, contents, algorithm, expected, computed): - super().__init__( - f"{algorithm} checksum failed for {path}", - f"Expected {expected} but got {computed}. " - f"File size = {size} bytes. Contents = {contents!r}", - ) - - class NewLayoutException(spack.error.SpackError): """ Raised if directory layout is different from buildcache. @@ -3177,10 +3050,6 @@ def __init__(self, msg): super().__init__(msg) -class InvalidMetadataFile(spack.error.SpackError): - pass - - class UnsignedPackageException(spack.error.SpackError): """ Raised if installation of unsigned package is attempted without diff --git a/lib/spack/spack/buildcache_migrate.py b/lib/spack/spack/buildcache_migrate.py new file mode 100644 index 00000000000..f5c6b0cf577 --- /dev/null +++ b/lib/spack/spack/buildcache_migrate.py @@ -0,0 +1,351 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import codecs +import json +import os +import pathlib +import tempfile +from typing import NamedTuple + +import llnl.util.tty as tty + +import spack.binary_distribution as bindist +import spack.database as spack_db +import spack.error +import spack.mirrors.mirror +import spack.spec +import spack.stage +import spack.util.crypto +import spack.util.parallel +import spack.util.url as url_util +import spack.util.web as web_util + +from .enums import InstallRecordStatus +from .url_buildcache import ( + BlobRecord, + BuildcacheComponent, + compressed_json_from_dict, + get_url_buildcache_class, + sign_file, + try_verify, +) + + +def v2_tarball_directory_name(spec): + """ + Return name of the tarball directory according to the convention + -//-/ + """ + return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}") + + +def v2_tarball_name(spec, ext): + """ + Return the name of the tarfile according to the convention + --- + """ + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" + ) + return f"{spec_formatted}{ext}" + + +def v2_tarball_path_name(spec, ext): + """ + Return the full path+name for a given spec according to the convention + / + """ + return os.path.join(v2_tarball_directory_name(spec), v2_tarball_name(spec, ext)) + + +class MigrateSpecResult(NamedTuple): + success: bool + message: str + + +class MigrationException(spack.error.SpackError): + """ + Raised when migration fails irrevocably + """ + + def __init__(self, msg): + super().__init__(msg) + + +def _migrate_spec( + s: spack.spec.Spec, mirror_url: str, tmpdir: str, unsigned: bool = False, signing_key: str = "" +) -> MigrateSpecResult: + """Parallelizable function to migrate a single spec""" + print_spec = f"{s.name}/{s.dag_hash()[:7]}" + + # Check if the spec file exists in the new location and exit early if so + + v3_cache_class = get_url_buildcache_class(layout_version=3) + v3_cache_entry = v3_cache_class(mirror_url, s, allow_unsigned=unsigned) + exists = v3_cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + v3_cache_entry.destroy() + + if exists: + msg = f"No need to migrate {print_spec}" + return MigrateSpecResult(True, msg) + + # Try to fetch the spec metadata + v2_metadata_urls = [ + url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json.sig")) + ] + + if unsigned: + v2_metadata_urls.append( + url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json")) + ) + + spec_contents = None + + for meta_url in v2_metadata_urls: + try: + _, _, meta_file = web_util.read_from_url(meta_url) + spec_contents = codecs.getreader("utf-8")(meta_file).read() + v2_spec_url = meta_url + break + except (web_util.SpackWebError, OSError): + pass + else: + msg = f"Unable to read metadata for {print_spec}" + return MigrateSpecResult(False, msg) + + spec_dict = {} + + if unsigned: + # User asked for unsigned, if we found a signed specfile, just ignore + # the signature + if v2_spec_url.endswith(".sig"): + spec_dict = spack.spec.Spec.extract_json_from_clearsig(spec_contents) + else: + spec_dict = json.loads(spec_contents) + else: + # User asked for signed, we must successfully verify the signature + local_signed_pre_verify = os.path.join( + tmpdir, f"{s.name}_{s.dag_hash()}_verify.spec.json.sig" + ) + with open(local_signed_pre_verify, "w", encoding="utf-8") as fd: + fd.write(spec_contents) + if not try_verify(local_signed_pre_verify): + return MigrateSpecResult(False, f"Failed to verify signature of {print_spec}") + with open(local_signed_pre_verify, encoding="utf-8") as fd: + spec_dict = spack.spec.Spec.extract_json_from_clearsig(fd.read()) + + # Read out and remove the bits needed to rename and position the archive + bcc = spec_dict.pop("binary_cache_checksum", None) + if not bcc: + msg = "Cannot migrate a spec that does not have 'binary_cache_checksum'" + return MigrateSpecResult(False, msg) + + algorithm = bcc["hash_algorithm"] + checksum = bcc["hash"] + + # TODO: Remove this key once oci buildcache no longer uses it + spec_dict["buildcache_layout_version"] = 2 + + v2_archive_url = url_util.join(mirror_url, "build_cache", v2_tarball_path_name(s, ".spack")) + + # spacks web utilities do not include direct copying of s3 objects, so we + # need to download the archive locally, and then push it back to the target + # location + archive_stage_path = os.path.join(tmpdir, f"archive_stage_{s.name}_{s.dag_hash()}") + archive_stage = spack.stage.Stage(v2_archive_url, path=archive_stage_path) + + try: + archive_stage.create() + archive_stage.fetch() + except spack.error.FetchError: + return MigrateSpecResult(False, f"Unable to fetch archive for {print_spec}") + + local_tarfile_path = archive_stage.save_filename + + # As long as we have to download the tarball anyway, we might as well compute the + # checksum locally and check it against the expected value + local_checksum = spack.util.crypto.checksum( + spack.util.crypto.hash_fun_for_algo(algorithm), local_tarfile_path + ) + + if local_checksum != checksum: + return MigrateSpecResult( + False, f"Checksum mismatch for {print_spec}: expected {checksum}, got {local_checksum}" + ) + + spec_dict["archive_size"] = os.stat(local_tarfile_path).st_size + + # Compress the spec dict and compute its checksum + metadata_checksum_algo = "sha256" + spec_json_path = os.path.join(tmpdir, f"{s.name}_{s.dag_hash()}.spec.json") + metadata_checksum, metadata_size = compressed_json_from_dict( + spec_json_path, spec_dict, metadata_checksum_algo + ) + + tarball_blob_record = BlobRecord( + spec_dict["archive_size"], v3_cache_class.TARBALL_MEDIATYPE, "gzip", algorithm, checksum + ) + + metadata_blob_record = BlobRecord( + metadata_size, + v3_cache_class.SPEC_MEDIATYPE, + "gzip", + metadata_checksum_algo, + metadata_checksum, + ) + + # Compute the urls to the new blobs + v3_archive_url = v3_cache_class.get_blob_url(mirror_url, tarball_blob_record) + v3_spec_url = v3_cache_class.get_blob_url(mirror_url, metadata_blob_record) + + # First push the tarball + tty.debug(f"Pushing {local_tarfile_path} to {v3_archive_url}") + + try: + web_util.push_to_url(local_tarfile_path, v3_archive_url, keep_original=True) + except Exception: + return MigrateSpecResult(False, f"Failed to push archive for {print_spec}") + + # Then push the spec file + tty.debug(f"Pushing {spec_json_path} to {v3_spec_url}") + + try: + web_util.push_to_url(spec_json_path, v3_spec_url, keep_original=True) + except Exception: + return MigrateSpecResult(False, f"Failed to push spec metadata for {print_spec}") + + # Generate the manifest and write it to a temporary location + manifest = { + "version": v3_cache_class.get_layout_version(), + "data": [tarball_blob_record.to_dict(), metadata_blob_record.to_dict()], + } + + manifest_path = os.path.join(tmpdir, f"{s.dag_hash()}.manifest.json") + with open(manifest_path, "w", encoding="utf-8") as f: + json.dump(manifest, f, indent=0, separators=(",", ":")) + # Note: when using gpg clear sign, we need to avoid long lines (19995 + # chars). If lines are longer, they are truncated without error. So, + # here we still add newlines, but no indent, so save on file size and + # line length. + + # Possibly sign the manifest + if not unsigned: + manifest_path = sign_file(signing_key, manifest_path) + + v3_manifest_url = v3_cache_class.get_manifest_url(s, mirror_url) + + # Push the manifest + try: + web_util.push_to_url(manifest_path, v3_manifest_url, keep_original=True) + except Exception: + return MigrateSpecResult(False, f"Failed to push manifest for {print_spec}") + + return MigrateSpecResult(True, f"Successfully migrated {print_spec}") + + +def migrate( + mirror: spack.mirrors.mirror.Mirror, unsigned: bool = False, delete_existing: bool = False +) -> None: + """Perform migration of the given mirror + + If unsigned is True, signatures on signed specs will be ignored, and specs + will not be re-signed before pushing to the new location. Otherwise, spack + will attempt to verify signatures and re-sign specs, and will fail if not + able to do so. If delete_existing is True, spack will delete the original + contents of the mirror once the migration is complete.""" + signing_key = "" + if not unsigned: + try: + signing_key = bindist.select_signing_key() + except (bindist.NoKeyException, bindist.PickKeyException): + raise MigrationException( + "Signed migration requires exactly one secret key in keychain" + ) + + delete_action = "deleting" if delete_existing else "keeping" + sign_action = "an unsigned" if unsigned else "a signed" + mirror_url = mirror.fetch_url + + tty.msg( + f"Performing {sign_action} migration of {mirror.push_url} " + f"and {delete_action} existing contents" + ) + + index_url = url_util.join(mirror_url, "build_cache", spack_db.INDEX_JSON_FILE) + contents = None + + try: + _, _, index_file = web_util.read_from_url(index_url) + contents = codecs.getreader("utf-8")(index_file).read() + except (web_util.SpackWebError, OSError): + raise MigrationException("Buildcache migration requires a buildcache index") + + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + index_path = os.path.join(tmpdir, "_tmp_index.json") + with open(index_path, "w", encoding="utf-8") as fd: + fd.write(contents) + + db = bindist.BuildCacheDatabase(tmpdir) + db._read_from_file(pathlib.Path(index_path)) + + specs_to_migrate = [ + s + for s in db.query_local(installed=InstallRecordStatus.ANY) + if not s.external and db.query_local_by_spec_hash(s.dag_hash()).in_buildcache + ] + + # Run the tasks in parallel if possible + executor = spack.util.parallel.make_concurrent_executor() + migrate_futures = [ + executor.submit(_migrate_spec, spec, mirror_url, tmpdir, unsigned, signing_key) + for spec in specs_to_migrate + ] + + success_count = 0 + + tty.msg("Migration summary:") + for spec, migrate_future in zip(specs_to_migrate, migrate_futures): + result = migrate_future.result() + msg = f" {spec.name}/{spec.dag_hash()[:7]}: {result.message}" + if result.success: + success_count += 1 + tty.msg(msg) + else: + tty.error(msg) + # The migrated index should have the same specs as the original index, + # modulo any specs that we failed to migrate for whatever reason. So + # to avoid having to re-fetch all the spec files now, just mark them + # appropriately in the existing database and push that. + db.mark(spec, "in_buildcache", result.success) + + if success_count > 0: + tty.msg("Updating index and pushing keys") + + # If the layout.json doesn't yet exist on this mirror, push it + v3_cache_class = get_url_buildcache_class(layout_version=3) + v3_cache_class.maybe_push_layout_json(mirror_url) + + # Push the migrated mirror index + index_tmpdir = os.path.join(tmpdir, "rebuild_index") + os.mkdir(index_tmpdir) + bindist._push_index(db, index_tmpdir, mirror_url) + + # Push the public part of the signing key + if not unsigned: + keys_tmpdir = os.path.join(tmpdir, "keys") + os.mkdir(keys_tmpdir) + bindist._url_push_keys( + mirror_url, keys=[signing_key], update_index=True, tmpdir=keys_tmpdir + ) + else: + tty.warn("No specs migrated, did you mean to perform an unsigned migration instead?") + + # Delete the old layout if the user requested it + if delete_existing: + delete_prefix = url_util.join(mirror_url, "build_cache") + tty.msg(f"Recursively deleting {delete_prefix}") + web_util.remove_url(delete_prefix, recursive=True) + + tty.msg("Migration complete") diff --git a/lib/spack/spack/ci/__init__.py b/lib/spack/spack/ci/__init__.py index 9f3a1ca0d9c..7f786094b4a 100644 --- a/lib/spack/spack/ci/__init__.py +++ b/lib/spack/spack/ci/__init__.py @@ -33,6 +33,7 @@ import spack.paths import spack.repo import spack.spec +import spack.stage import spack.store import spack.util.git import spack.util.gpg as gpg_util @@ -245,7 +246,9 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision: if not spec_locations: return RebuildDecision(True, "not found anywhere") - urls = ",".join([loc["mirror_url"] for loc in spec_locations]) + urls = ",".join( + [f"{loc.url_and_version.url}@v{loc.url_and_version.version}" for loc in spec_locations] + ) message = f"up-to-date [{urls}]" return RebuildDecision(False, message) @@ -1242,33 +1245,31 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic """Given a url to write to and the details of the failed job, write an entry in the broken specs list. """ - tmpdir = tempfile.mkdtemp() - file_path = os.path.join(tmpdir, "broken.txt") + with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + file_path = os.path.join(tmpdir, "broken.txt") - broken_spec_details = { - "broken-spec": { - "job-name": pkg_name, - "job-stack": stack_name, - "job-url": job_url, - "pipeline-url": pipeline_url, - "concrete-spec-dict": spec_dict, + broken_spec_details = { + "broken-spec": { + "job-name": pkg_name, + "job-stack": stack_name, + "job-url": job_url, + "pipeline-url": pipeline_url, + "concrete-spec-dict": spec_dict, + } } - } - try: - with open(file_path, "w", encoding="utf-8") as fd: - syaml.dump(broken_spec_details, fd) - web_util.push_to_url( - file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"} - ) - except Exception as err: - # If there is an S3 error (e.g., access denied or connection - # error), the first non boto-specific class in the exception - # hierarchy is Exception. Just print a warning and return - msg = f"Error writing to broken specs list {url}: {err}" - tty.warn(msg) - finally: - shutil.rmtree(tmpdir) + try: + with open(file_path, "w", encoding="utf-8") as fd: + syaml.dump(broken_spec_details, fd) + web_util.push_to_url( + file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"} + ) + except Exception as err: + # If there is an S3 error (e.g., access denied or connection + # error), the first non boto-specific class in the exception + # hierarchy is Exception. Just print a warning and return + msg = f"Error writing to broken specs list {url}: {err}" + tty.warn(msg) def read_broken_spec(broken_spec_url): diff --git a/lib/spack/spack/ci/common.py b/lib/spack/spack/ci/common.py index 78caafcb2ab..ecef706ad19 100644 --- a/lib/spack/spack/ci/common.py +++ b/lib/spack/spack/ci/common.py @@ -31,12 +31,12 @@ import spack.spec import spack.util.compression as compression import spack.util.spack_yaml as syaml -import spack.util.url as url_util import spack.util.web as web_util from spack import traverse from spack.reporters import CDash, CDashConfiguration from spack.reporters.cdash import SPACK_CDASH_TIMEOUT from spack.reporters.cdash import build_stamp as cdash_build_stamp +from spack.url_buildcache import get_url_buildcache_class IS_WINDOWS = sys.platform == "win32" SPACK_RESERVED_TAGS = ["public", "protected", "notary"] @@ -179,33 +179,13 @@ def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file): for release_spec in specs: release_spec_dag_hash = release_spec.dag_hash() - # TODO: This assumes signed version of the spec - buildcache_copies[release_spec_dag_hash] = [ - { - "src": url_util.join( - src_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_name(release_spec, ".spec.json.sig"), - ), - "dest": url_util.join( - dest_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_name(release_spec, ".spec.json.sig"), - ), - }, - { - "src": url_util.join( - src_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_path_name(release_spec, ".spack"), - ), - "dest": url_util.join( - dest_prefix, - bindist.build_cache_relative_path(), - bindist.tarball_path_name(release_spec, ".spack"), - ), - }, - ] + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + buildcache_copies[release_spec_dag_hash] = { + "src": cache_class.get_manifest_url(release_spec, src_prefix), + "dest": cache_class.get_manifest_url(release_spec, dest_prefix), + } target_dir = os.path.dirname(output_file) diff --git a/lib/spack/spack/ci/gitlab.py b/lib/spack/spack/ci/gitlab.py index a69149dee8a..15d46ada01a 100644 --- a/lib/spack/spack/ci/gitlab.py +++ b/lib/spack/spack/ci/gitlab.py @@ -292,6 +292,9 @@ def main_script_replacements(cmd): ) maybe_generate_manifest(pipeline, options, manifest_path) + relative_specs_url = bindist.buildcache_relative_specs_url() + relative_keys_url = bindist.buildcache_relative_keys_url() + if options.pipeline_type == PipelineType.COPY_ONLY: stage_names.append("copy") sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"]) @@ -301,9 +304,12 @@ def main_script_replacements(cmd): if "variables" not in sync_job: sync_job["variables"] = {} - sync_job["variables"][ - "SPACK_COPY_ONLY_DESTINATION" - ] = options.buildcache_destination.fetch_url + sync_job["variables"].update( + { + "SPACK_COPY_ONLY_DESTINATION": options.buildcache_destination.fetch_url, + "SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url, + } + ) pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True) if "buildcache-source" not in pipeline_mirrors: @@ -333,9 +339,13 @@ def main_script_replacements(cmd): signing_job["interruptible"] = True if "variables" not in signing_job: signing_job["variables"] = {} - signing_job["variables"][ - "SPACK_BUILDCACHE_DESTINATION" - ] = options.buildcache_destination.push_url + signing_job["variables"].update( + { + "SPACK_BUILDCACHE_DESTINATION": options.buildcache_destination.push_url, + "SPACK_BUILDCACHE_RELATIVE_SPECS_URL": relative_specs_url, + "SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url, + } + ) signing_job["dependencies"] = [] output_object["sign-pkgs"] = signing_job diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py index 84aec74328f..d20750fa58e 100644 --- a/lib/spack/spack/cmd/bootstrap.py +++ b/lib/spack/spack/cmd/bootstrap.py @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import pathlib import shutil import sys import tempfile @@ -28,7 +29,7 @@ # Tarball to be downloaded if binary packages are requested in a local mirror -BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz" +BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache-v3.tar.gz" #: Subdirectory where to create the mirror LOCAL_MIRROR_DIR = "bootstrap_cache" @@ -410,8 +411,9 @@ def _mirror(args): stage.create() stage.fetch() stage.expand_archive() - build_cache_dir = os.path.join(stage.source_path, "build_cache") - shutil.move(build_cache_dir, mirror_dir) + stage_dir = pathlib.Path(stage.source_path) + for entry in stage_dir.iterdir(): + shutil.move(str(entry), mirror_dir) llnl.util.tty.set_msg_enabled(True) def write_metadata(subdir, metadata): @@ -436,7 +438,6 @@ def write_metadata(subdir, metadata): shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory) shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory) instructions += cmd.format("local-binaries", rel_directory) - instructions += " % spack buildcache update-index /bootstrap_cache\n" print(instructions) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 80a579a3b71..7d1433f33c0 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -4,11 +4,9 @@ import argparse import glob import json -import os -import shutil import sys import tempfile -from typing import List, Tuple +from typing import List, Optional, Tuple import llnl.util.tty as tty from llnl.string import plural @@ -27,14 +25,21 @@ import spack.stage import spack.store import spack.util.parallel -import spack.util.url as url_util import spack.util.web as web_util from spack import traverse from spack.cmd import display_specs from spack.cmd.common import arguments from spack.spec import Spec, save_dependency_specfiles +from ..buildcache_migrate import migrate from ..enums import InstallRecordStatus +from ..url_buildcache import ( + BuildcacheComponent, + BuildcacheEntryError, + URLBuildcacheEntry, + check_mirror_for_layout, + get_url_buildcache_class, +) description = "create, download and install binary packages" section = "packaging" @@ -272,6 +277,27 @@ def setup_parser(subparser: argparse.ArgumentParser): ) update_index.set_defaults(func=update_index_fn) + # Migrate a buildcache from layout_version 2 to version 3 + migrate = subparsers.add_parser("migrate", help=migrate_fn.__doc__) + migrate.add_argument("mirror", type=arguments.mirror_name, help="name of a configured mirror") + migrate.add_argument( + "-u", + "--unsigned", + default=False, + action="store_true", + help="Ignore signatures and do not resign, default is False", + ) + migrate.add_argument( + "-d", + "--delete-existing", + default=False, + action="store_true", + help="Delete the previous layout, the default is to keep it.", + ) + arguments.add_common_arguments(migrate, ["yes_to_all"]) + # TODO: add -y argument to prompt if user really means to delete existing + migrate.set_defaults(func=migrate_fn) + def _matching_specs(specs: List[Spec]) -> List[Spec]: """Disambiguate specs and return a list of matching specs""" @@ -397,6 +423,10 @@ def push_fn(args): (s, PackageNotInstalledError("package not installed")) for s in not_installed ) + # Warn about possible old binary mirror layout + if not mirror.push_url.startswith("oci://"): + check_mirror_for_layout(mirror) + with bindist.make_uploader( mirror=mirror, force=args.force, @@ -527,8 +557,7 @@ def download_fn(args): if len(specs) != 1: tty.die("a single spec argument is required to download from a buildcache") - if not bindist.download_single_spec(specs[0], args.path): - sys.exit(1) + bindist.download_single_spec(specs[0], args.path) def save_specfile_fn(args): @@ -553,29 +582,78 @@ def save_specfile_fn(args): ) -def copy_buildcache_file(src_url, dest_url, local_path=None): - """Copy from source url to destination url""" - tmpdir = None +def copy_buildcache_entry(cache_entry: URLBuildcacheEntry, destination_url: str): + """Download buildcache entry and copy it to the destination_url""" + try: + spec_dict = cache_entry.fetch_metadata() + cache_entry.fetch_archive() + except bindist.BuildcacheEntryError as e: + tty.warn(f"Failed to retrieve buildcache for copying due to {e}") + cache_entry.destroy() + return - if not local_path: - tmpdir = tempfile.mkdtemp() - local_path = os.path.join(tmpdir, os.path.basename(src_url)) + spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC) + local_spec_path = cache_entry.get_local_spec_path() + tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL) + local_tarball_path = cache_entry.get_local_archive_path() + + target_spec = spack.spec.Spec.from_dict(spec_dict) + spec_label = f"{target_spec.name}/{target_spec.dag_hash()[:7]}" + + if not tarball_blob_record: + cache_entry.destroy() + raise BuildcacheEntryError(f"No source tarball blob record, failed to sync {spec_label}") + + # Try to push the tarball + tarball_dest_url = cache_entry.get_blob_url(destination_url, tarball_blob_record) try: - temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path)) - try: - temp_stage.create() - temp_stage.fetch() - web_util.push_to_url(local_path, dest_url, keep_original=True) - except spack.error.FetchError as e: - # Expected, since we have to try all the possible extensions - tty.debug("no such file: {0}".format(src_url)) - tty.debug(e) - finally: - temp_stage.destroy() - finally: - if tmpdir and os.path.exists(tmpdir): - shutil.rmtree(tmpdir) + web_util.push_to_url(local_tarball_path, tarball_dest_url, keep_original=True) + except Exception as e: + tty.warn(f"Failed to push {local_tarball_path} to {tarball_dest_url} due to {e}") + cache_entry.destroy() + return + + if not spec_blob_record: + cache_entry.destroy() + raise BuildcacheEntryError(f"No source spec blob record, failed to sync {spec_label}") + + # Try to push the spec file + spec_dest_url = cache_entry.get_blob_url(destination_url, spec_blob_record) + + try: + web_util.push_to_url(local_spec_path, spec_dest_url, keep_original=True) + except Exception as e: + tty.warn(f"Failed to push {local_spec_path} to {spec_dest_url} due to {e}") + cache_entry.destroy() + return + + # Stage the manifest locally, since if it's signed, we don't want to try to + # to reproduce that here. Instead just push the locally staged manifest to + # the expected path at the destination url. + manifest_src_url = cache_entry.remote_manifest_url + manifest_dest_url = cache_entry.get_manifest_url(target_spec, destination_url) + + manifest_stage = spack.stage.Stage(manifest_src_url) + + try: + manifest_stage.create() + manifest_stage.fetch() + except Exception as e: + tty.warn(f"Failed to fetch manifest from {manifest_src_url} due to {e}") + manifest_stage.destroy() + cache_entry.destroy() + return + + local_manifest_path = manifest_stage.save_filename + + try: + web_util.push_to_url(local_manifest_path, manifest_dest_url, keep_original=True) + except Exception as e: + tty.warn(f"Failed to push manifest to {manifest_dest_url} due to {e}") + + manifest_stage.destroy() + cache_entry.destroy() def sync_fn(args): @@ -615,37 +693,21 @@ def sync_fn(args): ) ) - build_cache_dir = bindist.build_cache_relative_path() - buildcache_rel_paths = [] - tty.debug("Syncing the following specs:") - for s in env.all_specs(): + specs_to_sync = [s for s in env.all_specs() if not s.external] + for s in specs_to_sync: tty.debug(" {0}{1}: {2}".format("* " if s in env.roots() else " ", s.name, s.dag_hash())) - - buildcache_rel_paths.extend( - [ - os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")), - ] + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION ) - - tmpdir = tempfile.mkdtemp() - - try: - for rel_path in buildcache_rel_paths: - src_url = url_util.join(src_mirror_url, rel_path) - local_path = os.path.join(tmpdir, rel_path) - dest_url = url_util.join(dest_mirror_url, rel_path) - - tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path)) - copy_buildcache_file(src_url, dest_url, local_path=local_path) - finally: - shutil.rmtree(tmpdir) + src_cache_entry = cache_class(src_mirror_url, s, allow_unsigned=True) + src_cache_entry.read_manifest() + copy_buildcache_entry(src_cache_entry, dest_mirror_url) -def manifest_copy(manifest_file_list, dest_mirror=None): +def manifest_copy( + manifest_file_list: List[str], dest_mirror: Optional[spack.mirrors.mirror.Mirror] = None +): """Read manifest files containing information about specific specs to copy from source to destination, remove duplicates since any binary packge for a given hash should be the same as any other, and copy all files specified @@ -655,21 +717,24 @@ def manifest_copy(manifest_file_list, dest_mirror=None): for manifest_path in manifest_file_list: with open(manifest_path, encoding="utf-8") as fd: manifest = json.loads(fd.read()) - for spec_hash, copy_list in manifest.items(): + for spec_hash, copy_obj in manifest.items(): # Last duplicate hash wins - deduped_manifest[spec_hash] = copy_list + deduped_manifest[spec_hash] = copy_obj - build_cache_dir = bindist.build_cache_relative_path() - for spec_hash, copy_list in deduped_manifest.items(): - for copy_file in copy_list: - dest = copy_file["dest"] - if dest_mirror: - src_relative_path = os.path.join( - build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/") - ) - dest = url_util.join(dest_mirror.push_url, src_relative_path) - tty.debug("copying {0} to {1}".format(copy_file["src"], dest)) - copy_buildcache_file(copy_file["src"], dest) + for spec_hash, copy_obj in deduped_manifest.items(): + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + src_cache_entry = cache_class( + cache_class.get_base_url(copy_obj["src"]), allow_unsigned=True + ) + src_cache_entry.read_manifest(manifest_url=copy_obj["src"]) + if dest_mirror: + destination_url = dest_mirror.push_url + else: + destination_url = cache_class.get_base_url(copy_obj["dest"]) + tty.debug("copying {0} to {1}".format(copy_obj["src"], destination_url)) + copy_buildcache_entry(src_cache_entry, destination_url) def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False): @@ -693,13 +758,9 @@ def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False): bindist._url_generate_package_index(url, tmpdir) if update_keys: - keys_url = url_util.join( - url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path() - ) - try: with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: - bindist.generate_key_index(keys_url, tmpdir) + bindist.generate_key_index(url, tmpdir) except bindist.CannotListKeys as e: # Do not error out if listing keys went wrong. This usually means that the _gpg path # does not exist. TODO: distinguish between this and other errors. @@ -711,5 +772,53 @@ def update_index_fn(args): return update_index(args.mirror, update_keys=args.keys) +def migrate_fn(args): + """perform in-place binary mirror migration (2 to 3) + + A mirror can contain both layout version 2 and version 3 simultaneously without + interference. This command performs in-place migration of a binary mirror laid + out according to version 2, to a binary mirror laid out according to layout + version 3. Only indexed specs will be migrated, so consider updating the mirror + index before running this command. Re-run the command to migrate any missing + items. + + The default mode of operation is to perform a signed migration, that is, spack + will attempt to verify the signatures on specs, and then re-sign them before + migration, using whatever keys are already installed in your key ring. You can + migrate a mirror of unsigned binaries (or convert a mirror of signed binaries + to unsigned) by providing the --unsigned argument. + + By default spack will leave the original mirror contents (in the old layout) in + place after migration. You can have spack remove the old contents by providing + the --delete-existing argument. Because migrating a mostly-already-migrated + mirror should be fast, consider a workflow where you perform a default migration, + (i.e. preserve the existing layout rather than deleting it) then evaluate the + state of the migrated mirror by attempting to install from it, and finally + running the migration again with --delete-existing.""" + target_mirror = args.mirror + unsigned = args.unsigned + assert isinstance(target_mirror, spack.mirrors.mirror.Mirror) + delete_existing = args.delete_existing + + proceed = True + if delete_existing and not args.yes_to_all: + msg = ( + "Using --delete-existing will delete the entire contents \n" + " of the old layout within the mirror. Because migrating a mirror \n" + " that has already been migrated should be fast, consider a workflow \n" + " where you perform a default migration (i.e. preserve the existing \n" + " layout rather than deleting it), then evaluate the state of the \n" + " migrated mirror by attempting to install from it, and finally, \n" + " run the migration again with --delete-existing." + ) + tty.warn(msg) + proceed = tty.get_yes_or_no("Do you want to proceed?", default=False) + + if not proceed: + tty.die("Migration aborted.") + + migrate(target_mirror, unsigned=unsigned, delete_existing=delete_existing) + + def buildcache(parser, args): return args.func(args) diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index 0a3b68af674..31bdfb04485 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -423,7 +423,7 @@ def ci_rebuild(args): # jobs in subsequent stages. tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name)) for match in matches: - tty.msg(" {0}".format(match["mirror_url"])) + tty.msg(" {0}".format(match.url_and_version.url)) # Now we are done and successful return 0 diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py index 92eb5f951ab..b04af6315b3 100644 --- a/lib/spack/spack/error.py +++ b/lib/spack/spack/error.py @@ -202,3 +202,16 @@ class MirrorError(SpackError): def __init__(self, msg, long_msg=None): super().__init__(msg, long_msg) + + +class NoChecksumException(SpackError): + """ + Raised if file fails checksum verification. + """ + + def __init__(self, path, size, contents, algorithm, expected, computed): + super().__init__( + f"{algorithm} checksum failed for {path}", + f"Expected {expected} but got {computed}. " + f"File size = {size} bytes. Contents = {contents!r}", + ) diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index c51ea278a05..5abd9ac5c56 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -65,6 +65,7 @@ import spack.util.executable import spack.util.path import spack.util.timer as timer +from spack.url_buildcache import BuildcacheEntryError from spack.util.environment import EnvironmentModifications, dump_environment from spack.util.executable import which @@ -449,17 +450,17 @@ def _process_binary_cache_tarball( else ``False`` """ with timer.measure("fetch"): - download_result = binary_distribution.download_tarball( + tarball_stage = binary_distribution.download_tarball( pkg.spec.build_spec, unsigned, mirrors_for_spec ) - if download_result is None: + if tarball_stage is None: return False tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache") with timer.measure("install"), spack.util.path.filter_padding(): - binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer) + binary_distribution.extract_tarball(pkg.spec, tarball_stage, force=False, timer=timer) if pkg.spec.spliced: # overwrite old metadata with new spack.store.STORE.layout.write_spec( @@ -2177,7 +2178,7 @@ def install(self) -> None: ) raise - except binary_distribution.NoChecksumException as exc: + except BuildcacheEntryError as exc: if task.cache_only: raise diff --git a/lib/spack/spack/schema/buildcache_spec.py b/lib/spack/spack/schema/buildcache_spec.py index 6f560b4a5df..81cde38901a 100644 --- a/lib/spack/spack/schema/buildcache_spec.py +++ b/lib/spack/spack/schema/buildcache_spec.py @@ -19,10 +19,6 @@ "additionalProperties": True, "items": spack.schema.spec.properties, }, - "binary_cache_checksum": { - "type": "object", - "properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}}, - }, "buildcache_layout_version": {"type": "number"}, } @@ -30,6 +26,6 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "Spack buildcache specfile schema", "type": "object", - "additionalProperties": False, + "additionalProperties": True, "properties": properties, } diff --git a/lib/spack/spack/schema/url_buildcache_manifest.py b/lib/spack/spack/schema/url_buildcache_manifest.py new file mode 100644 index 00000000000..e3dc4340fcb --- /dev/null +++ b/lib/spack/spack/schema/url_buildcache_manifest.py @@ -0,0 +1,45 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Schema for buildcache entry manifest file + +.. literalinclude:: _spack_root/lib/spack/spack/schema/url_buildcache_manifest.py + :lines: 11- +""" +from typing import Any, Dict + +properties: Dict[str, Any] = { + "version": {"type": "integer"}, + "data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "contentLength", + "mediaType", + "compression", + "checksumAlgorithm", + "checksum", + ], + "properties": { + "contentLength": {"type": "integer"}, + "mediaType": {"type": "string"}, + "compression": {"type": "string"}, + "checksumAlgorithm": {"type": "string"}, + "checksum": {"type": "string"}, + }, + "additionalProperties": True, + }, + }, +} + +#: Full schema with metadata +schema = { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Buildcache manifest schema", + "type": "object", + "required": ["version", "data"], + "additionalProperties": True, + "properties": properties, +} diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index 9278d9bf1d8..0a27c18781d 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -17,11 +17,10 @@ import urllib.request import urllib.response from pathlib import Path, PurePath +from typing import Any, Callable, Dict, NamedTuple, Optional import pytest -import archspec.cpu - from llnl.util.filesystem import copy_tree, join_path from llnl.util.symlink import readlink @@ -38,16 +37,27 @@ import spack.paths import spack.repo import spack.spec +import spack.stage import spack.store import spack.util.gpg import spack.util.spack_yaml as syaml import spack.util.url as url_util import spack.util.web as web_util -from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError +from spack.binary_distribution import CannotListKeys, GenerateIndexError from spack.database import INDEX_JSON_FILE from spack.installer import PackageInstaller from spack.paths import test_path from spack.spec import Spec +from spack.url_buildcache import ( + INDEX_MANIFEST_FILE, + BuildcacheComponent, + BuildcacheEntryError, + URLBuildcacheEntry, + URLBuildcacheEntryV2, + compression_writer, + get_url_buildcache_class, + get_valid_spec_file, +) pytestmark = pytest.mark.not_on_windows("does not run on windows") @@ -372,7 +382,7 @@ def test_built_spec_cache(temporary_mirror_dir): for s in [gspec, cspec]: results = bindist.get_mirrors_for_spec(s) - assert any([r["spec"] == s for r in results]) + assert any([r.spec == s for r in results]) def fake_dag_hash(spec, length=None): @@ -435,7 +445,11 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config): assert "libelf" in cache_list # Remove dependency from cache - libelf_files = glob.glob(os.path.join(mirror_dir.join("build_cache").strpath, "*libelf*")) + libelf_files = glob.glob( + os.path.join( + mirror_dir.join(bindist.buildcache_relative_specs_path()).strpath, "libelf", "*libelf*" + ) + ) os.remove(*libelf_files) # Update index @@ -480,8 +494,7 @@ def mock_list_url(url, recursive=False): assert ( "Warning: Encountered problem listing packages at " - f"{test_url}/{bindist.BUILD_CACHE_RELATIVE_PATH}: Some HTTP error" - in capfd.readouterr().err + f"{test_url}: Some HTTP error" in capfd.readouterr().err ) @@ -538,29 +551,6 @@ def test_update_sbang(tmp_path, temporary_mirror, mock_fetch, install_mockery): assert f.read() == new_contents -@pytest.mark.skipif( - str(archspec.cpu.host().family) != "x86_64", - reason="test data uses gcc 4.5.0 which does not support aarch64", -) -def test_install_legacy_buildcache_layout(mutable_config, compiler_factory, install_mockery): - """Legacy buildcache layout involved a nested archive structure - where the .spack file contained a repeated spec.json and another - compressed archive file containing the install tree. This test - makes sure we can still read that layout.""" - legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout") - mirror_url = f"file://{legacy_layout_dir}" - filename = ( - "test-debian6-core2-gcc-4.5.0-archive-files-2.0-" - "l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json" - ) - spec_json_path = os.path.join(legacy_layout_dir, "build_cache", filename) - mirror_cmd("add", "--scope", "site", "test-legacy-layout", mirror_url) - output = install_cmd("--no-check-signature", "--cache-only", "-f", spec_json_path, output=str) - mirror_cmd("rm", "--scope=site", "test-legacy-layout") - expect_line = "Extracting archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache" - assert expect_line in output - - def test_FetchCacheError_only_accepts_lists_of_errors(): with pytest.raises(TypeError, match="list"): bindist.FetchCacheError("error") @@ -600,7 +590,60 @@ def test_text_relocate_if_needed(install_mockery, temporary_store, mock_fetch, t assert join_path("bin", "secretexe") not in manifest["relocate_textfiles"] -def test_etag_fetching_304(): +def test_compression_writer(tmp_path): + text = "This is some text. We might or might not like to compress it as we write." + checksum_algo = "sha256" + + # Write the data using gzip compression + compressed_output_path = str(tmp_path / "compressed_text") + with compression_writer(compressed_output_path, "gzip", checksum_algo) as ( + compressor, + checker, + ): + compressor.write(text.encode("utf-8")) + + compressed_size = checker.length + compressed_checksum = checker.hexdigest() + + with open(compressed_output_path, "rb") as f: + binary_content = f.read() + + assert bindist.compute_hash(binary_content) == compressed_checksum + assert os.stat(compressed_output_path).st_size == compressed_size + assert binary_content[:2] == b"\x1f\x8b" + decompressed_content = gzip.decompress(binary_content).decode("utf-8") + + assert decompressed_content == text + + # Write the data without compression + uncompressed_output_path = str(tmp_path / "uncompressed_text") + with compression_writer(uncompressed_output_path, "none", checksum_algo) as ( + compressor, + checker, + ): + compressor.write(text.encode("utf-8")) + + uncompressed_size = checker.length + uncompressed_checksum = checker.hexdigest() + + with open(uncompressed_output_path, "r", encoding="utf-8") as f: + content = f.read() + + assert bindist.compute_hash(content) == uncompressed_checksum + assert os.stat(uncompressed_output_path).st_size == uncompressed_size + assert content == text + + # Make sure we raise if requesting unknown compression type + nocare_output_path = str(tmp_path / "wontwrite") + with pytest.raises(BuildcacheEntryError, match="Unknown compression type"): + with compression_writer(nocare_output_path, "gsip", checksum_algo) as ( + compressor, + checker, + ): + compressor.write(text) + + +def test_v2_etag_fetching_304(): # Test conditional fetch with etags. If the remote hasn't modified the file # it returns 304, which is an HTTPError in urllib-land. That should be # handled as success, since it means the local cache is up-to-date. @@ -613,7 +656,7 @@ def response_304(request: urllib.request.Request): ) assert False, "Should not fetch {}".format(url) - fetcher = bindist.EtagIndexFetcher( + fetcher = bindist.EtagIndexFetcherV2( url="https://www.example.com", etag="112a8bbc1b3f7f185621c1ee335f0502", urlopen=response_304, @@ -624,7 +667,7 @@ def response_304(request: urllib.request.Request): assert result.fresh -def test_etag_fetching_200(): +def test_v2_etag_fetching_200(): # Test conditional fetch with etags. The remote has modified the file. def response_200(request: urllib.request.Request): url = request.get_full_url() @@ -638,7 +681,7 @@ def response_200(request: urllib.request.Request): ) assert False, "Should not fetch {}".format(url) - fetcher = bindist.EtagIndexFetcher( + fetcher = bindist.EtagIndexFetcherV2( url="https://www.example.com", etag="112a8bbc1b3f7f185621c1ee335f0502", urlopen=response_200, @@ -652,7 +695,7 @@ def response_200(request: urllib.request.Request): assert result.hash == bindist.compute_hash("Result") -def test_etag_fetching_404(): +def test_v2_etag_fetching_404(): # Test conditional fetch with etags. The remote has modified the file. def response_404(request: urllib.request.Request): raise urllib.error.HTTPError( @@ -663,7 +706,7 @@ def response_404(request: urllib.request.Request): fp=None, ) - fetcher = bindist.EtagIndexFetcher( + fetcher = bindist.EtagIndexFetcherV2( url="https://www.example.com", etag="112a8bbc1b3f7f185621c1ee335f0502", urlopen=response_404, @@ -673,13 +716,13 @@ def response_404(request: urllib.request.Request): fetcher.conditional_fetch() -def test_default_index_fetch_200(): +def test_v2_default_index_fetch_200(): index_json = '{"Hello": "World"}' index_json_hash = bindist.compute_hash(index_json) def urlopen(request: urllib.request.Request): url = request.get_full_url() - if url.endswith(INDEX_HASH_FILE): + if url.endswith("index.json.hash"): return urllib.response.addinfourl( # type: ignore[arg-type] io.BytesIO(index_json_hash.encode()), headers={}, # type: ignore[arg-type] @@ -697,7 +740,7 @@ def urlopen(request: urllib.request.Request): assert False, "Unexpected request {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash="outdated", urlopen=urlopen ) @@ -710,7 +753,7 @@ def urlopen(request: urllib.request.Request): assert result.hash == index_json_hash -def test_default_index_dont_fetch_index_json_hash_if_no_local_hash(): +def test_v2_default_index_dont_fetch_index_json_hash_if_no_local_hash(): # When we don't have local hash, we should not be fetching the # remote index.json.hash file, but only index.json. index_json = '{"Hello": "World"}' @@ -728,7 +771,7 @@ def urlopen(request: urllib.request.Request): assert False, "Unexpected request {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash=None, urlopen=urlopen ) @@ -741,13 +784,13 @@ def urlopen(request: urllib.request.Request): assert not result.fresh -def test_default_index_not_modified(): +def test_v2_default_index_not_modified(): index_json = '{"Hello": "World"}' index_json_hash = bindist.compute_hash(index_json) def urlopen(request: urllib.request.Request): url = request.get_full_url() - if url.endswith(INDEX_HASH_FILE): + if url.endswith("index.json.hash"): return urllib.response.addinfourl( io.BytesIO(index_json_hash.encode()), headers={}, # type: ignore[arg-type] @@ -758,7 +801,7 @@ def urlopen(request: urllib.request.Request): # No request to index.json should be made. assert False, "Unexpected request {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen ) @@ -766,7 +809,7 @@ def urlopen(request: urllib.request.Request): @pytest.mark.parametrize("index_json", [b"\xa9", b"!#%^"]) -def test_default_index_invalid_hash_file(index_json): +def test_v2_default_index_invalid_hash_file(index_json): # Test invalid unicode / invalid hash type index_json_hash = bindist.compute_hash(index_json) @@ -778,21 +821,21 @@ def urlopen(request: urllib.request.Request): code=200, ) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen ) assert fetcher.get_remote_hash() is None -def test_default_index_json_404(): +def test_v2_default_index_json_404(): # Test invalid unicode / invalid hash type index_json = '{"Hello": "World"}' index_json_hash = bindist.compute_hash(index_json) def urlopen(request: urllib.request.Request): url = request.get_full_url() - if url.endswith(INDEX_HASH_FILE): + if url.endswith("index.json.hash"): return urllib.response.addinfourl( io.BytesIO(index_json_hash.encode()), headers={}, # type: ignore[arg-type] @@ -811,7 +854,7 @@ def urlopen(request: urllib.request.Request): assert False, "Unexpected fetch {}".format(url) - fetcher = bindist.DefaultIndexFetcher( + fetcher = bindist.DefaultIndexFetcherV2( url="https://www.example.com", local_hash="invalid", urlopen=urlopen ) @@ -1097,9 +1140,7 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success): json.dump(spec_dict, f) try: - spec_dict_disk, layout_disk = bindist._get_valid_spec_file( - str(path), max_supported_layout=1 - ) + spec_dict_disk, layout_disk = get_valid_spec_file(str(path), max_supported_layout=1) assert expect_success assert spec_dict_disk == spec_dict assert layout_disk == effective_layout @@ -1109,51 +1150,66 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success): def test_get_valid_spec_file_doesnt_exist(tmp_path): with pytest.raises(bindist.InvalidMetadataFile, match="No such file"): - bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1) - - -def test_get_valid_spec_file_gzipped(tmp_path): - # Create a gzipped file, contents don't matter - path = tmp_path / "spec.json.gz" - with gzip.open(path, "wb") as f: - f.write(b"hello") - with pytest.raises( - bindist.InvalidMetadataFile, match="Compressed spec files are not supported" - ): - bindist._get_valid_spec_file(str(path), max_supported_layout=1) + get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1) @pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"]) def test_get_valid_spec_file_no_json(tmp_path, filename): tmp_path.joinpath(filename).write_text("not json") with pytest.raises(bindist.InvalidMetadataFile): - bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1) + get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1) -def test_download_tarball_with_unsupported_layout_fails( - tmp_path, mock_packages, mutable_config, capsys -): - layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1 - spec = spack.concretize.concretize_one("pkg-c") - spec_dict = spec.to_dict() - spec_dict["buildcache_layout_version"] = layout_version +@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "temporary_mirror") +def test_url_buildcache_entry_v3(monkeypatch, tmpdir): + """Make sure URLBuildcacheEntry behaves as expected""" - # Setup a basic local build cache structure - path = ( - tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json") - ) - path.parent.mkdir(parents=True) - with open(path, "w", encoding="utf-8") as f: - json.dump(spec_dict, f) + # Create a temp mirror directory for buildcache usage + mirror_dir = tmpdir.join("mirror_dir") + mirror_url = url_util.path_to_file_url(mirror_dir.strpath) - # Configure as a mirror. - mirror_cmd("add", "test-mirror", str(tmp_path)) + s = Spec("libdwarf").concretized() - # Shouldn't be able "download" this. - assert bindist.download_tarball(spec, unsigned=True) is None + # Install libdwarf + install_cmd("--fake", s.name) - # And there should be a warning about an unsupported layout version. - assert f"Layout version {layout_version} is too new" in capsys.readouterr().err + # Push libdwarf to buildcache + buildcache_cmd("push", "-u", mirror_dir.strpath, s.name) + + cache_class = get_url_buildcache_class(bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION) + build_cache = cache_class(mirror_url, s, allow_unsigned=True) + + manifest = build_cache.read_manifest() + spec_dict = build_cache.fetch_metadata() + local_tarball_path = build_cache.fetch_archive() + + assert "spec" in spec_dict + + for blob_record in manifest.data: + blob_path = build_cache.get_staged_blob_path(blob_record) + assert os.path.exists(blob_path) + actual_blob_size = os.stat(blob_path).st_size + assert blob_record.content_length == actual_blob_size + + build_cache.destroy() + + assert not os.path.exists(local_tarball_path) + + +def test_relative_path_components(): + blobs_v3 = URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.BLOB) + assert len(blobs_v3) == 1 + assert "blobs" in blobs_v3 + + blobs_v2 = URLBuildcacheEntryV2.get_relative_path_components(BuildcacheComponent.BLOB) + assert len(blobs_v2) == 1 + assert "build_cache" in blobs_v2 + + v2_spec_url = "file:///home/me/mymirror/build_cache/linux-ubuntu22.04-sapphirerapids-gcc-12.3.0-gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.json.sig" + assert URLBuildcacheEntryV2.get_base_url(v2_spec_url) == "file:///home/me/mymirror" + + v3_manifest_url = "file:///home/me/mymirror/v3/manifests/gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.manifest.json" + assert URLBuildcacheEntry.get_base_url(v3_manifest_url) == "file:///home/me/mymirror" @pytest.mark.parametrize( @@ -1170,3 +1226,244 @@ def test_download_tarball_with_unsupported_layout_fails( def test_default_tag(spec: str): """Make sure that computed image tags are valid.""" assert re.fullmatch(spack.oci.image.tag, bindist._oci_default_tag(spack.spec.Spec(spec))) + + +class IndexInformation(NamedTuple): + manifest_contents: Dict[str, Any] + index_contents: str + index_hash: str + manifest_path: str + index_path: str + manifest_etag: str + fetched_blob: Callable[[], bool] + + +@pytest.fixture +def mock_index(tmp_path, monkeypatch) -> IndexInformation: + mirror_root = tmp_path / "mymirror" + index_json = '{"Hello": "World"}' + index_json_hash = bindist.compute_hash(index_json) + fetched = False + + cache_class = get_url_buildcache_class( + layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + + index_blob_path = os.path.join( + str(mirror_root), + *cache_class.get_relative_path_components(BuildcacheComponent.BLOB), + "sha256", + index_json_hash[:2], + index_json_hash, + ) + + os.makedirs(os.path.dirname(index_blob_path)) + with open(index_blob_path, "w", encoding="utf-8") as fd: + fd.write(index_json) + + index_blob_record = bindist.BlobRecord( + os.stat(index_blob_path).st_size, + cache_class.BUILDCACHE_INDEX_MEDIATYPE, + "none", + "sha256", + index_json_hash, + ) + + index_manifest = { + "version": cache_class.get_layout_version(), + "data": [index_blob_record.to_dict()], + } + + manifest_json_path = cache_class.get_index_url(str(mirror_root)) + + os.makedirs(os.path.dirname(manifest_json_path)) + + with open(manifest_json_path, "w", encoding="utf-8") as f: + json.dump(index_manifest, f) + + def fetch_patch(stage, mirror_only: bool = False, err_msg: Optional[str] = None): + nonlocal fetched + fetched = True + + @property # type: ignore + def save_filename_patch(stage): + return str(index_blob_path) + + monkeypatch.setattr(spack.stage.Stage, "fetch", fetch_patch) + monkeypatch.setattr(spack.stage.Stage, "save_filename", save_filename_patch) + + def get_did_fetch(): + # nonlocal fetched + return fetched + + return IndexInformation( + index_manifest, + index_json, + index_json_hash, + manifest_json_path, + index_blob_path, + "59bcc3ad6775562f845953cf01624225", + get_did_fetch, + ) + + +def test_etag_fetching_304(): + # Test conditional fetch with etags. If the remote hasn't modified the file + # it returns 304, which is an HTTPError in urllib-land. That should be + # handled as success, since it means the local cache is up-to-date. + def response_304(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"' + raise urllib.error.HTTPError( + url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type] + ) + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.EtagIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_304, + ) + + result = fetcher.conditional_fetch() + assert isinstance(result, bindist.FetchIndexResult) + assert result.fresh + + +def test_etag_fetching_200(mock_index): + # Test conditional fetch with etags. The remote has modified the file. + def response_200(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"' + return urllib.response.addinfourl( + io.BytesIO(json.dumps(mock_index.manifest_contents).encode()), + headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.EtagIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_200, + ) + + result = fetcher.conditional_fetch() + assert isinstance(result, bindist.FetchIndexResult) + assert not result.fresh + assert mock_index.fetched_blob() + assert result.etag == mock_index.manifest_etag + assert result.data == mock_index.index_contents + assert result.hash == mock_index.index_hash + + +def test_etag_fetching_404(): + # Test conditional fetch with etags. The remote has modified the file. + def response_404(request: urllib.request.Request): + raise urllib.error.HTTPError( + request.get_full_url(), + 404, + "Not found", + hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + fp=None, + ) + + fetcher = bindist.EtagIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + etag="112a8bbc1b3f7f185621c1ee335f0502", + urlopen=response_404, + ) + + with pytest.raises(bindist.FetchIndexError): + fetcher.conditional_fetch() + + +def test_default_index_fetch_200(mock_index): + # We fetch the manifest and then the index blob if the hash is outdated + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + return urllib.response.addinfourl( # type: ignore[arg-type] + io.BytesIO(json.dumps(mock_index.manifest_contents).encode()), + headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type] + url=url, + code=200, + ) + + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + local_hash="outdated", + urlopen=urlopen, + ) + + result = fetcher.conditional_fetch() + + assert isinstance(result, bindist.FetchIndexResult) + assert not result.fresh + assert mock_index.fetched_blob() + assert result.etag == mock_index.manifest_etag + assert result.data == mock_index.index_contents + assert result.hash == mock_index.index_hash + + +def test_default_index_404(): + # We get a fetch error if the index can't be fetched + def urlopen(request: urllib.request.Request): + raise urllib.error.HTTPError( + request.get_full_url(), + 404, + "Not found", + hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type] + fp=None, + ) + + fetcher = bindist.DefaultIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + local_hash=None, + urlopen=urlopen, + ) + + with pytest.raises(bindist.FetchIndexError): + fetcher.conditional_fetch() + + +def test_default_index_not_modified(mock_index): + # We don't fetch the index blob if hash didn't change + def urlopen(request: urllib.request.Request): + url = request.get_full_url() + if url.endswith(INDEX_MANIFEST_FILE): + return urllib.response.addinfourl( + io.BytesIO(json.dumps(mock_index.manifest_contents).encode()), + headers={}, # type: ignore[arg-type] + url=url, + code=200, + ) + + # No other request should be made. + assert False, "Unexpected request {}".format(url) + + fetcher = bindist.DefaultIndexFetcher( + bindist.MirrorURLAndVersion( + "https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ), + local_hash=mock_index.index_hash, + urlopen=urlopen, + ) + + assert fetcher.conditional_fetch().fresh + assert not mock_index.fetched_blob() diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py index 647c794c645..8db7058a1d2 100644 --- a/lib/spack/spack/test/build_distribution.py +++ b/lib/spack/spack/test/build_distribution.py @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import os +import shutil import pytest @@ -37,12 +37,7 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p assert not skipped # Remove the tarball, which should cause push to push. - os.remove( - tmp_path - / bd.BUILD_CACHE_RELATIVE_PATH - / bd.tarball_directory_name(spec) - / bd.tarball_name(spec, ".spack") - ) + shutil.rmtree(tmp_path / bd.buildcache_relative_blobs_path()) with bd.make_uploader(mirror) as uploader: skipped = uploader.push_or_raise(specs) diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index f350874485d..daeca3a4011 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -5,12 +5,16 @@ import errno import json import os +import pathlib import shutil from typing import List import pytest +from llnl.util.filesystem import copy_tree, find + import spack.binary_distribution +import spack.buildcache_migrate as migrate import spack.cmd.buildcache import spack.concretize import spack.environment as ev @@ -18,8 +22,16 @@ import spack.main import spack.mirrors.mirror import spack.spec -import spack.util.url +import spack.util.url as url_util from spack.installer import PackageInstaller +from spack.paths import test_path +from spack.url_buildcache import ( + BuildcacheComponent, + URLBuildcacheEntry, + URLBuildcacheEntryV2, + check_mirror_for_layout, + get_url_buildcache_class, +) buildcache = spack.main.SpackCommand("buildcache") install = spack.main.SpackCommand("install") @@ -74,20 +86,6 @@ def test_buildcache_list_allarch(database, mock_get_specs_multiarch, capsys): assert output.count("mpileaks") == 2 -def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir): - """ "Ensure that buildcache create creates output files""" - pkg = "trivial-install-test-package" - install(pkg) - - buildcache("push", "--unsigned", str(tmpdir), pkg) - - spec = spack.concretize.concretize_one(pkg) - tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack") - tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) - - def tests_buildcache_create_env( install_mockery, mock_fetch, monkeypatch, tmpdir, mutable_mock_env_path ): @@ -102,10 +100,15 @@ def tests_buildcache_create_env( buildcache("push", "--unsigned", str(tmpdir)) spec = spack.concretize.concretize_one(pkg) - tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack") - tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) + + mirror_url = f"file://{tmpdir.strpath}" + + cache_class = get_url_buildcache_class( + layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + cache_entry = cache_class(mirror_url, spec, allow_unsigned=True) + assert cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + cache_entry.destroy() def test_buildcache_create_fails_on_noargs(tmpdir): @@ -159,12 +162,14 @@ def test_update_key_index( # it causes the index to get update. buildcache("update-index", "--keys", mirror_dir.strpath) - key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp")) + key_dir_list = os.listdir( + os.path.join(mirror_dir.strpath, spack.binary_distribution.buildcache_relative_keys_path()) + ) uninstall("-y", s.name) mirror("rm", "test-mirror") - assert "index.json" in key_dir_list + assert "keys.manifest.json" in key_dir_list def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch): @@ -180,10 +185,14 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch): # Install and generate build cache index PackageInstaller([s.package], fake=True, explicit=True).install() - metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json") + assert s.name is not None + manifest_file = URLBuildcacheEntry.get_manifest_filename(s) + specs_dirs = os.path.join( + *URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.SPEC), s.name + ) - assert not (mirror_dir / "build_cache" / metadata_file).exists() - assert (mirror_autopush_dir / "build_cache" / metadata_file).exists() + assert not (mirror_dir / specs_dirs / manifest_file).exists() + assert (mirror_autopush_dir / specs_dirs / manifest_file).exists() def test_buildcache_sync( @@ -205,7 +214,11 @@ def test_buildcache_sync( out_env_pkg = "libdwarf" def verify_mirror_contents(): - dest_list = os.listdir(os.path.join(dest_mirror_dir, "build_cache")) + dest_list = os.listdir( + os.path.join( + dest_mirror_dir, spack.binary_distribution.buildcache_relative_specs_path() + ) + ) found_pkg = False @@ -252,33 +265,15 @@ def verify_mirror_contents(): verify_mirror_contents() shutil.rmtree(dest_mirror_dir) + cache_class = get_url_buildcache_class( + layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + def manifest_insert(manifest, spec, dest_url): - manifest[spec.dag_hash()] = [ - { - "src": spack.util.url.join( - src_mirror_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_name(spec, ".spec.json"), - ), - "dest": spack.util.url.join( - dest_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_name(spec, ".spec.json"), - ), - }, - { - "src": spack.util.url.join( - src_mirror_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_path_name(spec, ".spack"), - ), - "dest": spack.util.url.join( - dest_url, - spack.binary_distribution.build_cache_relative_path(), - spack.binary_distribution.tarball_path_name(spec, ".spack"), - ), - }, - ] + manifest[spec.dag_hash()] = { + "src": cache_class.get_manifest_url(spec, src_mirror_url), + "dest": cache_class.get_manifest_url(spec, dest_url), + } manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json") with open(manifest_file, "w", encoding="utf-8") as fd: @@ -298,9 +293,7 @@ def manifest_insert(manifest, spec, dest_url): with open(manifest_file, "w", encoding="utf-8") as fd: manifest = {} for spec in test_env.specs_by_hash.values(): - manifest_insert( - manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path") - ) + manifest_insert(manifest, spec, url_util.join(dest_mirror_url, "invalid_path")) json.dump(manifest, fd) # Trigger the warning @@ -327,11 +320,37 @@ def test_buildcache_create_install( buildcache("push", "--unsigned", str(tmpdir), pkg) + mirror_url = f"file://{tmpdir.strpath}" + spec = spack.concretize.concretize_one(pkg) - tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack") - tarball = spack.binary_distribution.tarball_name(spec, ".spec.json") - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path)) - assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball)) + cache_class = get_url_buildcache_class( + layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + ) + cache_entry = cache_class(mirror_url, spec, allow_unsigned=True) + assert spec.name is not None + manifest_path = os.path.join( + str(tmpdir), + *cache_class.get_relative_path_components(BuildcacheComponent.SPEC), + spec.name, + cache_class.get_manifest_filename(spec), + ) + + assert os.path.exists(manifest_path) + cache_entry.read_manifest() + spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC) + tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL) + + spec_blob_path = os.path.join( + tmpdir.strpath, *cache_class.get_blob_path_components(spec_blob_record) + ) + assert os.path.exists(spec_blob_path) + + tarball_blob_path = os.path.join( + tmpdir.strpath, *cache_class.get_blob_path_components(tarball_blob_record) + ) + assert os.path.exists(tarball_blob_path) + + cache_entry.destroy() @pytest.mark.parametrize( @@ -503,3 +522,230 @@ def test_push_without_build_deps(tmp_path, temporary_store, mock_packages, mutab "push", "--update-index", "--without-build-dependencies", "my-mirror", f"/{s.dag_hash()}" ) assert spack.binary_distribution.update_cache_and_get_specs() == [s] + + +@pytest.fixture(scope="function") +def v2_buildcache_layout(tmp_path): + def _layout(signedness: str = "signed"): + source_path = str(pathlib.Path(test_path) / "data" / "mirrors" / "v2_layout" / signedness) + test_mirror_path = tmp_path / "mirror" + copy_tree(source_path, test_mirror_path) + return test_mirror_path + + return _layout + + +def test_check_mirror_for_layout(v2_buildcache_layout, mutable_config, capsys): + """Check printed warning in the presence of v2 layout binary mirrors""" + test_mirror_path = v2_buildcache_layout("unsigned") + + check_mirror_for_layout(spack.mirrors.mirror.Mirror.from_local_path(str(test_mirror_path))) + err = str(capsys.readouterr()[1]) + assert all([word in err for word in ["Warning", "missing", "layout"]]) + + +def test_url_buildcache_entry_v2_exists( + capsys, v2_buildcache_layout, mock_packages, mutable_config +): + """Test existence check for v2 buildcache entries""" + test_mirror_path = v2_buildcache_layout("unsigned") + mirror_url = f"file://{test_mirror_path}" + mirror("add", "v2mirror", mirror_url) + + with capsys.disabled(): + output = buildcache("list", "-a", "-l") + + assert "Fetching an index from a v2 binary mirror layout" in output + assert "is deprecated" in output + + v2_cache_class = URLBuildcacheEntryV2 + + # If you don't give it a spec, it returns False + build_cache = v2_cache_class(mirror_url) + assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + spec = spack.concretize.concretize_one("libdwarf") + + # In v2 we have to ask for both, because we need to have the spec to have the tarball + build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True) + assert not build_cache.exists([BuildcacheComponent.TARBALL]) + assert not build_cache.exists([BuildcacheComponent.SPEC]) + # But if we do ask for both, they should be there in this case + assert build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + spec_path = build_cache._get_spec_url(spec, mirror_url, ext=".spec.json")[7:] + tarball_path = build_cache._get_tarball_url(spec, mirror_url)[7:] + + os.remove(tarball_path) + build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True) + assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + os.remove(spec_path) + build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True) + assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL]) + + +@pytest.mark.parametrize("signing", ["unsigned", "signed"]) +def test_install_v2_layout( + signing, + capsys, + v2_buildcache_layout, + mock_packages, + mutable_config, + mutable_mock_env_path, + install_mockery, + mock_gnupghome, + monkeypatch, +): + """Ensure we can still install from signed and unsigned v2 buildcache""" + test_mirror_path = v2_buildcache_layout(signing) + mirror("add", "my-mirror", str(test_mirror_path)) + + # Trust original signing key (no-op if this is the unsigned pass) + buildcache("keys", "--install", "--trust") + + with capsys.disabled(): + output = install("--fake", "--no-check-signature", "libdwarf") + + assert "Extracting libelf" in output + assert "libelf: Successfully installed" in output + assert "Extracting libdwarf" in output + assert "libdwarf: Successfully installed" in output + assert "Installing a spec from a v2 binary mirror layout" in output + assert "is deprecated" in output + + +def test_basic_migrate_unsigned(capsys, v2_buildcache_layout, mutable_config): + """Make sure first unsigned migration results in usable buildcache, + leaving the previous layout in place. Also test that a subsequent one + doesn't need to migrate anything, and that using --delete-existing + removes the previous layout""" + + test_mirror_path = v2_buildcache_layout("unsigned") + mirror("add", "my-mirror", str(test_mirror_path)) + + with capsys.disabled(): + output = buildcache("migrate", "--unsigned", "my-mirror") + + # The output indicates both specs were migrated + assert output.count("Successfully migrated") == 6 + + build_cache_path = str(test_mirror_path / "build_cache") + + # Without "--delete-existing" and "--yes-to-all", migration leaves the + # previous layout in place + assert os.path.exists(build_cache_path) + assert os.path.isdir(build_cache_path) + + # Now list the specs available under the new layout + with capsys.disabled(): + output = buildcache("list", "--allarch") + + assert "libdwarf" in output and "libelf" in output + + with capsys.disabled(): + output = buildcache( + "migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror" + ) + + # A second migration of the same mirror indicates neither spec + # needs to be migrated + assert output.count("No need to migrate") == 6 + + # When we provide "--delete-existing" and "--yes-to-all", migration + # removes the old layout + assert not os.path.exists(build_cache_path) + + +def test_basic_migrate_signed( + capsys, v2_buildcache_layout, monkeypatch, mock_gnupghome, mutable_config +): + """Test a signed migration requires a signing key, requires the public + key originally used to sign the pkgs, fails and prints reasonable messages + if those requirements are unmet, and eventually succeeds when they are met.""" + test_mirror_path = v2_buildcache_layout("signed") + mirror("add", "my-mirror", str(test_mirror_path)) + + with pytest.raises(migrate.MigrationException) as error: + buildcache("migrate", "my-mirror") + + # Without a signing key spack fails and explains why + assert error.value.message == "Signed migration requires exactly one secret key in keychain" + + # Create a signing key and trust the key used to sign the pkgs originally + gpg("create", "New Test Signing Key", "noone@nowhere.org") + + with capsys.disabled(): + output = buildcache("migrate", "my-mirror") + + # Without trusting the original signing key, spack fails with an explanation + assert "Failed to verify signature of libelf" in output + assert "Failed to verify signature of libdwarf" in output + assert "did you mean to perform an unsigned migration" in output + + # Trust original signing key (since it's in the original layout location, + # this is where the monkeypatched attribute is used) + with capsys.disabled(): + output = buildcache("keys", "--install", "--trust") + + with capsys.disabled(): + output = buildcache("migrate", "my-mirror") + + # Once we have the proper keys, migration should succeed + assert "Successfully migrated libelf" in output + assert "Successfully migrated libelf" in output + + # Now list the specs available under the new layout + with capsys.disabled(): + output = buildcache("list", "--allarch") + + assert "libdwarf" in output and "libelf" in output + + +def test_unsigned_migrate_of_signed_mirror(capsys, v2_buildcache_layout, mutable_config): + """Test spack can do an unsigned migration of a signed buildcache by + ignoring signatures and skipping re-signing.""" + + test_mirror_path = v2_buildcache_layout("signed") + mirror("add", "my-mirror", str(test_mirror_path)) + + with capsys.disabled(): + output = buildcache( + "migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror" + ) + + # Now list the specs available under the new layout + with capsys.disabled(): + output = buildcache("list", "--allarch") + + assert "libdwarf" in output and "libelf" in output + + # We should find two spec manifest files, one for each spec + file_list = find(test_mirror_path, "*.spec.manifest.json") + assert len(file_list) == 6 + assert any(["libdwarf" in file for file in file_list]) + assert any(["libelf" in file for file in file_list]) + + # The two spec manifest files should be unsigned + for file_path in file_list: + with open(file_path, "r", encoding="utf-8") as fd: + assert json.load(fd) + + +def test_migrate_requires_index(capsys, v2_buildcache_layout, mutable_config): + """Test spack fails with a reasonable error message when mirror does + not have an index""" + + test_mirror_path = v2_buildcache_layout("unsigned") + v2_index_path = test_mirror_path / "build_cache" / "index.json" + v2_index_hash_path = test_mirror_path / "build_cache" / "index.json.hash" + os.remove(str(v2_index_path)) + os.remove(str(v2_index_hash_path)) + + mirror("add", "my-mirror", str(test_mirror_path)) + + with pytest.raises(migrate.MigrationException) as error: + buildcache("migrate", "--unsigned", "my-mirror") + + # If the buildcache has no index, spack fails and explains why + assert error.value.message == "Buildcache migration requires a buildcache index" diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 9f4cf680aca..07c3d7bdf04 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -31,11 +31,8 @@ from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig from spack.ci.generator_registry import generator from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE -from spack.database import INDEX_JSON_FILE from spack.error import SpackError -from spack.schema.buildcache_spec import schema as specfile_schema from spack.schema.database_index import schema as db_idx_schema -from spack.spec import Spec from spack.test.conftest import MockHTTPResponse config_cmd = spack.main.SpackCommand("config") @@ -718,7 +715,7 @@ def test_ci_nothing_to_rebuild( ) install_cmd("archive-files") - buildcache_cmd("push", "-f", "-u", mirror_url, "archive-files") + buildcache_cmd("push", "-f", "-u", "--update-index", mirror_url, "archive-files") with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") @@ -855,18 +852,18 @@ def test_push_to_build_cache( # Test generating buildcache index while we have bin mirror buildcache_cmd("update-index", mirror_url) - with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as idx_fd: - index_object = json.load(idx_fd) - jsonschema.validate(index_object, db_idx_schema) + + # Validate resulting buildcache (database) index + layout_version = spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION + url_and_version = spack.binary_distribution.MirrorURLAndVersion( + mirror_url, layout_version + ) + index_fetcher = spack.binary_distribution.DefaultIndexFetcher(url_and_version, None) + result = index_fetcher.conditional_fetch() + jsonschema.validate(json.loads(result.data), db_idx_schema) # Now that index is regenerated, validate "buildcache list" output assert "patchelf" in buildcache_cmd("list", output=str) - # Also test buildcache_spec schema - for file_name in os.listdir(mirror_dir / "build_cache"): - if file_name.endswith(".spec.json.sig"): - with open(mirror_dir / "build_cache" / file_name, encoding="utf-8") as f: - spec_dict = Spec.extract_json_from_clearsig(f.read()) - jsonschema.validate(spec_dict, specfile_schema) logs_dir = scratch / "logs_dir" logs_dir.mkdir() @@ -1032,7 +1029,7 @@ def test_ci_generate_override_runner_attrs( def test_ci_rebuild_index( - tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch, capsys ): scratch = tmp_path / "working_dir" mirror_dir = scratch / "mirror" @@ -1069,8 +1066,9 @@ def test_ci_rebuild_index( buildcache_cmd("push", "-u", "-f", mirror_url, "callpath") ci_cmd("rebuild-index") - with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as f: - jsonschema.validate(json.load(f), db_idx_schema) + with capsys.disabled(): + output = buildcache_cmd("list", "--allarch") + assert "callpath" in output def test_ci_get_stack_changed(mock_git_repo, monkeypatch): diff --git a/lib/spack/spack/test/cmd/gpg.py b/lib/spack/spack/test/cmd/gpg.py index e83602d2753..dd86a56e8b6 100644 --- a/lib/spack/spack/test/cmd/gpg.py +++ b/lib/spack/spack/test/cmd/gpg.py @@ -8,6 +8,7 @@ import llnl.util.filesystem as fs +import spack.binary_distribution as bindist import spack.util.executable import spack.util.gpg from spack.main import SpackCommand @@ -172,23 +173,25 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome): # Verification should now succeed again. gpg("verify", str(test_path)) + relative_keys_path = bindist.buildcache_relative_keys_path() + # Publish the keys using a directory path test_path = tmpdir.join("dir_cache") - os.makedirs("%s" % test_path) + os.makedirs(f"{test_path}") gpg("publish", "--rebuild-index", "-d", str(test_path)) - assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path) + assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json") # Publish the keys using a mirror url test_path = tmpdir.join("url_cache") - os.makedirs("%s" % test_path) - test_url = "file://%s" % test_path + os.makedirs(f"{test_path}") + test_url = f"file://{test_path}" gpg("publish", "--rebuild-index", "--mirror-url", test_url) - assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path) + assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json") # Publish the keys using a mirror name test_path = tmpdir.join("named_cache") - os.makedirs("%s" % test_path) - mirror_url = "file://%s" % test_path + os.makedirs(f"{test_path}") + mirror_url = f"file://{test_path}" mirror("add", "gpg", mirror_url) gpg("publish", "--rebuild-index", "-m", "gpg") - assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path) + assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json") diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py index a9ede4df73d..cd1fe699986 100644 --- a/lib/spack/spack/test/cmd/mirror.py +++ b/lib/spack/spack/test/cmd/mirror.py @@ -6,6 +6,7 @@ import pytest +import spack.binary_distribution as bindist import spack.cmd.mirror import spack.concretize import spack.config @@ -365,8 +366,10 @@ def test_mirror_destroy( install("--fake", "--no-cache", spec_name) buildcache("push", "-u", "-f", mirror_dir.strpath, spec_name) + blobs_path = bindist.buildcache_relative_blobs_path() + contents = os.listdir(mirror_dir.strpath) - assert "build_cache" in contents + assert blobs_path in contents # Destroy mirror by name mirror("destroy", "-m", "atest") @@ -376,7 +379,7 @@ def test_mirror_destroy( buildcache("push", "-u", "-f", mirror_dir.strpath, spec_name) contents = os.listdir(mirror_dir.strpath) - assert "build_cache" in contents + assert blobs_path in contents # Destroy mirror by url mirror("destroy", "--mirror-url", mirror_url) diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index cb7d3f082b1..b5b8e4d9825 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -1068,9 +1068,7 @@ def install_mockery(temporary_store: spack.store.Store, mutable_config, mock_pac @pytest.fixture(scope="module") def temporary_mirror_dir(tmpdir_factory): dir = tmpdir_factory.mktemp("mirror") - dir.ensure("build_cache", dir=True) yield str(dir) - dir.join("build_cache").remove() @pytest.fixture(scope="function") @@ -1084,9 +1082,7 @@ def temporary_mirror(temporary_mirror_dir): @pytest.fixture(scope="function") def mutable_temporary_mirror_dir(tmpdir_factory): dir = tmpdir_factory.mktemp("mirror") - dir.ensure("build_cache", dir=True) yield str(dir) - dir.join("build_cache").remove() @pytest.fixture(scope="function") diff --git a/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json b/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json deleted file mode 100644 index 8aae45be93f..00000000000 --- a/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "spec": { - "_meta": { - "version": 3 - }, - "nodes": [ - { - "name": "archive-files", - "version": "2.0", - "arch": { - "platform": "test", - "platform_os": "debian6", - "target": { - "name": "core2", - "vendor": "GenuineIntel", - "features": [ - "mmx", - "sse", - "sse2", - "ssse3" - ], - "generation": 0, - "parents": [ - "nocona" - ] - } - }, - "compiler": { - "name": "gcc", - "version": "4.5.0" - }, - "namespace": "builtin.mock", - "parameters": { - "cflags": [], - "cppflags": [], - "cxxflags": [], - "fflags": [], - "ldflags": [], - "ldlibs": [] - }, - "package_hash": "ncv2pr4o2yemepsa4h7u4p4dsgieul5fxvh6s5am5fsb65ebugaa====", - "hash": "l3vdiqvbobmspwyb4q2b62fz6nitd4hk" - } - ] - }, - "binary_cache_checksum": { - "hash_algorithm": "sha256", - "hash": "c226b51d88876746efd6f9737cc6dfdd349870b6c0b9c045d9bad0f2764a40b9" - }, - "buildinfo": { - "relative_prefix": "test-debian6-core2/gcc-4.5.0/archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk", - "relative_rpaths": false - } -} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2/gcc-4.5.0/archive-files-2.0/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack b/lib/spack/spack/test/data/mirrors/legacy_layout/build_cache/test-debian6-core2/gcc-4.5.0/archive-files-2.0/test-debian6-core2-gcc-4.5.0-archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spack deleted file mode 100644 index 047d9b2cb7fcdfc01451c8dd63213b9246c3e4c0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10240 zcmeHM2Ut_tw#LH9*auJu#n2U{r4T?+RFEb}6A?r(sGl)6!o@~;P^8r z5`l_CIyfq@i%G%$%(o$MN&|&La%vo=qRg)E-@iF56&0BBxAl);?+EPr|8Ljiw{HCL z)93o9V98YKAB!we{~4bX3444F-qv?_e6gsXwW3vn7tf0_v^LjDIXcvJ zbf}SN723}qy^xxwVIvM)8s7AXS5VVwp;AdB|G5k`bc^=Ps_Aj`CUoyJzw61gfk=&8 z1+U(|txxQa3eg@7wpSYs-rHkrWva2j)z!DjFSUa(yF;yk+AXCPp+CrA>Mc%rJ8Vq4 zF+J_FgYjyEG>H+0a~RcowivXK7AL-_FfsI|7Bgg!5{2@)Epr`(3E^r0=n!N$O z1O=>y!FA`s^WgDu&2qKLj6_?0P&&|_!{=P$BCA=FttjGEcW7ItmR8FZP(oM~)WDaq zG&ATE;Nb~ywuP8%Rn_wVKav$#Z9Z){G&lu0h|S%XR24qtAgMjnySm_A-};kaA6DZaJ#NIDn}X7LQr6RH9OLJ~%^TN)7IlJ-e3Lcf?Q`@A-c@uOI}Ej>HYK!e z<22|6z_Y@vEqBV2Yo)M}ks-9dqTrCkZ7toC&fe|}yMdYQec;jY*f0oW9Lk^-fO(4` zju}=^yr5VBZXc)Jhxjp71qnAC9gD-0!%IWs#Py407-jK$vRJXVrPEmg;xPQv*&cO)%k-ht#3mICt)D|4vlomD-u)w5T8@(<{ zyqw}#8g{<-De+x|k9tWV@4_NYv38fDSJGR^G=Jq&@=2Wna1}UqtCv>yyxdPjJ?z6vDMRW)TbnyWY@LhoTCSVn9#d<5pmC@j@(fj;6~W@(l50=;Ur+#Liz-*ZA@s zc=|XVi5ijB7fbUoo__YEMy6c)=NRE9hOl5+iEeSTUWmpyNSkD9A)QC+dR#NIx<-;p zH-TPFx~8iqbY0L29}Y_zPY*ce%cmpfy;e{*=>ATEt)Tbhy2Xo5s|LE>c%$pgCf!vL zZIJquw+|ZH11gT8O4jn1MY%8_lIBlSwdqz)>FPv87X*{HlRsY3)zej&^h)ZNpVBBZ=ya=0n9<%R znW1;7RDgTbk8DtXs_RNJvh_rEtKu{~51+NG!{%dJALFHr$K;KXwY*a5Ep}4jo|U{C zoGWxf`&wQI9JF3MGsA4p$BNe@^c{cV-{q=^x+4|Waq3WN^6uDIE*SMz2fzB5U+H>Y zS?zf{lu?`RBk4he(Ee)}NT@o`K+sdBEueqp(88e?c$7f|B1~?|1BkOUP-%92(8)KS z+~St0SntT(wYQjJ35hE?)HAu2pAQ#+u{qLzV4+4^LCQyGSpVHclT$eExL8Mx~0r*2~h-UM*EPpE+=}lJ5h6NE?4?rPmZj#g*B2<^P?kag7PAp-Cx>0$xFZA8f}nl zLlp2k$V$JJ+F4O;HjrfKc^I9wE9~5GXWwF;Q=(b$>9;j?<}>dSkA&Gh ze?I%Fr=~;mQHpzyK~i1tQ0Amjp8TuE1Bpk6{SPF!biqqryT!Jej$G1e$jOXt%xlkn z;5KQ$(TY(?kLn!Vm8-?KuuDuvW%d5q9oH=-K%L3ggPxRQA7@4DuBbESxBkRZM`$R!_j`*S0l2 zy**tbZ~K6>c?w8f))-*{Wi&1Uo&X3M;O{?Sgt6VS(Thhv>3adJ`<8|J0IybAI%g6N_`xg(%r zIXntnIk$;(bs2r2b*H}d@5-;L6nUq)d|mP>1WO)Xg1)3pqSvol7G#T#k89@aB%WsU zNkd-Kyj14sE-QyY)+X*ytTz>$9kYsA+ggy1V02EhVnaMV-N4fxV1C&6x)9}5-Q$sM z+5BaXy@G~=bV5QNq2NcfLtGSF%t?x=Czq0?5^SL#%A4o z6Qeiak@ikfSVi2~gvhPwqC$Q9`SEvc>I?=i9R?hyZR0&W>+fE>VS=qo)G4<>w#VJ{ z76;%?nZ_&i#(*`0EN8g^aU{ZVY(~PAeR3%AzcZ%LxR_DbVt};#B(l*Ou zRbH^c6bhRS3>$eJCE@QJYwoMwnEBf5!6Uo;H~ss?f!c>^Mjl>>A|1j~vxYj8umbhmMej~$k-aEtTjp=B- zK(X8rrSr_oB~a0Xo2&E#PfDJ~3kM^LJ@;NIIeWk}iYlcty=!`LqQdiTS?tZw5c$wB z59PqrsKDKwE7guXx|~Z1cYNZ93p4CE6*vM(w9| zQGI5UIpTSAMkBt6`PgQs*K@fdW&6SQGGKpfXzKc-cb&*du{5iv^z)99V^UPcKwVuT ze+<|)4`nEggSIn^<@Iq-rWsK27og%%t*wmwbF1F~FB5Mayp>>A3vm+}t;6oS5WD+4 z`}asb24rqPob2{tf`LZ4F=S90MFIhKI<+5C)dUr{)LuaB1kb;PWPyAYiwdjNS69B^ zKFu|(*+hE`Zf3uco^X$Eq=RoN78HFiOwl@SFzr6d|H|fZl3a^xF`uZ|#P+40<$r*| zfQH{LVC6#=$EM|*yuXNmA3c$|%`{mSgN{%FLHA)=&C_=)IKFL~fv#)*0+o^^(J+%?aYWKtenDG=^NFj25Y!moGi)P2%YrDDDR46HLiDy`DuR!<4w z?zx3vD(+8Fz}EieY^Ell-)bT)^I@-SBRqTaJ(uIx;i;8^4_`me(W14J5-#?YY( zfnb^Xvr)JY)HREZh!f&aGkIS=#H%-Qy8d&g^rNkX<{x$3^RT`%AJVclfR-a0h_lZ# zYFZPt86!O~$gffi8cib0YHiFK1Ddra#pP37g?o8CHD8^zk?FY28&c7(=>eadh|+< z?u_tY$Ayzy4~Zi6689&%c*YF(Z`Y4hihbF9>R4RmeCZu4x;@laUz-LH_%ar$8Y-lBt1);irj12+^O7l)MZ@B8buKKYe^Fr**kcc6|5j;GUtydE9%}{ze-VTX zb6yLe<^_$)vjJea9!1vA61+;u|rGVbU4(OS6^E@g>(q=#`c zdtO2fM1Q?;dNM;30qrS_*4R% z(3p0^4S3%UXz%{HJ{@&WL&HfALcTK;fBoki)W-qGG5IDls*)zX-DFIya;xF!FGhhq zJBp$;_~g*thDc;x$wVtlZ(L* zmP}3d{+TyNn~dnAvG_GXpT#w-PHT|-B7gX>7Pf3k^9kxAxMs!5)jXTU_MPWiQFQ1k zdylGb3+5XCx6Tz5ew_dQIRE`|{`+s$&i{u=|I?>_6lVadaEd}EeFYBZT=XBD|H6?- z_~&EO{s$^|2}i=$Q*AZK6Nv`m0@r4_5k%ez3jQ49bIaDd^2S!a1xH}%F(F{ zklY9)K%YbfoW7UB18Az7>o-VnoaPJ(BGKbJaW;`c0sfE(|05~|KuCPTcUk9&-%(sL zVGoc1S2T5bft0>uI-^|y67}mvT2{h|oQx%t(BJ9)%F?+n_n+2#$r_eSbY^#hIG`-2 z%d);NII^2Z-$e0M@<=Q-m3}w)wOY-uN?xX)-WFXjohe@m!KU%0{d$KEm>abLG z6bCe18TH8piO=S-a5xI148y6as;Zz=kSG9;L*dm_5Gq(K3Wvwx5J)vu6(tM`tAtU* zDj}6|Y8W(52@h96A<;-SjpN=YhuCY5Baraq-<@y;*j3MNKiWCF0(gQ42fF|Kwo?4k ztyKJGulX-^D?f+x#geNtn(Dw=N<7+$0&tvLd+smWdVeVXfxr(0ejxAzfgcF`Z3z4a D{oFJ8 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub new file mode 100644 index 00000000000..fc85a4b3113 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/A98A04B882E19D85FD36EE069565D80B055C92FF.pub @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGf23+EBEAC6UqaiE43cF9jFuVjA8xJ5j31BMhufpnk0cwoE5Iks/GgR/Hki +LMYbzy36V7TZGObel+5DtFKipX+WCwWj2XsjbeqHeuCkxZhzHFwfi1UJl9FO2T28 +iNn6OsBiGeU6ULNmehSia2hx0uhj1re/FUwJExOAvuYv8nc7M+nozqi7Pp/WjP8v +UTiqP2onzZJbidlSBvmZ2nheWk7G78e617gcV/ye+UyXZvciiF2UQBg9YV6D8JuD +YhBbNAVOzJOiyOdTBmZmOkmYsGx58sEbFVqGeOMB0xoxZrqKjMm9NhvjqjJF/sWs +hN/PD5ylW1UR05/fGxlG2GLKKfBInbdqnC101OFWXP5HenYHmKaBJoCKCAUfsoJ0 +r/t/GVh3z3w/99p0TRDONnTecKm5S9z3/5QjjE5RsWcd4ll7mRikUiVpe1WhKRwT +4T76pQLq3XwNJqiOmuMQuSHoBE9OMufvRFiTYC0QHyLoCV2H5PCWtS2xSsIDN4PB +0RNd0hnHKanVV7d2TkIrGOagoAo0wXqyW/Op6KUG1NdaFYYziDFEHeZxfGoPKytO +iS5PEwZG2FqambAZhJU5OXwzgnCRIoE5DCZad4YS6U5YD/2zg+RrQ/5GUxl5Cc+W +Zwesn9FV5jywx/oFePYbTSNQVPQ6jbUDvhmHvZ8c/OfGOVXQr0VpvfIwdwARAQAB +tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv +ZHlAbm93aGVyZS5jb20+iQJRBBMBCAA7FiEEqYoEuILhnYX9Nu4GlWXYCwVckv8F +Amf23+ECGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQlWXYCwVckv9i +pg//eGjBR9ph9hUYRsekzKWM1xB5zFOFfNoqlpCut/W7LAfy0XXkFy/y6EvPdcgn +lLWRWPsOFfsKGwZd7LgSovhEMQ2MRsAUUB/KNZx7s6vO/P773PmJspF3odQ/lcrM +1fum2lShChWqimdBdNLrXxG+8duO9uWaMBIp28diBCyB25M/MqpHtKYu00FB/QJ6 +ZwQH4OsgXVQHRjyrtIGx/2FQoWt0ah3eJMJCEw46GgkgiojtoTfXQQc4fIJP324b +O1sxz5lx3xVBG/EZYzyV3xnSoG9aZNJ1cJq8EKO7ZoNKc/8jwkVu5gewGaXYI0LK +/WkOeiXcSHPMSdu7TpnitvLYFCjc9YAEKQnjooXdt7+BElwC3+5hZJNXEnoGPMzn +3UL60sQE/ViCsGcW+l9rtzXPNTmLMjEg4rGRqOhX+UmwyhvGD2QYbZtXlayu5xn+ +5m/PfmdqgL1xsdvNsLo/BOo+6kizMdBk48Xfp0YM8AC4BzUEENypGzC4T0WYF0k1 +Jfc6/eSwiytIcIkJ42GlaVfEFE8UxfYc1/2zqTBN9EdzWJqy0Bh+mVOgOaeb0Dzi +xWpUpChi1fBB3PXWJ5iAS/w0HSVn4G5/JAIEFAs7r6ju2YtKBfuk+u/K5Q28mo7W +6LrZQywN44nBMTvSQUhhXpSNYG+juyotXJUJ3F2u9Cf/jVU= +=TkbL +-----END PGP PUBLIC KEY BLOCK----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub new file mode 100644 index 00000000000..46726ccbc8f --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/E89D4971F0097B1E7A3EB57371B484802E78D7CD.pub @@ -0,0 +1,29 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGfHlp4BEAC5wkZSHqF9z6GcymuHpk1m9aNXCJdt4ZWvE8ck8GcuVu1nbzlZ +h959jqtwk7nFMki5YaNMz6jcQf0eeS75viL4CoPAqFiVyhyCCh5am75h9F7vTBq6 +190017lhu9IgkAkiklnjfDbyXH+BwqJ78nXp6e6R4ShFMHNGGvYLem1wmPKzqPlZ +zN0yjc0+d5pw4hu+IEFrM63yqGp2BVX1X132IKUEcROCQt1QOma5oORhYEtSCieX +PuhuHJOA7q6nJuFccPCs5OcDS4IbQgGAbWL4L1+LAGVLVGpK4IVtqEZ831Srclh8 +0ruyFFeV/hqOONThwwile0Jwh5Jz/2sYxT5c+nlumXWK+CXTm4OCfGt1UuGy6c6u +Rz84PHfanbKnATp6RUjz4DMREkmA6qBnUFqGLLGaBKBsm42b7kbo7m5aeItuOwLE +U7AcnBEqqHLfI7O1zrHKjQCxhEWP/iok0kgEdiJ4tlPhfDjQRG6thlmZnVdt/08V ++bvVkbYZyWPzjbG3QHyFew1+uzPHb2UopgpByVKYEWhCgNfcFtE56lEI9c40Ba5o +LaZl0VlgfSLP4c+LoFB6gZp1gcVQuPo1JKd1v5WP60f1iHhazL5LEeMYcW6kvujK +58Q683gSH5DsVAnxaj1uU4nvtKDh8IF1CNKKXk8RVsltdpv9bGhV8b4qVQARAQAB +tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv +ZHlAbm93aGVyZS5jb20+iQJOBBMBCgA4FiEE6J1JcfAJex56PrVzcbSEgC54180F +AmfHlp4CGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQcbSEgC54180aDg// +f7GqIW5LzYqIqkey+IjdkSSfeD47tlWc2ukKYStHu0gTlHhrUp4rHNJ/s8XQ1o6o +jwzWfNMYh68wt9sjuM2BEkkh3RUFEjVqqW+k562gS5ibfKTDtJb2Yj0n/CQKWvoi +vUUzO88xW0AnZFieP+vD5iI5Zw4H2dY8cH4X1XlWAJufFdH4WBaZjujNwNOcCsnd +w2nE050wKTR2wroWq0HKn1Ni3QNtKWPpLoHGAlhW6ACLa+EFqxHU6D3KhW6IV4Jc +sdt36nHNiRiy6nT99asqtN6Z0Yw+EnQSuIDosIbmSgZoieINh0gU6AKwgydxLUxL +Cu1w2fZHGuFR/ym0c/tTpM893DxHMc/EZ/SpU8fXkC9lYnQO3or/Y0mLHd0kSEv7 +XoonvcOu1tOQzmvrvUQUtTn4+6OKpGViyZG5C8Lbk8/yKWFv5b+Gpss/EiGTHSsk +bPTHf5jMsWElv0GgFq2TpybtIcY52yJoZ1fBMEA9Nk76Y/MNFlN0d7HyS6tWGr6E +8FWJB7RYG5XHMEDIKSheq+Q5cORwz92JPFI+sovZukp+20G7f7/gwos441KamJPc +y1+M4uO21aKX2fA07bcgFtm25gNLoHyvjQLcmyDis6xogvciCV3iQ/mtunewgYp/ +lUX1dv0R5o8TteaAIkbJicbdLtur/iuAWN404E/QShc= +=8P00 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json new file mode 100644 index 00000000000..4e0cf4995e7 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/_pgp/index.json @@ -0,0 +1 @@ +{"keys":{"A98A04B882E19D85FD36EE069565D80B055C92FF":{},"E89D4971F0097B1E7A3EB57371B484802E78D7CD":{}}} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json new file mode 100644 index 00000000000..3f64de63c54 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json @@ -0,0 +1 @@ +{"database":{"version":"8","installs":{"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez":{"spec":{"name":"libelf","version":"0.8.13","arch":{"platform":"test","platform_os":"debian6","target":{"name":"core2","vendor":"GenuineIntel","features":["mmx","sse","sse2","ssse3"],"generation":0,"parents":["nocona"],"cpupart":""}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====","annotations":{"original_specfile_version":4,"compiler":"gcc@=10.2.1"},"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez"},"ref_count":1,"in_buildcache":true},"sk2gqqz4n5njmvktycnd25wq25jxiqkr":{"spec":{"name":"libdwarf","version":"20130729","arch":{"platform":"test","platform_os":"debian6","target":{"name":"core2","vendor":"GenuineIntel","features":["mmx","sse","sse2","ssse3"],"generation":0,"parents":["nocona"],"cpupart":""}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====","dependencies":[{"name":"libelf","hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez","parameters":{"deptypes":["build","link"],"virtuals":[]}}],"annotations":{"original_specfile_version":4,"compiler":"gcc@=10.2.1"},"hash":"sk2gqqz4n5njmvktycnd25wq25jxiqkr"},"ref_count":0,"in_buildcache":true},"qeehcxyvluwnihsc2qxstmpomtxo3lrc":{"spec":{"name":"compiler-wrapper","version":"1.0","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====","annotations":{"original_specfile_version":5},"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc"},"ref_count":2,"in_buildcache":true},"vd7v4ssgnoqdplgxyig3orum67n4vmhq":{"spec":{"name":"gcc","version":"10.2.1","arch":{"platform":"test","platform_os":"debian6","target":"aarch64"},"namespace":"builtin.mock","parameters":{"build_system":"generic","languages":["c","c++","fortran"],"cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"external":{"path":"/path","module":null,"extra_attributes":{"compilers":{"c":"/path/bin/gcc-10","cxx":"/path/bin/g++-10","fortran":"/path/bin/gfortran-10"}}},"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====","annotations":{"original_specfile_version":5},"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq"},"ref_count":3,"in_buildcache":false},"izgzpzeljwairalfjm3k6fntbb64nt6n":{"spec":{"name":"gcc-runtime","version":"10.2.1","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====","dependencies":[{"name":"gcc","hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq","parameters":{"deptypes":["build"],"virtuals":[]}}],"annotations":{"original_specfile_version":5},"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n"},"ref_count":2,"in_buildcache":true},"jr3yipyxyjulcdvckwwwjrrumis7glpa":{"spec":{"name":"libelf","version":"0.8.13","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====","dependencies":[{"name":"compiler-wrapper","hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc","parameters":{"deptypes":["build"],"virtuals":[]}},{"name":"gcc","hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq","parameters":{"deptypes":["build"],"virtuals":["c"]}},{"name":"gcc-runtime","hash":"izgzpzeljwairalfjm3k6fntbb64nt6n","parameters":{"deptypes":["link"],"virtuals":[]}}],"annotations":{"original_specfile_version":5},"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa"},"ref_count":1,"in_buildcache":true},"u5uz3dcch5if4eve4sef67o2rf2lbfgh":{"spec":{"name":"libdwarf","version":"20130729","arch":{"platform":"test","platform_os":"debian6","target":{"name":"m1","vendor":"Apple","features":["aes","asimd","asimddp","asimdfhm","asimdhp","asimdrdm","atomics","cpuid","crc32","dcpodp","dcpop","dit","evtstrm","fcma","flagm","flagm2","fp","fphp","frint","ilrcpc","jscvt","lrcpc","paca","pacg","pmull","sb","sha1","sha2","sha3","sha512","ssbs","uscat"],"generation":0,"parents":["armv8.4a"],"cpupart":"0x022"}},"namespace":"builtin.mock","parameters":{"build_system":"generic","cflags":[],"cppflags":[],"cxxflags":[],"fflags":[],"ldflags":[],"ldlibs":[]},"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====","dependencies":[{"name":"compiler-wrapper","hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc","parameters":{"deptypes":["build"],"virtuals":[]}},{"name":"gcc","hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq","parameters":{"deptypes":["build"],"virtuals":["c","cxx"]}},{"name":"gcc-runtime","hash":"izgzpzeljwairalfjm3k6fntbb64nt6n","parameters":{"deptypes":["link"],"virtuals":[]}},{"name":"libelf","hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa","parameters":{"deptypes":["build","link"],"virtuals":[]}}],"annotations":{"original_specfile_version":5},"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh"},"ref_count":0,"in_buildcache":true}}}} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash new file mode 100644 index 00000000000..7738b6bddf1 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/index.json.hash @@ -0,0 +1 @@ +7f94d6038bb4e5e7fff817151da5b22d7dd6d1e2d9ad51bd55504676786c17bd \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig new file mode 100644 index 00000000000..8a63bf498a6 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spec.json.sig @@ -0,0 +1,124 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ +"spec":{ +"_meta":{ +"version":4 +}, +"nodes":[ +{ +"name":"libdwarf", +"version":"20130729", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"core2", +"vendor":"GenuineIntel", +"features":[ +"mmx", +"sse", +"sse2", +"ssse3" +], +"generation":0, +"parents":[ +"nocona" +], +"cpupart":"" +} +}, +"compiler":{ +"name":"gcc", +"version":"10.2.1" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====", +"dependencies":[ +{ +"name":"libelf", +"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez", +"parameters":{ +"deptypes":[ +"build", +"link" +], +"virtuals":[] +} +} +], +"hash":"sk2gqqz4n5njmvktycnd25wq25jxiqkr" +}, +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"core2", +"vendor":"GenuineIntel", +"features":[ +"mmx", +"sse", +"sse2", +"ssse3" +], +"generation":0, +"parents":[ +"nocona" +], +"cpupart":"" +} +}, +"compiler":{ +"name":"gcc", +"version":"10.2.1" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"811f500a89ae7d2f61e2c0ef6f56e352dfbac245ae88275809088a1481489d5b" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54 +180hlxAAisLofFhr/PQvLcQ79T3t3V0tqGgz9x6QnPKfbPCgvb66tTNlny+ML0fY +y1H9xXQO53QOxfN9cdXcf2EVbRQ2eT6ltmwekI3ZZuCaTguflNu/i11UV6UnDy3x +dXOYQhky5QjtPbhJ0NxG5XDKoRFoUPR/rgXsiNG5O0sk3M5H9ldpsj8af5W/6LCL +gCTNM8fF0TVbd4MF9TiIECFBng2CrxhHwpl2gPHHxab1zxLRCF6t1lZvL6To0hmC +e/Tqre+42PhRSCtXuwhK22r0rvreVUaiglYn8udjOJHwNVKdzLnTZ1OBAFeIq00U +9uuroyaF841pq9+8PitwUORurv0lsnHUbfbi/+ou0HzMiaXzz+MPdOXt8nUuyScs +oKOi8ExvpWJ7vn6klkvQtMK/Gakzd4YOxO/nk9K8BJgVN3qrODwHYSORk8RrdITS +tkjiEJiIoklddiwCf3NUzlxiIYWbiqKqNbY+Pxh4B+OpVDnvRmpkJHgoSuVoCS8b +coaOTIgqDpnIClHIj7ogxO+ureRjIIkGNNh6wVhlHDlgm1GzxNUOklMrzDkYMD01 +eTYxrbicw7ZVwqhFtR8olODKT9QAqXUJOkGHS9IA6FJctatkUkIOG1DSI52AZV1r +PYzgdKtTxS60EkN8Igl6VMTkaC05anLygCTyOvGaV7sqVKmzHY8= +=8OR5 +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig new file mode 100644 index 00000000000..81fd33bf7fd --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spec.json.sig @@ -0,0 +1,72 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +{ +"spec":{ +"_meta":{ +"version":4 +}, +"nodes":[ +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"core2", +"vendor":"GenuineIntel", +"features":[ +"mmx", +"sse", +"sse2", +"ssse3" +], +"generation":0, +"parents":[ +"nocona" +], +"cpupart":"" +} +}, +"compiler":{ +"name":"gcc", +"version":"10.2.1" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"hash":"rqh2vuf6fqwkmipzgi2wjx352mq7y7ez" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"48c8aa769a62535f9d9f613722e3d3f5a48b91fde3c99a644b22f277a4502d75" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54 +182ezg/7Bkil1mY6d4recJMkFhpBzzDs8aMD+WQOBPoy/bWHIGsPb1DyOOW7lTLa +QC9jh9Rq02oMeX0LWvNg7k6iMTayWcrPzJwk1rgh3pg/ySgCTZ576/aP/UOZwA8h +HT/3RzsDFlq7Wkh4yYaDgSEDVc5PgUevb1p2f126Z9HMFjG8siEWmuZQOcy4I9JG +osQFtwWTLmx96sBMzweZTu2i3iGTPNz4Ae1hu+v5clmSFg43eW7EWChEVoob+3hb +hLRxajZEPsIho4yR5yynoxduXeXrLLP7GH6XGnYt7Z2GJR0UamIrPfxYuWBK76V1 +03Ie2rRXwOKfsjDWw9Z8ziTVu25G0aZ274DX6eQyaWKfvzz69cBXO0fgw1lU8B9S +K0j9k/xtnDCrIkPSh4QGQpFRlbzxkj20E+EnwgDCGIlK1rBzo2V5na4YNj+SbC91 +0BmWrj6dRkQZUMJHeb95kBMfFpKG5B6u7HQxZtIwHFAfF0nypbiB7xmdy/gAmUao +ej3Cu34DvWtLVeSh7lRimeEc44WyBDk2YSPqYleAwYMZBn4WSozUS/KVLU2T/AhZ +VlLaEBaFrVngmsw5PCdck0XRSNSAN9HUgPItpOzYig20NeT1/69wIlUZVNpLEYGT +yvZsmqHFnkunAs6av3XmGl0i8rSA6DujunpNXML6hUciFEK5wg4= +=Aq8h +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libdwarf-20130729/test-debian6-core2-gcc-10.2.1-libdwarf-20130729-sk2gqqz4n5njmvktycnd25wq25jxiqkr.spack new file mode 100644 index 0000000000000000000000000000000000000000..847e37f2fe1655c524608255f12a7caef85cf17d GIT binary patch literal 4099 zcmV+e5d7~SiwFP!00000|Lr~NZsJI~t^KviORUA{sJmytxO|1IkcJRQNVqkSPST^6 zW$ZFIF}7oy+kw!ZI&kLN#I9p{5m`iuk3GJAqz9`uB)%~mQR^@Wp zA3E7LKNuJUk{|?E1dG=d_oBcHZz$nMHw?qb^A#6W8An|S1RssRbK^tL&)9m#Q*Ga; zuMFRZR&V6?*TWj$J^p}aSeA@`9B|M0hf%_l7~%WJUl0IK#y<{t==kG{JaRO}hw?eR zkqt|O*YeO-&t=6`nQ!vPcaJ~IGl0l{EU>Qew;lhhJn)F|7m57G1M3|BAgKm>hGK~s z)pj8}W{K|}e~tkFknxWL=q#Q6wWVC|Lq&t`kE!D;<31g8Ao2_LpJ9RchPwVS&6$T+ zKK=lRl%x2A!VgvieO1i>hGuDy-Fogm6PQ6P!~XsHUt)> zo};MeWE!x3NUHuP{(n4>1pUu3%laQb|0DYU#{2zN7{=8d=**ZKuMaOQlnesrb6>4iOR$D#8S* z5T*LiN627sL#(9?Ax@_lI+9I<3}jWlgsx}U7K#BhgZ?yl$x!d|5-;VYoGR&p2$a0S zi%{ehkn4$(&M-anCy3}Oy+`GoL)U_4q@bS!?mt>j$oW#d0rHBtD$nk79kJ1OHk0;W} zr^D0YX{U4|eAqcXK%SIuU>6NYJOCWWOMvDCj?1G`J>wQdFo~yBsQ+E)U`O>O@--Q!wuN8GQiNu9$z}D@9~vpqyA|% z1#=M!ieOXd_qAoIr%`afQg4(Fs+HZHPu%gxdaESV_L}8VS16q}>czvxY8jRg1^EdH zSuVFJGz?UG7W1HQSTwHlMeF(H?q)Gw>BE<8-&XV+qv#uDXg*C`4(mdd|8y)Ki(Iq9 zH@L&S_HjepITZM6qgdsR?^oH}#@M@WaL)>-@^!FLlmO6t)!h$DonUC^=4pbpdsO^% zTG>0@X|ol+czj$eA@hB}wTd4b-4Egiaiw61fXjp2M6x7Fcp;=koV7N>7KJ8XY&So$ zy=WYjXd`g>7Z3w>147JHt^zvutI)goXiIh*0fc{q#eaq2T!ByW2VFl zlubNcAE)4!o`57{{eSWOe}-kq`+u;G}U zmy~ZKnL-ASW_U*8B_3V!IRT*ibjm>n70*b2`;rPD^CN2YA>JW?bVelTLDjZ2FDM2e zuY?yxKA-2<9C8GaT~_UpsraF7g783{Irj&*@3A)5#QyRn!4O>5A#WsiSePkxu-MgJjr$ux$S& zV*ld+kmokX{;yc)Cl0{VAOZGYTJ`)Nxdg=i#{lr!v0a}kl?vH`J%r;qc=mArLw1=L zT>A{FzL$*%BNpcH4hbY4Zv6lEt!c5v(tcUqt#*n#wQ>RceQRWS5aUbJ+e)pt*O}$0 z7Z1yF3onP9$gN^`zYy;%9GZHDp>s6gGVaBIy&UPHesOg^G@RSM!CswRa{@cOkZvS+ zJ8N^ZQK|09oocsS$ZGI1YmH2E+CSWhS)g{*k*keT?P#|w&&RYPw+l2mIa%N6{PN4R zo|BVkttTg8tzmd&Gj~sGyK<#itL+p^hjOjD(=N78ms3<5opQSy##hUoxybokVIM1! zw3a^Vk~AxGJxr~uljc`Sb0n&iP*-N5`M79QZFeqii3U?N;`>^&xEsN=j~erdR%MJ< zbySq+SPmni;d-k&6FwX`If;ZSn0&KPo{5I0*_t~Ci|i4Sd@Ql+;nnSRl%t{v44wi& z%QJlA60W1r`^yOZm7Z<;p6@D7*7K3BL8Vxu>x>F5-}a=;bxxE;KB|9$rO&Yj0us$; zY|*o?y<2{8r44Rxdn5i#5O1$k)w~$E%8(VZJ;QorhaQttHBNo5G5Y}W;mJyG?N*3KkW82;m|SgzdFeBG(ZJ~^R;n;k>`dD^cKR?;4C7z z83(7hKAOmR(R?Aov(TuFGfeMrYn+keK;Kk)9>z@=zIEE&Z#JT0)RxPQkJWawF+=P( z>*c8+bXr(JVzY|9>#T3j*~Lk$+{R_f#r9qyb8MzFE>4_YOx8rgS1Y&WK#9nQ<#Ma= z_U*S5wacwqu~aU+%^XA5wcX4=GRKyk3GNG2h>YOAT3n}9E)`JIe=Ur8bTJ(^P17D? z+p-w_pf2u~caHYtT5}J%3hl?~kFSM!G z>!gSkv0uqpFE)yM<=rs$;m>@qUzrJ(znE<5=YOpCVNvSm#b%Bvw7LtO{U&m@0!e*4 zVf9}plk5M$!aJ5-DwsLT-i_zOVWAH#=o%`8Dn+yZWxqI-m8HtM&35^o!5F9c2GPd} zB7I8qqc)snJMk?P`9&h1cDm*Il(AEYM}r?^*TTyrDDu;r0A*OJInrR{HbA3F!pkH$ z&KTSTC}d{C%Op6dExe7Q$C;;rX;h^4NN^e*wj#WZqGtupwP>u^<^uCk(Kb#I%Sj@| z&rl1=7TP|CTu2)2#{{vQWUM zqjvDVnLhYu+xhSPI-U9B&%@()|JqMCes8z_=g(j0>Ysmn_`kor`Cnsn_5OL=O<+Bc z1pCj5E6#r;_CFQ?xs7`M8@?S!O#Zqc0rsEeIG))5XM!&&EL3cF@gSG_oK6KRT#6qwk-?k7Txj`0F3VPJOTlSWXN2(h zOqrbPN-^k=7s2p=6?6wc2^s_*kRwxZ}9 z*T3vr#<{3oOZwSJyBetC;PU25b6x(TfB>~igASrSv{Ykh@bSAf)BSJvJtAMC1S;6S zamGUzPz9wh4eJ~a#iil;BgGu|25-W9@Fu(Tasy}MHGY~;I5R<<3C~=PH6{Pt5k~{f zHAmLPC9i1bqw9;S8%32adl%~2Wna_=5<4_aUUiHKXFini?mjQS>p)-`lHvcaSpOsY zKVt!qV>ZYCAHPFJoc}cfIsf-{%YPE|KiU8Pe6Ugb|5v<2OpL+Pfaw3%E&qwo|9~au zzdjSN%m%&x{jJl3h#`0sBw+oYVR&{~|C94yV*xBb+glVhT`}CI-bMxNEs?7i4aEMx zZu#GI`#(KHis-3F0h#~3Zuw7y{%2Qw|AQm?KMp{K_GRe*>A9E05PT01{r|e) zi)yhbycyD?Y@G_koa#YU8F`TM2avMSk4xDW^r)T-m2-M4NF!^8>rn;jx5%btDiz#l zqoKp5K5uPpsixw2R4q*Kt~GAVd%Pw}QTSX02e++2r3Sw5Y-h7qS68&-!b=4S03wYA z{cMnv9{58u733PZ=C?B}X+RqBApYb^kli$}s0C$A>fZ_?!kwP)BHOL0p(YekBH!a# zG0(z0FKGhEF%XC>LlcN(Kp;lVNQJcLhEcw2j#LWI|i>u@wN`1VL0yc?tG$vC`! zy_O;o{vW%%|A(yq#se&qe=+_a{)WKomE^|(ng73T`A>xYUwr=$>wkgh|2TkUH*Wv` zL%$(IOu+*n0rUTr`XBIQ{vQVKyj9|y4DW%>VP_4oTA0s0>RATIm=hUVWYr2^0i8)b_7)4iDF`vsR>f+*Okei+4hAhx?ti)+~UC>31=M*8IlX9BM zO1utLX=U!|!fRgZ@fs(pYz`G5ihf`*OLR-giJ#+#UFi?|R(@ zynxsT)fZSC1A5!kZ=WORU=SB@KD_qv%qYO$5xYcy009C72oOjr{2Qy^RB8ao001)W BL-YUu literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack new file mode 100644 index 0000000000000000000000000000000000000000..5a56736f2745f61d994d2b00be32de712b5f92be GIT binary patch literal 3633 zcmV-14$ko(iwFP!00000|LtAdZrjKi4tiDiB}!;fB-ynn4v&%-0wPPYEnBBZa%^S0 zASiNVF{VhGq#COL z(HFYsL31#1PF6EDK70HD&#){R|0M8<@eiYf2Tg>}AAdmrJQ@EaaM$t2U-HD(6c5U$ z@JiNA4PMG)OFfkpM`b=bKR$c>S)Ktz{u9Bf#^18NTj#(%#$P1zpA1$x{(e#o4s^v7 zv#RAlHkcBhJ^ma603hR^1n3-{d(~Dh_n@Ld=W)uoW!$4N2O>Xb{}~pDCsm?6E z^6>{iq-@0-6(3mbp+{RP4ncxPA4EyKne~BgbKM|xs|Fb-|sQ-DQ|C0a^ zxuo^Kp%0)j%rbPI23*!T8?on;p*TFdI34Tu`xBkLcz4MO?D$N&lHhw{E*=#`|9{=` zp9=jifJOb!5&fS8fLKVm{tuE9EATi-tNsVlGb$08{L1A&9r~Xo{{IsJNKDH0KkeFz zdP;5s9tUaF|HS`K25Hd$9J56KGerL<0Z`!AL;ve08Ysh%p?R)5mN|x|N569|lh}m^ zAu0VY^8D@cFNh2JpJR#tp9DT+Miv@b%gt!p9aqv>P11~rGk`KrJ*1KrJ(R46nO>Y z2ck4&m;w3|MD#7aM0`%6V?raw=o^9iE6dUhPlN2?U}Aa`s@rH&?M}7YX!qnwzgcS3 z%G43msC~txfIxx57RYb$B6Zj+Q*7q?se3zpNQ3=nC27I_vt<1*5r6_%FZ(}MOnr#> z6XU-Uq`>}50w*lkf1V-sKM8!u(@ow!xE|=ntkT1m5cy^ELip}QEYqrsSb1WUdJ?NME z-SV;Udb_`m{3lPxjv0`6063190L@AaS70UNq3Q2Y43p|k3e~;?ZS0$VX;UC%Z;v`EI|x`=--+E3)mkyx85TciUWh>11GeuE2rmWDq2N zJvt11W(O19(6Z1xr@c$>VO=LlJFSB{zu!Kn937P^y}j3Lx4eDWm$vt7mG;r=<#L3& zjuod}#}xorx4yF`9_~B( zhyL^Rcn(iKPd8~?-WRLX7n_>HbmatoxrS^>n=y&LQpU!^MC70$)cChY;*rR;cK9ZD zu-iFmirWVQUu%|X+|hCkBOncvj}-J;n6{5hn+1>Y%bz`SO`nEo`|4rSsU)=cQm6HX zt=8VMo$5|)ud!2ZcKYS=v9#YV_m)p5Y1Uw^kLUdHPBko5F=$)y)X~Fw^?r9FGrgMX zKE0pht;2G2I@W&|-~VFC^WQ`O^7-{z|HXR5C5ykSLJHP@nPuz093bnzN#H}qvyfaN z`%5$Ewh}30hT~X{6M1Z!ZDil^UCR$2GT~!h)LSPIKOsgsBNlX_YMGkrvjHLpBuoo|Wfxe^QD^w( zB~cO?KF><|Jd+n=Tz}l0at+;X6sM>oL>MZD3nQoJlTHJ%{{cR2|Aj^S4~YFw0wACE z_djl5`6Uj(!;u2}FCneIX#dIkABg~7+Lq%{<#I7Mvc@o26389wz0NJp1;=^^RnN`E z1QH8#c!h)#4>$h*ej{RAD(_Y0O08Sku2+lT`;Cd|LQF45mpk>+ZZ`@wN(WWBjTdH) z<#wsJcf+dH+^Ox#-CD0&%xUmEXHEHEmeGrk+?5ej)l^CUS*qlwY zEMvT=!;(BR-}9pjZS?;AV8Xu>#P=7fYMhN6Wy}h>fo|TjLHEfyG?Y`A$Gz(3#&-40 z%Lz0PXy2UT)cP<Hm1C_^4Bh(BpPx_J!yVRZ$)!rAm_-h}GB9A) zgcn)OQ%>C5Qg?fuS{tR;boZkf&@_=(r4>`F5tmGEWJ&Hc^gDMGf~mDZ^P{^s`|Gvl z0h+8+7E5Ap$=E10OS{!d7<>1pJoxS+>#r~|xy-jet@iOy=G*yd20gUA3EjOGacqu^&)_)mB;uh9_S&^*&CV@Zw{g1)L z|9<}Em+r5>)%X8d`?+KPW3NGHfBXI5=*6#lo6TQ3?SK9L9bNnVx7Yvq!?S|5?U!}}=e2I5;xiI)-{Vx}CnIAVZ{_>mR`4wdFhA;=3o}bIKRLfLw z8r7a4IzG=NbA63BfQ@b4z}vc`+Mww>)6zhLWv+uAVpuWZR5Nci`DLL<4`srf;){Zq zW3ViU_mm9{>X`_}ACCP=9N`9PUkn?;MUbB=6J~CnL=< zCg#~WuV|-}%d?9sMU~D6XX?B26HyyU?AS1P)z;672$|2=-S*>JY;Y&|DAM8oEw2BN z^PiIekP`p>mxsL0M4Z1BgPi~Qb<2M$^gr4E{bX31{ohMoXC=nq;UN0|>z4mC=zoq8 z7yW;7{(m9>g*AEq_hToF5JPZpq+tD*VR&{?|C9CKM1W;zYlEVq^~^2md0fbz6WRKr zLG1t6E&uCm|D)rKh>p59$o=2fE&r*||6p;LdLJ7NYtA4LCu-SVFb z{m(6V{!gC&Bmx$!)${+m9%o8S!5xu`^}l7m|ICy1za)?f{m(A`{uloJZ=(MnA1wHC z{QsLX!ioC7D^j5U0RW5p|5<_P|0M7YRkrLaM?V>PR2K^n)ea6Kp(^kk8kP+mm!`T9 zQsq{=->L2H^{An4Kx$|?lmiys zJS%ZpVJHkmjpr1hkeBkB%1ZnYs?yTj%rtjF8}J$@s%#!HkP3{JA1abI1VbLzNGqV+ zesE&Hm4UZYs`|tm&@j=z|G!)8+oVQ{ zqZzt+>dG3_CN|Vi0nP;C2zMlVR#fbVPw`su_^h&-*M2vmo=4^R3R{Qn7WKoAF|ywW zb2JZLdibWor(YAbPC!6FKtMo1KtMo1KtMo1KtMo1KtMo1KtMo1a1Z<+?<07%0LTCU DqZ2%| literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig new file mode 100644 index 00000000000..d50cf662f95 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json.sig @@ -0,0 +1,429 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libdwarf", +"version":"20130729", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c", +"cxx" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +}, +{ +"name":"libelf", +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa", +"parameters":{ +"deptypes":[ +"build", +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"0898457b4cc4b18d71059ea254667fb6690f5933c82e1627f9fed3606488dbca" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv9Xlg//d7uWhVbHjujSXRpoN3hzH5sUvvTSZ9xzvXGAXCoAu2oEGg4hxZPIFQJ3 +pZzKysZMfeFg+UKwDzex5TlKZ3JtKgCTKYl64zZfUl2EQgo/d/Fjz5mSFHW/6sa1 +1uTe3+sVt+HlijN72t2412Qbp+/uGvU+KBvXPA7kgkp88Kd/PL9xe3jlT9ytH5Nw +3LIghe++JiepjFAKXTfIA04EjLb8c50AAxsK5Xx37HOOVHHQ8L9anFnOVYM+DxAz +gn4dBYUQ9Uu5k5uEu5CwtxsED2/Yar7YWIepEnyp6z4zQVbwjO4/w0vZ3wSJ9c4P +UhZs8V2akuqIWyzlQuBOjywnEQc/nw9v0py+Dr/Qr3U4XWh/LARWABMxa4IqXMOK +aVmd6weVjV4U929gaOT/FCtZPfaFNRbk97YP8yAxuLhSdiGS0Mp16Ygz21fVWB7C +UjkGGsKK1cdiJQ0m1CffmydU/nbDjSuw4WZIoIgDzvN7SFm7YBtE+xY+RUPsHU22 +QMAXojF5abwn48HJeP47MYdfR7+nUJq6XJiJ7/80a7Ciy8SAVxinQWqvigf/hmTf +kAiQaqOVSlRBJ2yry5fYBKHSIRvghCqS4t4es8o13R7n2wz68VqKu0JkNlT3Ijjc +QjJYtI+844PCDNetPVV8iNWF6upnTJnPHcFmKAEO1663hOc3Dh8= +=3fA5 +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig new file mode 100644 index 00000000000..745b3b61492 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json.sig @@ -0,0 +1,317 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"c068bcd1a27a3081c07ba775d83e90228e340bb6a7f0d55deb18a462760c4bcf" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv/zSg/+NrS4JjT9TFSFR/q2vaN9aL7fSTunxp+M8eAzTmg0sgHc/D6ov2PMpUF7 +1E2mnZ2gL5a5dHtsSCf30ILFzQoD+m+I9yOwcJopcbEjr8pcnXBFe6TT8lkxlXtI +EHNsYGMUHFbFvc+hFdWatQJicdDaIbdyEMGAC7Kobs/4KpdBF5VWV+sIrzD5+XzO +ACiKRjBmcaJpa950nuEaFzBITgq1aDtZ0EEZdXYvjRnzj9Bm6gbqmWzlllW1wf4r +5hSMTpAsRED4TxL433nuf0nKIvTD5Mywzs88kiLCtEABfDy1qccyBAnjyNypFF6B +fPqSDnr33s+JQ35t7RcHKfrgowk69UablE25YOUrQP6LtH4QzLBLj4/Z0zuz33hO +v+YYe51DgixsMQ2WCKWEO6sNcrcrLBJMFVwUP2FyTTdW3jCYRlFiTYLSfoDhTRJ/ +4o7f2eEp3sVoOe12jKI6dw/P+c70dl8K4+1ICcnZkwsb0pd0vt2z4J2kPs2+1/0g +vpywJO1HL5Zy7/ZRlmeeSMHYEDX2eKhm7QRFbxw1IEbg3stQCA7a425JWztyJ05K +sfhFQgPt7F/xanJVFYk/hdza+3+5pFr1K/ARcLFBdLBKGxAXTMMR+NkMp3J5NiOo +SMZJ3jG6xA2ntvSkyx/GFawD0FpnlgEByU3E+R/WiQA4VojLpvo= +=kfWI +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig new file mode 100644 index 00000000000..5e84d71f883 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json.sig @@ -0,0 +1,99 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"2c1c5576e30b7063aa02a22111eb24b3f2a93c35ac0f64b4e491c7078706c0ea" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv/T8BAAhK/v7CP6lMIKILj35nEi+Gftjs7B7f6qvb4QNtqcGHum6z9t3JxkOOrd ++q+Wd329kLYAFs/y9eaGe5X7wY1U7/f863i3XrxHbtmrnMci61D8qMjA1xnBGC+5 +yd746aVeV/VRbJxTeB9kGcKPMcIQYcearlDMgj5fKfpCKM8a+VyJfw7qHNUyrTnu +d6LSGsEey6tGkJecgnJZTNSwryO3BZbg/4EviivMXm38AKGZrSib06qjkoHrPRvB +8ftGSGlK4YmFs5/YjKFL7QzuNJeqPNJt4mD64tsk21urOfbQJe5AmdMLPGY0PbW/ +w++06c8lsd/6FmzUwlnTBUa39lKJjhkhoK7KFGVqZROcXZfhwAyqPZt7ReA5FDMV +l5X7sytjQuSFaQPGi5g1xXQGEI394T2I55p5T5/RuQ2PXcFxxSOmIcEcD8o6Z7+x +XWLq44KUWQyQP/StjaVhIz9YPogeBBJllA9hN+GzVrr2i+Esu1QO5uDgVuJP7pTA +9wwCLV/t0hf2TZcpU2fwEu+DMniaHm6haVwqiu6QGkbkMBx49zkV9b5i9L441GoC +Q86R2Gs9O0+QzHuN6egbQ0xKm/lfU8dmJSzV0snXawAeQ/vgCpdinx40EMc7Nz03 +rgZ3j88c/ADvCb1DVKmu1Phf6U7WqG6/AvB9tYl4Zl30VX7ETaw= +=ifvQ +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig new file mode 100644 index 00000000000..7ca58d17725 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json.sig @@ -0,0 +1,151 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA256 + +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"f33e7a6798a5fb2db6e538d3a530cc79b298e36d56a1df385d93889a9ba431d0" +} +} +-----BEGIN PGP SIGNATURE----- + +iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc +kv+MsRAAsaQjZbB9iW/Lq9b87H/E5Zmv6RrClvpjSnwvhLR4nhPL3p0G70k6tI/b +NEdXctDyvBOJOEoLaEBrCODl/3GjV8B9Gj7OhT/BIKQjlOfJqVdwIrnHgav5ri+Q +UUXLtejhJiUNoxeILI/xZx2CoKT9q/3EpQ5ysqdybJmYJCf/hv+lXEhnwUIv8vV/ +xdRYY//rfeMowCNIZtFPjSejMywXJfFKjl7h5dN5kwM63D6z/sh4zW7tqHq4kk+A +2m0WcorVg93wAm+YoJaQJVx8bYeMGfV/TjmY/cSouCt8PM4Vi93vwieZCkzEpXbM +BkVN4X3PTMZSOf0WTkEbnQD5v090/DoQPZyBrcDoJ/HmWDiz5Is2wUI0mLVkbg2L ++rKNC3ZajJhsWElMGNNtZRLmGeTIe8hT+LNAejo221vrOJbnUmpIjKxVjStDbXmW +nulgyEPSTfsJaXgbXmeJ8LOk0tWpBAGC16VzgXrPxoGD2XKxoiPCGLNrF/l1wyl+ +n+nw3TchNFrofpPrqJzT/vS71B6KDb0PVSTQZfM9+FahrQ+YbsIkzDAuxVZb5t3q +HUME95RgoIBbccUGxAPwkaNme2OLaLzsJZ/Xhl5I8T1fraLYapsKNjQ5+CSKO8+t +MlJYgSHuazWSetRbZ2H7g7QJWqeHUAWi9i1szpNDYxTFSs8wgDY= +=edPy +-----END PGP SIGNATURE----- diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack new file mode 100644 index 0000000000000000000000000000000000000000..dbed0b978af0378673c607d208d504c2d4566fd3 GIT binary patch literal 9063 zcmV-tBbeMDiwFP!00000|LuKgciT3z@blTff>-%axshmb5heLv-&@DooYjq!CTW_( zLy@3Fn<5!5T9T9Zx4$z0NpX|6q_Sdh+L$7i0WcU027>|U`PNhm$=-kdMd$$Zx?TDN zC_XEG_hg~>AFlgvRw{ug2sIyyTt`_O2wvR(Q_cU^c;MRoFLcCC-T!NWZ2z0CZ4f{B z>3neg{udOvTlfDupsN3a;m>7(>-WDXc7?kC*8wZ~KTys@^Q`ZPi-FLwC*!U(?G03? z+a1ZFJh&!%T)+Q&9Z9VFe;shM{(pNd4!A-8i&8EB*8(@~e{T_zAZ)v4JJ8)Q4d;i8g;qH0b+(r;HW- zp7y!eQA+DSLHZ9j|97gBhb`}aL6SL73&#ilrXawFYFk5Nrun97IY4sTRwu50u4=yC z+1t5oDr_&TquZ}9|9d@2s^$MWU?b!|D0?OO->K#QIzW*6w=e%)FRbT)+ksmCZ}<4$ z2>CBo$bX@h|LXujRyITa)1(ujo(I+owfx`i@xKZ3U+Q$r^1rtKtObNYZ?oiolB_Lu z1gX;#2kodEeU%JdA9pQNCQ~8@#8i5&=$oQFG)Lol%J>1O<^Oh%|BaCUf?Ui0TLQ7O z0rvkNl>dC-Y5I9R4cr83`M=%ce zf$P-s!UeD{`L8U;cxn90Mf-0o|JO1mw|xA6Ym8m!B{yj7Hw-egG)HN)8lJ6%rt422 z5IYehW^Z7Up=`=Z*Cd+03wX|n?+`nK=%MD%EQfne!@0j$I48=y#CP_& zx8sO=ra2rZa$;v+l=lUNJ2`y|FJkw>d|zCw(wD9YWS=_(uHkCFJ*Q|Y+-;N9c54-b#z_a7d= zd;Ql(>E(kH>G{*APaeH~UPWT1(~~-)oRC=T$#;Xobkyz*Bg+mghezyNJcP@{*u!vb zM7CM5RZsYEd9i{_fR)&0O`KK4p;&A3RFY?;>uo=B zn6#-WbOj-l#PHtY$0h;suh++aAHI2g^62>K>Bo1+$0yJJe0V6IK7H`+>FLSer{zhY z&v#{|*B>a!Bp{%}%H8h*Cg6S=;UD~U{Nm-a*C!uep8WOZ<7?^J^N)Z2-F^1_@rR>V z&z?PhUAfZebOwDXp}Qh(57li~*KFR?LVf&WWS5R*@u_sGyn6NcPx0mHUmtoO{(k=P z@Z;&xhwh&bK0aGPb|qcjb+&@$6zfmpb?LqQ=b`lB)!R2mFMA()FO;KqPo$Fv?;aig z-F+%5l}o8=Qn*u0)*<;S-`(LrY* z^zI5pA5>RyHGK?0re+5uRedhi4DRA2e*tX7 z{4ZAA|In@H|8)SY;_u4*pZd$x)Be@r_UC^=fL<=o|4KdouLHg{aM<8_Se=lNT58f0 zSGX7Bz$!U6|7RxH<UJbSIXMd37(cFt6_Gk_b;j_gsnsf6%9e zbtcm=2z|_I>JtqgwlYlM#Le$7E-XSf4MZb~JOWaiEJ|=0P~|+(|fhCFrvf zO`wQy;V~Yh^g$|5EkBGv$)pX+Xt*O?UluhrdQ&+FMvnW%@a)liVT~l$k0wgbk*AaK zmunZ*pj|PaH;QGZZU<@E*4NQj?<=v-W}4+|ws|&@&K1)MheJhnLdChTY}<0qFKT7% zB$F0)=1U99Tg(?{k*yn3{d_i?o%zsGE9i}EPrE`xTPq5M0L^h=xq^I8^SZt@vK-A; zQHYt)$!bn`U0LUEUBoQ8qpY=rt0mlzNw^^BErz4%7gIDf(Vm_ID-vyL+vePxhhuAE zi-tQ4&y6q2%;|kS*T04&k#INS{!4{iNn~XhzQc>W@IyS`&@iteH-1Xb*qSqnnEPxj z0q@^Owsif_*Br_bKeg}39IE7K)GakOQS8xY3-=Eq7`FQkusBWVYpNE8zBP*zhd zXN_l)BY}QzMDjVAe$gb+U5JZ7`w`Z`%+{~e9bVra?tzY_QbJviQ@S0DsVrb&|Ip}Rq1ETA=lyiydc#> zt{!qfHslEC`l8(n&pjm?8@?SmU#7BVoJaF7vxTPjro%7#*>t2BFB@or8M*-|q2$x99)# z8iHDKt$=#{x837^BjkUt;`DwaMF@1N!g$vJeMqC{H z!V1EjBn^5n({tat7wxdrw(ZzF;z=^#nUORD6%~AXGJ}WGea*K>kWY|!x(>B~FtoYw z1oiJbJ3G3q1p!czVSLZzOY?*!+(p|e$2A%aM%<)eE|x=Fp!=2=@;fwHf&`OIG{*?I zk;{c+-;G9Ncv?X@zvaZ;M*U8qD-h4yF{wxya2g_UfFKKa*8r_!KnUSr2sno38{CpE z6wh(U5l0)^B;W$z24AzebF$z(i|7#Knw%LqI{KDswiPb+StA4<^MQbOXF*)I(B*=e z<}J}fwIK`&<=iMlM6C84(E`g}u);-#$1s+K!#shGtcWjMHj%R}2ikyPTHJpW0YD=E zIbCyrZCP_IWZb3uF$bAw@f`Z&(n|S}Ltpk2Jndr)-vgF!Tg=NE$Ty1c^ASNj5@Li4 zF1byRGb|s*k&Dd5r-)u~MOcjpc(WSGd%M;4aqQd_i9~7Z`R>#*kOJI<=wr>Xf(g>r za;N~HUf^8J{6r5DPG;Rab|=Kcp>V)8#$o6M`+IvMD;!5d99?_Kl??cuDw;m>ZAfZ1 zPIc@=T9P^oWSpPW-`jv`?#HwcVt%Ta;*ngOxR zU9SDVT-xjKT;jb4A`9gc4t5x+^e`RAz9ie=f@laRTImR<5{rsWI#`3n1ijw_&TyUI zLT*ID3|cnaXWAl>8Bi%LvO{iaky)C~Ct>6}xwZ~chuuFNv?t&pf8bVQyhDPz*UqDlarQ?hkM=@VAXKCZ*P2l)$g0-vvFz7DJ> zcG56iSL3ceD0B~!gkCJVRsl{cBH3Ny4@jsY(?uc9#O0K^cK8GY7sti}a6HPz%y5DE z9=c*FCh4YcnjQNNLr|wEh$7qIZ9JG?3LUvs!Nm|WbV@-*A=EGj-EvS-0yfNEuNb6z zUMXlWCyuE}k}m@=-Hw?6xk7 zhAOU1V6MoIXW%$|tSTU%pt6WG$%;aBP>k`SHkkda(3%oVeRNcjPNz{&;gwOowzwo3 z`DaypbsVkqY7*z;s|b*$SkZp-j7C;fJdX0ia@2@*+^8yB5?585BrZ7=UP=J^G_f&> zRfRee>s=U)_{;H*MkxUgh_8sAjy+mEXIgua;Z7pW3!*?=Rz#|Pj8?7ayiD}{`xhrL zFNq#>@(ET8R3;#I<7t)5VMVXH=M^eRDbF>f@jdPlRV7m$5ykTnNR*|50WYz7kejs$ z)$vMFu9WZMHZ)W8drXp{h`hM&m02&(ma3tkE<~Q1tN=rm9SzQW?=V(SA*97ElcBzP z13k5`vcA@5OpQfm3a^>T=srGYDOEmigD0xdYVJhrH&e3+#O=1Qw`F&C{=*;8~A4_4k* zs8TCPK)IPN0O%t9b1TbxIOLOSV_F>E(4wLUX(dY&S|=7lmvrens%|SPoU~c>&PPyU zzOz3irF~A!g}J;jDS#ueF3l{eO!W#3!3uRk%ZbQNWmTXWu=KqUWq#4`OnK;|X1>j> z6yNSnvDQhmX!U_M*lNz?N;`pis@j!rPQp-a9ljc@*I#4WI z^lmks%y@8oD`NPrzBxI3`&2!A^Q6(r*OhXos`;ZrWg;|Nm-vRqP*`^Bcy$KPZraFY zi>)uJ6HSIV-c&%5HGuO;0;~imJ+M|SuikSNq&eeUyy1o+%qV~zFON`xE|xR8V67hd zi*namfrbr+n$bq`XtJGHU+E15Dzzx)*ppA{Pf6L_Z50|>=!>~dU>V!|OS2>L$oE|! zLXH6MoFMYhGEa<`mhQV*idKxMWn~kqW*TEAk>y-Pj(7__|L86D|Ec;d9c_F>_W^dl z15?DP3S4lHJ2VU?b7BK@W|pSb4sK?_pQ0@VZ7+U+L$H7@q+&W8^W1xawzO(X45k3# zH@1KUV^ejIn8gYR(*m2i4y=9uev5+GrKc(x&$cT-(Fmb(plIQ}G0e~xZwlb=0y@nnT&k2MroG>h?7!n~LayBbVKxx9 z$3+GEY)p!@n~~oDCoK{8nkeBhS3&|lpT3a$$aPGZkU}&wb48_*V><>wMcXwEzcTjD$5Z;8+53*G$D7%H^`-RFK=W9SmucqtKB(mu?iw2T9 z2yn|VvqKoTEjmtd>koN$L9VI^_yu(B@ynAJ$4AF+nc{GYiuZnl zKXWaogfBzq0=7r54j;UDq`rFeUVZrX@Cjm}$}-!qRUI-@4cxWbU4`J`@o6;%1E#A2 zikvc?jLekV_e-tze5%>d=u-3Fzd1ZPfhRT|Cr3IuSFT@U6*^wEvP%~)ShykzmFcu8 z!=|?~Hh*uWTI@b5esoI;oD)4H8lX7|lqlyuSFvhaGI7-F78b8bq(N+0#=(-87{pL9p-PR5p5_Mxf2OB;p6L1;CN;_jdg1^| zh$lx!OS4=oXSj5cdGVPEUFSzm#Jm28Et0F9=~o>*sLdE0U#>f};f+-HMk%mkR!W1?Af>kqIt8)HoaT9D)j zM0!Z#$;peh#CO`R-$u*7&%Hl>`xJ@SW;?r3ABY!{d@qh4yg7Vx{OA;`ml_ZnTA*d! ziQ^ANWJ5;iU7ZU^JzX+k&|lg2@vMm03MbiDyAb^)(u@iH7SJ**2y!@s(8xO(H^Ksv zXt)eWyA(JlsWEr>;Fwt;;=?+roviJL*ddv;@rjIO%1qoJQiJWM95nveS1wE`FH`m| zOxRr}?C&>H8$Md}0u@h*Kw`Ag;a3U{*e42-3>mU47v zSH>>Y1dY-0h+ADc#iwE&(omI(%VrjL_V5mZuUP@%VwpxgD4LCo(*&1(2_|E~jSoLi z@}lJ|Z`?&^ptJ`0!oqODgDMA|dE`d6L6b5A8mBo6Jk5mC31w$lWL|tM1Z6JsV@Mpe zR#YKj6qeWxck@M`%gj;q+B1CDQ!^t({R%H-%26X{RnJk=>k=oP z8pY;mK`}W_X2Xc>X5yUKevMo5>12~2(DfXnB3LA}pG&BmXW{~> zO_mI~mkHn%J6?>n>DZv@(4{b*jc_yAG4QnJZy`5i2U(b(VcVu@TNNmg5IC0y)EmVF zTI0SHI}LX3xHIaQI3sL&=U}p9hlr!HDP;RAwhL}e!k%9xFRg0|hH zb^C|^)M*d+&-Zu#32^IPZ#LR<>ZRJIe;B{FS+i-#y?giINpKh*;BI88h;j%}nN;s5N>uyf>0gGRxHI{Lu+xDrj@09a z>cfw~shsU4?l3n3NX6Eql`ozFN zz?4)zQ72> zoymRmLZ17!Y}fZ2GhDW$D>GiDbWA;|vnX2`;%6t4%!W`2QDHJjrE_5xxLA!L^OSlh z4sudE^+jaUSA43B`LD0oY_0U^%#@M17^e!(D(_}n^IMyjM;-lR%PcUMPATT zxj>xg9xV?^CfW0q%Cqw-<>;7o5mS!LhEBhr#V!v(ttDONA%(xibf7zh-!n=};xVAF z+IGdSsug=0*5Z+m-f%WOj2cjVh^^(AmgSsd>zH$5+it~fQML_h#wkf>KfYLt7gTRp z;PzfI)+$EA48%(uY^%0fnPyl}m z=+XPob?x}%0N1QoAn!qrua<3Vd%@V9sqn_@BkTVx{SV_nO%7C+(T+60EK~1bgG>8Z@WVt|1@)2W!&`q!t8fG zaDPu65OBI6G$F&ai<3pCNN@T|I~sZ^lloKVGvM}z_tAAU;LQCDKb!QBbZoCooD%psKrGm=+I*#l6v=3%orrz2oaJ;BgxEKCe4(CGmppX2## zKMb%HFymlmpaYPhMfo9D5sJA|r72XQ+IEADrAtA?lQ8G%^;u?JwzTIY%0uOPgtqn3 zi^nB3yC`{*B6vOa96*Nf$%xFEG8xA=eVBrW5cf%Ie{HLu@Bag&`G^0qh$E*;iTrh877+TznF3spcahHmiRt5!41>Hg!5o*X$2T|12 zsh*C(c!7&Hv`Vt)ifSY<#3fTYwQzB0+y`DNX+r359YnfLf}qIQmuj5lN&g-&;0)=7 z9;)y!oVm%RGg-V9io!{N%1U2 z08=wy=$A%5LgmDLk9+IWn=jbXN2{)C0)ZQI*yS<=M>|W7qdn0g8&PPs`;AIX;89}W zTd^oKHOr2W;;h0AT89Q!8Mu>??7uT~sz8!(Hd_h;4p=|pNeZ^C1RWF`O%=FOW)s$U zOdA^&^j`=4w7-}Qjl-7fiGJAdWj_Wi#)LRTn% z|4r`H{$J~W+w%Xq@J?m42-u7GQUsm&=vahSiI848Ob0To?sYA>-jE&Qg>`A-kLKjZ z;l$2fb$daEl8nRKKKRks#L5iAhT%xcXKUK@_SGaCG-qOT4;qR*ltpDAl7ZYaxzo6m$>lM|_JyH`2!$&kow1yT9O+vJv`Ux8nQn_4hy40;04P-~V9JqF()% zpkDuP_xRrk`QH=E_TOGD|JMOxx3fj^A9urS+>O%5^;AG%1J?hQ@?WgK|Fjm^4EbMw z|Mj+DOTPaa%m3Q?TZ9de|AJ85|402_uI2waU?c6nOQS40pl?b*lTD)cDs!iX*MTvV4N1|(J|O4~ zI!52rdWI>OGS=x6Gsbu&L+XziNzt#omb;q;8=?OR75ZPj|GyRh zE$f%p|Av-RTXt^hFK71w`R) z$p2LL*Xo(zI?o&8Sw3U0A>lv_o~?1+)Ssl*%ZZmDp!9&GI$d zJex@8is^*Ip&~n>;%rd!GT&)$3msnMh4y8&Z*gb7w6MIzd~p`px-r$yXS3Ov??)3W z=#6Yo+o1O4Zq8_5R`Kt2M9_~DsmV9h_PI}=Gr^R>3t=Idhj{%64gX$G?={p=Lk%_5 ZP(uwh)KEhWHPmn=`2Q0`zyAPe0RYM{9RvUX literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack new file mode 100644 index 0000000000000000000000000000000000000000..a31d664cb9214dfcbfc938663cd6603234537a21 GIT binary patch literal 8625 zcma)>Wmwcv*Y2qy1*Ab*Qd&xoE)j=LDM3QI5s(@hh7^#HPHB*ChLY~?7?6}0q#5S_ z`aJJ-&iQ)why87>dwse0Uf1ttjKM|w?*UE%?7UaSl20F&VIhV7wyiAF@Pcun9J@g; zQVw-^Q})DyxFk8=yVw^=+XZamt*}RZbk<*L8%aZT<45sq80P4l*R3(m#B|Mc8KE@a;`I>YZd`f~ygPW+03n}OA>aPjgv3}6FvCL_BNxmDkN${j z<)1(_7{Du1b{@6!FUA0@Lz%TdH{={(;*Lj<%ASb`^$Aj)shjwhAkMw+)stTha=WR; zvGgYBsMTja>xsZ^hM+p77nLS?d13-ZS0!i;@e79v- zSyHDR5mFz%W|&FZIk){?MuKha?iD3+)l1AI`ROtPy=`0VMNE*adbhyO2f%0xrg;kJ zZ3AZXq8s5g1i}DV=Je;J=U65{ z|Ec>ARu6=OZS`(96Mq*!noZOIZ*E~p(s$htz|G1~u)aHT#Y>F?Q>Rg)@y&SST(@h- zODqc(d0aJ6(z7Hqj6r#Fs|w4I$$Xp)Ay)ymldmIK#4>PW(jFb~lL4HJ) z)2So5JHKw`j-tAwQ_Mxv)ONhaRN{ijrY-w&oJ`OGuC4kS=il^j|ax;~3( zeY2~ltUvKO&%cqedHs1saU(TQ?B#|$)7=5M-jatu2%{48R)vFfL~hLzogql7*Huj> z)WC0hq#0g7jnn#c4zX!x(_^Qom*-E}Fm(-5S`iz2GUA+mF9YoMx^<_yh#a!Er&#r> zt^C)%y$J)^Rf3r9k^0BMcU#X^8brAJkEAQ@6)qz6tz};#IV2l=mAx{RO|pEYhuR+a zrc1NiN@1*cC)^8+dzz$P*5i+?s@$-q8;;49vupJYY4I-?r-jHrzc#>b8pzwD-y74BRP80o*4Y}T z-hT8}qpx$%bm?$?l(8tfm2MIrWG`+jLOKzUjNRY{))T$b6{;l4;c-CLa zo~Q8Zky2*ob~FsNo7-1tz`sEU@%fCL9FBZ76O+b>LM;U%E)E|$7x_PQ`0I#%YcQ?~ z)QGm;AYqK?=T+9>-O$5yy7YLSMr4g>^<4S#cz?JFub72wcyB`d?>^r29d0~aZ)!+) zynlY7&Y$XY95^Zgg27bBiOo)78xX|q9r$iN@aD1IrUoUREOC}|dcxRsLKn5}b!ep- z=D({rUc6zN^MpvXU0@5G#ObM@*WqrK^co&8wPE}C(l?HAJ%Xn_#FO6&Z`e}&=AzzK@^upJRVO!iv_ZqSScPM|8ds8Q9myBfY7Z~G1>&bX0`M@60J_xW6KKm z@)7O*?829u`hKYW5pku}{pY6! zxB}lFxza)4F%e*3D(lH)1A=$x$Zd{(o7$FS?f*t2!>ir_Gd1Vhlsl9vb?0Hp^=S7^{Jc-fk^W= zq;NL;*E=H$t+G~VPSJ6}iEZ%7#~^c-89KGfu!zrt9kxWy_jaOSDVnmg)SNG9pn^?^ zrr}m~Btf>zTZ9T24c*<=y;n$1M%A@`5KgXWc_Upr271!2+e+~+(#hQ2wJ;rM! zWS1=ZLD|erpFTA#)6#j_$AGyM^& zE6w>EdI(}(N^g*V@z<-=E~b***ZcVcYRaqYs9Gdzd^~@*n|k4J<-$tx-uI-^-iSBX zxGphw*FKOo&;v$tJgO@qH80o|Tl_cvBTlHCa#Lu0z{l!TyIiVc9Q1OhK7aI-gsvT* zy{QI0upIeI*_n6PQq_*$ulVe1#|(S%KXIksiKinTPyGe5{6!QMD5{2Arw>U#TNndvs=Y(=5w{Gz{;t`q5b@NwfOqn@2kSA4~|d` z^wi0<>kR6Vn8R+ot(IllVxcssR*vxdC7sWmybtR&>IQUWY42TNi_!OZp?lh^M}zwk zb*62U#}N79Ggw2#6-@8zl2QFbE@WV`+6dV$s&@mE`h$W|-sYMCO2{SDFn##~AATgj_d3*s5Q}Q+*&m?U=tP|akeEjST)2M2 z*Vuz>F)^!ajEBybIQ?bSt>bD~U3ynVe#-Oz-6$UEfn<*+3VT!8dF0uD7Z^YtfwHN+ z06ySB-5}B8oUtRBZC1lMtL|@+GVS5{Z*JazYwme?RXmLpaI_m(8)VKdc z{B%tyvM@;ES9}k>-vm_wXifV2)6rlQ{O{Te=CzH?9b%5i$(&rb1Ij^~_`yR~O|{Ye zR}4;)aP@?n%~uJkaZA#63`hM$s(q{*q6V9v5dzLSyT)soWT*4n%X<;xUiAEVLzwua zGWUf70#&t4l4?L_|k%p<}B)=(iN9~a`z%=c^a*kDHHtRR8!XCUxIr< zxnkIVzqXh7vLov8EH(#%kH`i@$r$QU9+s90^}3rlF9hSys0Zc}413i~jDNQnRSksE z+Kef^@4w5iX+wv}CI3sP-^+3!B@Cep6_Phr zSX7x8480Sr2ZG-5t$a+<#$Z|AYOf$ zyT=^i5i+zkVqxf8Z#nf+M=>o=z&erFA@9T9{6ERsgqa7~A7msl$?`XdK?PZ_cBF{q&ssBRk8PRGWeVQioB z06-TR8BMGUq$lv_R6D+;vTSO^64oJ8zdF{J%j~k7Ru+E+myU)}XIHGMueoJ){ z!ec1wD#(B8EjAK1Z^AKEqh^eLQt@Cu>*PKT{7%x3oCIwuqZvkNEG_4k%+%A-J+QSt z+mFOFpML8rro@Av*-(G~_hpX$U-9)?0Jnu1PutPZJGcBVwNRd>y)9_1pLLcsTRCK^ zUdX!Q0i9dq%(q{_CNp!5QkfVw5vAXEmgvl(7dK{fuN``wj!o6sUTXV+*6UpZIv%O5 zmEy_Er(C?sJnxLY>iWBDI@5%fN+5PT{H0#I{`A-jxAHjT)|q?~MJlUcm6F88?!;}6 zHT9CsR9`VS&+*dD@sxgxKNiowDQ{WuXX?`1USTz8hA+pWBqyow1MZnbehCSvu*~uK zT$G`rJ9m^DOEJ4GX7ME#a2j3^vyy!flQdvB)64U&cZ@bj+53YA7j`*DMygF+>(*Ja ztXwexCLJ{9h0xVGE*jO~J*KTB+@id_+lN@KHb|xIFta1xDU#V%XE$$X=F8}e$Isw= zg%TbyMhh%2W=>*xk-|+r?B66mKY6y_F^zvFkb4YdE;V@4Tj)U0F?!v7UqGV8ndu)U z^lr;!4x6)|`sYZvVNDrPjzchVTCmDVlrK8sIiH$2&lg4wDYx5No>0e-8J;9ou;6sJ zz(A(2`K1u1hrGbm#s`(xac|^;)82eJLMGYH$2FN~<%j;LH2ToI{Y!dcrX6=SMLg>lxcuklk^!%sS=y}PPlW`5FAJYmQk-XC5Z(5N-Df#Q!VTlN)(Ual_X!q=n+N5j8+vuFjqF*iK2(=UVn zCJ(W;$N52@6gN{K`_lZ#k9qV@(Pqy1)SD~EY+5|nc$|OPj{P(|)Ny8+mAm{Mj~>2W zgG#kg@~v9i&y{F`Frf1k|$g86AJgW6tAK_eB0G{?ccSH9gJ z^0}jFu6B1WbhH*;(YC(ZTe0{-l0L8iXjOfs^U1-KAHv zueIc(Cj@;7AA%cyi4zM=l@aT^meK3t=!_#ilYj2wn;4lhOkDKvq z49(0q=YXY#IPm8rm_M$u0HV$KE`}`$7LI-NTt0 z>pk7USX9gVJOJv7fHYs+?JluaVUTVVj|4fCs28xEuWyekH?|}e$hF`;UmV=~m6xvc z&DKqDRmgQn)GZlwgnThlq4l9-viMQx8S(Q>9789{;Hj|kX%jx-OG^yJDbjoQ&5N?z^}_2Jdh5s*zx>WHeRM@ja-#|B zm%jV<=b-=&({EX|nc%K$?zN9iSPeJj8Im6WoN(F;)QplBI)^Z*)ag+L!;VZEwSYR|#J;IvNNrf~Jd3fhAw_JRpF zVy#6vF}$zn;fB4L27{5xQw@;uF(-UUU2|C-7DDa-GsO^PK|di9X)3a z@y3e>oA$}C1R|-+5mgq+Wd%Fc^Lv8W50NFm={%{c-FXBDJlBNCoHqI9R<25|6$%OF zA>MU<{!en>f1#tZJ2QC=iBmnWT^Kn?gEJI&9y(-Kmp$>yraq8NkVNbeAkr`aFbo0?q6Zo?*3=`@5?jPLyn09Bp z8rD~@?q<-UlJYtHgPum(Kx(HLTL+q}E4QYg-ac_MuoTHO?d5JgKY52Nn0sa9GNf_y zXbY!w@aEMvH-W#e9&_9F{<>Xbbo*YPyzR)XxQ=Pktd+8^NO3?fJo)E(2dQyep1WGt zH-y=eF_dbIjFTJf(1KPnR6L5tQ-Z8`r%!kIl`4EO+2O{^a~|`_Fai6_$V#t1R3T%W z^O)Ugy~uDel|oSFLuGX=&%P?jD^CBk_l&D-frkFOmRDF^vlxXoOB|e^t;mj8^RNZ@ zSa|>5!!6S)cHZ(mOoOj{e8(Et86GTi{JUa#3`Y{XVYFSv4%@)G9R(6r@c_|RC|GMd zLv0>z8g|lV|9gqEfiM4{CGR=R_~HSw{vW1k@gTPi`FzQt+zde}e3TpFU#z zgI=2>4Y?~#FKi7CauXjYS7}BCIdqC@>wN`56J%K@ zeCxfWf&Ytt*RyKZbnjy1gLU)GzmK)ra{;3O;%i?>m?&diz z&8;tQGc5hmZqGT?JfHO=)TU4U+h@zfz{u(xwrANZDYf`)J--f02YDy6FLZuBQ7M(* zuFKiWmhK4ME&HL-f;(G^qtuz|p0Sm*iF#YL^(K%yA6Ub3e`&*wM1_Xgw86eVC*Dg=V+^d2DGAzBZ6p;~wv$AKk|uw?xI>z1@VEM*a+@18ct19#16# z*KS-bdrKY>BYg1<9vXYXjoy2i(Pt5B!h3C;iW*i z#tE}%nx%efKzyU+1XQhpq*cj~a*T$9mFDr*|3!T1y1?Mc^nJ-BxVeI*VKj})qL(s? zN^(ixsr6T=6uRR;c+9v(5yoFDnIgVSt|w~>bE*Pp!1>|yL$_Py!7iEs|92*r9@3xSU>5{rh1B zDIJRx0^S&FUf0|u13tP>AA@VSH zJ`p>2B~G*Uo{^a{rVD6_#~qSKgx|PwzkfjC&_8V9YU{wRXKaE+BMbBoU}Kg(WuQr- z5n?aAZNpJGM}te3jk~D*{<{N_69z7TqXd9{j*<@ttm^SBw`mmoNk;7c_5N@$@-0Qd zH;+BSC#Er(Gi5YVoTr*mc17=}bf|1cWf#0`vD-Qi#?PT7(id{Cj># z+kerryvTmr$2a)&&k2wC_4QJ8Wq9!PpZRyZ!J$6EF2hArnrhsN(0YtTTN?ZKR#WQi zb%6SCS(r-YvLhF+q&X^zgSqh3m*f|b%|_@r$CK#e<+s^THnK>j!wm_LK9QA!1E zLZIg`_eb7Bk#30G0j|=6S@k)D!qXT9GicQTqP*6!_MJ8&z5E;q1cDj;oaDAX*Zr1w zgC?NR6eZQ53h7S>sZHq$LBg8_&qLD$5PdF*H_q;NA~BUBEwvjPWy2wJ)9sjs4#REJ zhlH;?)!SFK3HAP5`bp`zma?>gvr*^GJ zZ=N1w8m7I@Si;0!4+Njl*c-gUQWzjTeDsk@H|o4H8o~O|;{1jVY$50D@8QIb zj9>9HkhA=8G+^-=<;~W>aLZ4Qfc*<~x!yYBc|QXRX=5wIy9w<~5wJSIK*1pxu)sqZ zJV3aaYv*B9x04@sxEJFcX4>EoYoI1#0RyPG0*njWyHLc#+^YbG?f0uV9RRP1^zpZI&{RhyaHo)C@+mCDM|3bm)`v zv*tjN*t7tXo1c|>JKF=9)|Yy`Y|&Mq@0$gptZ3C_P;* WC5r$5YdWA9AV%9lYN6Spqx}zPR9c4s literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/compiler-wrapper-1.0/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spack new file mode 100644 index 0000000000000000000000000000000000000000..c3afd0b19e32e1a822b38cb937af0a5cf12b8ea5 GIT binary patch literal 5589 zcmaKwXEfXav&Kb@5`+*$FF};7C{YqiBzlQ%^)5=H1VOO6AbM}B6A@(fDB0C}w`%m> zdn|VMe{|-Ky$x05?SiYmGhC>jFr!P-dqzD0rRL@3bvcQO;T~;N zG*4JWPDFh_hC3 zI{9lsHr#;NohQBwW&1#KE#{Ja^;QQ}tho9D43>y<0ut&ryPPsuusdw5k7L{f*~{)n+4ds&BVQ&A3e zTM%y~SiGmOIzXndcfpCU5R3iXh$t3&vjn8S0vBMDzZuxy{Z}MyY6T1B=RUYA1AP7- z8~=^?J@9eBXzU1*trlxwh2tGSB%Y4mcYsN>V&6bGVDyxK10Pebys58*xtCm*I!Wbw zEEq&q1(rMgUYecra}jb87Al%Mjt34K1pm1qt|Y{T)*2!K;$FZmf$tM58({Wy!v?ud z-3tsK0Rq8*4k<2%8Az8eoG`wJaZrimWFtvaV+NkL0M+=oLF;QAxEILN(kX%)qpMpK zMr+Cu0h`V+5<~5GE`Y|a9GjM+x96r&s9ZXZm1K47W8~^=>U)drq8^2)b-?}#Kp_F| zalqC6pOtpG7E1I}LRhKAnbpct*D1927EoP5=yXA&ssX`pZip!tMhl|rV0&F9TCBr!GQbX3)FA zvtOt?bb?E7#KMpT8H7^v32Olxp_><`>ivF&()bEBrX*|mKnIkVMO+!BH*cnL+(%YY z!=VzJCfx5=HZwl#JyZO$ThCY{*~#x7+Eiz`;4ekdb{5o5YD1iymu=0BlRrdI%)7sv zowHgjk~&H`)=XuN&Sc{Pga4s;3rf4})wu|#eIHZEEfF5a#|Q8p=di$-kMh{Q-#WM& zNBfQAuUGte;TF z(_C}zeC5Bhm%6w=FeZ+?f8T@!^4;){K;(@ZulNF4Q~I!X+r-skUno1XOS4WH*U}X9 zGOIVEu^{yrl0AQrR?s>(AI8FDp#zWlqJ18Z!L&y&-t+closL zEfy5sxwjAl((?wr(bWE82Elf?_ zv{kt;6EeFaVWbj#N!8`alv_W(S6I>sE!${RI&(jtyM6d@^olAaKF_=rdbL`M*_tcI z-0bXZuC~mSs2Viq8kA)|fj9{`7uimK{^XZv7n#Ooc_(`@HRfiv-K|pVIrF;egHZX7 z*N^6bDka?Y?t$I;di&h1gTtt7ptkvMX&)gLWs9X&S;=dR6M!lHp(H^Eaq*g@EU%k} z43}f+7(Rx!R+LUKv}a>vOj2cAWpFjnzeg7*=pD@Y?fUwpAyjtRa3mhq29^e6J!sK)AdRbE*j=q#H*#mEoTa5o&4n_L?u#%-KWFEl>Ir7Yz@(NnmK&C6}<=r&1S z7F6~Y^IfmeL+K#8-02bGw{h|>TaHGM58-A9RY{BD0MQn@I^V?R+4Cr~!obw3);l*S z);EpA1|I`FZ-2rAI~z1-LJVvG!;NFWhdZfwi|Gj9lq2y!zoNWA-y%RceVkz6QrXjt z)NJS3O5NRizbgB59;W+r$4%@Ye;x;Gg{f&n%?y*0&GRc|yJ@Y17)2ZJpv-!MQp_^A z>*t%y3&o6el0Q60>W?kwAw{EL@AwXZ|EqNZQU}L zX?0r3VCv z2deFu_5zsP_|M3O1Q2iE?j=OH>~hPv$L{yUISmKMtbPqp?023bgr?P&_cKaUj59URb#O*P8L+7m5OTxCm(CZKw*h zj>~ImF4KznxEqj-m-q0Hq5kxv9o$iQWg*@|~m7TR8Z40dr-gwW`p;{NRE z*f6j6Xs$u(Z_g@{X+CN7Z-?6Y@pS4WZd(W7EruT7v09g=^r`(u`xvor5->RKLihd; zVTJy-%k9YTm6=Pkd1F#Q%ZVO~czgw<$tUquCjt+TCp@-~kGk@Jr2`y-23?)f3TUmO z5fn+qURgkT&cteHd*%NwV5siaC?HFtawdHUEP1Igopg7F2&fS*Y zBYKXxtQFzwfCd0V>7M0qm~{U_VJUn%4vj|4VWEe;zybd~N^`reZD{r)V2BSabYFo_ zufW2Q#iex|XbT_Jc&fVycg*qy4ng+MP5_X43_J}lp6*!#F8B}cv68Yvu~hc?HKs^t zI2eFG@OwpR>bhovl~*2tD<$YIV-+tGUX*b;wTx-w2j}RjQZD5N%E|=#ddi=0vxu1G zF^EkG%1}4JTwZc)myLqo=NU3NCJJXD1VqX~`xuDLtlCkvtVF(@I*Q6}a0tA?7oc!g4d+_D(?a{X(1&d!ATm zr4&|>pp%rNJ47yMB4D`j9Vn0eWzy^4y6kx| zcZ5_JsqbY<<|mN;aYjyyu!qs?;KvnQ6KC#)pGr}Zdey-+2xx-D(ODY8yJ0@`D2_hy zC`Ku1Si_WyK+Zu=DmBp%3Er_eQIt!8T8O}Kb`1%fgMy6b!voGDBfnSm$QoYvRh^+; zP-%FlWU!JHVYF>?y)wLdc%5 z0Wz{tWVZYle*7XOYRGf{`l#>OTLN3<%it}ec4{wYDGIzMfJx1+*T=B(0jNqR zM%U;uer5v9pBEsyX|7?0e7Bu`lcd{^5)5+8Vkyld??_%9o;H8uDqdg3amlSvFy&HT zZ?1bRZp_mC?zwTB`HYKLRvT=p$gJCCKvBub%l3=nSEdnB)ybi>qdtd;L^*$WyEBtk6mqFg;FK+1gJnyqmo_c)&%?9jkhOA% zgsJuxR41Y}&gf`fzqHgI^^;T(B+mX(OPtJshqRNUn7Mw*9l$j5Gq&Y6RYFCCAZ&tD zhMOzK+-}#ON%6NRuZj3wn{uMjvQqIXR3BxzoFI#CTL^-7VdQ&OQ@x;W?q!Cw%9*A> zR%1jPjk8<`p)?B!N|7?Kp{(Cem>D(7+GW(mzN^`BLQ~5-N_t?1zF#RcwZ_7O(?EIAGJj0^e0P%_9EE+;7Q(<0gC5PGq$|gF;P9EoE!d`;ABVRJER+_ zhnzT5chbe(xu-AI%-5CJl;KguDoeD&=?=wWEJR29SoZ4%A6Z(amHiP_Zy;GI&X$)D0 z)?4G(X}0nl(vm&sVu`PT2Jd3|N@XSaRLW}ksC}$bsx^Mo#{RA4`~4!0w9Tmd{M3%D zPeU#4C*2p z5VI{EAyAIUK!HLtj$}46@lrkso9Y*1ys4l%(LKjyNl*Brh|N}P zclQNbt_ulso&_me70>N?Hj_1(^k-`HXv-;0hx;*~q$#7q=@#P4Wo9j&s*>ZOw0*PalM_^Ji`;7RF41JSij3QplEQX$?#laj(92GC_&>L#XmnUQo%5p^v+BN|v=**fJ| zpMlQLp0lo@(-%iQ34Jon{i#wVD_O{8xZ~7}oJQ^rN^c+sEDald)>+U z_c@)>kZ=&WBmM*Y9M+5qdWYkW_%%5y!tHLI)QS&Q;E9E-L zLT!H};S0}ERzhb*ub-G`=ZI=?q~?{^7*}jR$#jl^Yg#dxVDsnhzq|-|m;x#by3(Zf zZF;5^`#kNarItx(S6kO=Qh!*mN1ZF?*BQP09U-Vs@FBFy+@Ve5%dD*2A@NsEtJK;M zUfX4m^^gjEzJubwt4Y2}5S?tH)?WV*j1QG$Vjkf*Z zOlnudprZRBZY%Fr$z|8Vf7|NfLAPfeWqC4n(}WMP!A=0dC)%w&mNB1u+)L}$$UyOY z-$k?WekobmQ1vVG1M_=Gi|Ld!HP0c(d3apP|4RDQ@!`@sJT}uBEHAdl&+sCRl1YAgZB+hX|pqkWEkjdBziWf+GQytD{TAMAzH{Y z+H?_VKbAxk%9N;7@oUy&h9j4!_47wRe|(M4n5~Nq^a`5On|WmXbgA<0vgsiIN%r{1 zxP{dPtOIZ4-a=>m-gyi*y)Rs9@HC=wZ%hD%G^?2BBp!_)9h71KnQj^lNliMA4*dvd zeCs^w0J|3>tOu;m;dc2=;b)SPHA}zTa9K@b{XgiK#AA0uTSyAor@A~sv@AZa|MQ7ba&pAGMif8vZ8Kf?<9o=J7MSa5bRhn037226 zYX{dtd)A^ft)MVP(viUlar#UZIbVAGvW5dQ1*B2RC)sIFKLa7vlsn(8fCq3RIv?oK(}v@fj3)9nHs|sscN{2 zX%tNJ2XYa>Ke8pB;w1^hE=S&oOaFK?^ofp`bVuO3>sDGMw3k(*dBB40kwo;L}FL1s_1v$UJ zYVc3HjRQR1@tovU17N1g{u^*0IgATS@b3R;uvh;y*EpFqFh-yWU}Aglp-jIUdIUU; zG#~)fegm^x|M`)Qq}LH5?K6Wf8}16u$P z%71uGjb|T>xn)!1D2LPI!2e3qUbDMtt^MaQ`sXGO@D(#oJ_rBn$1knu8a>_09%w$} z!6M84M>QV6#z%bnVtpUr`HickXr}bbnjF31e$_5K=j#GO_WEX6UZPXgD>w|!LXhZ^ zBleNoAkw#%j8HkA3@q`hT=dlR$?z+uEUd*H^IQtCWy_~yI_rkx49M|#K1D(TanD+h zDn#%8Dc$Wc5{TfRK^5RXd4wm(bWqbMXWy=_{R`y(>*en*SordZRR}GwW{#x$T36Kn SpCFj9U^a^GUOa|7c>e`JZ&Ats literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack b/lib/spack/spack/test/data/mirrors/v2_layout/signed/build_cache/test-debian6-m1/none-none/gcc-runtime-10.2.1/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spack new file mode 100644 index 0000000000000000000000000000000000000000..71132aebce6d20007a4e164e84d07dd65621aadf GIT binary patch literal 5077 zcmV;`6DsT}eo>r~e+RIUU+Md?M zA}~p)1Spr7)aCoPzk3f5Nr?4wT1=DKU6n8lo0)z2?LET)LFf$3n6Cf!6G;MU={nm2 zO>bxXZfSDsH=_T{M&*=5u^GZ8tGL!2QjqC?V1_@<2JWqYSyCjf|4OK+|Bmn4G>m?@ z9o%34a#Pc}{;Qx_`j6T_)&%aae?`${uKz0N2K`6PFG}ajhNp}+Wy$ULbal|$G(27J zYO%I?PxH9H{#%l&aQ#<757YnYy)57X`d3tr|CP`~^&gC48pRF2(}*lTj>QWnj;Ys9 z!fx3#?yY}C(`D}emC#Sne|D%jHiY}@zo|&v|0^MJU0nb3tvNiPCg}d>T;m44k9Dp{ z&C>W!R(~VIpVOR4T5|o%szw4c?rr}}Ai#~`I&J&H3_FJ7fyiCg==;{WVTP8pzVg^o zSYE0^?f1w3R!dbm{#QYBf&Y-~mGEEU_+JId>c->azaPXr20RXN{9kVWp9}m~X5hcf z@xKa^wdQ=_Kdai8c^s&gIQ}oU|IY*dtFz{RG>-pOki0o3`G1c8UCU~Oi5EM4+K?qt z5#@&St@|zbM%^zLrW2ZO=SyEbZ+5)6-EL}L-1KygIHB+LsTYf1BH!b2;by7| z|C^e2qyKBu^S>GXSK^aP?*ErU&j&5}k+3D$wC$K)Q>Y7pYsMWv>_Z_c5lUDskW`n( zP}rBVBG2qo+)qNVoVJfOnJ*dxFeJzK;Fv5*@Xw|LiPSn;Yg4f{RE<*ARurjeDXO*6 z-mp5Y%?@pA9j&Q%sA;VMpK}^|)GaXDHNy+XBQIGw`8zEqiYBU}v_(#P33*|9geXLj zwiInkZj!err?8{w|0SXC$6G@$X~?o7>*9tiYX7IMAMYLQJw4eoo*utyYQ||3j8C|F zY>iY)^AZ1N-2dU@ze-3^=Is8@4~YML({nn&jtBqS>G9*gtV*(6j{h=`|5ebXu>LE1 zKDfHDbt&|H8`2QDxuGg`!N2fm2z!d&(jd9iTXmru`bhvq5NfndWn)9Ws{h*kP7!Y@ zyWVz|Q{|3lxe4}p3HuJLm$KggZ}?wc%Fbt5T}T5Qoi48cqJUZ~mAw{%s>=Eug#hDp zGwQ+c`FQW2SFiRD{(617yZh{L|JgtK>FYNyPLGaW>^(nyb8rAdZfqT#+tM^eRWx}+ zY|2et-GnPe&Nn=wtLlOX^`^PPe<$PD2kN2z?A`Oz-KRU+`w!3Gz4`m2`s&$R_2t3A z{@$CHGi9u9Y-&<7m$A~)7DU9%ao5|4vE1y^r3SYdV!*~!)tc|0@9ZAFYwjs6eWrOT za2+kTP7EBsy6|BzH`@SJRgs(b6sdZsDF^CF^Y!cJe<`m{{{GPV@XyPSPd}dQe9-@T z_VLARk5~29*x{vLU7pz>v3Rc7dkm?zHd@-kNEKM)|DsUs(ZravI>By8QfrX~Yd(%p zTgsvc^dc%4ja`|EjIh{a=MY|6K_win?(3fAQRi-~7E3J^uY)T>qt&@Bixj z{%;j@Da1ZF4WvDyq^TRNrreTcS&^EOuGfVCvMQXP`gAE|E4@HkU5YEj05b}^5w(2J zhMJqAt~OhmDmOK$spzzU*{bD(Lqoc8RUh+#!FJrp9QK;sROy&2G){prF(hk>sw>JS zBrQsT?O8o%TO3P51=H$5X~%RUI?fPp&7YPk6`G6qugrM@cyqKeWQ$r z{X5ef#D9qPdMWy;RvqSmxho*30GHXri6Hjf}XfJ%ecV7 z7<7utz=O6CjiQ*M64@0U?1Y7V$7``G1jW7Ku&~!D{C4e$9Y6*4`-;f|Z>ShqdX@xA zU`H2v+!^#ey?fDZtA5~WP1jVDAq0x^P)+Q9Xxg@}MXj%XOX->0K&UYqfXz_Pa!MvE zjMKk?-5O|7)b;$YcHnl0Bd4qSVbX85JZ;eLeJzOrxW=O(H9IB}6zsqW;{?)5)(6Z# zP0xco8uh_W@8WUc1=Tx*l)rlYKZ-R;EHZ|dgScPkP-6Xx8{YhYE z^5hA+gdfJC=`m~kp>{)u&?L`fcZsv%!#dm6ar!d>$k_70y;#N2G|V^-oputZx7#hh zAAnZUE?N1h4;$T*YHLkA8W^K0Y=EcBO`aR9J)g+C{{#$PUp9}nNmf!!W@bAA=LW;IH?|;7QBb+=ITocFt z<@W!%!2i~a=YM$qUkNGt^5OsWBm6uPJpTNjtd;UVz<-U;|5ZY(WXBIiq0{Zf1MHerOyy z7WE?9IJ7DFTF0T`7O6cA@Hw1@B1+`VF?n@(y0)_7^aDSPN!Rt;`ENhUe}!~qr4#yn z(uWg$upp8hcm?}!>a5uCvpUT8#muMkihmT3C9&g*)a~R4ZR*mP8Xl!KwGFIjz|lJ3 zU>H&Sxw5iixn>lReakvdL8JCIy}%kymJ%WeLRP>8vA?rJ@+E~NgBb$t4$TnPq6)$a zJFDmSX@KW!6QLK!LA14wz$A9PMD)Y1kXAAy!}4vc^c){tN>7c=ZUiNtgnaGwSz5kI z3dlyXm4Fd3Lkiu5PK4AZ_Q*5)js=)IV&_EXPLMAeG|a?yV#YaNF1qbHUNRKwY+i1K#yO~(Uc@PKPg=$R(Sf(mQtF*h}eny~-&sG*8d!w(z421ND#@bmzM z*Pug-2Dro#j?4LZM~BajpB^9XonZ4=2@|~;nb4HNK3mgWECG=i(L_PMWV2WX`YP`C zE~Xhfalg3hP96T5n0B8nIK3{5q6wZ+Y_nCc*Cp{3lcarJFjgtP5+Q3bdHU>-l?I_d zaBK?F%~zU+F)E}`7vwfsvvBNB6)Io!sJAWrxz+q`P3^j7>$?^8>xvs&xzf@_*pcCT ztTO0@;PUBh*zwNCCrsJejC0v~=j_yU+4Z;*IC7do)FS4$xDXb+H>4eBnE59XT0wB$ z1>?bLpBTrp@lmJ4&J**N08j9NvFUe0O52ewoL%!vO*!*v;M%1zf`u_ItK|@|on5+q zU4I+fAdW41ZOoOrrIu_1JY*hs^VJzY1KThzH5gphio7E_$sshIh>{G`_QKE)Yr;D( zN&*B%fQs=V@=VxB{d3z1YpHlexLDy3G~M&#>vm0EERIJaWPG5^9Y1kxRuw`91{c#C zk*LSe39+*bnQN2Hrg0K_*VjeN#jLf??pvYiZfHVoC~0Ls67h%l#do|yhf*$Ull>|g zN3CoeYm*l!nlO~yJPQ_r;xJ6l9w+5Yo&dLJ6SB^wJZZ#JqVA;C(0Wq|mW3b+FCYL_=qn6nkt^B>L2l&%~@&RBkkMu*gTW`ZhX2 znxu+_w#?QQWX>oy;&(da&yYP(lEpF*Zb%>t_rNI#NX>VXm@@s8?G=fsB=_K4-fc6Y zSX9HViD9auw(xlv<*Dhhl*jiJM_ z^bigvP#Yh4GIHpy2`T8{Q%xAc@|1BZWQU`&a0=Ieh9cM6IKdfTW72}se4aebAG%_x zMwxQ}gRGg#%P;vIO-&SHpxYZ#D~ZjvOKbV*lZ~1*=bMtF!&F6Sa?}wl_V^Wyc|DtD zE#qMxO87F9gNWI}RUv7~^ln*c@BV}Y8b;FbXsmL4kZoGiFcwcje2&u{A&f;VA4p$cf_Gr?a)2nv|j4bgMEZ=>+jL zj}>OPvojv_1jre;2s$Zg{7)fLL3*;AP!jvl+;-AM%Y+2*cj#pj`?2r45twW8d~;maT#tJGg#mk_)ph>Yv9~u`@`kcj&xV`rZxHhPl)VRtpu0lk$0Wjp&e(}( zn_f)PCm6~2nb8pdaB<)zl3g}U011Pb=TsBfzw5U}{8O9ADzmOn4$S@#BmWN}16d3g zlvPZ5tzoc`rsd6cnW8~Uh1#E_GvFPzp9pK%qs~^ro;%R2@f=32Q_lzfMg%E0WLMj# zVN&qZ+|?kZ!#Rg^I%LWtuG=z}>^3A@!?;!(G7lJ{29bpw=MyTR^az;`MVwlTBkTo4 z96TB10Ai>!dzfel<2*B^nQFr9xf|>(8ww(y+&FI@pB2Vs*Yi1f@-TBe!uq;*^t_a2 zPa{u$3DL6Fa+@(i+NHxZnaqyYLI}ZaIQGeGd}|nIPd$v$927gv61dXI z9T?-$^O{u7g3~J-Y`nx^f9%d8W&}93&=Dc6uxF*v8PZSkSR?=0Ch|RpZDZ9;@`7Mc zlKqq|yJ-rJ-kRYElj0=6U7Oq867sY;wd@d?3*rgM00Wf zU!L*(N51~65>h1P*T4UdFW}%#{UGA&zn0to=K}wg^7>y{=lEX*De?k5|1tFy5grQe zPVwZF+?zQ5 zFSq~C1^)Bz|2-Nl()a(^v-u;#cB1sHBaZL!fN%>v{`+4gS(D1|e^qrp|5F8BMs3sU zj(w87bBC-yHW=UoJ*;7M!ruKjAI++#?~_em&IQeR7qJGESf($~g(D1=*xxz{scXj0 zfExMf{{rypq-TaUzE2`D@U!s&wSixlP&^RzjMyJvjPEmG2Qu$jmBXchJcc_*s`^yB&Y2x zBm{~KO0+3bCMj>(o(s%b=BrV6=mH9u|GB^%V>*JQBx-SbQ__?jFOe4G#s1=L@e~ZX z!%_O(4+aK-BnaLog2m4j&!WH!? zU+S(4&EDAVuZJ}rz5XoE06^A13b<$egDmk`jPUsN7X+5beVOKX1`3!^kE`L0F{Q)OQ3|apuU|s8PjodqV;PLCv3lh=)k-$3F-z%!Zo-UhW zO0jGRe3p3h`g6zwl*sx=0d$&9|JszV_Mogn`^WTg$GT6~oE7;6`_BMYd`DgVnBmOB z8()8x6{(Tz4st(O5%d)$#WFOY+4R;+&zZmsB3bsw>wk$M_CFGcgZ>A+w50z5L-cZNRJDZsEu(m^)moPqxN`3-Lh=IQX_%)M4j6$qE-Ksde9&(CaPGJY5k z{r|S*KNk95SfT$}qW_}+RuVQq|4*tAJFsquss0xw;T;u;OuljXkB9yjiU0pXz)JiE z=zrQ7$;uho2CN@qs{e`q9}&bs|08q19RKHu{*MCKY-V%xf6z%6nisllnL}v0^r>T+ z#4bD;qSF5&&)+Toi~b+t3YH=Me-!X7F|g3eT25j+5iDIInHU+et6BCCA@QOjj8_U# zst;X+^cFY3n(`3haGc(-S3)VS4DFAfiv{BWmXi+9otY4gDl={|Nxi@HCq`?2S!#OtouGs@W=)YRyi! zcv8>TDh28Ys?@%0QmjC+*=;tn&5P7wr$B+k?MqWTeh?4)&n?@3690tEG@*@BAcOn` zmXi>wZ(HNh1jPs}cbkm)+$L3Kv*S3xiVvF|a=v6|fV?8E!h=1oE!OMhPoUQRd@LP* zIXKCmvE41@vsdH4#?}&TVl{~>^5Kjmb zreFcjZw}Sa-;~#R3Yr(Rd*wcleqWAuKjrt@o#S0D|G7{q9+!&6<~~ zQ&0o~mz|JMWH*I?cg3ZiN5Z{wtzOu#l#4rGxTDXtWLxw3UaQf+bGm7am;3N_8@Lto#whw)9vaURmxG#6;lCV-Mw=o9qJKVFvt9%=56ea*P-t_dnRwo#Gbp1TRDjw#) zoRoJ@c3Pm!=Z}u^1!TYXxn}-zz4J-@B(4^$Y)G&qNq8lcMU1sJy5@xjpKmojgHq)S zXqC#9y;`|YZz0e9SlVwEIxFeQA{U95CWIBT8{=d?fUt(8$Ob)^kGXNPH zAR>#CMb;ItY%-x(W79=;=r%cRCt#deOEW~cEryX0FYsA`5k%w%a6G@VHRM{jsih_t zuygH7B9BHWX0yBqGFe6pHTyL=X?D7mLb;gh6Wf|>IB@D`+-sxbf8Zb<_Mct0{}QqP zQ2?9eHpl+2i1QN%;CT=O`!B8h{$C{aKMH_XBg=NFLLrwPSVQRV!P5tOpVG^`U|Xk9 zah-HX7_l%1Pe>r~a^wGh-+{3x|f5V(1Lbaw+?K04~OwsGVP)4fWAYUk8_`SDXNb=hC$VZ)Rg| z)XSCKZoASc<b?0-Mp~nT993QW5b$(S%3GyqY_FU$CtzeF2Nm|Ps zHA$MaxgMt0l*#bRg*g(H3ur2{)O=nzt6H4PTcW`fjrhLW$QMJH)?t0V(5i~zt`76v zIhMnSaJts4%!Cgoj*mmBGA7?Fm1V-IX|=}A{vvw>B=;qDJ-oWTj&>BZfx&wKXga#9 zU%+)VdVd>%ztXcT*Kuumly+RCYfvlZ_$sA9)3qEab(ImjA|KX2Ug*=;K&(WAlqEXW zm2<-nF4e)!O>fMf3gXSBq8R4`TOI---P6q{PUtB$HA6mw8BA5LHrA)-Zd#yOf%eoW zp3O8z8KT1e-xNo4EX}0+=?BxE795Ur{9hTQd74EH1^U`AFm~)!Y2V6RsB^922dIUt#)$M>86aM$k3no`Z1{hHsv9_8RrD7`3{k`sYfkQJ*398nx0? z5ZXT^f;; zMeZFHYx#P9w^R&rAO4*W{st@Mg}<0|;^%*?_sgQh&x^zKDYSYD?Y#zawmeCFKVkJ> zC!6d4z{2axF6H&wb*>#34s(5ILR(iT*;df*f7ve%WM!$dPNP-2XEFRT-yynRAT*~~ zf7OPoY{!0tBELxFlXj<6n=*C+@u>F)*|p$fk`(#rgNM>h#TctF^yr~cC&9-g=~wg~ zJQOms!N(*y=`DB+qy5U$#560^dnh^04n`3?hS9SI=SJi!wz! zv4yeEAs32<=P^Mn7g_8ynAxI0b&%pKb+GzFYn>W;GVSpdl^VKZ6Z#2MYjOV{kNEG3 z`(FWx|3(39HuC#l*L_f#gn*v`F^K=-%R!d!{~+<-NZ@b(^;iGl|Nr^>@9ls6QQiO8 zNbdi$HTv(p8lC#%&x4~6|JqB|e{VJa=g(j0%AbFH`oF)t`(J&0`SE4PO<+9`2m24i z756_9`yUCgnT`7XH+VaanEZ7?4D3JPIG))57lLmIEL8X~ad!w^89z)fT*0ryn3g(! z04#JSYIZZrM3NB_Z#{7<%U8%uFpHPXOm3B#b}_kK<^r0kWh2BcG{?FLE2azaK`srk z-q`krl^71M@X;;@`hS7<;Xp3&HJR`tT(av8k-?|JY-qY(Ez?pgQ^sYK(HPTv zTYLy?IMNMhhem@~cy6w~7E9dvr^K*g!lmY3T=V)unJ%h?TMEbBaEpHA4WBC047ra- zB;^S9qrU&5e`-*&t5~yMKdgL#i zM-^1U(9JWv6c@Vfj%CB24L*hU;8S+Vw##iT;*RmpA^v;#ji@vB1Brr4#UK!~V&U`52-F{tu*MYz?#KZqz5&t9SKO+G) z!)%WK@4rJvoc}cfx&QZV%YPj7KRN&Zad z{g06V0IyB|&#vbsmf(JfLHv*9ndSQ*$@$-C;3ukJjjnCIKX9ow79FbTzw1vG;RQ6T z5wsneYC}jB8qJedWp}SbX}SR^&9W&Ky0UIK^wv*XKT+*w@iY5Ji zZR$gQgjps9XohOEs9L4-acfJnts&(}H5~{nr4PLqT7TX`jWtZ;+J9Z0QlO1`F)4f) z(xEJk^2D6#K}8-rka7o*vd|x!vP|euJ;ax1^p;nqtLnBx<*466hnA^?_n?l4BaryI zwY8-fvg1(IpumTwKbVg=CQ4EGUW5@ITaHQ$Tz9mcPG4SL(jyyQ$fk-(V?sCW)uadR z&`5Z-#c7|=t$K3HYWXVc^Sb;&kv#F*3?iHatV>|@j%Q1nB^r^ zU^5J4MF4qG5i?m;>uHIPE9wJAn$%}l!1018WwLTlQK7b&UQwTAk(55YKEBqI;T}=n zYrsF2>~wJ(!Aaj~jz$H#=!1jsNeUlpPg02Rntm0mCI;6UD}xU{nx3q~$G2-KV&VUR zPdCi`mqKl~d4Z ^24w&Lw&gz-`hW5LKdk=+qW_}+05`fn{kGrX5Rjk#>P;X!Z@=8GJV`yIgDTCjh2l+4=D0ck#HoZ$?*t~KrY+kI`Vd5zXT)%z}Jih2T%_+77Cj~5X8piVrC uqeEw#`t557?JeR0&V^Sl-WhrLTVj_85FkK+009DVg?|H>j+nOq$N&I2m@gIp literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-core2/gcc-10.2.1/libelf-0.8.13/test-debian6-core2-gcc-10.2.1-libelf-0.8.13-rqh2vuf6fqwkmipzgi2wjx352mq7y7ez.spack new file mode 100644 index 0000000000000000000000000000000000000000..101ecf4f351db27027bfb5ba96177020119ae4ee GIT binary patch literal 3622 zcmV+>4%zV^iwFP!00000|LtAra^lDxpUS6{Um_}|QZus)Lbt$MR1M~^8FLx14YQe| zgwz7f2nkEV=Q)i}dZ z^3@ZAf)IJ${f1clUU4n(9RG^opSocPW80A|SY;S>MGy|0|Ivj5Y$su=30pQDhdk39 z2N=Dv)n5;5+ZfUax- z$pKx>fWxQj)UEt+@A+p~QKazvhkb50A~g8io$T6jgAKTUJy@xtey zrUhaoIfKj-D}uf(CuoXfNIJRo+oA&;lE_ z{@1l0(A5M*rbwDiSSJJKbgT;M$=R2oHoEL<%-QF2mS=`1;)MtK}7YxAo8z>P-OCj%YQWVKZE`MX96u!5!e5uJ(A=v z_%`5a5K;Y){r`{<1^v%bEA&5w^?w+k)7)n0f4_n*FithgG={)%$j`QEV7u@rgr)xl zj=Nd@d0|QaBl*Yv|I_0tHZbALns#hE=1*NL9vkVBqng$b649hWikk|KR3A8y=uWOr zHKZXx=_E~soB)Zgymn84WoxDZV`!3se+oFIh_`8x6VqZ!7FAxLrL@EeK;R@g)e}UO zqI&R;7vQ(}4wmx;SO(Ao1-<6kzcLI-aU`8M?2Qd)Of)NXqTVPJD)m-3->GIRr5te# z6yk$q5HwHF>1{f-%?ZR|D@QP~>*pTr=phRBpAp3+`_JI@zfeG@>CLkLL&?xoSUxuX zYe59;zsR%Pvi-;Ff1%(imW;UM$F{FxLsNl11}&smCLS}-3}C?!lY+hi&5DrJx6JWq zLZWz@y^cq?UtaQAzgU)`g}e0&x?GYyfW8o0;+Q?QDO9V)1E$hEYK!fU`<-m3nQQY0 zJDm^Ee{wYBn9(9f(=01eJjqijhUZeyL(?u{3>E1?^3}ctM#wk))>y#rojn4wYsiAk zK=XI(;glJU_ydt`bv`yaT(eW(J!;fG3QYYYCp3%YW}U6CVg`z&ItHbLVkGcfjv-8bwwtwnhFZ%Qj1`Qm5Ih^TmVe-ch4cSSd#|2jxhZ3>}-M z=}j9ubL^n8Bm0++8@@Wfcd)<9G%8$Yw^V6=XdLBoJDvS{t&wjW@=SL1@Ug++}nFxr9Sv}?bwPwBNBNb4fV%K$bN+{aUYL`V}Y#|xhlKA+c>TY zJNrCWs%A^<@m=)uMp(96*k;LL{PwwLq3e?{Il6cpJ7xJAmu=LJm_q3z(%YtIe=+#^Zz!Nssm)sdMT*46i@)na1lE5kp5>R;e<=~K|Av99m}9~jga(+TI9^DJ zf(Wg$K#3d!joJtrcy!zH<5kRm%nL`W56}~0s51gV8_1@i*lsZl)aNNqM7wy|v;Z&T zG1(j&4m6I}@o7Om=gf+l5rT!Gvw+gDL{_93noltlyRtOs>o|#}A{VH0<%%@VG9n`| zDKW*-sX(cp;*(N+zZ=FOo{w#-l5T^k*Yj1cVG|sHXxM*#+5Xem{)YiN74`Q&ZeIDt z4#49P0sAlVl!%}IJ`=$C$g~_Hm&+sv<`8&m0?GZogXA(VSmtLSJ9aW4kVu&QD=3s` zy3zj+Thn5*xxGR+UutG|%7qO5!`9fa0ix%om&I~+w>b?}vipT@9j(l?yY*~qZ&p;T zS}g5$o26DElT^TI(irRdG@F-n5-1-wyQONbe3&nE=VR)DOTXet`R(@lhT!|}r}ebk z!CKqx`R=Z4X1-I-cZ=C_c_*9O@0LqDjclW{oT5~178)%-zEo(=Mb78)`&bmkwe(RH z#aWr_;T%<-G`~2{5yc$r$}BV=7mOl2j&=&x zQRvsp@EwDmX*#xJNu#9gK$`~pHHw2I4fR;=@n|tXgnGLN0)jwqD>gS-f8XCszEVobPLs^QlnO#5%y}8!c-8N zb)+DX?LsbW(lO_Z&m^|nKxKBbjonP*SdS+xlsLYatOkXzTxfJ%CDPq56zZ8bZ*EW2 zDAdc@Tp{x&aSSZWv=Tohjtw*6US|lO7@*x)T(e%tWnk0aXFMKVOovR#HEJoj{ zi}}LN;cmBF+lBVH@i6^yV~M4F#&_#|e||A#3k_JS*3IVgnFMs;rJfEF1~^M7j?}kr zO5JQVN_CiC(d=6@KsM>FODjfJBd%a>Xi4rg^~Jjh!Nl4?{%9_9f4NlMhs-)DVnyr? z87tXpcDIoCWAFZ$2R*b(xGPLlGWOk1>wP{H`);wB-UzMkLUXSMT`gBm-*}Av-n)PO zGZI@*Y6-W-u6LmIA?srW1TI}Nti=DC1ig&zg-e2l z)&H6VC$;&PL9&;58kj}}Y7Ydb(S9rZ%OH7H;9QG5Wi}U>j|#SNN?1-3D1Js-NVd@S zIpRXnU_T~=D)xZDtm;b(b-d1C*3sJED zjIiSTS8V@70WEIK^I!i}DQxoBi3r$#niBE(AJ2uW7!oYB!MQsGj)X3z2X3JECJa-V z-vBn<6E!=R!;@r4Lifk0gyrKyCX~g4MkXhUOpBkKEOH7A#k3${7Z_vB042i#Xb+bP z*=}sPO^OYN=V(`#4ZKgr{jvcY`xKA4%Wsn7HjqLa!Yp7oZZ5-=O+!Lyua6b@%pn{k>`6v&AyH=zJLotMlaw#x z^OLg+Nfu9gC-UdhzMu?5W~l3&Jkm}w5V3pgZu_(uJKQmDLp1!q<@FzY{&P5>#n6BM zS^jpA8$c|9i#ztk@Vl9$5dsZ26CZ{%0v++5gAq|AzuP zy&>=a-gd+YHUtkw1lE5kier}bKVJV01xSXrw+Ldop1DoD2@2U8EL-0+u>F78^1sRU ze|nw~)=>`zzW;mK@*fHPPcN_kVf`Nt7>eI~{qLV=ht0tKf%X5(mj6iTe|E+5fBgI> z6fpEgJ^#P!d8XJD+!2vj|6BF@&kSDw3j>kR|IG65f6*e=|6zckza9U7c1So@|93?M z^gm70%lrQ+7VG~o@S4b(qYF#x4;-S21c<162aynYa0+yD1T32*ng9^FTD{XK?e4V* zRnq~Xniin|N78hg+AcmN{o$&C z>6DmKWJcsvAd4$=QQz}3ue4&18!;y#+=>3p5F|vTFo%?fu;Fx1rq(>&bQG18g?#3Ys{vY1| z`Fz-z{hvL}kgSWYqS=n6^~UJUF05`Ik0|(m472?Euh{>^dLO|11m zE_CRVTS?qIAZ`6s#63sS^F?6M4>@WtPFO&f&{2KeNjdA(VsT<1S&FV1U+k^|l<^2C zumWogP=r0`I_9+6tFM){=J{D?H?Q4cgd-0taTT@(>}}%5PXV$!1`9L?oIB{I!lhqh swT^*-fq{X6fq{X6fq{X6fq{X6fq{X6fq{X6f$;$R9|R}?&;ZB)0OHj)r~m)} literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json new file mode 100644 index 00000000000..3b9fa2ec229 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spec.json @@ -0,0 +1,410 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libdwarf", +"version":"20130729", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"n7axrpelzl5kjuctt4yoaaf33gvgnik6cx7fjudwhc6hvywdrr4q====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c", +"cxx" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +}, +{ +"name":"libelf", +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa", +"parameters":{ +"deptypes":[ +"build", +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"u5uz3dcch5if4eve4sef67o2rf2lbfgh" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"2a6f045996e4998ec37679e1aa8a245795fbbffaf9844692ba2de6eeffcbc722" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json new file mode 100644 index 00000000000..7584a164998 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spec.json @@ -0,0 +1,298 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"libelf", +"version":"0.8.13", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ejr32l7tkp6uhdrlunqv4adkuxqwyac7vbqcjvg6dh72mll4cpiq====", +"dependencies":[ +{ +"name":"compiler-wrapper", +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +}, +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[ +"c" +] +} +}, +{ +"name":"gcc-runtime", +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n", +"parameters":{ +"deptypes":[ +"link" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"jr3yipyxyjulcdvckwwwjrrumis7glpa" +}, +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +}, +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"59141e49bd05abe40639360cd9422020513781270a3461083fee0eba2af62ca0" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json new file mode 100644 index 00000000000..36535520cef --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-compiler-wrapper-1.0-qeehcxyvluwnihsc2qxstmpomtxo3lrc.spec.json @@ -0,0 +1,80 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"compiler-wrapper", +"version":"1.0", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"ss7ybgvqf2fa2lvkf67eavllfxpxthiml2dobtkdq6wn7zkczteq====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"qeehcxyvluwnihsc2qxstmpomtxo3lrc" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"f27ddff0ef4268acbe816c51f6f1fc907dc1010d31f2d6556b699c80f026c47d" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json new file mode 100644 index 00000000000..fb3a00f77c3 --- /dev/null +++ b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1-none-none-gcc-runtime-10.2.1-izgzpzeljwairalfjm3k6fntbb64nt6n.spec.json @@ -0,0 +1,132 @@ +{ +"spec":{ +"_meta":{ +"version":5 +}, +"nodes":[ +{ +"name":"gcc-runtime", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":{ +"name":"m1", +"vendor":"Apple", +"features":[ +"aes", +"asimd", +"asimddp", +"asimdfhm", +"asimdhp", +"asimdrdm", +"atomics", +"cpuid", +"crc32", +"dcpodp", +"dcpop", +"dit", +"evtstrm", +"fcma", +"flagm", +"flagm2", +"fp", +"fphp", +"frint", +"ilrcpc", +"jscvt", +"lrcpc", +"paca", +"pacg", +"pmull", +"sb", +"sha1", +"sha2", +"sha3", +"sha512", +"ssbs", +"uscat" +], +"generation":0, +"parents":[ +"armv8.4a" +], +"cpupart":"0x022" +} +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"package_hash":"up2pdsw5tfvmn5gwgb3opl46la3uxoptkr3udmradd54s7qo72ha====", +"dependencies":[ +{ +"name":"gcc", +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq", +"parameters":{ +"deptypes":[ +"build" +], +"virtuals":[] +} +} +], +"annotations":{ +"original_specfile_version":5 +}, +"hash":"izgzpzeljwairalfjm3k6fntbb64nt6n" +}, +{ +"name":"gcc", +"version":"10.2.1", +"arch":{ +"platform":"test", +"platform_os":"debian6", +"target":"aarch64" +}, +"namespace":"builtin.mock", +"parameters":{ +"build_system":"generic", +"languages":[ +"c", +"c++", +"fortran" +], +"cflags":[], +"cppflags":[], +"cxxflags":[], +"fflags":[], +"ldflags":[], +"ldlibs":[] +}, +"external":{ +"path":"/path", +"module":null, +"extra_attributes":{ +"compilers":{ +"c":"/path/bin/gcc-10", +"cxx":"/path/bin/g++-10", +"fortran":"/path/bin/gfortran-10" +} +} +}, +"package_hash":"a7d6wvl2mh4od3uue3yxqonc7r7ihw3n3ldedu4kevqa32oy2ysa====", +"annotations":{ +"original_specfile_version":5 +}, +"hash":"vd7v4ssgnoqdplgxyig3orum67n4vmhq" +} +] +}, +"buildcache_layout_version":2, +"binary_cache_checksum":{ +"hash_algorithm":"sha256", +"hash":"348f23717c5641fa6bbb90862e62bf632367511c53e9c6450584f0d000841320" +} +} \ No newline at end of file diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libdwarf-20130729/test-debian6-m1-gcc-10.2.1-libdwarf-20130729-u5uz3dcch5if4eve4sef67o2rf2lbfgh.spack new file mode 100644 index 0000000000000000000000000000000000000000..eb99e6b274838d045a6e1561139a55d3a21d879a GIT binary patch literal 9058 zcmYkCWl)_x7p`&F;>C-*7Fyi3xVuAfcc(ZMcXyZK&W09>yL*A+4jX6h=kz=8oSBm! z`I9x1m1I`t&UJ?}4h81_HsB)g%zvS?<@(AUKr6g9TcC5@_g{DSo6b-#QY}(63$^mS z{nVb<>FKbq$bNkdy|{|E6im1PBjrZ^Zc$7<=1!5}DV_Uj`5G#`Ow)W<7>p|blT$LQ z60&DA?5#xD3b>4e)~fOMIOpNNx)C_mQ$LNpCEour*$*Y10nOqH-oG^>+}=6+Cdh|f zSKbXy9hsZay0`2l02(5GL4b+E=ylD=Vu3iRMLN}&CqH%J>ZNjzW?~UzXMc1O0!r#o+WsW~jd)gRPK(EN5&>#flh2+_5U&N_rYS35z$9~Kmq0#W#-u;r zk-03}1%PlOx(VJnnf~DL^oE`?`RvEL3GfrZ27$?XoRz@?r)%su0% z-S*55>~-er&JDoF`gTAE-o6+ZTxB@{lN>aQa5Q%;lV3-R%4F4wTdJ}MC%fhDhbxbrf;9~UUVa><0GTsyP-`p^p6kah(!28_P!Z7Y1 zHlAP57eQHayJPsrcW!&}`$iZQ8rlwl&odw|=$xROfHj@qhwN-z<91b#Ho5W?w8GP@ zubX&pQuP?@jZjjx*>N9S*3*u-dO8fecX>Pm6^+Ge>jUY25EIk@`}4P-DcB|3tFOz% zzVy1(Ve9Eb+P`u;X+um(O2$kLnDuM9P z4?3gCT3UUSyhevKZ(Q#{e)QxliQB@yuTOtegMi(!o|WLhG-DBgbYmg^2k`NI9P_)l ze0Ehmd3jd5TSyUS`g(qlgJAN-ZFJx_>CP|lE- zudsJ|{lSNGub_j<8Elj2sJ$Hy6S9JUfFKtyb-L&fBHAXw70<_vQKqvC;_cB6@^`Ha zp_i|6+c`umqX@b&7E42VGGwuB^eS||NRe|`=9#v+IT94ih8+XR?Q(Jm`-p>rl9 z3><|Z5y;amxRIaIL~+I>;O)-&;5m+g63aw57`o?qpj=9$n3I-0Skw90(pyN<+s7m^ zem8N>NFZa_Wn9|4hZg z`=syfC=|TAw80<6(T2SqD@Nh}5o7+`>T{#DR z8s67QEEM&7kO%^FU$3Dj6QJ$!(^rP5#KfIy*^&y)6Q7}6TOX~{OXyzN)rM090lj=l zQG-}0qrU7`AO8!D>kk{P=UpRJ&C9RrfyIoP*%}%kJiFMQ+rF}j&jz3H_2{IBUnS-? z9LFzBTvqjmg5?Qjfcev>My(LW*he-fI)RQZa+(AqTS(IIm+{|K^;Sh|+#qVp zuU~8|7z#QcIC+OS7C7HOZ!_ljy_?jFp`VrFy(7U^f1yer@xmE4wVj7OaM%nro6LQm zyRM7%EA%XoE6|2Be+_;NeJ{DD>(+<&&27Kpy$5r}f=tsC>p7ME22|kbG?;GsS*inm zt7%$xKEkd`4b(X9RMwr@ezAHvY*qRt%(D6SG`y|#Ys<;GkQau#gW(tY)H@g%yQuYn zn|phV*0wS=xmhEkc9zk{)&`HA}3T7^J`mTrg)S)xNC&pkU6j`&7g`H!3Roqz(Jj=J9G z$gArQ9B()?{2q0+3p0 zTOulvxuP!$&{>tyk^EE=O9n1SaM7=nJW*ehwabs%t12UuO9qL!DMTzuPLc?@Q>9pM zApY5H9#fhO7P4fRRx@dPS#m8WL^B^-#ag29#?lA+`7NHq;z7qL$5TUkRRD^;QCAh4 zQFah>+>%|JcJ237qClQJ#^jF_UE`+Q$BG44qcx?9H;Z?rabmGkYDKvfAEMTYRPdr+ z2qO1E_5+X*arbg@9jn4A>4sn4zT!2~4TN2kc%R|g`;WiQMKu1FpBrB&d)am@ap+v`_6PSz72}Yip1DFbZlK6bB7h!Z zw8T+=9`J+s@5}X(jH)Q$`JcWiOaG^FK!771RqW&88)y$pp^w-Q@&755Mvw7_V947{ zdI;lI?$gx-1&cZdg5saceGm+OTODxr)R+#VFuF}RV%2aceA&^90zvZacD{182)$ZV z_qZxD2KWu2SC21+!rzLP4Hb*F0mt^Y#t7&N^ERLg0&MB1#(Q#$07Y!lS|S9b|7Jyo zaCn!!ya5SfntU3-45d{28q46;DkTbJk7>f(9};hmTMV9 z-aAL{0lF~;FJi}XE6%X&?N`1xpSW}{vXxz26@A!$dYquD=1)s5iBK!Waj4SIVL00| zdlPI_$)2Qw5@?{k{e%mc6M>mK{wh1jwxBs|Jt~-NpBqz@LNDD0=bo|FxlWBNw4pmMIc9~(yaYKzs!@!fvykPF)DbRJ za1GVS^!%0R!|r6RLrbq%s9iXn)U)K5VUwSB=#glB32lZOLYvCFEo57S`A|Yf{EGhc z$!CGtZaZB{Ufsd7j&k2*K651C;GPM~f@i&~>s*s}xu$&ge<+($vFybiO8zC=h~P5K zDVU^`hDkaNo}M)_HQ+NOCGn;RniQv77mB*+Y}F7ypQrR^DtO?kxPqDJa3r+C{7X9Y z^b*aP9FDLXuUJSon(YPS_LtiIhNJ|q0c8Rg)>)gz_o{Urc6{0~SuU+T=+AdYJf?x~ z(Oip0f@IW=j+x9b?-8fzCRO4&4=nknB%P%K@FM~nxZ*#EZf>lRtj=*>mU>*2C0Y(V zYR=ud{t;8F4IPJ%y$n#aEEN3??}oOM+ed*Zcj;noO>C=vTJ_%OBmy>haPqyslX8Vi#qxK9p zF9^>tx`i3>N%*gul``+!A=&riW-kqEj3=GH`B=;hsz#r{5{Qo`VvR8u3g$HX?Cddv zPS&Vt^WgRqiFd}IR2LzU#mp}aOb^l;^k30W0*8;i7aJVx$lG>y+62hUcZgmsb=vXA z&SUL#!dRtGav=?6B#5n+r|{laqQxo`V{Yvy{BX7h3G@AqHe-^)=Ol7Nfe9bXX!ub_ z+b$kLUUR5j>l_;(AVa>h>(3*vFq(v?4ESD2nts^{7S1V-E@qp^`1N0YR{t)sOGUH) zL}~{1AI&a!H5kH_OOu=!3M`pX&xg&Qw_w=&&6jO?0QYVVT+p02NHu5YOBEi(Io|6Iof9uA*#G-=&JOmjUkEv=QN>+QT| zh#Mj1(@iOQU4fzKS}^I5rN3>uys2y%(V!G7H=WfN0i8vZB724Y#;U-QnxzrTQlM*eT&6L9y6CJ?{8S%p>7O7sNMskVNU}}pBHjf}EQc`HA z@q8up@Uhh{nrRr7f$C+LYaxs(u#$j0m3e9f3M>)TKHCq_JE>~1P1T+% zQusZ0yO9&H?@zz$p&_BIR^Dm3`mY6zX5^JSk4 z%*8bI^KB3I?bk{s_xT+?r$d>oNL*o9VaJMIp20btrtWvDnZ!I%^E2OCKhrX_yKeLx z5p~^o`0qh8rATjYh^O>}lTFirTf{j2U$5r66sGfI|840AdG#81NyoPk!IBC~Ynl>2 z5rpsD{G5Me%_&&x*2C9*=-6~>yq}44NM6V*##E(^w;#uAbaW;rRXK^wL7A5@r4&8y z^Za+r%!?!x;&O>3{qG6A?Bw! zMwZ$0HDMU2t){>S&nRPKnShhzy1GE(xeF(r`Vs1@?605RuBlhJ-#U#Jvd4RxpId3! z+3p%uX2}$KLPgsTm*+=M)6S(>r^p^wolZ%z4t=B&v)+%yt%?du~ zvYbsH7`852&dh!fSoPgdOY{?$7@zWh7vn!EH1OS)Oje;JUD&%uWfRt|?bBOJhRg z1`amD*NQ$?uqe6Ha2fpYt&+3O!k9w%z5(v9$L-vOZyhuLp56s~`f-8fn`;Z-#EW^j zy6Y7KepxU(^k4>fAk7rCHmS1R^n*8j7G-_UU}JmX^Hk7H83v9+`|Nxl`)9@Vd8&tV zjqwL#;Z|>UafOTGF0k0wYGnuOgxb8S_CtzoJd%XE9b_MLUJ$}4?ityCFcdx%rZjhL zF=I4y5@~!J;A?-*yd3(=h$2Mw6ApH}%j~?=rXY*Mk>>gqzdotT*G8k!I>Vwo`#@hh z!3Uiu7)*Es)IGAL@`+R&!EEe=GQHD8WcL{$&!S)W90M=Vp`hW6%-^)DGM5?FpGhT! z%UTpym6H%D^jJ{DQNND&jZW+t)6sNrNQ=C}sR{09-C%_ojJ;^U>Ygcp8SEv+Mm7ve^G7SFx?22#ZEtaLWZyp6@n2Q5? zG#n~J=yM5*u3@c^!1;b8nFHR5jmAs!@(P5z&F{LpoC-K%op5QhgnW2^|FUSKPr?sRe`m(K-1M9< z_KHBjyLT0yxv$%7sC2b>gjL|{Dd!N)uT505>Xk;J(d$B;hUQxw}L1I!1_~O@owMBpBdo|w%v)ju8@o4rNPqF((T&P$G#C~?1B%3cC0P% z8wHB=!gwn6?U~d*0Rh_I2g46)Ld*xVl<9c(ANuC2AR+pEygj^ckIq=iD!X{diE%XP z)8(%#L*dF^_24Vw328T(AJ*frdh$aVN-+Dj*&1i~#5KwC<3fR!O$UX+oZ}7p7OgI5P zYvg$$wBEils&l_cG30+MK0&&rQ~kG>%Zc3Ge0fg^`5J92J%h@{hwIg=UT4ppkmVd`cT|hxGz2z2Du-mM3c;HQs@r|;R>k1LRUA_UU!o&-9 z7dBJF@no0Dy(t#~2^S}o$EHD~8FRMFVIxue{fYaIzn{Z8uf9!xy0-&=g*f~0RG^6M zPs0#fQJHR~L@11X3)iA&5u$vfm}MqyK|w()UH`&E7@r>${t|MtVI$YeC%6^?73)u0 z7W^CpKKwIvGe_dZ;ZDU1R-K8$W8jD#t;FE$_M@hb^B5~13^)kJW{@88;vEM((AG@M zx5(zaVW$>;y1?2HXUB>E{$3SAdpU#G%!D{C?>|W{Bw$Su{kK{9kii}qS6QyX!Hvek zm^Giwl!=j?%_+83+Ibyo+;H5Fm-EOHY$N4TBE@Qm0K!D<)h`+SBe(``k zfv4UmOYQh}b&?=$=duy{U%11;5%WUE-w{c?rxW8s_dd(3qq;k2C?`?Yr7rzhW4!1c z;#PAL^Ub4?kg}8I-Zzo^0xPn7!E{#zVR0Le53Umil}~=FQLJsUOIAFIg2BGX)Uy{! zE-}ibxcE~t>|etswh>bA{?KPqGWyQBw;yi~n-ZqBpgXXvST}u7-y{eL3GP25=PNBg*@~FKW~}t@M}hPDRG+( zQo+uIyxbMb8>2>oz~@@)gl^-bPdCeqSSr|Y!>(LsBj>z$D?<WLMu|TnD!<49jzc@4o?`MWL7NMY5wa@76!bWKT zDjTEM`w{{i5n5syO{}n@__Zen2SKIvj^HPi{0Da6dG|q-nqrG|O2%KIRC5E2z5mEZ z%BSRe{RcMS9saLCq3^68)s$hGTN+^Jj9~|B;BlK_ddy|G`|uG(|G{u)vsw6H_d>hVN&kEJ+QifZLp;~l-J%vorJEg9E7{vuM&s@~hx;!<_K_1E9*)0j7ZdboJt?eM zWZ&K-QUr0BshL@kvc1^r;4*W$EOXSbmj=U@r_FKKZqqlJEpE;fu?$!&1l%3VsDEdN zwy<_*N!wAB9aStYGFm*CrD}clwN1^OT$K30?jd`pm|}ZBtjyMY$Gv}?rG(3qP!}n_ zQ#4FL5u&ZwJ|&43tavP@f(AN@&)_v5(Dgcxb=G2;<~<+Yhbi49^CS6K_JFf7!NoGs zH!#=qXQ+Ka;b#dM@7ZuG<GZHFZ9qTf0juHbx^V)P-T*}C+T0_-P3(|LdZZ<7W z>0!QwsyNmlp6%Sc2tjSxb<@XYiTJHScg2tX??XZ+5Q`B>8XOH-1*H8J3O!29GTh2V zJw@OzivS!+S;^`3iJKN_4tWNV_=+V3erR9PY^4W22RlOv5aRv`A-NQF0-QEk9fk)CcPpbPt4Fwb1FtQbQPsoN zAJY?UoSU-Au7Kc-jm#;?=jt2PF3MsHVF6!t?v}V52FEb|lu=mA=n#z2G?G;LAf6Dq zPHf*o@WYDN)!nb)5MJOK$ifCr_z$U(6~DOW6n0Foc!_qt_z|r|yoD2_r0@`8Vw4Ok z9WTcl<3Co#+8(IRs&lfX;TA`cVd6+D=C7Y*#bdk7y?V(%8ZN{mM0T3XS5mNJ<-ZOn z(kY_AH~d=Vlp`i2c~yvO={**OLCT}*zKq${AkhN8*AC)1~yKpLj&JV22zBEw0LOFhrCQ~Y&yfF0hOI|bsMpiCgA zK5cCvJ^=eF+bciVo^JYWuc#VUr^aEBBGHgn(26CiZL{>1uI7Qq^>FTk*h<*&H$5}$ z?Ae$KlcA>X)WttBI~>;*S@*(*i=*Q1UK_BDmQj}X-c0I!pe^c!|D0ASz18qL0NNzYHuY!$X&1Wm;?s-O19kRj&3X-tMz%V}rNXqi+|?wFAr zE-uc$(Bt)P3oPI0#Zq6Te2rTE8#OmNCVKjif<{LkrSkd**+hebCmQota{J6f>)L|O zf*ogGa`idCW*0}?X@i`X)s$wqQxr;#oaK^gPfx{C$Ggl)^T4Lk@_WHB#vYDoJ1#71!7M!j%PNBBC>vvvz2d@-q;1tk7w?3*90>;R=umD;7V zfhkj?dyuXuU{CksUxe!Ti%G-(_UoJx)C*PFHrKp@b%H&s0qkFcKmIR(bp%+tp{#{f z>lA$vtsh?*Bwva2Tmsr4p!d4y5R+Hn3ghw{Kv}cc4zQUS-3L1YjY!K9K-|IWjsygn z_>Y@!VnYhopniStHW8M!fRzWp&Qus2d;xT^bYepb5g{stzYH5;SN?r6`0orGA)?+$ zkAncu{MpmY#?S6kVEvz3*2RXH1OfJtx0R)kRkf=KXjHI7AIc_~9R&2>T5EiuI{zj| zkWza&xDR#%3j1@cmU%9c`u-98O+WEYwBaxJV$+yf5ggO zq@U^DS@_Qa`N>rV`XY4mFp}LLM80}7I}JZQA2lJcQ#*U-jo%ufI(vC49)h#vX92WE zRT-rBuQlf~H9a!FCi*J-*gSlaiPm-Q`-k(9=O-U*7S?>vi4Rvhv%b(DiQ^=mtCD`| z@0Zy(@NtuQtw{*3d?#+Ayr{TS-98fdlBZaKVJsIn%KLL9aTa`ae`_p>`EgP8|L<19 P0PI5)AR6W?EX@A_w@yge literal 0 HcmV?d00001 diff --git a/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack b/lib/spack/spack/test/data/mirrors/v2_layout/unsigned/build_cache/test-debian6-m1/gcc-10.2.1/libelf-0.8.13/test-debian6-m1-gcc-10.2.1-libelf-0.8.13-jr3yipyxyjulcdvckwwwjrrumis7glpa.spack new file mode 100644 index 0000000000000000000000000000000000000000..37023e4cf3cdc9ce0739adf10e474f74e3afb248 GIT binary patch literal 8627 zcma)>RX~+dx3%f+?rxColtvJc?r!Oj#!X6>bZ)x41qqSv4v_}w*pi#r@9{h5|IhVV z7xQk8F|XDfYdy5_XmJ01fQyi4lZ~#{hX)532KY7_dt+!QOEk<|)Pa32GqZzh?!xu? z(bGlD)6-=&E2T|yyZ^Ny)n&BP!8q2yhbIhRd0aU(_KM}?bv&$ z{ie7L+pus3zq5M_6uov3?_)cM$Ll@maKndAfZtK}8Fhkm{$;^$XuV>td9<`a@Wi!7 zqf;VXKHw(z)bsWJ>JwA58(=p#{R_A?rtpZkCoQJ{vgAZuLGC19>0Em3CFVAH4`%Cm znP%wzM%yiS`98;c8B{$3+Q%YoAgX0`;@zi<#u(~v|3vWmf4IMVDpLm&V*i3Y95feg zE(()Ms5-$|Re;@!{z}n+z?};o75*cqUB_oX1o+QY9YVY_Lfg7;P!-%ZPS=)ab*EP) zD4jdsL`ADjI^Lh{gBlGsfXU*F0TA!K>f?4b1VqlH^13k!TE#r$+wrX$0irhn0|a0g zc}kO=puy0-z2E))J9QQA60RdOpiy2FXzaSvXl~u zCrYwMr%Xv0oD@8l9~RH<6s^lzq>_2|Hh;ixM<(LExanegP+rMamaQCw_9mCJZ3j^2sp9!ggd?qV8aeokM`tmFkN2@+$wlR${b~$Vf`>EtEpW ztw%9ajgt?iw)yw5acLjmS0Rde7K9}Wjq-=-NIhDlx`AFu(=|E`C;-TdffNk*{Is>! zRVqc>9cXhFWOGho8HT26L12RY@S9#*GIp~N%h#@R^&?h@$X_}%`<%ZOYgd>u#(o_W zSm!jx^{6Wm&W@Aw7ihK5O*Po#w8k0m!`I;Jxl`&5S80gHDU|Mv#$Tb*_w+mLE0a&7 zAt)Q<`~$6G7zFaa|Fb^63mH9nb~o=6C6T%O1b$Zu>JRaK@qCz`hnQrmMw84eL=|$y ze?=o9ZS?7B`JH9e;uiAL#m?&Ha?*RAVrMq?;m-S{==tDcak00+47&MT>AwAPBO7>k zpb`jufeNJfm-|0kE1$5(`VSWdKa{HlosQ78oX>grAH2NnaW%rDBx}GPmq%{Fh_%F% zSsc07wBC5$ci8mn8FdF!$pk!TeZIGTDDUF@9DL{XaQ!pBAnm8u5n%wleR|81rZ~9n zw;DVvM`v-5&1_5Tw)BJ0to;0nxFH_?z9D7>Q+9Df=j)i2Syko?vlM6SZtwbRaqi=6 zUPQ9QsC&{qZXr0>Kgh%5cJHc}8hQXt>zvh0k}lNEAqxHYs?OfNhICuiwY}#+1o|c z6q1~lg44eF(8bS|6CU23Rf6IhHDCeTK>*}1x&W88=c)%J1HHBmo053_EFSE4**PSD z_A)8>x^uX7)=R2c*q^?AakteUbbkugF$#F_@w?ioZ{fM`*1HZ5*749PgP9fL;BZw6 z9;kg~V^f>knyF}E-^4sj%%a<0eVqSmhi>!xk16|A3m1I=gR)Jg$$@Dh7reo65R2M{ z{sEiMr99A=(+dpb+t9$(e*rXyp;u3^%JLMpy0KT>p#ca2ZNp+`~JJEWfABd5Ea9*F^pBORPuEae^3w6+zQUM>k!) z9QBlblDq?Kg1+Iuh3c6r9S*0{iulzyIEz9Ncec+c7Gl8#1{$wGMmVtr)6cxH%08mI zCXkeI7{ua4ReK#Us2Tltdq1G~Qk!Cb7-cG(L^z41{C(kJ%IFSP#(4f>iE~zqhg6W$ zK$w|~%S=BUp)?*_k}f4>BuJmM`*hv5A0V7d$}&GVR*CH7cnxoayt1Yujm-nKPZ}9t z)uK426y)_8%}lyFHMiaE>26eG2&U*h(k6L~`fO}y&yOB4H)^0BQnwpjk3Blh+UFQ} zi#t7j`v!Hp#l_;m=kYJ|94lFhB%+4XxlIi1eoT%i;)}on(mj0(ZfuAT@)H$S(~?$^ zvlZg|0(#Gw#wgu;YTIK%R@)NDXqq&Sb2bJCI-kE`t-wW$_5LWXJrlioNs)JzS`&mPlSy$5g;GEWSRMhNW*w`!ymw&cE$@oT|>*Gv~Y|*`2ZxS2>K(E;Rg17MA#q zGo8;px?6hoqxCI|7akV%L+$SR&yy(%Rqg~U)!9_m{rOdAhvhlS;ol1(FMtiH`$clE`GjvaAq*5PH# zRDq`Jmu_ZD*56tqg{GExZ@hU4k$TrHMWG+#Dp-D{3Fw^QyIODjS<3su=g8d;@n4K6nw2)_swQ$e}^_lv@*3?2cWM0<~r_9HjYb zaDu`oR}pH75H+DJ`FV=rE5{+j{@eGEePyHew!-3kO&3qeuV(Ub3=Gp17G}ybQYA!DfyqW*bSI3^K~zvPptEaZhKpiQ{QqMB z_zHUl(%%O-vopm#3<6grjAM6iJ>csHV4%uHy@^MQZJ?cK(7dI6#PeitkK{qQgYp&L zGuOe@lQ8URN$O-vOA01)4g-Nkf!hSvL}StqQo#QhTd;2fTr03`HF~YfEx#9@&3$cL zH18S+VD$iG3;^bh3BVm}R$JvbJ6<|4zrqKKJokSSC6m=K4wy1J14Kuc(N)7 zU2Q~XYUGEu;bQGYW;)@6;+-%3{mDAt{H4B}=6g8Z@d=rx2x_3!K7dDlM-!n%WbFGS zh`I89o7#JKsx_aU6qVLkwkxwt7OWR|ulZ0Sl=-WJI!{@89P_v(zwhmCxF&T(z}V($ za6A$eDb{`eEhd^x9;T)t@U5exV#63Ol!$!P56dQ3nQK?Pq29~1bl!4Mo&S$D*B-)V z=l2wg(s2ToY?nGQr2%X+ZYrpoAk~jb;`Hs#{I2z?XhuQgzhcy>ap~}t^n|EL{1Fb= zYvL%8csSjmHjaHirVr(yRFPDkZDHyN+;FbfLU|I50yC{KII<1nIjNbgldE`RT)M`StPQ*|pX%0W}*X*&p{FA%Y|;WFG! zrW#3xL=vv!PUtSt`F@gT{mvgI&%}KlQ#{<$QHnNAB@sZ_th4_+E+JyaptT;@ zC((?R4kyC$q~cv?nb6Wsg#ov8l@*@>K9(&Rugg)s%=TX+?)WyshSh87Y-8gXRq7bp z>&z&Y8Q*UpyQtiU{caj19F=MFk>();#`bp{KiIv7UQV??kMge=L%D`n&(G&{PK6}Ek-N+$$F-d z_JrIb?DYBCy&lpyZZ|*cDYabNcbEb^=mPF=G`&Uw^<-5Tj>f6uPx+v?a>Na5n&Txw$G4VN<2>Vik!&EFM74A>s9n@cn)U`qxHg;S$*alF(;B40HSjUiy zmlrXh2~ADAiB-jfVz;hxqMQ^OVEp>cg?u+D>+j>5pd1eHOPpxcy;z=Zd%`rC+Jp>gUuBv`$l#)KNWYXWCUP~!L zznf0mnHp*Y-ffTu^HH!Kk= z$Pjx^I>7{>=;WUMogg+3k$b{3Cv+ zruz7jl7jyhvO*3e*)?7s>Nl;t50s6OUgZzm4e|nZ{XbH$=>P7u81w}~ovX<){>U9v zk7M!bu3ODbxibu-a74Cs1@N~SxLa0|TnK$Cf!1KR42&-varFf2OKOnswPv3NcVl#I zuvMu~q$w3+rc9mgwQpBhs$8XiPow3Cu91C;l@j?YrGSdw_*2GASPQ zU9DA9dvA^(cp{oEE+czv>iaCOL@p~mjrkYfOjFgM&!dm3#Lj64%T!H0L|hhghAUaU zrkci{Io!Q`_?uxoN$xp&Pcy23JE!gmhsxXb91XZ*rdMyy+Mu<=G!sl?Ds#LdhRC+B z)#z^sF=Ij9lAo0pU1h`4#A~RHU*gjxoKbep5%bfVJNVaJSSRCn3xQEmmo|YN6J<{d52%p5&%I5xah^U117H z<94yzeXJpe(1qG-Psl;jPW1)FRk-r*0^9Say1avVydH~bXfN`m4g8Y8LdMV*?F(HY zkRjXq$jKGv-d#STicN9z%z@S(Vz0*GTAmuhMAWryUCUH6xT7>ALp0t%%Le1Z-Xjc^ z?S$<;Y!NIg1^7#Uvrr7L=KNh`@4Gp0M&1gBU>$FIg8tW0=8fjNM4BdplQ?S)DQMY? zZu<0{e(~3+j_gyUisosMpajEn-&HuC7lQv8Qff5k&5*y{Nnc)uqhfL{sj$)Vdz&ey zmKS|3Mvdab2hHYJ&7~_R_kG50+$I=eI)lC5T)6V@BrD#jSkfYaQbDteotZa}I;Ltr z+DT;wxD72ywX<=QB_EP@jE(P9u`1xMDUFK4A=;l(xZ;V)D#(RDQR_ZK_DN80+#g%l z!!myb^rBwRp7VEqmn@|=Y0WR$l2lcjAE{`0ucoQaU+8KOT8uBZJ1{qxC$}bN@tr+% z8}nQc>`|?i$#wA>YYhpqU_A1(_19%_^6%^?3h9y)F~mGbbdL?8}G=ovq~g z-g_p_f~7*wE#R$Knrb6~LZ`SP&bVY>jG5)5ouJ3gODW5g?fctuBtVPI+OtT2anCvo z#yT<#Y_k2)u}tg0Q1OrEKx`=JuFf3r%W9-q{;Cv*y2~G9N}G2SVNIj8H#{CZJ3zlN zs7_LKQz_c0{|O>(a6juW8RJT0+is2`hgqZK{cUbe6Ae$c)1T8aQ$=ibBL9=~4_==i zj$Z!PPlqtiWsmTM$VNSt772z>hD@V1W+X<&_6$*b&OpHL7zGpp0O-pKiFR-2|u2}3=Bv3UjxwU_gYER$s%SXi? z-QAp;no4ew=DWDKf57!iahq~Klil9^IO1u)nBDzDgLNOLtK$Rz>7}Y&bIMDxpR6J&ZBmqr6e^Yif~DUL8FnSo!|0(+dOnK34eZ z2c`pVS9+0w3Zs2GPf?-4o1P&wclREO+LK!X^OGodgFVv9zWG3T&u-cw!cK)A%4URp zf>CQb4UGOoEYR}cXi3SOUNUC}asX_iJq4MWzmX$9{o*NOZ2>vM`EPjwFu0}q;*lw5 zgsyV5u_HDmJ5{ui!&U%Sp&I6tT9jkn)9LCcZKmaXFJZHg!E(c`i&Vn;9jot7ihQw{=MtHj{2TK4j`L5H=hY+ z?GV_}{|o)YSHRtWJ~d-EQKNvnc`(8z3`#4@T>y;S$lSfks1=)t18Mmky$tI-55gK2?I`2D`+gc_E0RrZJS`1ry< zs7U=4m4SsPPki!8f5QqzLoxQ&FE4|QxJM|s3vA7U?d4XX#e|Rs8QZjjj}A|YsCYD< zH@$%rbewg19&R6PQRho5@3ybcaHpg0MIH41 zj9?v=7)5<%HT>=aUtElAyiynmVWkYk_?WMikG+Vo{^4u)DNkVgN`(v5&V^;}jmA@(y!9EA|)?okiXKl8C;v#3_ zEB-8ldybx7nbvn6<*g)D$S%mVX(1(4-rP<;Z#d1WM|($>f8;NsaKnr=i*eJcb>gA z)11D=yQ7p$XI3T^t2IvI?wtwPtJbP3Bq2)Mw-k<9b7M)0y%6bQ2IB~$Bbv>!tCkd! zV_V^K1M&0^bxS0+_jDe%X$H2qK5$S%X;Ds6V5qM{`F(93%67B!yK90chXg zCB_C{ur`F%H)n1buIbXS|^L z!1VDIa?4}$;df53(%Q+uPqRBq&CO@H+==xDd#0mmH4PTB>XFnp8m?h(Unn@lr@zw9 zE2i$~m^@CgDz|WcADg10HK(OqL(fi~aG22E)VCBXT@zgu{MLW$HmD?-8Rij!Q|Ey? zs}R0qDiPNiKKT@?Wz0{P&)>~u-0D2m^Bd~?Gmf(ZwH=H)_y=ck zm~ya-2}%boQ%bz8h`QDi=ofVZ(NM*fG(zy#uZh7NHn1~HVH3^}ae)9P?vd)S*0G-K zfRC3<&lO8cY~J^lU-j1g-(`}v>2l+tcE3XD#}Dp;i~pW}9}^X~R3A!UY%N}-jj_%` zEn5Dnar0qt+eU1zV33x)lo>)cN9x+McjZWILh)xWHF?WJSSo?yLr~20VRpu0zSTvx zxAlhGu$xu~w)Y77J7t+Z!#MemB2ykgZP1gO!HZL}i@5T*oF?QwG29M46IY0(CS0E9 zrL_QU^?>AheAQ$H3gQEv5Fyrp3_{?ow2zsDwAg?UC(9V+^@zx1M|+~>L1FFn(3$)h zz-803P55LTe2?Y@pvc3rEw8ZV5>ZgDL#g#c;YGc$rqOLGr_S}z8*&Q#A-`$NSl}YU zG+k=)@7#M3X#hcQeLK^;m!We_X%FB%6`n^Iogn>wds2jAC7b(`3MGXxd)kxEk-+sm zYV!Oi!JVo41nIon6rb;3<^@g0hb*wsddo}wJ;W|QS}yG7BAVb-OJ=7^YLB}`Gz($p zO|xIX>zty0F}Ja5DMCo3p}S!(oW1`E=M2w?(}(2+$JbUReSje+W{@iw#h}}`X=!(& ziw9+z_~!pPGbd|$)tg|y*h^aOtQ+*$W@}HH$~qptYBLM3iw4FTPbW|F3|@?;_lNTY zx7~MkJV7D-bh#iXC~*|Pq43U%K&}u=oc(o3+)7Iove|n3_H!ME=LGo;%9n=Wg2ifb zDYi@P7$-Z!Io&xfUHsxms!w=|Y6ZVe$P!4q91&|Hr3?faW$C%e7fvupqcLBK)R@)K zs2d_wR^QbHL@w>>*(K1z5B4-RH!B~mG5^LXD zO?Mp%Wh^YSTvTn6ZC+Rk041wArUE37yw4{j2e4b$z%a1%O4>CMa`iQ{UHt5@%K)8N@U>|hxWTMC! z3WOJ-**#m-{A>3yf%O}F(*aU^e}^Br&PV$?+aU6W(LS5W?qIjzciE%y$_vK`wAEV< zB@Z?i^oGCeb$VS#Wbs0Nx;H<#1ww`5UD~>u=QrKP!pq2d!|MEOf9gIVmN)tYQBN-^ z*tKr2-ug>P0v*}k)fIl6$4iZ`3HDT2k9}R)Jb!rl!hLU>cHBGBDKWm0H}Y1PNs;t< zpr~`5@2O%%;Qf#EW`z;^7Ml0V(o!B)?JP9?vPSK65GHOpKAbp!NT1Ebr(>C&0ZWYN`+x87fzz-4|7-jto#DE>9k zBz42{rAM_|btFAaq12Aid*87g;Z?cTkumV(i(_G~p3=?jbGdjz0k8IjJo(KYn(O+RT+JcwBr+n5 zL85rC?!z*F|Lh8h`1di)i%C@=q6V|~n}Iqv)%Kor$PAJWHd|zrEydm@mEeoA6l_^n XM_uUuUKT^j0o*Ga2*N~Mte{dlozygbdkxBeg&0T0J7CWn8zF5ZZ&hf%-Ik7#tveG z4yKn8N47PdPg*X3Ewvlb?0^#c0U!}&h6nU5ivH{wK8D}p6JCuPf};|zSdJvW9>W`N z0lgnR;81{W2yjR$X)(tKET)UT4u$^u8yG(TszZR7=Uo8m7}!n~i>ZDOM9QDJM-eU0 zhaTK|r4xAHdg>7m21@&^yUue&L}D%#;v&1{;|BBB35Yml5A^Q?@<`w_5jNl(7M{BS zNRtpP`S-%P&-^!HsPO{?5`f%Qz#I>lmBx<3?%oLViB7Zrgb(I)@G2dxaQpNA)c@TM z1HaT7{Q|U9J=gmZ+?R%^{13VVjHSi&__|LfRapqJ z7D@}Lj;=a8e~9>!sd)1$qQXNa1jR@R-W`*KRuRv)=vyPaP9Bg7B1DA3+CUG8rGko( z5yM(cm#_VeTv8ij_1luIfTwt<*o#4rPH|ntce;+0LTmxTH`ovA=jC0(p4a12z2v|a z_xWPK$m(@_6YECLw{1AJ7NV2q7EtxWaPHQ*<7%p0!4o-xF_{dyE}27s94C;eZsV%d z13^XASufvOvOxtDcubJh!{a#t`_niFn`R)wOT?`6+)XI+ElouDc-_jr9y+H7I!-^f zJo)W%@2NXvSvHTwwJW)qc#F-_7e^5z=QPc&Rn^|7buZft+?1l#rX&d|fGFp%8DL1{wOw-}22 z?8yuVuX%>7&1FE%zy!J@V<-ysjL3-SdL1w*((~5$WakHFd~s^zcwneiQ1Fce76vV}zONee7Uq4i^Xc15 z{%5-fA})?KbO&9)O)bu87aN7iv)7gHK&v8xRj?XxlTAo zY3TrB`Or;O{$We01y9LzJg$~S2mSQQ_-!3|6YbiVUL2rzeQkur06rvTAXgdewv|0> zp@+Y=_e!F&Hf9_CTm>r;-aS{3`r&88HvOSCZ&R?&dvW1>EQq-9Rp3XZKa`h~<%=hF z*9<{SCd!`q>|!W=JK{0WciAqS11cC=U84UJl!4eFKZlB0NTeys5h8oHmnqBQs7f4U zYm4X|W?LfKlEo|xc52%SNrRvEU$T&6`s(Dq8GxR#5N6FvM=Dz1?@}GB%bGe_s$^_}rKyQw-qHxX(t*8W+BE4rIMfo`2&1nMD?;T<|m!pgvg z&-+w7FM@VJ`!)a4OH%wl2@VZ(m__7q;MPRW^?sJGdRjBJWuG!?kgZx3XF_%E) z{lBDw=rlY9Vl)+Dl5NQfSWgIGLyGYf#$2O`c`Pi-@6fpkl&7JEF=n=R<_uPhDB$D{ zT}lH<>=M^Fk?nhYa_a_Mv#1Pw;Fa4^+~c#!5%&sh=&Cr$9iRnN{Hobkw}_694825iVi_TvYD9nB>+WjbZd4d<8| zV`&5+0tXIFe$zfW!|2*x64SQ#a;Q(>9g@rPoTMdGY^Z}s34PRsAHjStjxn3p$NYV% zeqT@UUQO$?el%WyHl2R}=RLlwf9JyXopR?QT6cB0#f^0@`6e_GW#cv)rn8opYA%== zFV3duNh7t1i$CJguYP~gFj$_nwWgOX#c6%nqoAa(9Kz!6;CxgetaRpnb!?Whe@gHX+ z%G6Xc@hy|3`4pwS)w${0=0j^;HAwg;3Bjh+2GX4f4 zE^0|{Xhk8E%I=j)5Sc}N5pL2fYMeDBmFXf+V#Iuhssr^aO%?{dP6Z9^=(C93qJ4vu zFTWxZoejyOyzvqtJUvd9Pu3S6e8=rSYr12>FQmXk7a?t1bp73Bzy%$)pNM3XLk(&I z!;C1Ui^3KhgVZM!aAzHd}0d~<}X zbKd(?MSR3tAEe~#)otS%%I<%_+1>1}x@jpgx19yzKWp$o zj>J)W);;F;^@u`)7HtmYNzHLJ{sa0!zRGspBAewpqB(QwN_*Mi3%JhOuGhztcLYiA zuOdFOLq*s@p@u2)Bl9{emmyCc9{!|gU~xlCuo_0<;FxnuJ-)pBaON!2FSm0rf5X>L zL8F)W@k5*&^kX@>69e%#PXtTic6`4sKL)5o)v58uJqYucMf6jbk079 zZ(OO?X^Qniv<8=yj>WGRBV{a-%4^C`|5`u=PGe6S-lRm@FFKa5B3KW<_qC}(a@5}z z^gOnZdn-QWo`9?97D2s~gPECDL50iNG5Cbzho zw;AzGarH%|oei#|WTq#7uakh*eZI9r2^d0R`R;JVIJ~{fkiCJ_Po>`Lw(R7-8EWaL zITI!`-pmTy$Rb?mo$NMe1XDEdv@$#=D9UMG79nP&t#|ZS9N_A|)K^C$OA9?7{XX8v zE*ykW*BG(UrccwD=nr|R)Ll^YoC@m(Rkb8xvB@lUPk|L zc!qmQDiS-KDkk(5XNZM!CUOduutz)aJt~TEhk}U^Pa=-&Y$V{*R4~(k`H)l*b9JCf zw;pa#4B>fecm1$P-%JIU2$N*3ptaN(KID)I>*#UgMs8?}WO zE?iz&H43gGjC|;r!=c1j^{u!n@J;d@RbTjpf3@?ns`7xw(#PIuMQm%TrB#OG%2&RXly@=R+DJF9OFP_B z{O)_9M4G~_wI=q~VeatF=&G?7^DCQC$B^U<<^D&fW*gtt-zN!V3uwa6If7lLY-Y5f zg^vo!^TgieJILFlg_si22*>z?at+OK16&CkM;?*=*6DC$pW)-xliut_$2NxdXR?Yo zm+c=KZw10~O}H5|a=}!puq@$w9tY-rR6%HQ75!+<(X;J`eo@j=M`crOg2nfqsN|Y& z^0wrseTMynJ<>o;f4$e1kGydUW3pZhmacS!C{)vqrN{aCpkK8K0}Xyzw>%=R;0(!&yHufKv+X$X zx#2~cVl@*zr{6;>gYHjN4weqa138;5vLil-_q7%Z-R|mmLWTLd7D=kr9#4wH2OVQQ zp8pxqQyQA*iSk^!UhMnT0P(_h&gEsMTa=d(--Ss^J+R8$)9y9Sg0NjB;ByD@e*w4n zoj>v2u%o`sB@%OF?tL$YONMJmJ@#U>@$wO_#e__zE%X@yFPYa+IYxFecK^aNs~Xf- zveE>puz#;EZKjKx=@xaNx|a=$uN9ieE>9XkJ-6_bAx{@;8dxZljIZsP`IyB)7)q>{ zW!h^cANLNNUA`r{Owwr)Z+2EuS-)82$YJl0{0safwk;w${8Qii;FTmhCBaGlx~1j09z_|> z5!-@o*p7*RmUt{9%NF4rCBDJ!@rFR?wLE5#v8a^FT9xq12qWFnscgq?)w=p=Rry=# z!eALfe0lgEY3{{391t(w&)%so%rHLkcROV`*a!79_sb;C*iv6Q?PaplcWq%fIH>;0?{(Dgo6RT}Uib2~A^rR0>f?`<}fXr;T_ zC!H;}oYUFi0uv=8?N9xT^FV2S-ag%Lek8eX2#|lj{@hgK*c@0OdizF^qeO)o;gS|Q z8Clg};^BAp5Kl|et&fV{s08fdFcR5i%hF*d^XPHV-O2Ax7X@|qFDMbdNB51@XqQFy z=)zKyZ}s?&)3@wA))MJ{D`+${YXq|Ep$iyGf9x@(95O~IWoL$*Tdzv4+wIl6GqEQr zQh1MgIYvpI<oV2-?DwoX${Kk#&SZ<8VCVsmzJXXNLI!@2^vaHGqd zzB{nRYF)l~8?Uu^p4iARv1$JASuM{bMkSN`W~5SDjhD{nL?g}OpHSrIgP;3K+O>() zXZe;j=4V^oT35~m_bRh7{Zoa#Jq>Ss*!Jp|x7X_~-=7#+|DqWSQ5mPbNj8DHVGVuQ z(u%FOLCafjtv_%XUkL~`yJ`zR;XLQ=Dm(z`>C)~A_9|Q5_j^M?ROBW~_LMb*`v}uJ z$au$4FA6%NL#Khd!)1}aegT~i?8b9pqUBs(YUj?>)(UTXO9_fK*o2FNaN~n!eDvaM z<%0FiHjV=V9U6ZwAM2ZW)+{L?#?a zckWdS9_5n83HD?0DoqO}OK5#Vyze*4*I;0wSXHPKz6&;Z7^ zN#GD}jbV3H;aSnCjsaEx8>(6Y?CCi`Tr6t=3>o$Lmh=$+A50H`{fj}vL6|5w|E0}# z_?Pjfsw>nT@N3fL2Yld?Q|Z4y5_00o)9~orhq`u?@qhKI>l;+~R89>*;UA1Wwf;A5 z9-g5o8VdLf+)xA9p}&jHe*ykonvRp;KBH~ugUIAahCn6XG4?K z`JMf15Suivo>(e%MMNW7%_${BQL)=XXA}X5_PVBoa%AlhT3Vinu+l3KNez7PDtuyX zJu!cH3=JH>DjflXX@Bk+}eo>r~e+RIUU+Md?M zA}}OW0+dTk>hk^D-@ONjB*c0-ZA_EdU6n8lo0)z2?LET)VdVDBgs%Vg6G;MU={nm2 zO>U?BZfSDsH=_T{MirFCi5bBqXK}4Lq%hZi-;92m4cuG*vZP2{|1+Vo{++q+8@@8!lqIj%)zyA$)9`h@ zqb1trJfg(oJMQ~Ng`gjNkaX0 z8g;6sac})Anl5wyp9%d0{pW}Dks;h)|4l{Gxc+BB;<~v0=Ua1dKuys7&$-48dLQXr zk(!n9pRE2y20y1cx0NW&*%l#xnV|@w7&A# zQdnM^h1&0r|E(6!|7SyUf&Y-~Rq$Wp_&*Dh)s4r;{~%0w40s&k_`lr#KNt9~Ou>Jd z;srkZwhrG^vuKwq8=2Y5}}0E0!ejf z0);&}FY?VE#r-q_%V`H#llh|22SaiLACAeQ1pjQ>mq^XgTAPZsp=y+>wxUQ)OHr+j z_J-xOHXYj59IdH4)U?)s&pC~J>XjJnn$d;plb5WV{GF8(MH5v~+9Idjl)Nx~LKLD% zTZ*G9*gtb)H+;=j`3@qZR{DXjm> zo)4}rY+VYyz=kwLZf>YbT?j6G8o{2Tw=_sD^;TWzL_r!t5ri6TQ`y*%uj;>czca*J z%C5KF!s{Bz#IOTm$LJDRu|C_N2kjxfHcn14F%Z7?l(N5tLlOX^`^PPe<$PD2kN2z?A`Oz-KRU+`w!3Gz4`m2`s&$R_2t3A z{@$CHQ)RrVsk%IpvC`5OM8wQ-*W0PJ_Pl@R zi+_DM-q+vn9KU;Z{OtWd+Ww23m&%Uv&#RX^M;~82Jvo}@i+W4aWvN;CqSjnoDHq09 zX3GunRuL72VmSPF=6U#X_x0|JX9q8}_q(kR@AqH*eX2FzKi}CseAnDlTKaVJ)FiDi z&ZfGt`K!ww#&Xl`PgNDUc~6O|hnjMro-|*-e*Ty8>g4Yatq=da{P^_a$<7D;uV)`$ zOqFO;Z(Zj%(yuPhe2iE=SL{B9R5x4AjfIgau*Uyoq1wZ-A?XYSyD2S>xlR$R`8Yyt zEsRhf7Q>h9t|zC0=HmXZ+N#|DRrvGYGa*G$7w-Nqo*VO+2LWcxwi=Ov%t(#D8VV^Pf!~|7SspJO}Up`q4Mac-X%) z%|ZMJG_S^gjj#Wj30(^4H8^Qt^k~9nUq&mg^(zRgfju7o#{+C>nK$!QC<2dWQ;yX& ztuS@*Tr09v1-7;o28;#%U|Sd4O#4ZkM0l!W^-SD&W~Wzd@kA$Nf7q3r z$o26o*Nd#sf*)UEtB(c6KH!v@Z92H=rCFNO#y?$C&NfQ6QL~LMv;DY@txjXhOaw@G zI@G6;nK0Q)$S|UQl8Gkj^*2P##9GivID#wor2&kog{!M9Q9`^#L66;>WnAE30y;%y z;6dAnhjBttiR_9lcEZBG+ zIQ^clcP=_@H3&Ve>6vOefIx8`sj1zIOxxDAxb-z?DP40L2sK7Suo>!GZpCDUQTErj zTYW8#JAUxh4!zD`=yuc~N_)+gul0M~uN5%>*JK!GX2(Q=g6+Fel0sI=`he-D>HCmJ zqdwS4LF9H^-}DUhF30t#G3Hfw&P3}Io%o^tCYX3`QxkrORd}Y~NweG8p9E$mPoAJl z1W^*1KC{LjYBzKMP4Z24mpC6itg~$$=RZ?`j4dDBi&czF!%ULMZKp|gyWI+UA!sG* zl2x2Kx*sJ=0&CRxfhSM014Wm{4(5ee z-&;%01Hp8fgZY0+(wkNM=lB0-Lg*k{D};>Z|F+2QWe@rtj+vpmMf$QRaVJ_h;_-jE z{eQvZ|L74O4qelUkN=k2|K|e#o7MM!D*XE|Ga*G=ocBN9^$|`U3$BUd|8o2PT;P9e z%JV-w|DOpd`tsrb^&|W|5pDQaX zmS@H>*|)6Y3^ZzQvkR=@WGNwnAmjy15c@kjq*zi&a+o2|?#PUAEvg`_u(P^BkA`^8 zHW9i>62@EW2uxzfPsJeW2w5dFHmtzLO3(4RrR>zi?8H#=NhsD{pJnB%q=alFUkMl! zGosK<=tM+qVh??@=URZdLv~Jl?uNypLBmXKH({KM<)Yi3>!$;uPOeLA_Ui4iBG|Ws zi@0b<&vbn-1|PWAgsy3VEU2)S9rH4ys0sUTj~c2dHG-%CY(P}+4^IzJcnvzRXoyQ3 z;kaC!cXasd`04TC-U&93l`zqpu?bBn?XxxAI}!30lc&!PS!oyreb=TS-D0I_ zl%PUNbwO_9HA~0-RH5=km-^enpIgoE*3_oI_-~F(o;s?L|=#)r5C`oQ4RD02QM} zI3@)ZWBypFa6Jlo- zGS|kNO_Mb8udj=ki&<-(-M2#3-Oz->P_oKFEaDIGOW^vY4rN@{#`{$KM%0X~N`yF|qN6v|WD`YzhXJHD)M< zd6p~$#bKDfJxa=%JOOUc#$;VcdEAI6MD1kN(0Vff9zTw>O5XS+Gi~M@tR8XWB5;8@ zKx2qy`oAZnjrQQ5V_8fZ9;;iQ!V(36u^gfviBkt;O)en%1<2#v4=w;80xV(Soj|Z- zsX@}s*yA56R@eftTU`)DM8%lmV-mGW6=s;i=eXh3Sld2}pQOg7c=lxNGXx(Heobf$ z5X6KA`;*ywBkMLRSzTR)Ek!rnj+1Dua*1jQNI7d72<wBZ^mG}+A7(n3DfJ*j&Zna?0y7GC0=0Kn49&$ zu*RyRUE@I`KUgP59c(i((a_l?#V(r^i9a>uGcm6fR~ro-Eb|eqo{dhBC8=VmEz@-c znKO!w1dc=gjMxJuc`O6rMg+2OADn`K)B-O}DAP~XUXhqe3J)&i-8N&2MK$c27-cGI z3!ev)B&>;$BSNZKTdNuxQahsN`Ph+aM7ddQtK=xKAk8D~Vb(NeedD;E;lA-*o4dU3 z*VTg!u6(6C8{SlO6wMH`l-d}}dP(@@K#~Qn>5L|V!3dovqQLjl7&;h958+@8wegWB zqk!(3kbw?9)r29eNEs(Wb~LOCr*sWyC~~ch5}eUB#w{qz=gHIJp(~bZR5^z*$eOCW z;*#Id)L0<~y1fCl(!^|gv{sxx-l%bNz9~66%v6*mM~+~z$FE?_>)R}A84dGL!k4)m zM9dbhN=ZwucgxCp_a`LKFp^G&BbDQWY?GRXv3MHcbDZ7~VJv3(P&Ss!vMm1`dq*x@ z&_%D&`C+-e@c-G(v#hkk|coUw$lz;CM1BrLod@LNP@tN!OW91B(*66K`j{Co^y}i+rH^(pP-4!A~CJ`od#!h|P^b?Xj!AM5W zj1B>S%L6Zw?6PSBNEpN-ry9%ty`U}PpW0YfnRk7BVD^U?2Y(1T$YQvltYXG%4TFU= zDQ~vR6b)J`)&3-%0q>~&L|DTfIa?)rcA#0KIgFZ9F9!Zb3@JBcSKFsiTJqDv)gYzA zIfraIWWpn^+cJ{uHY8hvq*fa+4;Y{Zk%gTU6Dpwe7?}@5oLWm_>;*&|JQ?HwVyH8F z7;6aQJT;}6Xu|Zl8|*9_3L>AvIBy=GmBwY)^EpNGFm*h_`nq@Yypm>5B2RG%(X!SG zn=vBVp@S@$%#YV12*GVQ_Q`B~YZzxwK7(jJi=XW$pWDTl`{vY~9WUqQQ${}+eS>pW z3wxEDLS7T`#Y`}?kP9|6OJnAf2r=~%3swu%!VD2wNbsRspM7@?^K>kRa16PLTfv?? zrjcL}w}f;y!$ojzA8e_T2@z;YoLUxzKr&(2w`QExpuY+Rc!BZ)$VJ#qE&}jB_ULU{ zva-lz%H}~`)+RI!+$~j7CMsovnTW&|bP(q_9k_~bxqgS$GcA0eywJ~x&ou;OX$H7{ z#q*J-2JTgIimDFsp^rJesRhMBZeh1$2zq-~9lgECB84<@8XLmYOkl5=;JaZ^=$NjT zqQtWX3#<(SHW{ol6WM=Da+<=VuTH6sb-uP1cQp~Cv4fvQgHOv z97mWG#{urz-0qf;XU(}a8NY8LUiHWOB;Z-2isbvdiT7jQbeSWXi~Ilblx&41G@PG3CuZaIT$NyQ7A}_%69}}Mu;i2H}G_Uxt@bzCa zp}D|+dCL2LIsVUt6lnpz|6Y9dhy(wzxxjyQ%KIPr^B*%IMc!N-{2%-5 z9S;U~p?SrBp8wB=<^uou{QslTLd^f;cO*I7KZxcP|9SpD8=4FJSEt}VfB*YTNRc-d z1OF$#Bh2H$ooHV1pXdLxq4~i7Df9m=j{mbDMOl#ie**v6<2XD9+)5n(m)rm60{{8^ z&!f>o%>VREzW(!pG_Uy2=YM8H^MU`>_y6(lf6s;#skIpR5C3Hz1nx~7|CihU=K}xv z_x~P^7U}zc?AiRGVY_km))6=Gc|f>@9{>HXlB`M9_rIzdpZ}Q!UB+$G?~Htsy>o}G zKQHh-o z>ZEH%Hoi|HHt@6YA+>>Dlu|qpca0<%UFGsNUXL%tW?az kG*On&L4xlyVC7dlNO0oBi4!MIoR*3HAA5!WmjLhp03TRJcK`qY literal 0 HcmV?d00001 diff --git a/lib/spack/spack/url_buildcache.py b/lib/spack/spack/url_buildcache.py new file mode 100644 index 00000000000..29caa91806d --- /dev/null +++ b/lib/spack/spack/url_buildcache.py @@ -0,0 +1,1239 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import codecs +import enum +import gzip +import io +import json +import os +import re +import shutil +from contextlib import closing, contextmanager +from tempfile import TemporaryDirectory +from typing import Any, Dict, List, Optional, Tuple, Type + +import jsonschema + +import llnl.util.filesystem as fsys +import llnl.util.tty as tty + +import spack.config as config +import spack.database +import spack.error +import spack.hash_types as ht +import spack.mirrors.mirror +import spack.spec +import spack.stage +import spack.util.crypto +import spack.util.gpg +import spack.util.url as url_util +import spack.util.web as web_util +from spack.schema.url_buildcache_manifest import schema as buildcache_manifest_schema +from spack.util.archive import ChecksumWriter +from spack.util.crypto import hash_fun_for_algo + +#: The build cache layout version that this version of Spack creates. +#: Version 3: Introduces content-addressable tarballs +CURRENT_BUILD_CACHE_LAYOUT_VERSION = 3 + +#: The layout version spack can current install +SUPPORTED_LAYOUT_VERSIONS = (3, 2) + +#: The name of the default buildcache index manifest file +INDEX_MANIFEST_FILE = "index.manifest.json" + + +class BuildcacheComponent(enum.Enum): + """Enumeration of the kinds of things that live in a URL buildcache + + These enums serve two purposes: They allow different buildcache layout + versions to specify different relative location of these entities, and + they're used to map buildcache objects to their respective media types. + """ + + # metadata file for a binary package + SPEC = enum.auto() + # things that live in the blobs directory + BLOB = enum.auto() + # binary mirror index + INDEX = enum.auto() + # public key used for verifying signed binary packages + KEY = enum.auto() + # index of all public keys found in the mirror + KEY_INDEX = enum.auto() + # compressed archive of spec installation directory + TARBALL = enum.auto() + # binary mirror descriptor file + LAYOUT_JSON = enum.auto() + + +class BlobRecord: + """Class to describe a single data element (blob) from a manifest""" + + def __init__( + self, + content_length: int, + media_type: str, + compression_alg: str, + checksum_alg: str, + checksum: str, + ) -> None: + self.content_length = content_length + self.media_type = media_type + self.compression_alg = compression_alg + self.checksum_alg = checksum_alg + self.checksum = checksum + + @classmethod + def from_dict(cls, record_dict): + return BlobRecord( + record_dict["contentLength"], + record_dict["mediaType"], + record_dict["compression"], + record_dict["checksumAlgorithm"], + record_dict["checksum"], + ) + + def to_dict(self): + return { + "contentLength": self.content_length, + "mediaType": self.media_type, + "compression": self.compression_alg, + "checksumAlgorithm": self.checksum_alg, + "checksum": self.checksum, + } + + +class BuildcacheManifest: + """A class to represent a buildcache manifest, which consists of a version + number and an array of data blobs, each of which is represented by a + BlobRecord.""" + + def __init__(self, layout_version: int, data: Optional[List[BlobRecord]] = None): + self.version: int = layout_version + if data: + self.data: List[BlobRecord] = [ + BlobRecord( + rec.content_length, + rec.media_type, + rec.compression_alg, + rec.checksum_alg, + rec.checksum, + ) + for rec in data + ] + else: + self.data = [] + + def to_dict(self): + return {"version": self.version, "data": [rec.to_dict() for rec in self.data]} + + @classmethod + def from_dict(cls, manifest_json: Dict[str, Any]) -> "BuildcacheManifest": + jsonschema.validate(manifest_json, buildcache_manifest_schema) + return BuildcacheManifest( + layout_version=manifest_json["version"], + data=[BlobRecord.from_dict(blob_json) for blob_json in manifest_json["data"]], + ) + + def get_blob_records(self, media_type: str) -> List[BlobRecord]: + """Return any blob records from the manifest matching the given media type""" + matches: List[BlobRecord] = [] + + for record in self.data: + if record.media_type == media_type: + matches.append(record) + + if matches: + return matches + + raise NoSuchBlobException(f"Manifest has no blobs of type {media_type}") + + +class URLBuildcacheEntry: + """A class for managing URL-style buildcache entries + + This class manages access to a versioned buildcache entry by providing + a means to download both the metadata (spec file) and compressed archive. + It also provides methods for accessing the paths/urls associcated with + buildcache entries. + + Starting with buildcache layout version 3, it is not possible to know + the full path to a compressed archive without either building it locally, + or else fetching and reading the metadata first. This class provides api + for fetching the metadata, as well as fetching the archive, and it enforces + the need to fetch the metadata first. + + To help with downloading, this class manages two spack.spec.Stage objects + internally, which must be destroyed when finished. Specifically, if you + call either of the following methods on an instance, you must eventually also + call destroy(): + + fetch_metadata() + fetch_archive() + + This class also provides generic manifest and blob management api, and it + can be used to fetch and push other kinds of buildcache entries aside from + just binary packages. It can be used to work with public keys, buildcache + indices, and any other type of data represented as a manifest which refers + to blobs of data. + + """ + + SPEC_URL_REGEX = re.compile(r"(.+)/v([\d]+)/manifests/.+") + LAYOUT_VERSION = 3 + BUILDCACHE_INDEX_MEDIATYPE = f"application/vnd.spack.db.v{spack.database._DB_VERSION}+json" + SPEC_MEDIATYPE = f"application/vnd.spack.spec.v{spack.spec.SPECFILE_FORMAT_VERSION}+json" + TARBALL_MEDIATYPE = "application/vnd.spack.install.v2.tar+gzip" + PUBLIC_KEY_MEDIATYPE = "application/pgp-keys" + PUBLIC_KEY_INDEX_MEDIATYPE = "application/vnd.spack.keyindex.v1+json" + BUILDCACHE_INDEX_FILE = "index.manifest.json" + COMPONENT_PATHS = { + BuildcacheComponent.BLOB: ["blobs"], + BuildcacheComponent.INDEX: [f"v{LAYOUT_VERSION}", "manifests", "index"], + BuildcacheComponent.KEY: [f"v{LAYOUT_VERSION}", "manifests", "key"], + BuildcacheComponent.SPEC: [f"v{LAYOUT_VERSION}", "manifests", "spec"], + BuildcacheComponent.KEY_INDEX: [f"v{LAYOUT_VERSION}", "manifests", "key"], + BuildcacheComponent.TARBALL: ["blobs"], + BuildcacheComponent.LAYOUT_JSON: [f"v{LAYOUT_VERSION}", "layout.json"], + } + + def __init__( + self, mirror_url: str, spec: Optional[spack.spec.Spec] = None, allow_unsigned: bool = False + ): + """Lazily initialize the object""" + self.mirror_url: str = mirror_url + self.spec: Optional[spack.spec.Spec] = spec + self.allow_unsigned: bool = allow_unsigned + self.manifest: Optional[BuildcacheManifest] = None + self.remote_manifest_url: str = "" + self.stages: Dict[BlobRecord, spack.stage.Stage] = {} + + @classmethod + def get_layout_version(cls) -> int: + """Returns the layout version of this class""" + return cls.LAYOUT_VERSION + + @classmethod + def check_layout_json_exists(cls, mirror_url: str) -> bool: + """Return True if layout.json exists in the expected location, False otherwise""" + layout_json_url = url_util.join( + mirror_url, *cls.get_relative_path_components(BuildcacheComponent.LAYOUT_JSON) + ) + return web_util.url_exists(layout_json_url) + + @classmethod + def maybe_push_layout_json(cls, mirror_url: str) -> None: + """This function does nothing if layout.json already exists, otherwise it + pushes layout.json to the expected location in the mirror""" + if cls.check_layout_json_exists(mirror_url): + return + + layout_contents = {"signing": "gpg"} + + with TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + local_layout_path = os.path.join(tmpdir, "layout.json") + with open(local_layout_path, "w", encoding="utf-8") as fd: + json.dump(layout_contents, fd) + remote_layout_url = url_util.join( + mirror_url, *cls.get_relative_path_components(BuildcacheComponent.LAYOUT_JSON) + ) + web_util.push_to_url(local_layout_path, remote_layout_url, keep_original=False) + + @classmethod + def get_base_url(cls, manifest_url: str) -> str: + """Given any manifest url (i.e. one containing 'v3/manifests/') return the + base part of the url""" + rematch = cls.SPEC_URL_REGEX.match(manifest_url) + if not rematch: + raise BuildcacheEntryError(f"Unable to parse spec url: {manifest_url}") + return rematch.group(1) + + @classmethod + def get_index_url(cls, mirror_url: str): + return url_util.join( + mirror_url, + *cls.get_relative_path_components(BuildcacheComponent.INDEX), + cls.BUILDCACHE_INDEX_FILE, + ) + + @classmethod + def get_relative_path_components(cls, component: BuildcacheComponent) -> List[str]: + """Given any type of buildcache component, return its relative location within + a mirror as a list path elements""" + return cls.COMPONENT_PATHS[component] + + @classmethod + def get_manifest_filename(cls, spec: spack.spec.Spec) -> str: + """Given a concrete spec, compute and return the name (i.e. basename) of + the manifest file representing it""" + spec_formatted = spec.format_path("{name}-{version}-{hash}") + return f"{spec_formatted}.spec.manifest.json" + + @classmethod + def get_manifest_url(cls, spec: spack.spec.Spec, mirror_url: str) -> str: + """Given a concrete spec and a base url, return the full url where the + spec manifest should be found""" + path_components = cls.get_relative_path_components(BuildcacheComponent.SPEC) + return url_util.join( + mirror_url, *path_components, spec.name, cls.get_manifest_filename(spec) + ) + + @classmethod + def component_to_media_type(cls, component: BuildcacheComponent) -> str: + """Mapping from buildcache component to media type""" + if component == BuildcacheComponent.SPEC: + return cls.SPEC_MEDIATYPE + elif component == BuildcacheComponent.TARBALL: + return cls.TARBALL_MEDIATYPE + elif component == BuildcacheComponent.INDEX: + return cls.BUILDCACHE_INDEX_MEDIATYPE + elif component == BuildcacheComponent.KEY: + return cls.PUBLIC_KEY_MEDIATYPE + elif component == BuildcacheComponent.KEY_INDEX: + return cls.PUBLIC_KEY_INDEX_MEDIATYPE + + raise BuildcacheEntryError(f"Not a blob component: {component}") + + def get_local_spec_path(self) -> str: + """Convenience method to return the local path of a fetched spec file""" + return self.get_staged_blob_path(self.get_blob_record(BuildcacheComponent.SPEC)) + + def get_local_archive_path(self) -> str: + """Convenience method to return the local path of a fetched tarball""" + return self.get_staged_blob_path(self.get_blob_record(BuildcacheComponent.TARBALL)) + + def get_blob_record(self, blob_type: BuildcacheComponent) -> BlobRecord: + """Return the first blob record of the given type. Assumes the manifest has + already been fetched.""" + if not self.manifest: + raise BuildcacheEntryError("Read manifest before accessing blob records") + + records = self.manifest.get_blob_records(self.component_to_media_type(blob_type)) + + if len(records) == 0: + raise BuildcacheEntryError(f"Manifest has no blob record of type {blob_type}") + + return records[0] + + def check_blob_exists(self, record: BlobRecord) -> bool: + """Return True if the blob given by record exists on the mirror, False otherwise""" + blob_url = self.get_blob_url(self.mirror_url, record) + return web_util.url_exists(blob_url) + + @classmethod + def get_blob_path_components(cls, record: BlobRecord) -> List[str]: + """Given a BlobRecord, return the relative path of the blob within a mirror + as a list of path components""" + return [ + *cls.get_relative_path_components(BuildcacheComponent.BLOB), + record.checksum_alg, + record.checksum[:2], + record.checksum, + ] + + @classmethod + def get_blob_url(cls, mirror_url: str, record: BlobRecord) -> str: + """Return the full url of the blob given by record""" + return url_util.join(mirror_url, *cls.get_blob_path_components(record)) + + def fetch_blob(self, record: BlobRecord) -> str: + """Given a blob record, find associated blob in the manifest and stage it + + Returns the local path to the staged blob + """ + if record not in self.stages: + blob_url = self.get_blob_url(self.mirror_url, record) + blob_stage = spack.stage.Stage(blob_url) + + # Fetch the blob, or else cleanup and exit early + try: + blob_stage.create() + blob_stage.fetch() + except spack.error.FetchError as e: + self.destroy() + raise BuildcacheEntryError(f"Unable to fetch blob from {blob_url}") from e + + # Raises if checksum does not match expectation + validate_checksum(blob_stage.save_filename, record.checksum_alg, record.checksum) + + self.stages[record] = blob_stage + + return self.get_staged_blob_path(record) + + def get_staged_blob_path(self, record: BlobRecord) -> str: + """Convenience method to return the local path of a staged blob""" + if record not in self.stages: + raise BuildcacheEntryError(f"Blob not staged: {record}") + + return self.stages[record].save_filename + + def exists(self, components: List[BuildcacheComponent]) -> bool: + """Check whether blobs exist for all specified components + + Returns True if there is a blob present in the mirror for every + given component type. + """ + try: + self.read_manifest() + except BuildcacheEntryError: + return False + + if not self.manifest: + return False + + for component in components: + component_blobs = self.manifest.get_blob_records( + self.component_to_media_type(component) + ) + + if len(component_blobs) == 0: + return False + + if not self.check_blob_exists(component_blobs[0]): + return False + + return True + + @classmethod + def verify_and_extract_manifest(cls, manifest_contents: str, verify: bool = False) -> dict: + """Possibly verify clearsig, then extract contents and return as json""" + magic_string = "-----BEGIN PGP SIGNED MESSAGE-----" + if manifest_contents.startswith(magic_string): + if verify: + # Rry to verify and raise if we fail + with TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + manifest_path = os.path.join(tmpdir, "manifest.json.sig") + with open(manifest_path, "w", encoding="utf-8") as fd: + fd.write(manifest_contents) + if not try_verify(manifest_path): + raise NoVerifyException("Signature could not be verified") + + return spack.spec.Spec.extract_json_from_clearsig(manifest_contents) + else: + if verify: + raise NoVerifyException("Required signature was not found on manifest") + return json.loads(manifest_contents) + + def read_manifest(self, manifest_url: Optional[str] = None) -> BuildcacheManifest: + """Read and process the the buildcache entry manifest. + + If no manifest url is provided, build the url from the internal spec and + base push url.""" + + if self.manifest: + if not manifest_url or manifest_url == self.remote_manifest_url: + # We already have a manifest, so now calling this method without a specific + # manifiest url, or with the same one we have internally, then skip reading + # again, and just return the manifest we already read. + return self.manifest + + self.manifest = None + + if not manifest_url: + if not self.spec or not self.mirror_url: + raise BuildcacheEntryError( + "Either manifest url or spec and mirror are required to read manifest" + ) + manifest_url = self.get_manifest_url(self.spec, self.mirror_url) + + self.remote_manifest_url = manifest_url + manifest_contents = "" + + try: + _, _, manifest_file = web_util.read_from_url(manifest_url) + manifest_contents = codecs.getreader("utf-8")(manifest_file).read() + except (web_util.SpackWebError, OSError) as e: + raise BuildcacheEntryError(f"Error reading manifest at {manifest_url}") from e + + if not manifest_contents: + raise BuildcacheEntryError("Unable to read manifest or manifest empty") + + manifest_contents = self.verify_and_extract_manifest( + manifest_contents, verify=not self.allow_unsigned + ) + + self.manifest = BuildcacheManifest.from_dict(manifest_contents) + + if self.manifest.version != 3: + raise BuildcacheEntryError("Layout version mismatch in fetched manifest") + + return self.manifest + + def fetch_metadata(self) -> dict: + """Retrieve metadata for the spec, returns the validated spec dict""" + if not self.manifest: + # Reading the manifest will either successfully compute the remote + # spec url, or else raise an exception + self.read_manifest() + + local_specfile_path = self.fetch_blob(self.get_blob_record(BuildcacheComponent.SPEC)) + + # Check spec file for validity and read it, or else cleanup and exit early + try: + spec_dict, _ = get_valid_spec_file(local_specfile_path, self.get_layout_version()) + except InvalidMetadataFile as e: + self.destroy() + raise BuildcacheEntryError("Buildcache entry does not have valid metadata file") from e + + return spec_dict + + def fetch_archive(self) -> str: + """Retrieve the archive file and return the local archive file path""" + if not self.manifest: + # Raises if problems encountered, including not being able to verify signagure + self.read_manifest() + + return self.fetch_blob(self.get_blob_record(BuildcacheComponent.TARBALL)) + + def get_archive_stage(self) -> Optional[spack.stage.Stage]: + return self.stages[self.get_blob_record(BuildcacheComponent.TARBALL)] + + def remove(self): + """Remove a binary package (spec file and tarball) and the associated + manifest from the mirror.""" + if self.manifest: + try: + web_util.remove_url(self.remote_manifest_url) + except Exception as e: + tty.debug(f"Failed to remove previous manfifest: {e}") + + try: + web_util.remove_url( + self.get_blob_url( + self.mirror_url, self.get_blob_record(BuildcacheComponent.TARBALL) + ) + ) + except Exception as e: + tty.debug(f"Failed to remove previous archive: {e}") + + try: + web_util.remove_url( + self.get_blob_url( + self.mirror_url, self.get_blob_record(BuildcacheComponent.SPEC) + ) + ) + except Exception as e: + tty.debug(f"Failed to remove previous metadata: {e}") + + self.manifest = None + + @classmethod + def push_blob(cls, mirror_url: str, blob_path: str, record: BlobRecord) -> None: + """Push the blob_path file to mirror as a blob represented by the given + record""" + blob_destination_url = cls.get_blob_url(mirror_url, record) + web_util.push_to_url(blob_path, blob_destination_url, keep_original=False) + + @classmethod + def push_manifest( + cls, + mirror_url: str, + manifest_name: str, + manifest: BuildcacheManifest, + tmpdir: str, + component_type: BuildcacheComponent = BuildcacheComponent.SPEC, + signing_key: Optional[str] = None, + ) -> None: + """Given a BuildcacheManifest, push it to the mirror using the given manifest + name. The component_type is used to indicate what type of thing the manifest + represents, so it can be placed in the correct relative path within the mirror. + If a signing_key is provided, it will be used to clearsign the manifest before + pushing it.""" + # write the manifest to a temporary location + manifest_file_name = f"{manifest_name}.manifest.json" + manifest_path = os.path.join(tmpdir, manifest_file_name) + with open(manifest_path, "w", encoding="utf-8") as f: + json.dump(manifest.to_dict(), f, indent=0, separators=(",", ":")) + # Note: when using gpg clear sign, we need to avoid long lines (19995 + # chars). If lines are longer, they are truncated without error. So, + # here we still add newlines, but no indent, so save on file size and + # line length. + + if signing_key: + manifest_path = sign_file(signing_key, manifest_path) + + manifest_destination_url = url_util.join( + mirror_url, *cls.get_relative_path_components(component_type), manifest_file_name + ) + + web_util.push_to_url(manifest_path, manifest_destination_url, keep_original=False) + + @classmethod + def push_local_file_as_blob( + cls, + local_file_path: str, + mirror_url: str, + manifest_name: str, + component_type: BuildcacheComponent, + compression: str = "none", + ) -> None: + """Convenience method to push a local file to a mirror as a blob. Both manifest + and blob are pushed as a component of the given component_type. If compression + is 'gzip' the blob will be compressed before pushing, otherwise it will be pushed + uncompressed.""" + cache_class = get_url_buildcache_class() + checksum_algo = "sha256" + blob_to_push = local_file_path + + with TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: + blob_to_push = os.path.join(tmpdir, os.path.basename(local_file_path)) + + with compression_writer(blob_to_push, compression, checksum_algo) as ( + fout, + checker, + ), open(local_file_path, "rb") as fin: + shutil.copyfileobj(fin, fout) + + record = BlobRecord( + checker.length, + cache_class.component_to_media_type(component_type), + compression, + checksum_algo, + checker.hexdigest(), + ) + manifest = BuildcacheManifest( + layout_version=CURRENT_BUILD_CACHE_LAYOUT_VERSION, data=[record] + ) + cls.push_blob(mirror_url, blob_to_push, record) + cls.push_manifest( + mirror_url, manifest_name, manifest, tmpdir, component_type=component_type + ) + + def push_binary_package( + self, + spec: spack.spec.Spec, + tarball_path: str, + checksum_algorithm: str, + tarball_checksum: str, + tmpdir: str, + signing_key: Optional[str], + ) -> None: + """Convenience method to push tarball, specfile, and manifest to the remote mirror + + Pushing should only be done after checking for the pre-existence of a + buildcache entry for this spec, and represents a force push if one is + found. Thus, any pre-existing files are first removed. + """ + + spec_dict = spec.to_dict(hash=ht.dag_hash) + # TODO: Remove this key once oci buildcache no longer uses it + spec_dict["buildcache_layout_version"] = 2 + tarball_content_length = os.stat(tarball_path).st_size + compression = "gzip" + + # Delete the previously existing version + self.remove() + + if not self.remote_manifest_url: + self.remote_manifest_url = self.get_manifest_url(spec, self.mirror_url) + + # Any previous archive/tarball is gone, compute the path to the new one + remote_archive_url = url_util.join( + self.mirror_url, + *self.get_relative_path_components(BuildcacheComponent.BLOB), + checksum_algorithm, + tarball_checksum[:2], + tarball_checksum, + ) + + # push the archive/tarball blob to the remote + web_util.push_to_url(tarball_path, remote_archive_url, keep_original=False) + + # Clear out the previous data, then add a record for the new blob + blobs: List[BlobRecord] = [] + blobs.append( + BlobRecord( + tarball_content_length, + self.TARBALL_MEDIATYPE, + compression, + checksum_algorithm, + tarball_checksum, + ) + ) + + # compress the spec dict and compute its checksum + specfile = os.path.join(tmpdir, f"{spec.dag_hash()}.spec.json") + metadata_checksum, metadata_size = compressed_json_from_dict( + specfile, spec_dict, checksum_algorithm + ) + + # Any previous metadata blob is gone, compute the path to the new one + remote_spec_url = url_util.join( + self.mirror_url, + *self.get_relative_path_components(BuildcacheComponent.BLOB), + checksum_algorithm, + metadata_checksum[:2], + metadata_checksum, + ) + + # push the metadata/spec blob to the remote + web_util.push_to_url(specfile, remote_spec_url, keep_original=False) + + blobs.append( + BlobRecord( + metadata_size, + self.SPEC_MEDIATYPE, + compression, + checksum_algorithm, + metadata_checksum, + ) + ) + + # generate the manifest + manifest = { + "version": self.get_layout_version(), + "data": [record.to_dict() for record in blobs], + } + + # write the manifest to a temporary location + manifest_path = os.path.join(tmpdir, f"{spec.dag_hash()}.manifest.json") + with open(manifest_path, "w", encoding="utf-8") as f: + json.dump(manifest, f, indent=0, separators=(",", ":")) + # Note: when using gpg clear sign, we need to avoid long lines (19995 + # chars). If lines are longer, they are truncated without error. So, + # here we still add newlines, but no indent, so save on file size and + # line length. + + # possibly sign the manifest + if signing_key: + manifest_path = sign_file(signing_key, manifest_path) + + # Push the manifest file to the remote. The remote manifest url for + # a given concrete spec is fixed, so we don't have to recompute it, + # even if we deleted the pre-existing one. + web_util.push_to_url(manifest_path, self.remote_manifest_url, keep_original=False) + + def destroy(self): + """Destroy any existing stages""" + for blob_stage in self.stages.values(): + blob_stage.destroy() + + self.stages = {} + + +class URLBuildcacheEntryV2(URLBuildcacheEntry): + """This class exists to provide read-only support for reading older buildcache + layouts in a way that is transparent to binary_distribution code responsible for + downloading and extracting binary packages. Since support for layout v2 is + read-only, and since v2 did not have support for manifests and blobs, many class + and instance methods are overridden simply to raise, hopefully making the intended + use and limitations of the class clear to developers.""" + + SPEC_URL_REGEX = re.compile(r"(.+)/build_cache/.+") + LAYOUT_VERSION = 2 + BUILDCACHE_INDEX_FILE = "index.json" + COMPONENT_PATHS = { + BuildcacheComponent.BLOB: ["build_cache"], + BuildcacheComponent.INDEX: ["build_cache"], + BuildcacheComponent.KEY: ["build_cache", "_pgp"], + BuildcacheComponent.SPEC: ["build_cache"], + BuildcacheComponent.KEY_INDEX: ["build_cache", "_pgp"], + BuildcacheComponent.TARBALL: ["build_cache"], + BuildcacheComponent.LAYOUT_JSON: ["build_cache", "layout.json"], + } + + def __init__( + self, + push_url_base: str, + spec: Optional[spack.spec.Spec] = None, + allow_unsigned: bool = False, + ): + """Lazily initialize the object""" + self.mirror_url: str = push_url_base + self.spec: Optional[spack.spec.Spec] = spec + self.allow_unsigned: bool = allow_unsigned + + self.has_metadata: bool = False + self.has_tarball: bool = False + self.has_signed: bool = False + self.has_unsigned: bool = False + self.spec_stage: Optional[spack.stage.Stage] = None + self.local_specfile_path: str = "" + self.archive_stage: Optional[spack.stage.Stage] = None + self.local_archive_path: str = "" + + self.remote_spec_url: str = "" + self.remote_archive_url: str = "" + self.remote_archive_checksum_algorithm: str = "" + self.remote_archive_checksum_hash: str = "" + self.spec_dict: Dict[Any, Any] = {} + + self._checked_signed = False + self._checked_unsigned = False + self._checked_exists = False + + @classmethod + def get_layout_version(cls) -> int: + return cls.LAYOUT_VERSION + + @classmethod + def maybe_push_layout_json(cls, mirror_url: str) -> None: + raise BuildcacheEntryError("spack can no longer write to v2 buildcaches") + + def _get_spec_url( + self, spec: spack.spec.Spec, mirror_url: str, ext: str = ".spec.json.sig" + ) -> str: + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" + ) + path_components = self.get_relative_path_components(BuildcacheComponent.SPEC) + return url_util.join(mirror_url, *path_components, f"{spec_formatted}{ext}") + + def _get_tarball_url(self, spec: spack.spec.Spec, mirror_url: str) -> str: + directory_name = spec.format_path( + "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}" + ) + spec_formatted = spec.format_path( + "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}" + ) + filename = f"{spec_formatted}.spack" + return url_util.join( + mirror_url, + *self.get_relative_path_components(BuildcacheComponent.BLOB), + directory_name, + filename, + ) + + def _check_metadata_exists(self): + if not self.spec: + return + + if not self._checked_signed: + signed_url = self._get_spec_url(self.spec, self.mirror_url, ext=".spec.json.sig") + if web_util.url_exists(signed_url): + self.remote_spec_url = signed_url + self.has_signed = True + self._checked_signed = True + + if not self.has_signed and not self._checked_unsigned: + unsigned_url = self._get_spec_url(self.spec, self.mirror_url, ext=".spec.json") + if web_util.url_exists(unsigned_url): + self.remote_spec_url = unsigned_url + self.has_unsigned = True + self._checked_unsigned = True + + def exists(self, components: List[BuildcacheComponent]) -> bool: + if not self.spec: + return False + + if ( + len(components) != 2 + or BuildcacheComponent.SPEC not in components + or BuildcacheComponent.TARBALL not in components + ): + return False + + self._check_metadata_exists() + if not self.has_signed and not self.has_unsigned: + return False + + if not web_util.url_exists(self._get_tarball_url(self.spec, self.mirror_url)): + return False + + return True + + def fetch_metadata(self) -> dict: + """Retrieve the v2 specfile for the spec, yields the validated spec+ dict""" + if self.spec_dict: + # Only fetch the metadata once + return self.spec_dict + + self._check_metadata_exists() + + if not self.remote_spec_url: + raise BuildcacheEntryError(f"Mirror {self.mirror_url} does not have metadata for spec") + + if not self.allow_unsigned and self.has_unsigned: + raise BuildcacheEntryError( + f"Mirror {self.mirror_url} does not have signed metadata for spec" + ) + + self.spec_stage = spack.stage.Stage(self.remote_spec_url) + + # Fetch the spec file, or else cleanup and exit early + try: + self.spec_stage.create() + self.spec_stage.fetch() + except spack.error.FetchError as e: + self.destroy() + raise BuildcacheEntryError( + f"Unable to fetch metadata from {self.remote_spec_url}" + ) from e + + self.local_specfile_path = self.spec_stage.save_filename + + if not self.allow_unsigned and not try_verify(self.local_specfile_path): + raise NoVerifyException(f"Signature on {self.remote_spec_url} could not be verified") + + # Check spec file for validity and read it, or else cleanup and exit early + try: + spec_dict, _ = get_valid_spec_file(self.local_specfile_path, self.get_layout_version()) + except InvalidMetadataFile as e: + self.destroy() + raise BuildcacheEntryError("Buildcache entry does not have valid metadata file") from e + + try: + self.spec = spack.spec.Spec.from_dict(spec_dict) + except Exception as err: + raise BuildcacheEntryError("Fetched spec dict does not contain valid spec") from err + + self.spec_dict = spec_dict + + # Retrieve the alg and hash from the spec dict, use them to build the path to + # the tarball. + if "binary_cache_checksum" not in self.spec_dict: + raise BuildcacheEntryError("Provided spec dict must contain 'binary_cache_checksum'") + + bchecksum = self.spec_dict["binary_cache_checksum"] + + if "hash_algorithm" not in bchecksum or "hash" not in bchecksum: + raise BuildcacheEntryError( + "Provided spec dict contains invalid 'binary_cache_checksum'" + ) + + self.remote_archive_checksum_algorithm = bchecksum["hash_algorithm"] + self.remote_archive_checksum_hash = bchecksum["hash"] + self.remote_archive_url = self._get_tarball_url(self.spec, self.mirror_url) + + return self.spec_dict + + def fetch_archive(self) -> str: + self.fetch_metadata() + + # Adding this, we can avoid passing a dictionary of stages around the + # install logic, and in fact completely avoid fetching the metadata in + # the new (v3) approach. + if self.spec_stage: + self.spec_stage.destroy() + self.spec_stage = None + + self.archive_stage = spack.stage.Stage(self.remote_archive_url) + + # Fetch the archive file, or else cleanup and exit early + try: + self.archive_stage.create() + self.archive_stage.fetch() + except spack.error.FetchError as e: + self.destroy() + raise BuildcacheEntryError( + f"Unable to fetch archive from {self.remote_archive_url}" + ) from e + + self.local_archive_path = self.archive_stage.save_filename + + # Raises if checksum does not match expected + validate_checksum( + self.local_archive_path, + self.remote_archive_checksum_algorithm, + self.remote_archive_checksum_hash, + ) + + return self.local_archive_path + + def get_archive_stage(self) -> Optional[spack.stage.Stage]: + return self.archive_stage + + @classmethod + def get_manifest_filename(cls, spec: spack.spec.Spec) -> str: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest file") + + @classmethod + def get_manifest_url(cls, spec: spack.spec.Spec, mirror_url: str) -> str: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest url") + + def read_manifest(self, manifest_url: Optional[str] = None) -> BuildcacheManifest: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest file") + + def remove(self): + raise BuildcacheEntryError("Spack cannot delete v2 buildcache entries") + + def get_blob_record(self, blob_type: BuildcacheComponent) -> BlobRecord: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def check_blob_exists(self, record: BlobRecord) -> bool: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def get_blob_path_components(cls, record: BlobRecord) -> List[str]: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def get_blob_url(cls, mirror_url: str, record: BlobRecord) -> str: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def fetch_blob(self, record: BlobRecord) -> str: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def get_staged_blob_path(self, record: BlobRecord) -> str: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def verify_and_extract_manifest(cls, manifest_contents: str, verify: bool = False) -> dict: + raise BuildcacheEntryError("v2 buildcache entries do not have a manifest file") + + @classmethod + def push_blob(cls, mirror_url: str, blob_path: str, record: BlobRecord) -> None: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def push_manifest( + cls, + mirror_url: str, + manifest_name: str, + manifest: BuildcacheManifest, + tmpdir: str, + component_type: BuildcacheComponent = BuildcacheComponent.SPEC, + signing_key: Optional[str] = None, + ) -> None: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + @classmethod + def push_local_file_as_blob( + cls, + local_file_path: str, + mirror_url: str, + manifest_name: str, + component_type: BuildcacheComponent, + compression: str = "none", + ) -> None: + raise BuildcacheEntryError("v2 buildcache layout is unaware of manifests and blobs") + + def push_binary_package( + self, + spec: spack.spec.Spec, + tarball_path: str, + checksum_algorithm: str, + tarball_checksum: str, + tmpdir: str, + signing_key: Optional[str], + ) -> None: + raise BuildcacheEntryError("Spack can no longer push v2 buildcache entries") + + def destroy(self): + if self.archive_stage: + self.archive_stage.destroy() + self.archive_stage = None + if self.spec_stage: + self.spec_stage.destroy() + self.spec_stage = None + + +def get_url_buildcache_class( + layout_version: int = CURRENT_BUILD_CACHE_LAYOUT_VERSION, +) -> Type[URLBuildcacheEntry]: + """Given a layout version, return the class responsible for managing access + to buildcache entries of that version""" + if layout_version == 2: + return URLBuildcacheEntryV2 + elif layout_version == 3: + return URLBuildcacheEntry + else: + raise UnknownBuildcacheLayoutError( + f"Cannot create buildcache class for unknown layout version {layout_version}" + ) + + +def check_mirror_for_layout(mirror: spack.mirrors.mirror.Mirror): + """Check specified mirror, and warn if missing layout.json""" + cache_class = get_url_buildcache_class() + if not cache_class.check_layout_json_exists(mirror.fetch_url): + msg = ( + f"Configured mirror {mirror.name} is missing layout.json and has either \n" + " never been pushed or is of an old layout version. If it's the latter, \n" + " consider running 'spack buildcache migrate' or rebuilding the specs in \n" + " in this mirror." + ) + tty.warn(msg) + + +def validate_checksum(file_path, checksum_algorithm, expected_checksum) -> None: + """Compute the checksum of the given file and raise if invalid""" + local_checksum = spack.util.crypto.checksum(hash_fun_for_algo(checksum_algorithm), file_path) + + if local_checksum != expected_checksum: + size, contents = fsys.filesummary(file_path) + raise spack.error.NoChecksumException( + file_path, size, contents, checksum_algorithm, expected_checksum, local_checksum + ) + + +def _get_compressor(compression: str, writable: io.BufferedIOBase) -> io.BufferedIOBase: + if compression == "gzip": + return gzip.GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=writable) + elif compression == "none": + return writable + else: + raise BuildcacheEntryError(f"Unknown compression type: {compression}") + + +@contextmanager +def compression_writer(output_path: str, compression: str, checksum_algo: str): + """Create and return a writer capable of writing compressed data. Available + options for compression are "gzip" or "none", checksum_algo is used to pick + the checksum algorithm used by the ChecksumWriter. + + Yields a tuple containing: + io.IOBase: writer that can compress (or not) as it writes + ChecksumWriter: provides checksum and length of written data + """ + with open(output_path, "wb") as writer, ChecksumWriter( + fileobj=writer, algorithm=hash_fun_for_algo(checksum_algo) + ) as checksum_writer, closing( + _get_compressor(compression, checksum_writer) + ) as compress_writer: + yield compress_writer, checksum_writer + + +def compressed_json_from_dict( + output_path: str, spec_dict: dict, checksum_algo: str +) -> Tuple[str, int]: + """Compress the spec dict and write it to the given path + + Return the checksum (using the given algorithm) and size on disk of the file + """ + with compression_writer(output_path, "gzip", checksum_algo) as ( + f_bin, + checker, + ), io.TextIOWrapper(f_bin, encoding="utf-8") as f_txt: + json.dump(spec_dict, f_txt, separators=(",", ":")) + + return checker.hexdigest(), checker.length + + +def get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]: + """Read and validate a spec file, returning the spec dict with its layout version, or raising + InvalidMetadataFile if invalid.""" + try: + with open(path, "rb") as f: + binary_content = f.read() + except OSError as e: + raise InvalidMetadataFile(f"No such file: {path}") from e + + # Decompress spec file if necessary + if binary_content[:2] == b"\x1f\x8b": + binary_content = gzip.decompress(binary_content) + + try: + as_string = binary_content.decode("utf-8") + if path.endswith(".json.sig"): + spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string) + else: + spec_dict = json.loads(as_string) + except Exception as e: + raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e + + # Ensure this version is not too new. + try: + layout_version = int(spec_dict.get("buildcache_layout_version", 0)) + except ValueError as e: + raise InvalidMetadataFile("Could not parse layout version") from e + + if layout_version > max_supported_layout: + raise InvalidMetadataFile( + f"Layout version {layout_version} is too new for this version of Spack" + ) + + return spec_dict, layout_version + + +def sign_file(key: str, file_path: str) -> str: + """sign and return the path to the signed file""" + signed_file_path = f"{file_path}.sig" + spack.util.gpg.sign(key, file_path, signed_file_path, clearsign=True) + return signed_file_path + + +def try_verify(specfile_path): + """Utility function to attempt to verify a local file. Assumes the + file is a clearsigned signature file. + + Args: + specfile_path (str): Path to file to be verified. + + Returns: + ``True`` if the signature could be verified, ``False`` otherwise. + """ + suppress = config.get("config:suppress_gpg_warnings", False) + + try: + spack.util.gpg.verify(specfile_path, suppress_warnings=suppress) + except Exception: + return False + + return True + + +class MirrorURLAndVersion: + """Simple class to hold a mirror url and a buildcache layout version + + This class is used by BinaryCacheIndex to produce a key used to keep + track of downloaded/processed buildcache index files from remote mirrors + in some layout version.""" + + url: str + version: int + + def __init__(self, url: str, version: int): + self.url = url + self.version = version + + def __str__(self): + return f"{self.url}__v{self.version}" + + def __eq__(self, other): + if isinstance(other, MirrorURLAndVersion): + return self.url == other.url and self.version == other.version + return False + + def __hash__(self): + return hash((self.url, self.version)) + + @classmethod + def from_string(cls, s: str): + parts = s.split("__v") + return cls(parts[0], int(parts[1])) + + +class MirrorForSpec: + """Simple holder for a mirror (represented by a url and a layout version) and + an associated concrete spec""" + + url_and_version: MirrorURLAndVersion + spec: spack.spec.Spec + + def __init__(self, url_and_version: MirrorURLAndVersion, spec: spack.spec.Spec): + self.url_and_version = url_and_version + self.spec = spec + + +class InvalidMetadataFile(spack.error.SpackError): + """Raised when spack encounters a spec file it cannot understand or process""" + + pass + + +class BuildcacheEntryError(spack.error.SpackError): + """Raised for problems finding or accessing binary cache entry on mirror""" + + pass + + +class NoSuchBlobException(spack.error.SpackError): + """Raised when manifest does have some requested type of requested type""" + + pass + + +class NoVerifyException(BuildcacheEntryError): + """Raised if file fails signature verification""" + + pass + + +class UnknownBuildcacheLayoutError(BuildcacheEntryError): + """Raised when unrecognized buildcache layout version is encountered""" + + pass diff --git a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml index e3679f87493..6959764bcf5 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/ci.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/ci.yaml @@ -42,13 +42,22 @@ ci: aud: "${OIDC_TOKEN_AUDIENCE}" - signing-job: - image: { "name": "ghcr.io/spack/notary:0.0.1", "entrypoint": [""] } + image: + name: ghcr.io/spack/notary@sha256:d5a183b090602dea5dc89d5023fe777d1e64d9a7ddcb6cc9ec58a79bb410c168 + entrypoint: [""] tags: ["aws"] script: - - - aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_BUILDCACHE_DESTINATION}/build_cache /tmp + - - export BUILD_CACHE="${SPACK_BUILDCACHE_DESTINATION}/${SPACK_BUILDCACHE_RELATIVE_SPECS_URL}" + - mkdir -p /tmp/input /tmp/output + - aws s3 sync --exclude "*" --include "*spec.manifest.json" ${BUILD_CACHE} /tmp/input - /sign.sh - - aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_BUILDCACHE_DESTINATION}/build_cache - - aws s3 cp /tmp/public_keys ${SPACK_BUILDCACHE_DESTINATION}/build_cache/_pgp --recursive --exclude "*" --include "*.pub" + - aws s3 sync --exclude "*" --include "*spec.manifest.json" /tmp/output ${BUILD_CACHE} + - |+ + for keyfile in $( find /tmp/public_keys -type f ); + do + spack gpg trust $keyfile + done + - spack gpg publish --update-index --mirror-url ${SPACK_BUILDCACHE_DESTINATION} id_tokens: GITLAB_OIDC_TOKEN: aud: "${OIDC_TOKEN_AUDIENCE}" @@ -62,10 +71,14 @@ ci: - export SPACK_COPY_ONLY_SOURCE=${SPACK_BUILDCACHE_SOURCE//SPACK_REPLACE_VERSION/${SPACK_REPLACE_VERSION}} script: - - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR} + # TODO: remove this when we stop getting windows config includes added to the environment + - spack config add 'config:build_stage:["$tempdir/$user/spack-stage", "$user_cache_path/stage"]' + - spack config blame config - echo Copying environment specs from ${SPACK_COPY_ONLY_SOURCE} to ${SPACK_COPY_ONLY_DESTINATION} - spack buildcache sync "${SPACK_COPY_ONLY_SOURCE}" "${SPACK_COPY_ONLY_DESTINATION}" - curl -fLsS https://spack.github.io/keys/spack-public-binary-key.pub -o /tmp/spack-public-binary-key.pub - - aws s3 cp /tmp/spack-public-binary-key.pub "${SPACK_COPY_ONLY_DESTINATION}/build_cache/_pgp/spack-public-binary-key.pub" + - spack gpg trust /tmp/spack-public-binary-key.pub + - spack gpg publish --mirror-url "${SPACK_COPY_ONLY_DESTINATION}" - spack buildcache update-index --keys "${SPACK_COPY_ONLY_DESTINATION}" when: "always" retry: diff --git a/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in index 0ad46d5fc90..e224ed0eb6d 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in +++ b/share/spack/gitlab/cloud_pipelines/configs/multi-src-mirrors.yaml.in @@ -9,8 +9,3 @@ mirrors: push: ${PR_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME} source: False binary: True - buildcache-shared: - fetch: ${PR_MIRROR_FETCH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} - push: ${PR_MIRROR_PUSH_DOMAIN}/shared_pr_mirror/${SPACK_CI_STACK_NAME} - source: False - binary: True diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml index 4bd9a184c18..83632dc95a4 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-neoverse_v1/spack.yaml @@ -29,8 +29,9 @@ spack: - signing-job: before_script: # Do not distribute Intel & ARM binaries - - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done + - - export SPECS_PATH=${SPACK_BUILDCACHE_RELATIVE_SPECS_PATH} + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep intel-oneapi | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep armpl | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done cdash: build-group: AWS Packages diff --git a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml index 4597d187b26..75ef28076c0 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/aws-pcluster-x86_64_v4/spack.yaml @@ -33,8 +33,9 @@ spack: - signing-job: before_script: # Do not distribute Intel & ARM binaries - - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done - - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done + - - export SPECS_PATH=${SPACK_BUILDCACHE_RELATIVE_SPECS_PATH} + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep intel-oneapi | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done + - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/ | grep armpl | awk '{print $4}' | sed -e "s?^.*${SPECS_PATH}/??g"); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/${SPECS_PATH}/$i; done cdash: build-group: AWS Packages diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 94d4457d393..ef2328868b2 100644 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -563,7 +563,7 @@ _spack_buildcache() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="push create install list keys check download save-specfile sync update-index rebuild-index" + SPACK_COMPREPLY="push create install list keys check download save-specfile sync update-index rebuild-index migrate" fi } @@ -651,6 +651,15 @@ _spack_buildcache_rebuild_index() { fi } +_spack_buildcache_migrate() { + if $list_options + then + SPACK_COMPREPLY="-h --help -u --unsigned -d --delete-existing -y --yes-to-all" + else + _mirrors + fi +} + _spack_cd() { if $list_options then diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 560f47193f5..a0640f70c4e 100644 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -697,6 +697,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a save-sp complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a sync -d 'sync binaries (and associated metadata) from one mirror to another' complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a update-index -d 'update a buildcache index' complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a rebuild-index -d 'update a buildcache index' +complete -c spack -n '__fish_spack_using_command_pos 0 buildcache' -f -a migrate -d 'perform in-place binary mirror migration (2 to 3)' complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command buildcache' -s h -l help -d 'show this help message and exit' @@ -861,6 +862,18 @@ complete -c spack -n '__fish_spack_using_command buildcache rebuild-index' -s h complete -c spack -n '__fish_spack_using_command buildcache rebuild-index' -s k -l keys -f -a keys complete -c spack -n '__fish_spack_using_command buildcache rebuild-index' -s k -l keys -d 'if provided, key index will be updated as well as package index' +# spack buildcache migrate +set -g __fish_spack_optspecs_spack_buildcache_migrate h/help u/unsigned d/delete-existing y/yes-to-all + +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s u -l unsigned -f -a unsigned +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s u -l unsigned -d 'Ignore signatures and do not resign, default is False' +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s d -l delete-existing -f -a delete_existing +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s d -l delete-existing -d 'Delete the previous layout, the default is to keep it.' +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s y -l yes-to-all -f -a yes_to_all +complete -c spack -n '__fish_spack_using_command buildcache migrate' -s y -l yes-to-all -d 'assume "yes" is the answer to every confirmation request' + # spack cd set -g __fish_spack_optspecs_spack_cd h/help m/module-dir r/spack-root i/install-dir p/package-dir P/packages s/stage-dir S/stages c/source-dir b/build-dir e/env= first complete -c spack -n '__fish_spack_using_command_pos_remainder 0 cd' -f -k -a '(__fish_spack_specs)'