Compare commits
125 Commits
alby/libvt
...
bugfix/env
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1c6bb8cfc3 | ||
![]() |
9244ecacf0 | ||
![]() |
1df4afb53f | ||
![]() |
4991f0e484 | ||
![]() |
09fd7d68eb | ||
![]() |
2ace8a55c1 | ||
![]() |
861acb9467 | ||
![]() |
eea743de46 | ||
![]() |
e2b6e5a7ec | ||
![]() |
2f2dc3695c | ||
![]() |
6eb5e57199 | ||
![]() |
9a047eb95f | ||
![]() |
ef42fd7a2f | ||
![]() |
e642c2ea28 | ||
![]() |
f27d012e0c | ||
![]() |
c638311796 | ||
![]() |
2a02bea405 | ||
![]() |
219b42d991 | ||
![]() |
c290ec1f62 | ||
![]() |
e7ede86733 | ||
![]() |
e3e7609af4 | ||
![]() |
49d7ebec36 | ||
![]() |
7c3d82d819 | ||
![]() |
1c0fbec9ce | ||
![]() |
ca4d60ae25 | ||
![]() |
dc571e20d6 | ||
![]() |
1485275d0c | ||
![]() |
1afbf72037 | ||
![]() |
407fd80f95 | ||
![]() |
62525d9076 | ||
![]() |
c2371263d1 | ||
![]() |
5a870182ec | ||
![]() |
e33ad83256 | ||
![]() |
0352a1df5d | ||
![]() |
ade44bce62 | ||
![]() |
ddb29ebc34 | ||
![]() |
19a62630e5 | ||
![]() |
5626802aa0 | ||
![]() |
f68063afbc | ||
![]() |
8103d019d6 | ||
![]() |
ce89cdd9d7 | ||
![]() |
20d9b356f0 | ||
![]() |
3401438a3a | ||
![]() |
dcf1999d22 | ||
![]() |
9e3c3ae298 | ||
![]() |
40d6b84b4d | ||
![]() |
2db09f27af | ||
![]() |
6979d6a96f | ||
![]() |
deffd2acc9 | ||
![]() |
988f71f434 | ||
![]() |
4fe76f973a | ||
![]() |
8e4e6ad529 | ||
![]() |
3586a2dbe3 | ||
![]() |
4648939043 | ||
![]() |
746eaaf01a | ||
![]() |
bd2f78ae9a | ||
![]() |
a4ebe01dec | ||
![]() |
94e9e18558 | ||
![]() |
d2e0ac4d1f | ||
![]() |
36321fef1c | ||
![]() |
e879877878 | ||
![]() |
f0bce3eb25 | ||
![]() |
316bfd8b7d | ||
![]() |
92593fecd5 | ||
![]() |
8db5fecdf5 | ||
![]() |
eee696f320 | ||
![]() |
8689cf392f | ||
![]() |
15d4cce2eb | ||
![]() |
45fbb82d1a | ||
![]() |
2861c89b89 | ||
![]() |
135bfeeb27 | ||
![]() |
8fa9c66a7d | ||
![]() |
5e6174cbe2 | ||
![]() |
b4ad883b0d | ||
![]() |
a681111a23 | ||
![]() |
d2436afb66 | ||
![]() |
e43444cbb6 | ||
![]() |
8c0d947114 | ||
![]() |
5ba4a2b83a | ||
![]() |
da45073ef9 | ||
![]() |
61e17fb36d | ||
![]() |
9f13a90dd2 | ||
![]() |
ef4b35ea63 | ||
![]() |
66187c8a6e | ||
![]() |
c8d95512fc | ||
![]() |
c74fa648b9 | ||
![]() |
4cc5e9cac6 | ||
![]() |
41345d18f9 | ||
![]() |
0dd1316b68 | ||
![]() |
d8cc185e22 | ||
![]() |
061051270c | ||
![]() |
61445159db | ||
![]() |
7fa3c7f0fa | ||
![]() |
9c0fe30f42 | ||
![]() |
d00010819f | ||
![]() |
248b05b32a | ||
![]() |
8232e934e9 | ||
![]() |
9d005839af | ||
![]() |
a7e5c73608 | ||
![]() |
7896625919 | ||
![]() |
fb43cb8166 | ||
![]() |
28f68e5d11 | ||
![]() |
1199eeed0b | ||
![]() |
8ffeb4900b | ||
![]() |
456550da3f | ||
![]() |
b2676fe2dd | ||
![]() |
8561ec6249 | ||
![]() |
5b775d82ac | ||
![]() |
b43088cc16 | ||
![]() |
237eab136a | ||
![]() |
ffffa2794b | ||
![]() |
433b44403f | ||
![]() |
fa2e1c0653 | ||
![]() |
00257f6824 | ||
![]() |
3b8366f3d3 | ||
![]() |
a73f511404 | ||
![]() |
c823e01baf | ||
![]() |
4188080899 | ||
![]() |
ef6ea2c93f | ||
![]() |
3c672905d0 | ||
![]() |
ee106c747f | ||
![]() |
295726e6b8 | ||
![]() |
2654d64a3c | ||
![]() |
d91ec8500f | ||
![]() |
c354cc51d0 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
12
.github/workflows/build-containers.yml
vendored
12
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
|
7
.github/workflows/valid-style.yml
vendored
7
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -81,6 +81,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
|
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
@@ -3071,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
|
@@ -1,8 +1,8 @@
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.1
|
||||
python-levenshtein==0.21.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.2
|
||||
urllib3==2.0.3
|
||||
|
@@ -760,13 +760,12 @@ def hashes_to_prefixes(spec):
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
def get_buildinfo_dict(spec):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
@@ -1209,9 +1208,6 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
@@ -1281,41 +1277,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
if not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
@@ -1336,7 +1308,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
@@ -1596,35 +1567,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
orig_path_names.append(os.path.join(prefix, linkname))
|
||||
cur_path_names.append(os.path.join(workdir, linkname))
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
|
@@ -175,12 +175,12 @@ def black_root_spec() -> str:
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
return _root_spec("py-flake8@3.8.2:")
|
||||
|
||||
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
return _root_spec("py-pytest@6.2.4:")
|
||||
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
|
@@ -180,6 +180,51 @@ def test(self):
|
||||
work_dir="spack-test",
|
||||
)
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
@@ -225,51 +270,6 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
|
@@ -43,13 +43,6 @@ def setup_parser(subparser):
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
# TODO: remove from Spack 0.21
|
||||
push.add_argument(
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||
)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
@@ -63,37 +56,7 @@ def setup_parser(subparser):
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
output = push.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||
# because we have further positional arguments.
|
||||
# TODO: require from Spack 0.21
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -127,13 +90,6 @@ def setup_parser(subparser):
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
install.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
@@ -272,71 +228,17 @@ def setup_parser(subparser):
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
)
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Source mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="Name of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
source.add_argument(
|
||||
sync.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Source mirror name, path, or URL",
|
||||
nargs="?",
|
||||
)
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Destination mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
type=arguments.mirror_name,
|
||||
dest="dest_mirror_flag",
|
||||
help="Name of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
dest.add_argument(
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -344,39 +246,8 @@ def setup_parser(subparser):
|
||||
update_index = subparsers.add_parser(
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: require from Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
@@ -436,32 +307,12 @@ def _concrete_spec_from_args(args):
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.mirror_flag:
|
||||
mirror = args.mirror_flag
|
||||
elif not args.mirror:
|
||||
raise ValueError("No mirror provided")
|
||||
else:
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.rel:
|
||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||
# spec is in the positional mirror arg due to argparse limitations.
|
||||
input_specs = args.specs
|
||||
if args.mirror_flag and args.mirror:
|
||||
input_specs.insert(0, args.mirror)
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(input_specs, args.spec_file),
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -486,7 +337,6 @@ def push_fn(args):
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
relative=args.rel,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
@@ -524,9 +374,6 @@ def install_fn(args):
|
||||
if not args.specs:
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
@@ -710,21 +557,8 @@ def sync_fn(args):
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
# If no manifest_glob, require a source and dest mirror.
|
||||
# TODO: Simplify in Spack 0.21
|
||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||
args.dest_mirror_flag or args.dest_mirror
|
||||
):
|
||||
raise ValueError("Source and destination mirror are required.")
|
||||
|
||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||
src_mirror = args.src_mirror
|
||||
dest_mirror = args.dest_mirror
|
||||
|
||||
src_mirror_url = src_mirror.fetch_url
|
||||
dest_mirror_url = dest_mirror.push_url
|
||||
@@ -803,13 +637,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||
update_index(mirror, update_keys=args.keys)
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
@@ -349,7 +349,7 @@ def install_status():
|
||||
"-I",
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=False,
|
||||
default=True,
|
||||
help="show install status of packages. packages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in and upstream instance [^], "
|
||||
@@ -357,6 +357,17 @@ def install_status():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_install_status():
|
||||
return Args(
|
||||
"--no-install-status",
|
||||
dest="install_status",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="do not show install status annotations",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_checksum():
|
||||
return Args(
|
||||
|
@@ -715,7 +715,7 @@ def __call__(self, stage, url):
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
lines = output.split("\n")
|
||||
lines = output.splitlines()
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
|
@@ -44,7 +44,11 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
|
@@ -31,7 +31,11 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
"-y",
|
||||
|
@@ -164,7 +164,10 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
@@ -60,7 +60,7 @@
|
||||
# DB version. This is stuck in the DB file to track changes in format.
|
||||
# Increment by one when the database format changes.
|
||||
# Versions before 5 were not integers.
|
||||
_db_version = vn.Version("6")
|
||||
_db_version = vn.Version("7")
|
||||
|
||||
# For any version combinations here, skip reindex when upgrading.
|
||||
# Reindexing can take considerable time and is not always necessary.
|
||||
@@ -72,6 +72,7 @@
|
||||
# version is saved to disk the first time the DB is written.
|
||||
(vn.Version("0.9.3"), vn.Version("5")),
|
||||
(vn.Version("5"), vn.Version("6")),
|
||||
(vn.Version("6"), vn.Version("7")),
|
||||
]
|
||||
|
||||
# Default timeout for spack database locks in seconds or None (no timeout).
|
||||
@@ -105,7 +106,11 @@
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {vn.Version("5"): spack.spec.SpecfileV1, vn.Version("6"): spack.spec.SpecfileV3}
|
||||
reader_cls = {
|
||||
vn.Version("5"): spack.spec.SpecfileV1,
|
||||
vn.Version("6"): spack.spec.SpecfileV3,
|
||||
vn.Version("7"): spack.spec.SpecfileV4,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
@@ -743,7 +748,9 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -766,7 +773,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1172,7 +1179,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
@@ -125,7 +125,7 @@ def default_manifest_yaml():
|
||||
valid_environment_name_re = r"^\w[\w-]*$"
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
lockfile_format_version = 5
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
@@ -133,6 +133,7 @@ def default_manifest_yaml():
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
5: spack.spec.SpecfileV4,
|
||||
}
|
||||
|
||||
|
||||
@@ -1548,12 +1549,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_deps = node.dependencies(deptype="test")
|
||||
for test_dependency in test_deps:
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test"
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2184,9 +2186,9 @@ def _read_lockfile_dict(self, d):
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
@@ -2378,17 +2380,28 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
if len(root_spec) != 1:
|
||||
m = "The constraints %s are not a valid spec " % spec_constraints
|
||||
m += "concretization target. all specs must have a single name "
|
||||
m += "constraint for concretization."
|
||||
raise InvalidSpecConstraintError(m)
|
||||
spec_constraints.remove(root_spec[0])
|
||||
hash_spec = [s for s in spec_constraints if s.abstract_hash]
|
||||
|
||||
error_message = "The constraints %s are not a valid spec " % spec_constraints
|
||||
error_message += "concretization target. all specs must have a single name "
|
||||
error_message += "constraint for concretization."
|
||||
|
||||
if len(root_spec) > 1:
|
||||
raise InvalidSpecConstraintError(error_message)
|
||||
|
||||
if len(root_spec) < 1:
|
||||
if len(hash_spec) < 1:
|
||||
raise InvalidSpecConstraintError(error_message)
|
||||
|
||||
if root_spec:
|
||||
spec_constraints.remove(root_spec[0])
|
||||
|
||||
root_spec = root_spec[0] if root_spec else Spec()
|
||||
|
||||
invalid_constraints = []
|
||||
while True:
|
||||
# Attach all anonymous constraints to one named spec
|
||||
s = root_spec[0].copy()
|
||||
s = root_spec.copy()
|
||||
for c in spec_constraints:
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
|
@@ -544,6 +544,7 @@ def _static_edges(specs, deptype):
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -231,7 +231,9 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
dep.concretize()
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",)))
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
|
@@ -40,7 +40,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
@@ -672,6 +672,7 @@ def configure_options(self):
|
||||
return None
|
||||
|
||||
@tengine.context_property
|
||||
@memoized
|
||||
def environment_modifications(self):
|
||||
"""List of environment modifications to be processed."""
|
||||
# Modifications guessed by inspecting the spec prefix
|
||||
@@ -742,6 +743,19 @@ def environment_modifications(self):
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
|
||||
@tengine.context_property
|
||||
def has_manpath_modifications(self):
|
||||
"""True if MANPATH environment variable is modified."""
|
||||
for modification_type, cmd in self.environment_modifications:
|
||||
if not isinstance(
|
||||
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
|
||||
):
|
||||
continue
|
||||
if cmd.name == "MANPATH":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
"""List of modules that needs to be loaded automatically."""
|
||||
|
@@ -1231,6 +1231,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@property
|
||||
def extendee_spec(self):
|
||||
"""
|
||||
@@ -1246,7 +1247,6 @@ def extendee_spec(self):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
@@ -1256,7 +1256,6 @@ def extendee_spec(self):
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
|
@@ -291,7 +291,7 @@ def next_spec(
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV4.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV4.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
@@ -2500,10 +2500,15 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
@@ -2581,6 +2586,8 @@ def sort_fn(function_tuple):
|
||||
return (-2, 0)
|
||||
elif name == "external_spec_selected":
|
||||
return (0, 0) # note out of order so this goes last
|
||||
elif name == "virtual_on_edge":
|
||||
return (1, 0)
|
||||
else:
|
||||
return (-1, 0)
|
||||
|
||||
|
@@ -300,6 +300,11 @@ attr("depends_on", Package, Provider, Type)
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
attr("virtual_on_edge", Package, Provider, Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provider(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
attr("virtual_node", Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
|
@@ -170,7 +170,7 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
SPECFILE_FORMAT_VERSION = 4
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -714,47 +714,81 @@ class DependencySpec:
|
||||
parent: starting node of the edge
|
||||
spec: ending node of the edge.
|
||||
deptypes: list of strings, representing dependency relationships.
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "deptypes"
|
||||
__slots__ = "parent", "spec", "parameters"
|
||||
|
||||
def __init__(self, parent: "Spec", spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def __init__(
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.deptypes = dp.canonical_deptype(deptypes)
|
||||
self.parameters = {
|
||||
"deptypes": dp.canonical_deptype(deptypes),
|
||||
"virtuals": tuple(sorted(set(virtuals))),
|
||||
}
|
||||
|
||||
def update_deptypes(self, deptypes: dp.DependencyArgument) -> bool:
|
||||
deptypes = set(deptypes)
|
||||
deptypes.update(self.deptypes)
|
||||
deptypes = tuple(sorted(deptypes))
|
||||
changed = self.deptypes != deptypes
|
||||
@property
|
||||
def deptypes(self) -> Tuple[str, ...]:
|
||||
return self.parameters["deptypes"]
|
||||
|
||||
self.deptypes = deptypes
|
||||
return changed
|
||||
@property
|
||||
def virtuals(self) -> Tuple[str, ...]:
|
||||
return self.parameters["virtuals"]
|
||||
|
||||
def _update_edge_multivalued_property(
|
||||
self, property_name: str, value: Tuple[str, ...]
|
||||
) -> bool:
|
||||
current = self.parameters[property_name]
|
||||
update = set(current) | set(value)
|
||||
update = tuple(sorted(update))
|
||||
changed = current != update
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
self.parameters[property_name] = update
|
||||
return True
|
||||
|
||||
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
return self._update_edge_multivalued_property("deptypes", deptypes)
|
||||
|
||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
return self._update_edge_multivalued_property("virtuals", virtuals)
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
return DependencySpec(self.parent, self.spec, deptypes=self.deptypes)
|
||||
|
||||
def add_type(self, type: dp.DependencyArgument):
|
||||
self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
|
||||
"""Return a copy of this edge"""
|
||||
return DependencySpec(
|
||||
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.parent.name if self.parent else None
|
||||
yield self.spec.name if self.spec else None
|
||||
yield self.deptypes
|
||||
yield self.virtuals
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "%s %s--> %s" % (
|
||||
self.parent.name if self.parent else None,
|
||||
self.deptypes,
|
||||
self.spec.name if self.spec else None,
|
||||
)
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
return DependencySpec(parent=self.spec, spec=self.parent, deptypes=self.deptypes)
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
|
||||
)
|
||||
|
||||
|
||||
class CompilerFlag(str):
|
||||
@@ -1575,10 +1609,12 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
@@ -1596,34 +1632,58 @@ def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
"""
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these types {str(deptypes)}"
|
||||
)
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
if any(v in edge.virtuals for v in virtuals):
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {str(virtuals)}"
|
||||
)
|
||||
)
|
||||
|
||||
if has_errors:
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptypes)
|
||||
edge.update_deptypes(deptypes=deptypes)
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -1896,12 +1956,12 @@ def lookup_hash(self):
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=())
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=())
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2036,8 +2096,14 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
name_tuple = ("name", name)
|
||||
for dspec in edges_for_name:
|
||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
||||
type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
|
||||
deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
||||
d["dependencies"] = deps_list
|
||||
|
||||
# Name is included in case this is replacing a virtual.
|
||||
@@ -2361,7 +2427,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2379,8 +2445,10 @@ def from_dict(data):
|
||||
spec = SpecfileV1.load(data)
|
||||
elif int(data["spec"]["_meta"]["version"]) == 2:
|
||||
spec = SpecfileV2.load(data)
|
||||
else:
|
||||
elif int(data["spec"]["_meta"]["version"]) == 3:
|
||||
spec = SpecfileV3.load(data)
|
||||
else:
|
||||
spec = SpecfileV4.load(data)
|
||||
|
||||
# Any git version should
|
||||
for s in spec.traverse():
|
||||
@@ -2529,6 +2597,7 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
|
||||
def _replace_with(self, concrete):
|
||||
"""Replace this virtual spec with a concrete spec."""
|
||||
assert self.virtual
|
||||
virtuals = (self.name,)
|
||||
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
@@ -2539,7 +2608,11 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
|
||||
else:
|
||||
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
|
||||
virtuals=virtuals
|
||||
)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -3180,7 +3253,9 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
if provider:
|
||||
@@ -3236,7 +3311,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3573,15 +3648,20 @@ def _constrain_dependencies(self, other):
|
||||
changed |= edges_from_name[0].update_deptypes(
|
||||
other._dependencies[name][0].deptypes
|
||||
)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
deptypes=dep_spec_copy.deptypes,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3965,7 +4045,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4635,12 +4715,16 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4730,11 +4814,40 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
merged_spec._add_dependency(
|
||||
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def reconstruct_virtuals_on_edges(spec):
|
||||
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
|
||||
|
||||
Args:
|
||||
spec: spec on which we want to reconstruct virtuals
|
||||
"""
|
||||
# Collect all possible virtuals
|
||||
possible_virtuals = set()
|
||||
for node in spec.traverse():
|
||||
try:
|
||||
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
|
||||
except Exception as e:
|
||||
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
|
||||
continue
|
||||
|
||||
# Assume all incoming edges to provider are marked with virtuals=
|
||||
for vspec in possible_virtuals:
|
||||
try:
|
||||
provider = spec[vspec]
|
||||
except KeyError:
|
||||
# Virtual not in the DAG
|
||||
continue
|
||||
|
||||
for edge in provider.edges_from_dependents():
|
||||
edge.update_virtuals([vspec])
|
||||
|
||||
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
@@ -4818,7 +4931,7 @@ def _load(cls, data):
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
@@ -4849,8 +4962,10 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
@@ -4884,9 +4999,10 @@ def load(cls, data):
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4915,18 +5031,20 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
virtuals = []
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4960,7 +5078,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
@@ -4980,6 +5098,20 @@ class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class SpecfileV4(SpecfileV2):
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash = elt[hash.name]
|
||||
deptypes = elt["parameters"]["deptypes"]
|
||||
hash_type = hash.name
|
||||
virtuals = elt["parameters"]["virtuals"]
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
return cls._load(data)
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
"""Cache for Specs that uses a spec_like as key, and computes lazily
|
||||
the corresponding value ``Spec(spec_like``.
|
||||
|
@@ -201,12 +201,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
install_cmd("--no-cache", sy_spec.name)
|
||||
|
||||
# Create a buildache
|
||||
buildcache_cmd("push", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
|
||||
buildcache_cmd("push", "-au", mirror_dir, cspec.name, sy_spec.name)
|
||||
# Test force overwrite create buildcache (-f option)
|
||||
buildcache_cmd("push", "-auf", "-d", mirror_dir, cspec.name)
|
||||
buildcache_cmd("push", "-auf", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
|
||||
@@ -214,13 +214,13 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-au", cspec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-afu", cspec.name)
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -246,35 +246,10 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.nomockstage
|
||||
@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
|
||||
def test_relative_rpaths_create_default_layout(mirror_dir):
|
||||
"""
|
||||
Test the creation and installation of buildcaches with relative
|
||||
rpaths into the default directory layout scheme.
|
||||
"""
|
||||
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install 'corge' without using a cache
|
||||
install_cmd("--no-cache", cspec.name)
|
||||
|
||||
# Create build cache with relative rpaths
|
||||
buildcache_cmd("push", "-aur", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -291,19 +266,19 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
|
||||
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
@@ -320,7 +295,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
|
||||
cspec = Spec("corge").concretized()
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-auf", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome):
|
||||
@@ -401,7 +376,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
install_cmd(s.name)
|
||||
|
||||
# Put installed package in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
@@ -430,8 +405,8 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
install_cmd("--no-cache", s.name)
|
||||
|
||||
# Create a buildcache and update index
|
||||
buildcache_cmd("push", "-uad", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("push", "-ua", mirror_dir.strpath, s.name)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
# Check package and dependency in buildcache
|
||||
cache_list = buildcache_cmd("list", "--allarch")
|
||||
@@ -443,7 +418,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
|
||||
buildcache_cmd("update-index", mirror_dir.strpath)
|
||||
|
||||
with spack.config.override("config:binary_index_ttl", 0):
|
||||
# Check dependency not in buildcache
|
||||
@@ -519,10 +494,10 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
|
||||
# Create a buildcache with the installed spec.
|
||||
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
|
||||
buildcache_cmd("push", "-u", "-a", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", mirror_dir)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
@@ -538,7 +513,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||
|
||||
# Install package from buildcache
|
||||
buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
|
||||
buildcache_cmd("install", "-u", "-f", new_spec.name)
|
||||
|
||||
# Continue blowing away caches
|
||||
bindist.clear_spec_cache()
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.create
|
||||
@@ -12,8 +10,6 @@
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
|
@@ -291,7 +291,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
|
||||
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
"script": "spack buildcache update-index s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
|
@@ -85,7 +85,7 @@ def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -105,7 +105,7 @@ def tests_buildcache_create_env(
|
||||
add(pkg)
|
||||
install()
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -118,7 +118,7 @@ def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
"""Ensure that buildcache create fails when given no args or
|
||||
environment."""
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned")
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
|
||||
def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
@@ -127,7 +127,7 @@ def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monk
|
||||
|
||||
tmpdir.chmod(0)
|
||||
with pytest.raises(OSError) as error:
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
|
||||
buildcache("push", "--unsigned", str(tmpdir), "trivial-install-test-package")
|
||||
assert error.value.errno == errno.EACCES
|
||||
tmpdir.chmod(0o700)
|
||||
|
||||
@@ -159,11 +159,11 @@ def test_update_key_index(
|
||||
# Put installed package in the buildcache, which, because we're signing
|
||||
# it, should result in the public key getting pushed to the buildcache
|
||||
# as well.
|
||||
buildcache("push", "-a", "-d", mirror_dir.strpath, s.name)
|
||||
buildcache("push", "-a", mirror_dir.strpath, s.name)
|
||||
|
||||
# Now make sure that when we pass the "--keys" argument to update-index
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
|
||||
@@ -213,27 +213,25 @@ def verify_mirror_contents():
|
||||
# Install a package and put it in the buildcache
|
||||
s = Spec(out_env_pkg).concretized()
|
||||
install(s.name)
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, s.name)
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add(in_env_pkg)
|
||||
install()
|
||||
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
|
||||
buildcache("push", "-u", "-f", "-a", src_mirror_url, in_env_pkg)
|
||||
|
||||
# Now run the spack buildcache sync command with all the various options
|
||||
# for specifying mirrors
|
||||
|
||||
# Use urls to specify mirrors
|
||||
buildcache(
|
||||
"sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
|
||||
)
|
||||
buildcache("sync", src_mirror_url, dest_mirror_url)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
# Use local directory paths to specify fs locations
|
||||
buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
|
||||
buildcache("sync", src_mirror_dir, dest_mirror_dir)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
@@ -242,7 +240,7 @@ def verify_mirror_contents():
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
|
||||
buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
|
||||
@@ -260,7 +258,7 @@ def test_buildcache_create_install(
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = Spec(pkg).concretized()
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
@@ -324,12 +322,12 @@ def fake_push(node, push_url, options):
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
|
||||
|
||||
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
|
||||
buildcache_create_args = ["create", "--unsigned"]
|
||||
|
||||
if things_to_install != "":
|
||||
buildcache_create_args.extend(["--only", things_to_install])
|
||||
|
||||
buildcache_create_args.extend([slash_hash])
|
||||
buildcache_create_args.extend([str(tmpdir), slash_hash])
|
||||
|
||||
buildcache(*buildcache_create_args)
|
||||
|
||||
|
@@ -1055,7 +1055,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-a", "-f", "-u", mirror_url, "archive-files")
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
@@ -1155,8 +1155,8 @@ def test_ci_generate_mirror_override(
|
||||
second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
|
||||
with ev.read("test"):
|
||||
install_cmd()
|
||||
buildcache_cmd("push", "-u", "--mirror-url", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
|
||||
buildcache_cmd("push", "-u", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
@@ -1297,7 +1297,7 @@ def test_push_mirror_contents(
|
||||
mirror_cmd("rm", "test-ci")
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", "--mirror-url", mirror_url)
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
index_path = os.path.join(buildcache_path, "index.json")
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
@@ -1613,7 +1613,7 @@ def test_ci_rebuild_index(
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--add", "--keep-stage", "-f", json_path)
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
@@ -1647,7 +1647,7 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||
install_cmd("gcc@=12.2.0%gcc@10.2.1")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
|
||||
@@ -1662,7 +1662,7 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
|
||||
|
||||
# Put spec built with installed compiler in the buildcache
|
||||
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "b%gcc@12.2.0")
|
||||
|
||||
# Now uninstall the spec
|
||||
uninstall_cmd("-y", "b%gcc@12.2.0")
|
||||
|
@@ -2402,6 +2402,21 @@ def test_env_activate_default_view_root_unconditional(mutable_mock_env_path):
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.regression("38510")
|
||||
def test_concretize_separately_abstract_hash(install_mockery, mock_fetch):
|
||||
"""Check that a root can have no name if it has a hash."""
|
||||
s = Spec("trivial-install-test-package").concretized()
|
||||
install(str(s))
|
||||
|
||||
e = ev.create("test")
|
||||
e.unify = False
|
||||
|
||||
e.add(f"/{s.dag_hash()}")
|
||||
e.concretize()
|
||||
|
||||
assert list(e.concretized_specs()) == [(Spec(f"/{s.dag_hash()}"), s)]
|
||||
|
||||
|
||||
def test_concretize_user_specs_together():
|
||||
e = ev.create("coconcretization")
|
||||
e.unify = True
|
||||
|
@@ -3,16 +3,12 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
graph = SpackCommand("graph")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.usefixtures("mock_packages", "database")
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,8 +12,6 @@
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def parser():
|
||||
|
@@ -966,7 +966,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("gcc@=10.2.0")
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@10.2.0")
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall("-y", "gcc@10.2.0")
|
||||
@@ -1138,7 +1138,7 @@ def install_use_buildcache(opt):
|
||||
|
||||
# Populate the buildcache
|
||||
install(package_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, package_name, dependency_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, package_name, dependency_name)
|
||||
|
||||
# Uninstall the all of the packages for clean slate
|
||||
uninstall("-y", "-a")
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -15,8 +13,6 @@
|
||||
install = SpackCommand("install")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_mark_mode_required(mutable_database):
|
||||
|
@@ -235,7 +235,7 @@ def test_mirror_destroy(
|
||||
|
||||
# Put a binary package in a buildcache
|
||||
install("--no-cache", spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
@@ -245,7 +245,7 @@ def test_mirror_destroy(
|
||||
|
||||
assert not os.path.exists(mirror_dir.strpath)
|
||||
|
||||
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
|
||||
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
|
||||
|
||||
contents = os.listdir(mirror_dir.strpath)
|
||||
assert "build_cache" in contents
|
||||
|
@@ -157,7 +157,7 @@ def _parse_types(string):
|
||||
|
||||
|
||||
def test_spec_deptypes_nodes():
|
||||
output = spec("--types", "--cover", "nodes", "dt-diamond")
|
||||
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
|
||||
|
||||
|
||||
def test_spec_deptypes_edges():
|
||||
output = spec("--types", "--cover", "edges", "dt-diamond")
|
||||
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
|
||||
types = _parse_types(output)
|
||||
|
||||
assert types["dt-diamond"] == [" "]
|
||||
|
@@ -2170,3 +2170,14 @@ def test_concretization_with_compilers_supporting_target_any(self):
|
||||
with spack.config.override("compilers", compiler_configuration):
|
||||
s = spack.spec.Spec("a").concretized()
|
||||
assert s.satisfies("%gcc@12.1.0")
|
||||
|
||||
@pytest.mark.parametrize("spec_str", ["mpileaks", "mpileaks ^mpich"])
|
||||
def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretization):
|
||||
"""Tests that information on virtuals is annotated on DAG edges"""
|
||||
spec = default_mock_concretization(spec_str)
|
||||
mpi_provider = spec["mpi"].name
|
||||
|
||||
edges = spec.edges_to_dependencies(name=mpi_provider)
|
||||
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v020.json.gz
Normal file
Binary file not shown.
@@ -167,6 +167,37 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append_path("SPACE", "qux", " ")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("SPACE", "qux", " ")' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend_path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
)
|
||||
assert (
|
||||
len([x for x in content if 'prepend_path("MANPATH", "/path/to/share/man", ":")' in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with append_path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert len([x for x in content if 'append_path("MANPATH", "/path/to/man", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv("MANPATH", "/path/to/man")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
|
@@ -37,6 +37,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -46,6 +51,11 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# The configuration file sets the verbose keyword to False
|
||||
@@ -58,6 +68,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
module_configuration("autoload_all")
|
||||
content = modulefile_content(mpileaks_spec_string)
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 5
|
||||
assert len([x for x in content if "module load " in x]) == 5
|
||||
|
||||
# dtbuild1 has
|
||||
@@ -67,6 +82,11 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
# Just make sure the 'build' dependency is not there
|
||||
content = modulefile_content("dtbuild1")
|
||||
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
def test_prerequisites_direct(self, modulefile_content, module_configuration):
|
||||
@@ -103,6 +123,7 @@ def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
|
||||
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
|
||||
assert len([x for x in content if "unsetenv BAR" in x]) == 0
|
||||
assert len([x for x in content if "depends-on foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "module load foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
|
||||
|
||||
@@ -121,6 +142,46 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if 'append-path --delim " " SPACE "qux"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim " " SPACE "qux"' in x]) == 1
|
||||
|
||||
@pytest.mark.regression("11355")
|
||||
def test_manpath_setup(self, modulefile_content, module_configuration):
|
||||
"""Tests specific setup of MANPATH environment variable."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
|
||||
# no manpath set by module
|
||||
content = modulefile_content("mpileaks")
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
|
||||
|
||||
# manpath set by module with prepend-path
|
||||
content = modulefile_content("module-manpath-prepend")
|
||||
assert (
|
||||
len([x for x in content if 'prepend-path --delim ":" MANPATH "/path/to/man"' in x])
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in content
|
||||
if 'prepend-path --delim ":" MANPATH "/path/to/share/man"' in x
|
||||
]
|
||||
)
|
||||
== 1
|
||||
)
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
|
||||
|
||||
# manpath set by module with append-path
|
||||
content = modulefile_content("module-manpath-append")
|
||||
assert (
|
||||
len([x for x in content if 'append-path --delim ":" MANPATH "/path/to/man"' in x]) == 1
|
||||
)
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
|
||||
|
||||
# manpath set by module with setenv
|
||||
content = modulefile_content("module-manpath-setenv")
|
||||
assert len([x for x in content if 'setenv MANPATH "/path/to/man"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
@@ -394,10 +455,16 @@ def test_autoload_with_constraints(self, modulefile_content, module_configuratio
|
||||
|
||||
# Test the mpileaks that should have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich2")
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
# Test the mpileaks that should NOT have the autoloaded dependencies
|
||||
content = modulefile_content("mpileaks ^mpich")
|
||||
assert (
|
||||
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
|
||||
== 0
|
||||
)
|
||||
assert len([x for x in content if "depends-on " in x]) == 0
|
||||
assert len([x for x in content if "module load " in x]) == 0
|
||||
|
||||
def test_modules_no_arch(self, factory, module_configuration):
|
||||
|
@@ -100,7 +100,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
parser = argparse.ArgumentParser()
|
||||
buildcache.setup_parser(parser)
|
||||
|
||||
create_args = ["create", "-a", "-f", "-d", mirror_path, pkghash]
|
||||
create_args = ["create", "-a", "-f", mirror_path, pkghash]
|
||||
# Create a private key to sign package with if gpg2 available
|
||||
spack.util.gpg.create(
|
||||
name="test key 1", expires="0", email="spack@googlegroups.com", comment="Spack test key"
|
||||
@@ -116,7 +116,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
# Uninstall the package
|
||||
pkg.do_uninstall(force=True)
|
||||
|
||||
install_args = ["install", "-a", "-f", pkghash]
|
||||
install_args = ["install", "-f", pkghash]
|
||||
args = parser.parse_args(install_args)
|
||||
# Test install
|
||||
buildcache.buildcache(parser, args)
|
||||
@@ -131,30 +131,6 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
assert buildinfo["relocate_textfiles"] == ["dummy.txt"]
|
||||
assert buildinfo["relocate_links"] == ["link_to_dummy.txt"]
|
||||
|
||||
# create build cache with relative path
|
||||
create_args.insert(create_args.index("-a"), "-f")
|
||||
create_args.insert(create_args.index("-a"), "-r")
|
||||
args = parser.parse_args(create_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
# Uninstall the package
|
||||
pkg.do_uninstall(force=True)
|
||||
|
||||
args = parser.parse_args(install_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
# test overwrite install
|
||||
install_args.insert(install_args.index("-a"), "-f")
|
||||
args = parser.parse_args(install_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
files = os.listdir(spec.prefix)
|
||||
assert "link_to_dummy.txt" in files
|
||||
assert "dummy.txt" in files
|
||||
# assert os.path.realpath(
|
||||
# os.path.join(spec.prefix, 'link_to_dummy.txt')
|
||||
# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
|
||||
|
||||
args = parser.parse_args(["keys"])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
|
@@ -125,7 +125,7 @@ def _mock_installed(self):
|
||||
# use the installed C. It should *not* force A to use the installed D
|
||||
# *if* we're doing a fresh installation.
|
||||
a_spec = Spec(a)
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"), virtuals=())
|
||||
a_spec.concretize()
|
||||
assert spack.version.Version("2") == a_spec[c][d].version
|
||||
assert spack.version.Version("2") == a_spec[e].version
|
||||
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
|
||||
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
|
||||
|
||||
a_spec = Spec("a")
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"), virtuals=())
|
||||
a_spec.concretize()
|
||||
|
||||
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
|
||||
@@ -989,9 +989,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
|
||||
link_run_spec = Spec("c@=1.0").concretized()
|
||||
build_spec = Spec("c@=2.0").concretized()
|
||||
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run")
|
||||
root.add_dependency_edge(build_spec, deptypes="build")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link", virtuals=())
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run", virtuals=())
|
||||
root.add_dependency_edge(build_spec, deptypes="build", virtuals=())
|
||||
|
||||
# Check dependencies from the perspective of root
|
||||
assert len(root.dependencies()) == 2
|
||||
@@ -1017,7 +1017,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
|
||||
root = Spec("b@=2.0").concretized()
|
||||
bootstrap = Spec("b@=1.0").concretized()
|
||||
|
||||
root.add_dependency_edge(bootstrap, deptypes="build")
|
||||
root.add_dependency_edge(bootstrap, deptypes="build", virtuals=())
|
||||
|
||||
assert len(root.dependencies()) == 1
|
||||
assert root.dependencies()[0].name == "b"
|
||||
@@ -1036,7 +1036,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
|
||||
bootstrap = Spec("b@=1.0").concretized()
|
||||
|
||||
for current_deptype in ("build", "link", "run"):
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype, virtuals=())
|
||||
|
||||
# Check edges in dependencies
|
||||
assert len(root.edges_to_dependencies()) == 1
|
||||
@@ -1063,9 +1063,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
|
||||
c1 = Spec("b@=1.0").concretized()
|
||||
c2 = Spec("b@=2.0").concretized()
|
||||
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes)
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes, virtuals=())
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes)
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes, virtuals=())
|
||||
|
||||
|
||||
@pytest.mark.regression("33499")
|
||||
@@ -1084,16 +1084,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
|
||||
z3_flavor_1 = Spec("z3 +through_a1")
|
||||
z3_flavor_2 = Spec("z3 +through_z1")
|
||||
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"), virtuals=())
|
||||
|
||||
# unique package as a dep of a build/run/test type dep.
|
||||
a1.add_dependency_edge(a2, deptypes="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
|
||||
a1.add_dependency_edge(a2, deptypes="all", virtuals=())
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all", virtuals=())
|
||||
|
||||
# chain of link type deps root -> z1 -> z2 -> z3
|
||||
root.add_dependency_edge(z1, deptypes="link")
|
||||
z1.add_dependency_edge(z2, deptypes="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
|
||||
root.add_dependency_edge(z1, deptypes="link", virtuals=())
|
||||
z1.add_dependency_edge(z2, deptypes="link", virtuals=())
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link", virtuals=())
|
||||
|
||||
# Indexing should prefer the link-type dep.
|
||||
assert "through_z1" in root["z3"].variants
|
||||
|
@@ -971,7 +971,7 @@ def test_error_message_unknown_variant(self):
|
||||
def test_satisfies_dependencies_ordered(self):
|
||||
d = Spec("zmpi ^fake")
|
||||
s = Spec("mpileaks")
|
||||
s._add_dependency(d, deptypes=())
|
||||
s._add_dependency(d, deptypes=(), virtuals=())
|
||||
assert s.satisfies("mpileaks ^zmpi ^fake")
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -1018,6 +1018,7 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
|
||||
|
||||
|
||||
def test_malformed_spec_dict():
|
||||
# FIXME: This test was really testing the specific implementation with an ad-hoc test
|
||||
with pytest.raises(SpecError, match="malformed"):
|
||||
Spec.from_dict(
|
||||
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
|
||||
@@ -1025,6 +1026,7 @@ def test_malformed_spec_dict():
|
||||
|
||||
|
||||
def test_spec_dict_hashless_dep():
|
||||
# FIXME: This test was really testing the specific implementation with an ad-hoc test
|
||||
with pytest.raises(SpecError, match="Couldn't parse"):
|
||||
Spec.from_dict(
|
||||
{
|
||||
@@ -1118,7 +1120,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
|
||||
# add it to an abstract spec as a dependency
|
||||
top = Spec("dt-diamond")
|
||||
top.add_dependency_edge(bottom, deptypes=())
|
||||
top.add_dependency_edge(bottom, deptypes=(), virtuals=())
|
||||
|
||||
# concretize with the already-concrete dependency
|
||||
top.concretize()
|
||||
|
@@ -43,12 +43,6 @@ def check_json_round_trip(spec):
|
||||
assert spec.eq_dag(spec_from_json)
|
||||
|
||||
|
||||
def test_simple_spec():
|
||||
spec = Spec("mpileaks")
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_read_spec_from_signed_json():
|
||||
spec_dir = os.path.join(spack.paths.test_path, "data", "mirrors", "signed_json")
|
||||
file_name = (
|
||||
@@ -70,13 +64,6 @@ def check_spec(spec_to_check):
|
||||
check_spec(s)
|
||||
|
||||
|
||||
def test_normal_spec(mock_packages):
|
||||
spec = Spec("mpileaks+debug~opt")
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
|
||||
)
|
||||
@@ -95,37 +82,28 @@ def test_invalid_json_spec(invalid_json, error_message):
|
||||
assert error_message in exc_msg
|
||||
|
||||
|
||||
def test_external_spec(config, mock_packages):
|
||||
spec = Spec("externaltool")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
spec = Spec("externaltest")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_ambiguous_version_spec(mock_packages):
|
||||
spec = Spec("mpileaks@1.0:5.0,6.1,7.3+debug~opt")
|
||||
spec.normalize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_concrete_spec(config, mock_packages):
|
||||
spec = Spec("mpileaks+debug~opt")
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
|
||||
|
||||
def test_yaml_multivalue(config, mock_packages):
|
||||
spec = Spec('multivalue-variant foo="bar,baz"')
|
||||
spec.concretize()
|
||||
check_yaml_round_trip(spec)
|
||||
check_json_round_trip(spec)
|
||||
@pytest.mark.parametrize(
|
||||
"abstract_spec",
|
||||
[
|
||||
# Externals
|
||||
"externaltool",
|
||||
"externaltest",
|
||||
# Ambiguous version spec
|
||||
"mpileaks@1.0:5.0,6.1,7.3+debug~opt",
|
||||
# Variants
|
||||
"mpileaks+debug~opt",
|
||||
'multivalue-variant foo="bar,baz"',
|
||||
# Virtuals on edges
|
||||
"callpath",
|
||||
"mpileaks",
|
||||
],
|
||||
)
|
||||
def test_roundtrip_concrete_specs(abstract_spec, default_mock_concretization):
|
||||
check_yaml_round_trip(Spec(abstract_spec))
|
||||
check_json_round_trip(Spec(abstract_spec))
|
||||
concrete_spec = default_mock_concretization(abstract_spec)
|
||||
check_yaml_round_trip(concrete_spec)
|
||||
check_json_round_trip(concrete_spec)
|
||||
|
||||
|
||||
def test_yaml_subdag(config, mock_packages):
|
||||
@@ -506,6 +484,8 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
|
||||
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
|
||||
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
|
||||
# Add properties on edges, see https://github.com/spack/spack/pull/34821
|
||||
("specfiles/hdf5.v020.json.gz", "vlirlcgazhvsvtundz4kug75xkkqqgou", spack.spec.SpecfileV4),
|
||||
],
|
||||
)
|
||||
def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
|
@@ -19,7 +19,7 @@ def create_dag(nodes, edges):
|
||||
"""
|
||||
specs = {name: Spec(name) for name in nodes}
|
||||
for parent, child, deptypes in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes, virtuals=())
|
||||
return specs
|
||||
|
||||
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -34,7 +33,6 @@ def _create_url(relative_url):
|
||||
root_with_fragment = _create_url("index_with_fragment.html")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"depth,expected_found,expected_not_found,expected_text",
|
||||
[
|
||||
@@ -99,20 +97,17 @@ def test_spider_no_response(monkeypatch):
|
||||
assert not pages and not links
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_0():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=0)
|
||||
assert Version("0.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_1():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=1)
|
||||
assert Version("0.0.0") in versions
|
||||
assert Version("1.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_2():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
|
||||
assert Version("0.0.0") in versions
|
||||
@@ -120,14 +115,12 @@ def test_find_versions_of_archive_2():
|
||||
assert Version("2.0.0") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_exotic_versions_of_archive_2():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
|
||||
# up for grabs to make this better.
|
||||
assert Version("2.0.0b2") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_3():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
|
||||
assert Version("0.0.0") in versions
|
||||
@@ -137,7 +130,6 @@ def test_find_versions_of_archive_3():
|
||||
assert Version("4.5") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_exotic_versions_of_archive_3():
|
||||
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
|
||||
assert Version("2.0.0b2") in versions
|
||||
@@ -145,7 +137,6 @@ def test_find_exotic_versions_of_archive_3():
|
||||
assert Version("4.5-rc5") in versions
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_find_versions_of_archive_with_fragment():
|
||||
versions = spack.util.web.find_versions_of_archive(
|
||||
root_tarball, root_with_fragment, list_depth=0
|
||||
@@ -206,7 +197,6 @@ def test_etag_parser():
|
||||
assert spack.util.web.parse_etag("abc def") is None
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
def test_list_url(tmpdir):
|
||||
testpath = str(tmpdir)
|
||||
testpath_url = url_util.path_to_file_url(testpath)
|
||||
|
@@ -211,7 +211,9 @@ def get_visitor_from_args(cover, direction, deptype, key=id, visited=None, visit
|
||||
def with_artificial_edges(specs):
|
||||
"""Initialize a list of edges from an imaginary root node to the root specs."""
|
||||
return [
|
||||
EdgeAndDepth(edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=()), depth=0)
|
||||
EdgeAndDepth(
|
||||
edge=spack.spec.DependencySpec(parent=None, spec=s, deptypes=(), virtuals=()), depth=0
|
||||
)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from html.parser import HTMLParser
|
||||
from pathlib import Path, PurePosixPath
|
||||
from urllib.error import URLError
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
|
||||
@@ -498,7 +499,8 @@ def list_url(url, recursive=False):
|
||||
|
||||
if local_path:
|
||||
if recursive:
|
||||
return list(_iter_local_prefix(local_path))
|
||||
# convert backslash to forward slash as required for URLs
|
||||
return [str(PurePosixPath(Path(p))) for p in list(_iter_local_prefix(local_path))]
|
||||
return [
|
||||
subpath
|
||||
for subpath in os.listdir(local_path)
|
||||
@@ -738,7 +740,8 @@ def find_versions_of_archive(
|
||||
|
||||
# We'll be a bit more liberal and just look for the archive
|
||||
# part, not the full path.
|
||||
url_regex = os.path.basename(url_regex)
|
||||
# this is a URL so it is a posixpath even on Windows
|
||||
url_regex = PurePosixPath(url_regex).name
|
||||
|
||||
# We need to add a / to the beginning of the regex to prevent
|
||||
# Spack from picking up similarly named packages like:
|
||||
|
@@ -498,7 +498,7 @@ _spack_buildcache() {
|
||||
_spack_buildcache_push() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --update-index --rebuild-index --spec-file --only"
|
||||
SPACK_COMPREPLY="-h --help -f --force -u --unsigned -a --allow-root -k --key --update-index --rebuild-index --spec-file --only"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -507,7 +507,7 @@ _spack_buildcache_push() {
|
||||
_spack_buildcache_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -r --rel -f --force -u --unsigned -a --allow-root -k --key -d --directory -m --mirror-name --mirror-url --update-index --rebuild-index --spec-file --only"
|
||||
SPACK_COMPREPLY="-h --help -f --force -u --unsigned -a --allow-root -k --key --update-index --rebuild-index --spec-file --only"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -516,7 +516,7 @@ _spack_buildcache_create() {
|
||||
_spack_buildcache_install() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -f --force -m --multiple -a --allow-root -u --unsigned -o --otherarch"
|
||||
SPACK_COMPREPLY="-h --help -f --force -m --multiple -u --unsigned -o --otherarch"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -563,7 +563,7 @@ _spack_buildcache_save_specfile() {
|
||||
_spack_buildcache_sync() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --manifest-glob --src-directory --src-mirror-name --src-mirror-url --dest-directory --dest-mirror-name --dest-mirror-url"
|
||||
SPACK_COMPREPLY="-h --help --manifest-glob"
|
||||
else
|
||||
SPACK_COMPREPLY=""
|
||||
fi
|
||||
@@ -572,7 +572,7 @@ _spack_buildcache_sync() {
|
||||
_spack_buildcache_update_index() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
|
||||
SPACK_COMPREPLY="-h --help -k --keys"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -581,7 +581,7 @@ _spack_buildcache_update_index() {
|
||||
_spack_buildcache_rebuild_index() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -d --directory -m --mirror-name --mirror-url -k --keys"
|
||||
SPACK_COMPREPLY="-h --help -k --keys"
|
||||
else
|
||||
_mirrors
|
||||
fi
|
||||
@@ -1695,7 +1695,7 @@ _spack_restage() {
|
||||
_spack_solve() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --show -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse --reuse-deps"
|
||||
SPACK_COMPREPLY="-h --help --show -l --long -L --very-long -I --install-status --no-install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse --reuse-deps"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1704,7 +1704,7 @@ _spack_solve() {
|
||||
_spack_spec() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status -y --yaml -j --json --format -c --cover -N --namespaces -t --types -U --fresh --reuse --reuse-deps"
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status --no-install-status -y --yaml -j --json --format -c --cover -N --namespaces -t --types -U --fresh --reuse --reuse-deps"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
|
@@ -37,7 +37,7 @@ RUN find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \
|
||||
|
||||
# Modifications to the environment that are necessary to run
|
||||
RUN cd {{ paths.environment }} && \
|
||||
spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh
|
||||
spack env activate --sh -d . > activate.sh
|
||||
|
||||
{% if extra_instructions.build %}
|
||||
{{ extra_instructions.build }}
|
||||
@@ -53,7 +53,13 @@ COPY --from=builder {{ paths.environment }} {{ paths.environment }}
|
||||
COPY --from=builder {{ paths.store }} {{ paths.store }}
|
||||
COPY --from=builder {{ paths.hidden_view }} {{ paths.hidden_view }}
|
||||
COPY --from=builder {{ paths.view }} {{ paths.view }}
|
||||
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh
|
||||
|
||||
RUN { \
|
||||
echo '#!/bin/sh' \
|
||||
&& echo '.' {{ paths.environment }}/activate.sh \
|
||||
&& echo 'exec "$@"'; \
|
||||
} > /entrypoint.sh \
|
||||
&& chmod a+x /entrypoint.sh
|
||||
|
||||
{% block final_stage %}
|
||||
|
||||
@@ -70,6 +76,6 @@ RUN {% if os_package_update %}{{ os_packages_final.update }} \
|
||||
{% for label, value in labels.items() %}
|
||||
LABEL "{{ label }}"="{{ value }}"
|
||||
{% endfor %}
|
||||
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l", "-c", "$*", "--" ]
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
CMD [ "/bin/bash" ]
|
||||
{% endif %}
|
||||
|
@@ -84,6 +84,10 @@ setenv("{{ cmd.name }}", "{{ cmd.value }}")
|
||||
unsetenv("{{ cmd.name }}")
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #}
|
||||
{% if has_manpath_modifications %}
|
||||
append_path("MANPATH", "", ":")
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block footer %}
|
||||
|
@@ -26,9 +26,17 @@ proc ModulesHelp { } {
|
||||
{% endblock %}
|
||||
|
||||
{% block autoloads %}
|
||||
{% if autoload|length > 0 %}
|
||||
if {![info exists ::env(LMOD_VERSION_MAJOR)]} {
|
||||
{% for module in autoload %}
|
||||
module load {{ module }}
|
||||
module load {{ module }}
|
||||
{% endfor %}
|
||||
} else {
|
||||
{% for module in autoload %}
|
||||
depends-on {{ module }}
|
||||
{% endfor %}
|
||||
}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
{# #}
|
||||
{% block prerequisite %}
|
||||
@@ -58,6 +66,10 @@ unsetenv {{ cmd.name }}
|
||||
{% endif %}
|
||||
{# #}
|
||||
{% endfor %}
|
||||
{# Make sure system man pages are enabled by appending trailing delimiter to MANPATH #}
|
||||
{% if has_manpath_modifications %}
|
||||
append-path --delim ":" MANPATH ""
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
||||
{% block footer %}
|
||||
|
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleManpathAppend(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-manpath-append-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.append_path("MANPATH", "/path/to/man")
|
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleManpathPrepend(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-manpath-prepend-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.prepend_path("MANPATH", "/path/to/man")
|
||||
env.prepend_path("MANPATH", "/path/to/share/man")
|
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class ModuleManpathSetenv(Package):
|
||||
homepage = "http://www.llnl.gov"
|
||||
url = "http://www.llnl.gov/module-manpath-setenv-1.0.tar.gz"
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
env.set("MANPATH", "/path/to/man")
|
@@ -27,7 +27,8 @@ class Abinit(AutotoolsPackage):
|
||||
homepage = "https://www.abinit.org/"
|
||||
url = "https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz"
|
||||
|
||||
version("9.8.3", sha256="65fb93217336a72d1554cc6991127203958cc7df59921782251a86569e33a357")
|
||||
version("9.8.4", sha256="a086d5045f0093b432e6a044d5f71f7edf5a41a62d67b3677cb0751d330c564a")
|
||||
version("9.8.3", sha256="de823878aea2c20098f177524fbb4b60de9b1b5971b2e835ec244dfa3724589b")
|
||||
version("9.6.1", sha256="b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6")
|
||||
version("9.4.2", sha256="d40886f5c8b138bb4aa1ca05da23388eb70a682790cfe5020ecce4db1b1a76bc")
|
||||
version("8.10.3", sha256="ed626424b4472b93256622fbb9c7645fa3ffb693d4b444b07d488771ea7eaa75")
|
||||
|
@@ -16,6 +16,7 @@ class Alglib(MakefilePackage):
|
||||
homepage = "https://www.alglib.net/"
|
||||
url = "https://www.alglib.net/translator/re/alglib-3.11.0.cpp.gpl.tgz"
|
||||
|
||||
version("4.00.0", sha256="827b5f559713a3e8c7c1452ed1ffd5227adb9622d1a165ceb70c117c8ed3ccb4")
|
||||
version("3.20.0", sha256="e7357f0f894313ff1b640ec9cb5e8b63f06d2d3411c2143a374aa0e9740da8a9")
|
||||
version("3.11.0", sha256="34e391594aac89fb354bdaf58c42849489cd1199197398ba98bb69961f42bdb0")
|
||||
|
||||
|
54
var/spack/repos/builtin/packages/binder/package.py
Normal file
54
var/spack/repos/builtin/packages/binder/package.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Binder(CMakePackage):
|
||||
"""
|
||||
Binder is a tool for automatic generation of Python bindings
|
||||
for C++11 projects using Pybind11 and Clang LibTooling libraries.
|
||||
That is, Binder, takes a C++ project and compiles it into objects
|
||||
and functions that are all usable within Python.
|
||||
Binder is different from prior tools in that it handles special
|
||||
features new in C++11.
|
||||
"""
|
||||
|
||||
homepage = "https://github.com/RosettaCommons/binder"
|
||||
git = "https://github.com/RosettaCommons/binder.git"
|
||||
|
||||
maintainers("lyskov", "kliegeois")
|
||||
|
||||
version("master", branch="master")
|
||||
version("1.3.0", tag="v1.3.0")
|
||||
version("1.2.0", tag="v1.2.0")
|
||||
version("1.1.0", tag="v1.0.0")
|
||||
version("1.0.0", tag="v1.0.0")
|
||||
|
||||
# Add dependencies
|
||||
depends_on("llvm+clang+llvm_dylib@7.0:9")
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
llvm_dir = spec["llvm"].prefix
|
||||
clang_dir = spec["llvm"].prefix
|
||||
options = []
|
||||
|
||||
options.extend(
|
||||
[
|
||||
"-DLLVM_DIR:FILEPATH={0}".format(llvm_dir),
|
||||
"-DClang_DIR:FILEPATH={0}".format(clang_dir),
|
||||
"-DCMAKE_CXX_FLAGS=-Wl,--verbose",
|
||||
"-DBINDER_ENABLE_TEST=OFF",
|
||||
]
|
||||
)
|
||||
return options
|
||||
|
||||
def setup_dependent_package(self, module, dependent_spec):
|
||||
llvm_dir = self.spec["llvm"].prefix
|
||||
self.spec.clang_include_dirs = llvm_dir.include
|
||||
self.spec.LibClang_include_dir = llvm_dir.lib.clang.join(
|
||||
format(self.spec["llvm"].version)
|
||||
).include
|
@@ -0,0 +1,29 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class BioconductorEbseq(RPackage):
|
||||
"""An R package for gene and isoform differential expression analysis of RNA-seq data.
|
||||
|
||||
R/EBSeq is an R package for identifying genes and isoforms differentially
|
||||
expressed (DE) across two or more biological conditions in an RNA-seq
|
||||
experiment. Details can be found in Leng et al., 2013. It provides the syntax
|
||||
required for identifying DE genes and isoforms in a two-group RNA-seq
|
||||
experiment as well for identifying DE genes across more than two conditions
|
||||
(the commands for identifying DE isoforms across more than two conditions
|
||||
are the same as those required for gene-level analysis)."""
|
||||
|
||||
homepage = "https://www.biostat.wisc.edu/~kendzior/EBSEQ/"
|
||||
url = "https://bioconductor.org/packages/release/bioc/src/contrib/EBSeq_1.40.0.tar.gz"
|
||||
|
||||
bioc = "ebseq"
|
||||
|
||||
version("1.40.0", sha256="a5d3a88743d61062c6d68a426b19c53a4afd2fa216abc884d42c187780994378")
|
||||
|
||||
depends_on("r-blockmodeling")
|
||||
depends_on("r-gplots")
|
||||
depends_on("r-testthat")
|
@@ -12,5 +12,6 @@ class Blogbench(AutotoolsPackage):
|
||||
homepage = "https://openbenchmarking.org/test/pts/blogbench"
|
||||
url = "https://download.pureftpd.org/pub/blogbench/blogbench-1.1.tar.gz"
|
||||
|
||||
version("1.2", sha256="1eabdb1ac0ad8ff6f5b9de36b2ef9b684a35b6e40aea0424e3dd4d6cd923c1af")
|
||||
version("1.1", sha256="8cded059bfdbccb7be35bb6a2272ecfdbe3fbea43d53c92ba5572ac24f26c4df")
|
||||
version("1.0", sha256="dc29261a19064a8fb64d39b27607f19d3b33ce3795908e717404167687ef33be")
|
||||
|
@@ -21,6 +21,7 @@ class Ccache(CMakePackage):
|
||||
|
||||
executables = ["^ccache$"]
|
||||
|
||||
version("4.8.1", sha256="869903c1891beb8bee87f1ec94d8a0dad18c2add4072c456acbc85cdfc23ca63")
|
||||
version("4.8", sha256="ac4b01748fd59cfe07e070c34432b91bdd0fd8640e1e653a80b01d6a523186b0")
|
||||
version("4.7.4", sha256="dc283906b73bd7c461178ca472a459e9d86b5523405035921bd8204e77620264")
|
||||
version("4.7.3", sha256="577841df9e9d9659d58a2f4e0f6eaceb7e29816988ffb2b12390e17b109b4ac4")
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from glob import glob
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -16,12 +18,19 @@ class Cdhit(MakefilePackage):
|
||||
version("4.8.1", sha256="f8bc3cdd7aebb432fcd35eed0093e7a6413f1e36bbd2a837ebc06e57cdb20b70")
|
||||
version("4.6.8", sha256="37d685e4aa849314401805fe4d4db707e1d06070368475e313d6f3cb8fb65949")
|
||||
|
||||
maintainers("snehring")
|
||||
|
||||
variant("openmp", default=True, description="Compile with multi-threading support")
|
||||
variant("zlib", default=True, description="Compile with zlib")
|
||||
|
||||
depends_on("perl", type=("build", "run"))
|
||||
depends_on("perl-text-nsp", type="run")
|
||||
depends_on("zlib", when="+zlib", type="link")
|
||||
|
||||
def patch(self):
|
||||
for f in glob("*.pl"):
|
||||
filter_file("^#!/usr/bin/perl.*$", "#!/usr/bin/env perl", f)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
mkdirp(prefix.bin)
|
||||
make_args = []
|
||||
|
@@ -17,6 +17,7 @@ class Cgal(CMakePackage):
|
||||
homepage = "https://www.cgal.org/"
|
||||
url = "https://github.com/CGAL/cgal/releases/download/v5.4.1/CGAL-5.4.1.tar.xz"
|
||||
|
||||
version("5.5.2", sha256="b2b05d5616ecc69facdc24417cce0b04fb4321491d107db45103add520e3d8c3")
|
||||
version("5.4.1", sha256="4c3dd7ee4d36d237111a4d72b6e14170093271595d5b695148532daa95323d76")
|
||||
version("5.1.5", sha256="b1bb8a6053aa12baa5981aef20a542cd3e617a86826963fb8fb6852b1a0da97c")
|
||||
version("5.0.3", sha256="e5a3672e35e5e92e3c1b4452cd3c1d554f3177dc512bd98b29edf21866a4288c")
|
||||
|
@@ -14,6 +14,7 @@ class Cloc(Package):
|
||||
homepage = "https://github.com/AlDanial/cloc/"
|
||||
url = "https://github.com/AlDanial/cloc/archive/v1.90.tar.gz"
|
||||
|
||||
version("1.96.1", sha256="f0551d98dcce9ca2e78b984adf8e8cc7c6002037a1155e5294338c435e4a1af1")
|
||||
version("1.90", sha256="60b429dd2aa5cd65707b359dcbcbeb710c8e4db880886528ced0962c67e52548")
|
||||
version("1.84", sha256="c3f0a6bd2319110418ccb3e55a7a1b6d0edfd7528bfd2ae5d530938abe90f254")
|
||||
version("1.80", sha256="082f53530eee3f9ee84ec449eca59a77ff114250cd7daf9519679537b5b21d67")
|
||||
|
@@ -159,6 +159,10 @@ class Comgr(CMakePackage):
|
||||
|
||||
root_cmakelists_dir = join_path("lib", "comgr")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [self.define("BUILD_TESTING", self.run_tests)]
|
||||
return args
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, lib):
|
||||
match = re.search(r"lib\S*\.so\.\d+\.\d+\.(\d)(\d\d)(\d\d)", lib)
|
||||
|
@@ -14,6 +14,7 @@ class Consul(MakefilePackage):
|
||||
homepage = "https://www.consul.io"
|
||||
url = "https://github.com/hashicorp/consul/archive/v1.8.1.tar.gz"
|
||||
|
||||
version("1.15.3", sha256="5137e9831b4bc094c8ce5be64c661dc0d9a96ee9509d756030d60d4bcca42503")
|
||||
version("1.8.1", sha256="c173e9866e6181b3679a942233adade118976414f6ca2da8deaea0fa2bba9b06")
|
||||
version("1.8.0", sha256="a87925bde6aecddf532dfd050e907b6a0a6447cdd5dc4f49b46d97c9f73b58f9")
|
||||
version("1.7.6", sha256="893abad7563c1f085303705f72d8789b338236972123f0ab6d2be24dbb58c2ac")
|
||||
|
@@ -12,6 +12,7 @@ class CppHttplib(CMakePackage):
|
||||
homepage = "https://github.com/yhirose/cpp-httplib/"
|
||||
url = "https://github.com/yhirose/cpp-httplib/archive/v0.5.10.tar.gz"
|
||||
|
||||
version("0.12.5", sha256="b488f3fa9c6bf35608c3d9a5b69be52e016bbf2fbfe67e5ee684eadb2655493e")
|
||||
version("0.12.3", sha256="175ced3c9cdaf221e9edf210297568d8f7d402a41d6db01254ac9e0b25487c54")
|
||||
version("0.5.9", sha256="c9e7aef3b0d4e80ee533d10413508d8a6e09a67d0d59646c43111f3993de006e")
|
||||
version("0.5.8", sha256="184d4fe79fc836ee26aa8635b3240879af4c6f17257fc7063d0b77a0cf856dfc")
|
||||
|
@@ -0,0 +1,34 @@
|
||||
--- a/src/programs/ctffind/ctffind.cpp 2023-06-02 16:47:21.975662000 +0200
|
||||
+++ b/src/programs/ctffind/ctffind.cpp 2023-06-02 16:48:06.565418037 +0200
|
||||
@@ -267,11 +267,11 @@
|
||||
float FindRotationalAlignmentBetweenTwoStacksOfImages(Image *self, Image *other_image, int number_of_images, float search_half_range, float search_step_size, float minimum_radius, float maximum_radius);
|
||||
void ComputeImagesWithNumberOfExtremaAndCTFValues(CTF *ctf, Image *number_of_extrema, Image *ctf_values);
|
||||
int ReturnSpectrumBinNumber(int number_of_bins, float number_of_extrema_profile[], Image *number_of_extrema, long address, Image *ctf_values, float ctf_values_profile[]);
|
||||
-bool ComputeRotationalAverageOfPowerSpectrum( Image *spectrum, CTF *ctf, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], double average_renormalized[], float number_of_extrema_profile[], float ctf_values_profile[]);
|
||||
+void ComputeRotationalAverageOfPowerSpectrum( Image *spectrum, CTF *ctf, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], double average_renormalized[], float number_of_extrema_profile[], float ctf_values_profile[]);
|
||||
void ComputeEquiPhaseAverageOfPowerSpectrum( Image *spectrum, CTF *ctf, Curve *epa_pre_max, Curve *epa_post_max);
|
||||
void OverlayCTF( Image *spectrum, CTF *ctf, Image *number_of_extrema, Image *ctf_values, int number_of_bins_in_1d_spectra, double spatial_frequency[], double rotational_average_astig[], float number_of_extrema_profile[], float ctf_values_profile[], Curve *equiphase_average_pre_max, Curve *equiphase_average_post_max);
|
||||
void ComputeFRCBetween1DSpectrumAndFit( int number_of_bins, double average[], double fit[], float number_of_extrema_profile[], double frc[], double frc_sigma[], int first_fit_bin);
|
||||
-bool RescaleSpectrumAndRotationalAverage( Image *spectrum, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], float number_of_extrema_profile[], float ctf_values_profile[], int last_bin_without_aliasing, int last_bin_with_good_fit );
|
||||
+void RescaleSpectrumAndRotationalAverage( Image *spectrum, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], float number_of_extrema_profile[], float ctf_values_profile[], int last_bin_without_aliasing, int last_bin_with_good_fit );
|
||||
void Renormalize1DSpectrumForFRC( int number_of_bins, double average[], double fit[], float number_of_extrema_profile[]);
|
||||
float ReturnAzimuthToUseFor1DPlots(CTF *ctf);
|
||||
|
||||
@@ -2419,7 +2419,7 @@
|
||||
|
||||
// Rescale the spectrum and its 1D rotational avereage so that the peaks and troughs are at 0.0 and 1.0. The location of peaks and troughs are worked out
|
||||
// by parsing the suppilied 1D average_fit array
|
||||
-bool RescaleSpectrumAndRotationalAverage( Image *spectrum, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], float number_of_extrema_profile[], float ctf_values_profile[], int last_bin_without_aliasing, int last_bin_with_good_fit )
|
||||
+void RescaleSpectrumAndRotationalAverage( Image *spectrum, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], float number_of_extrema_profile[], float ctf_values_profile[], int last_bin_without_aliasing, int last_bin_with_good_fit )
|
||||
{
|
||||
MyDebugAssertTrue(spectrum->is_in_memory, "Spectrum memory not allocated");
|
||||
MyDebugAssertTrue(number_of_bins > 1,"Bad number of bins: %i\n",number_of_bins);
|
||||
@@ -2762,7 +2762,7 @@
|
||||
}
|
||||
|
||||
//
|
||||
-bool ComputeRotationalAverageOfPowerSpectrum( Image *spectrum, CTF *ctf, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], double average_rank[], float number_of_extrema_profile[], float ctf_values_profile[])
|
||||
+void ComputeRotationalAverageOfPowerSpectrum( Image *spectrum, CTF *ctf, Image *number_of_extrema, Image *ctf_values, int number_of_bins, double spatial_frequency[], double average[], double average_fit[], double average_rank[], float number_of_extrema_profile[], float ctf_values_profile[])
|
||||
{
|
||||
MyDebugAssertTrue(spectrum->is_in_memory, "Spectrum memory not allocated");
|
||||
MyDebugAssertTrue(number_of_extrema->is_in_memory,"Number of extrema image not allocated");
|
@@ -34,6 +34,7 @@ def url_for_version(self, version):
|
||||
|
||||
patch("configure.patch", when="@4.1.8")
|
||||
patch("power9.patch", when="@4.1.14 target=power9le")
|
||||
patch("fix_return_types.patch", when="@4.1.13:4.1.14")
|
||||
|
||||
def configure_args(self):
|
||||
config_args = []
|
||||
|
@@ -26,6 +26,7 @@ class Curl(NMakePackage, AutotoolsPackage):
|
||||
|
||||
maintainers("alecbcs")
|
||||
|
||||
version("8.1.2", sha256="b54974d32fd610acace92e3df1f643144015ac65847f0a041fdc17db6f43f243")
|
||||
version("8.0.1", sha256="9b6b1e96b748d04b968786b6bdf407aa5c75ab53a3d37c1c8c81cdb736555ccf")
|
||||
version("7.88.1", sha256="8224b45cce12abde039c12dc0711b7ea85b104b9ad534d6e4c5b4e188a61c907")
|
||||
|
||||
|
@@ -14,6 +14,7 @@ class Direnv(Package):
|
||||
|
||||
maintainers("acastanedam", "alecbcs")
|
||||
|
||||
version("2.32.3", sha256="c66f6d1000f28f919c6106b5dcdd0a0e54fb553602c63c60bf59d9bbdf8bd33c")
|
||||
version("2.32.2", sha256="352b3a65e8945d13caba92e13e5666e1854d41749aca2e230938ac6c64fa8ef9")
|
||||
version("2.32.1", sha256="dc7df9a9e253e1124748aa74da94bf2b96f5a61d581c60d52d3f8e8dc86ecfde")
|
||||
version("2.31.0", sha256="f82694202f584d281a166bd5b7e877565f96a94807af96325c8f43643d76cb44")
|
||||
|
135
var/spack/repos/builtin/packages/dla-future/package.py
Normal file
135
var/spack/repos/builtin/packages/dla-future/package.py
Normal file
@@ -0,0 +1,135 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
|
||||
"""DLA-Future library: Distributed Linear Algebra with Future"""
|
||||
|
||||
homepage = "https://github.com/eth-cscs/DLA-Future"
|
||||
url = "https://github.com/eth-cscs/DLA-Future/archive/v0.0.0.tar.gz"
|
||||
git = "https://github.com/eth-cscs/DLA-Future.git"
|
||||
maintainers = ["rasolca", "albestro", "msimberg", "aurianer"]
|
||||
|
||||
version("0.1.0", sha256="f7ffcde22edabb3dc24a624e2888f98829ee526da384cd752b2b271c731ca9b1")
|
||||
version("master", branch="master")
|
||||
|
||||
variant("shared", default=True, description="Build shared libraries.")
|
||||
|
||||
variant("doc", default=False, description="Build documentation.")
|
||||
|
||||
variant("miniapps", default=False, description="Build miniapps.")
|
||||
|
||||
depends_on("cmake@3.22:", type="build")
|
||||
depends_on("doxygen", type="build", when="+doc")
|
||||
depends_on("mpi")
|
||||
depends_on("blaspp@2022.05.00:")
|
||||
depends_on("lapackpp@2022.05.00:")
|
||||
|
||||
depends_on("umpire~examples")
|
||||
depends_on("umpire+cuda~shared", when="+cuda")
|
||||
depends_on("umpire+rocm~shared", when="+rocm")
|
||||
depends_on("umpire@4.1.0:")
|
||||
|
||||
depends_on("pika@0.15.1:")
|
||||
depends_on("pika-algorithms@0.1:")
|
||||
depends_on("pika +mpi")
|
||||
depends_on("pika +cuda", when="+cuda")
|
||||
depends_on("pika +rocm", when="+rocm")
|
||||
|
||||
conflicts("^pika cxxstd=20", when="+cuda")
|
||||
|
||||
depends_on("whip +cuda", when="+cuda")
|
||||
depends_on("whip +rocm", when="+rocm")
|
||||
|
||||
depends_on("rocblas", when="+rocm")
|
||||
depends_on("rocprim", when="+rocm")
|
||||
depends_on("rocsolver", when="+rocm")
|
||||
depends_on("rocthrust", when="+rocm")
|
||||
|
||||
conflicts("+cuda", when="+rocm")
|
||||
|
||||
with when("+rocm"):
|
||||
for val in ROCmPackage.amdgpu_targets:
|
||||
depends_on("pika amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val))
|
||||
depends_on(
|
||||
"rocsolver amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val)
|
||||
)
|
||||
depends_on(
|
||||
"rocblas amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val)
|
||||
)
|
||||
depends_on(
|
||||
"rocprim amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val)
|
||||
)
|
||||
depends_on(
|
||||
"rocthrust amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val)
|
||||
)
|
||||
depends_on("whip amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val))
|
||||
depends_on(
|
||||
"umpire amdgpu_target={0}".format(val), when="amdgpu_target={0}".format(val)
|
||||
)
|
||||
|
||||
with when("+cuda"):
|
||||
for val in CudaPackage.cuda_arch_values:
|
||||
depends_on("pika cuda_arch={0}".format(val), when="cuda_arch={0}".format(val))
|
||||
depends_on("umpire cuda_arch={0}".format(val), when="cuda_arch={0}".format(val))
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
|
||||
args.append(self.define_from_variant("BUILD_SHARED_LIBS", "shared"))
|
||||
|
||||
# BLAS/LAPACK
|
||||
if "^mkl" in spec:
|
||||
vmap = {
|
||||
"none": "seq",
|
||||
"openmp": "omp",
|
||||
"tbb": "tbb",
|
||||
} # Map MKL variants to LAPACK target name
|
||||
# TODO: Generalise for intel-oneapi-mkl
|
||||
args += [
|
||||
self.define("DLAF_WITH_MKL", True),
|
||||
self.define(
|
||||
"MKL_LAPACK_TARGET",
|
||||
"mkl::mkl_intel_32bit_{0}_dyn".format(
|
||||
vmap[spec["intel-mkl"].variants["threads"].value]
|
||||
),
|
||||
),
|
||||
]
|
||||
else:
|
||||
args.append(self.define("DLAF_WITH_MKL", False))
|
||||
args.append(
|
||||
self.define(
|
||||
"LAPACK_LIBRARY",
|
||||
" ".join([spec[dep].libs.ld_flags for dep in ["blas", "lapack"]]),
|
||||
)
|
||||
)
|
||||
|
||||
# CUDA/HIP
|
||||
args.append(self.define_from_variant("DLAF_WITH_CUDA", "cuda"))
|
||||
args.append(self.define_from_variant("DLAF_WITH_HIP", "rocm"))
|
||||
if "+rocm" in spec:
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if "none" not in archs:
|
||||
arch_str = ";".join(archs)
|
||||
args.append(self.define("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
if "+cuda" in spec:
|
||||
archs = self.spec.variants["cuda_arch"].value
|
||||
if "none" not in archs:
|
||||
arch_str = ";".join(archs)
|
||||
args.append(self.define("CMAKE_CUDA_ARCHITECTURES", arch_str))
|
||||
|
||||
# DOC
|
||||
args.append(self.define_from_variant("DLAF_BUILD_DOC", "doc"))
|
||||
|
||||
# TEST
|
||||
args.append(self.define("DLAF_BUILD_TESTING", self.run_tests))
|
||||
|
||||
# MINIAPPS
|
||||
args.append(self.define_from_variant("DLAF_BUILD_MINIAPPS", "miniapps"))
|
||||
|
||||
return args
|
@@ -50,13 +50,17 @@ def cmake_args(self):
|
||||
|
||||
return args
|
||||
|
||||
def test(self):
|
||||
def test_dla(self):
|
||||
"""prepare, run, and confirm dla results"""
|
||||
test01 = find(self.prefix.share, "01_spindimer")[0]
|
||||
copy(join_path(test01, "std.toml"), ".")
|
||||
|
||||
# prepare
|
||||
pythonexe = self.spec["python"].command.path
|
||||
python = self.spec["python"].command
|
||||
opts = [self.spec.prefix.bin.dla_pre, "std.toml"]
|
||||
self.run_test(pythonexe, options=opts)
|
||||
with test_part(self, "test_dla_pre", purpose="prepare dla"):
|
||||
python(*opts)
|
||||
|
||||
# (mpi) run
|
||||
opts = []
|
||||
if self.spec.satisfies("+mpi"):
|
||||
@@ -66,6 +70,11 @@ def test(self):
|
||||
else:
|
||||
exe_name = "dla"
|
||||
opts.append("param.in")
|
||||
expected = ["R ene = -3.74300000e-01 2.96344394e-03"]
|
||||
self.run_test(exe_name, options=opts)
|
||||
self.run_test("cat", options=["sample.log"], expected=expected)
|
||||
with test_part(self, "test_dla_run", purpose="run dla"):
|
||||
exe = which(exe_name)
|
||||
exe(*opts)
|
||||
|
||||
with test_part(self, "test_dla_results", purpose="confirming dla results"):
|
||||
cat = which("cat")
|
||||
out = cat("sample.log", output=str.split, error=str.split)
|
||||
assert "R ene = -3.74300000e-01 2.96344394e-03" in out
|
||||
|
@@ -51,13 +51,9 @@ def cache_test_sources(self):
|
||||
"""Save off the pdb sources for stand-alone testing."""
|
||||
self.cache_extra_test_sources("pdb")
|
||||
|
||||
def test(self):
|
||||
"""Perform stand-alone/smoke test on installed package."""
|
||||
pdb_path = join_path(self.test_suite.current_test_cache_dir, "pdb")
|
||||
self.run_test(
|
||||
"mkdssp",
|
||||
options=["1ALK.pdb", "1alk.dssp"],
|
||||
purpose="test: calculating structure for example",
|
||||
installed=True,
|
||||
work_dir=pdb_path,
|
||||
)
|
||||
def test_mkdssp(self):
|
||||
"""calculate structure for example"""
|
||||
pdb_path = self.test_suite.current_test_cache_dir.pdb
|
||||
mkdssp = which(self.prefix.bin.mkdssp)
|
||||
with working_dir(pdb_path):
|
||||
mkdssp("1ALK.pdb", "1alk.dssp")
|
||||
|
@@ -33,12 +33,8 @@ def check_install(self):
|
||||
print("stdout received fromm dust is '{}".format(output))
|
||||
assert "Dust " in output
|
||||
|
||||
def test(self):
|
||||
"""Run this smoke test when requested explicitly"""
|
||||
|
||||
dustpath = join_path(self.spec["dust"].prefix.bin, "dust")
|
||||
options = ["--version"]
|
||||
purpose = "Check dust can execute (with option '--version')"
|
||||
expected = ["Dust "]
|
||||
|
||||
self.run_test(dustpath, options=options, expected=expected, status=[0], purpose=purpose)
|
||||
def test_run(self):
|
||||
"""check dust can execute (with option '--version')"""
|
||||
dust = which(self.prefix.bin.dust)
|
||||
out = dust("--version", output=str.split, error=str.split)
|
||||
assert "Dust " in out
|
||||
|
@@ -107,7 +107,7 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage):
|
||||
propagate=["cuda", "hdf5", "sz", "zfp", "fortran"] + cuda_arch_variants,
|
||||
)
|
||||
|
||||
dav_sdk_depends_on("darshan-runtime+mpi", when="+darshan", propagate=["hdf5"])
|
||||
dav_sdk_depends_on("darshan-runtime+mpi", when="+darshan")
|
||||
dav_sdk_depends_on("darshan-util", when="+darshan")
|
||||
|
||||
dav_sdk_depends_on("faodel+shared+mpi network=libfabric", when="+faodel", propagate=["hdf5"])
|
||||
@@ -171,7 +171,8 @@ class EcpDataVisSdk(BundlePackage, CudaPackage, ROCmPackage):
|
||||
# ParaView needs @5.11: in order to use CUDA/ROCM, therefore it is the minimum
|
||||
# required version since GPU capability is desired for ECP
|
||||
dav_sdk_depends_on(
|
||||
"paraview@5.11:+mpi+openpmd+python+kits+shared+catalyst+libcatalyst+raytracing",
|
||||
"paraview@5.11:+mpi+openpmd+python+kits+shared+catalyst+libcatalyst+raytracing"
|
||||
" use_vtkm=on",
|
||||
when="+paraview",
|
||||
propagate=["adios2", "cuda", "hdf5", "rocm"] + amdgpu_target_variants + cuda_arch_variants,
|
||||
)
|
||||
|
@@ -19,6 +19,7 @@ class Edm4hep(CMakePackage):
|
||||
tags = ["hep", "key4hep"]
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.9", sha256="170ef84822761c4b02da9047f2b4d0dd0f48ed1c027b10171d4207b1542fbd5c")
|
||||
version("0.8", sha256="102d57167885eba3bea79f6b6647e5303ad8732c5784590abdcdd816b2411c79")
|
||||
version("0.7.2", sha256="e289280d5de2c0a3b542bf9dfe04b9f6471b0a0fcf33f5c8101ea7252e2a7643")
|
||||
version("0.7.1", sha256="82e215a532f548a73a6f6094eaa8b436c553994e135f6d63a674543dc89a9f1b")
|
||||
|
@@ -15,6 +15,7 @@ class Etcd(Package):
|
||||
|
||||
maintainers("alecbcs")
|
||||
|
||||
version("3.5.9", sha256="ab24d74b66ba1ed7d2bc391839d961e7215f0f3d674c3a9592dad6dc67a7b223")
|
||||
version("3.4.23", sha256="055c608c4898d25f23aefbc845ff074bf5e8a07e61ed41dbd5cc4d4f59c93093")
|
||||
|
||||
depends_on("go@1.19:")
|
||||
|
@@ -14,6 +14,13 @@ class Fairmq(CMakePackage):
|
||||
maintainers("dennisklein", "ChristianTackeGSI")
|
||||
|
||||
version("dev", branch="dev", submodules=True, get_full_repo=True)
|
||||
version(
|
||||
"1.6.0",
|
||||
tag="v1.6.0",
|
||||
commit="42d27af20fb5cbbbc0b0fdfef1c981d51a8baf87",
|
||||
submodules=True,
|
||||
no_cache=True,
|
||||
)
|
||||
version(
|
||||
"1.5.0",
|
||||
tag="v1.5.0",
|
||||
|
@@ -13,6 +13,7 @@ class Fastp(MakefilePackage):
|
||||
homepage = "https://github.com/OpenGene/fastp"
|
||||
url = "https://github.com/OpenGene/fastp/archive/v0.20.0.tar.gz"
|
||||
|
||||
version("0.23.4", sha256="4fad6db156e769d46071add8a778a13a5cb5186bc1e1a5f9b1ffd499d84d72b5")
|
||||
version("0.23.3", sha256="a37ee4b5dcf836a5a19baec645657b71d9dcd69ee843998f41f921e9b67350e3")
|
||||
version("0.20.0", sha256="8d751d2746db11ff233032fc49e3bcc8b53758dd4596fdcf4b4099a4d702ac22")
|
||||
|
||||
|
@@ -12,16 +12,31 @@ class FastqScreen(Package):
|
||||
the library matches with what you expect."""
|
||||
|
||||
homepage = "https://www.bioinformatics.babraham.ac.uk/projects/fastq_screen/"
|
||||
url = "https://www.bioinformatics.babraham.ac.uk/projects/fastq_screen/fastq_screen_v0.11.2.tar.gz"
|
||||
url = "https://github.com/StevenWingett/FastQ-Screen/archive/refs/tags/v0.15.3.tar.gz"
|
||||
|
||||
version("0.11.2", sha256="a179df1f5803b42bbbb2b50af05ea18ae6fefcbf7020ca2feeb0d3c598a65207")
|
||||
version("0.15.3", sha256="002750d78ca50fe0f789e24445e10988e16244f81b4f0189bf2fc4ee8b680be5")
|
||||
version(
|
||||
"0.11.2",
|
||||
sha256="a179df1f5803b42bbbb2b50af05ea18ae6fefcbf7020ca2feeb0d3c598a65207",
|
||||
url="https://www.bioinformatics.babraham.ac.uk/projects/fastq_screen/fastq_screen_v0.11.2.tar.gz",
|
||||
)
|
||||
|
||||
variant("bismark", default=False, description="Enable bisulfite mapping with bismark")
|
||||
variant("bowtie", default=False, description="Enable mapping with bowtie")
|
||||
variant("bwa", default=False, description="Enable mapping with bwa")
|
||||
|
||||
# general dependencies
|
||||
depends_on("perl", type="run")
|
||||
depends_on("perl-gdgraph", type="run")
|
||||
depends_on("bowtie")
|
||||
depends_on("bowtie2")
|
||||
depends_on("bwa")
|
||||
depends_on("samtools")
|
||||
depends_on("bowtie2", type="run")
|
||||
depends_on("samtools", type="run")
|
||||
# variant dependencies
|
||||
depends_on("bismark", type="run", when="+bismark")
|
||||
depends_on("bowtie", type="run", when="+bowtie")
|
||||
depends_on("bwa", type="run", when="+bwa")
|
||||
|
||||
def patch(self):
|
||||
filter_file("/usr/bin/perl", self.spec["perl"].command.path, "fastq_screen", backup=False)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix.bin)
|
||||
|
@@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage):
|
||||
maintainers("grondo")
|
||||
|
||||
version("master", branch="master")
|
||||
version("0.51.0", sha256="e57b71b708482f20d2a2195a000c0c3b9176faa6aaadfad4d2117f8671ca67ce")
|
||||
version("0.50.0", sha256="77414299a7ca081199aa0f57bcaea3e05860e2095df73c0f6b7672b88fadf683")
|
||||
version("0.49.0", sha256="9b8d7af1d8aaa7ee110bcb9815b6b8647af686de949097c9bb2a0269d5551051")
|
||||
version("0.48.0", sha256="32c1bfdde44123e90606422807d381406874bb6dbec170ddb493f905208cc275")
|
||||
|
@@ -23,6 +23,7 @@ class Freeipmi(AutotoolsPackage):
|
||||
homepage = "https://www.gnu.org/software/freeipmi/"
|
||||
url = "https://ftp.gnu.org/gnu/freeipmi/freeipmi-1.6.4.tar.gz"
|
||||
|
||||
version("1.6.9", sha256="f25e1c35f3d0f1b5a99cc31ecc2353ca83ed46a15163842fba870127dc9c8206")
|
||||
version("1.6.4", sha256="65dfbb95a30438ba247f01a58498862a37d2e71c8c950bcfcee459d079241a3c")
|
||||
|
||||
depends_on("libgcrypt")
|
||||
|
@@ -26,7 +26,7 @@ class FrontistrBase(CMakePackage):
|
||||
# depends_on('revocap-coupler')
|
||||
depends_on("metis")
|
||||
depends_on("mumps")
|
||||
depends_on("trilinos@:12.18.1")
|
||||
depends_on("trilinos@:13.0.1")
|
||||
|
||||
def cmake_args(self):
|
||||
define = self.define
|
||||
|
@@ -13,5 +13,6 @@ class Ftxui(CMakePackage):
|
||||
homepage = "https://arthursonzogni.github.io"
|
||||
url = "https://github.com/ArthurSonzogni/FTXUI/archive/refs/tags/v2.0.0.tar.gz"
|
||||
|
||||
version("4.1.1", sha256="9009d093e48b3189487d67fc3e375a57c7b354c0e43fc554ad31bec74a4bc2dd")
|
||||
version("4.0.0", sha256="7276e4117429ebf8e34ea371c3ea4e66eb99e0f234cb4c5c85fca17174a53dfa")
|
||||
version("2.0.0", sha256="d891695ef22176f0c09f8261a37af9ad5b262dd670a81e6b83661a23abc2c54f")
|
||||
|
@@ -17,6 +17,7 @@ class Fzf(MakefilePackage):
|
||||
|
||||
executables = ["^fzf$"]
|
||||
|
||||
version("0.41.1", sha256="982682eaac377c8a55ae8d7491fcd0e888d6c13915d01da9ebb6b7c434d7f4b5")
|
||||
version("0.40.0", sha256="9597f297a6811d300f619fff5aadab8003adbcc1566199a43886d2ea09109a65")
|
||||
|
||||
depends_on("go@1.17:", type="build")
|
||||
|
@@ -82,12 +82,7 @@ class Gaudi(CMakePackage):
|
||||
|
||||
# Testing dependencies
|
||||
# Note: gaudi only builds examples when testing enabled
|
||||
for pv in (
|
||||
["catch2", "@36.8:"],
|
||||
["py-nose", "@35:"],
|
||||
["py-pytest", "@36.2:"],
|
||||
["py-qmtest", "@35:"],
|
||||
):
|
||||
for pv in (["catch2", "@36.8:"], ["py-nose", "@35:"], ["py-pytest", "@36.2:"]):
|
||||
depends_on(pv[0], when=pv[1], type="test")
|
||||
depends_on(pv[0], when=pv[1] + " +examples")
|
||||
|
||||
|
@@ -84,7 +84,7 @@ class Gdb(AutotoolsPackage, GNUMirrorPackage):
|
||||
depends_on("source-highlight", when="+source-highlight")
|
||||
depends_on("ncurses", when="+tui")
|
||||
depends_on("gmp", when="@11.1:")
|
||||
depends_on("elfutils@0.178:+debuginfod", when="@11.1:+debuginfod")
|
||||
depends_on("elfutils@0.179:+debuginfod", when="@10.1:+debuginfod")
|
||||
|
||||
build_directory = "spack-build"
|
||||
|
||||
|
@@ -232,6 +232,13 @@ class Gromacs(CMakePackage, CudaPackage):
|
||||
for gmx_ver, plumed_vers in plumed_patches.items():
|
||||
depends_on("plumed@{0}".format(plumed_vers), when="@{0}+plumed".format(gmx_ver))
|
||||
|
||||
variant(
|
||||
"intel_provided_gcc",
|
||||
default=False,
|
||||
description="Use this if Intel compiler is installed through spack."
|
||||
+ "The g++ location is written to icp{c,x}.cfg",
|
||||
)
|
||||
|
||||
depends_on("fftw-api@3")
|
||||
depends_on("cmake@2.8.8:3", type="build")
|
||||
depends_on("cmake@3.4.3:3", type="build", when="@2018:")
|
||||
@@ -244,7 +251,8 @@ class Gromacs(CMakePackage, CudaPackage):
|
||||
depends_on("sycl", when="+sycl")
|
||||
depends_on("lapack", when="+lapack")
|
||||
depends_on("blas", when="+blas")
|
||||
depends_on("gcc", when="%oneapi")
|
||||
depends_on("gcc", when="%oneapi ~intel_provided_gcc")
|
||||
depends_on("gcc", when="%intel ~intel_provided_gcc")
|
||||
|
||||
depends_on("hwloc@1.0:1", when="+hwloc@2016:2018")
|
||||
depends_on("hwloc", when="+hwloc@2019:")
|
||||
@@ -254,6 +262,14 @@ class Gromacs(CMakePackage, CudaPackage):
|
||||
|
||||
depends_on("nvhpc", when="+cufftmp")
|
||||
|
||||
requires(
|
||||
"%intel",
|
||||
"%oneapi",
|
||||
policy="one_of",
|
||||
when="+intel_provided_gcc",
|
||||
msg="Only attempt to find gcc libs for Intel compiler if Intel compiler is used.",
|
||||
)
|
||||
|
||||
patch("gmxDetectCpu-cmake-3.14.patch", when="@2018:2019.3^cmake@3.14.0:")
|
||||
patch("gmxDetectSimd-cmake-3.14.patch", when="@5.0:2017^cmake@3.14.0:")
|
||||
|
||||
@@ -431,8 +447,16 @@ def cmake_args(self):
|
||||
if self.spec.satisfies("@2020:"):
|
||||
options.append("-DGMX_INSTALL_LEGACY_API=ON")
|
||||
|
||||
if self.spec.satisfies("%oneapi"):
|
||||
options.append("-DGMX_GPLUSPLUS_PATH=%s/g++" % self.spec["gcc"].prefix.bin)
|
||||
if self.spec.satisfies("%oneapi") or self.spec.satisfies("%intel"):
|
||||
# If intel-oneapi-compilers was installed through spack the gcc is added to the
|
||||
# configuration file.
|
||||
if self.spec.satisfies("+intel_provided_gcc") and os.path.exists(
|
||||
".".join([os.environ["SPACK_CXX"], "cfg"])
|
||||
):
|
||||
with open(".".join([os.environ["SPACK_CXX"], "cfg"]), "r") as f:
|
||||
options.append("-DCMAKE_CXX_FLAGS={}".format(f.read()))
|
||||
else:
|
||||
options.append("-DGMX_GPLUSPLUS_PATH=%s/g++" % self.spec["gcc"].prefix.bin)
|
||||
|
||||
if "+double" in self.spec:
|
||||
options.append("-DGMX_DOUBLE:BOOL=ON")
|
||||
|
@@ -20,6 +20,9 @@ class Gxsview(QMakePackage):
|
||||
# Support email for questions ohnishi@m.mpat.go.jp
|
||||
maintainers("cessenat")
|
||||
|
||||
version(
|
||||
"2023.05.29", sha256="1e768fd7afd22198b7f73adeb42f4ccf7e0ff68996a3843b1ea138225c4c1da3"
|
||||
)
|
||||
version(
|
||||
"2022.11.04", sha256="28c299e4f87836b93e4a42934777364a166e35d305050ee5623a1b7cbc0ab561"
|
||||
)
|
||||
|
@@ -16,6 +16,7 @@ class Hashcat(MakefilePackage):
|
||||
homepage = "https://hashcat.net/hashcat/"
|
||||
url = "https://github.com/hashcat/hashcat/archive/v6.1.1.tar.gz"
|
||||
|
||||
version("6.2.6", sha256="b25e1077bcf34908cc8f18c1a69a2ec98b047b2cbcf0f51144dcf3ba1e0b7b2a")
|
||||
version("6.1.1", sha256="39c140bbb3c0bdb1564bfa9b9a1cff49115a42f4c9c19e9b066b617aea309f80")
|
||||
version("6.1.0", sha256="916f92434e3b36a126be1d1247a95cd3b32b4d814604960a2ca325d4cc0542d1")
|
||||
version("6.0.0", sha256="e8e70f2a5a608a4e224ccf847ad2b8e4d68286900296afe00eb514d8c9ec1285")
|
||||
|
@@ -18,6 +18,7 @@ class Hbase(Package):
|
||||
list_url = "https://archive.apache.org/dist/hbase"
|
||||
list_depth = 1
|
||||
|
||||
version("2.5.4", sha256="ffb929d1ffcaf0309e1617833f3b36645a9c9d9c10b47d6baccc7e50c6d0ed33")
|
||||
version("2.5.3", sha256="874f239c341a6a4a646051c79fda9e838242481b70463bf8daa28ba7239576c2")
|
||||
version("2.4.9", sha256="ed282a165fe0910b27d143f3ea21d552110bc155fd5456250a05dc51b0f0b6bd")
|
||||
version("2.2.5", sha256="25d08f8f038d9de5beb43dfb0392e8a8b34eae7e0f2670d6c2c172abc3855194")
|
||||
|
@@ -47,6 +47,8 @@ class Hpcviewer(Package):
|
||||
maintainers("mwkrentel")
|
||||
|
||||
darwin_sha = {
|
||||
("2023.05", "aarch64"): "b34e1ebc021e91c7260cc91a888e966a81913691de04c5e972da613d0dc34294",
|
||||
("2023.05", "x86_64"): "689c2c18f70d53a8e1f27527f65d30c61b6f70db98f63378a97f236926ef1ac5",
|
||||
("2023.04", "aarch64"): "85fc1c8823e2ef442666d60e98674a55315771e57205a0d2cef739d39fea699f",
|
||||
("2023.04", "x86_64"): "6a2497d52414ca131089a4819006e5bfe1d4b35e28bf66874105bfe051d658d4",
|
||||
("2023.02", "aarch64"): "05356fcd0a84f70b07f556b55a02954aae05419d9fa12f0f64f8e2399566e622",
|
||||
@@ -65,6 +67,9 @@ class Hpcviewer(Package):
|
||||
}
|
||||
|
||||
viewer_sha = {
|
||||
("2023.05", "aarch64"): "901b58b73890180b1cb7572d91c1b6cc205a5d3d50927c50d05d2b05554918c6",
|
||||
("2023.05", "ppc64le"): "d948e4777aea3a0c06300aedd4ce04e28f97b3ac306f78d672a5f692152bbdef",
|
||||
("2023.05", "x86_64"): "8c51df8b958ec600c9b7547461d7e9abb0e07a048d4031f58efd47df7ec79091",
|
||||
("2023.04", "aarch64"): "826c6a83c88eda980f9106843863853804a74f117ef53bfdd0973429c121949a",
|
||||
("2023.04", "ppc64le"): "4804ea59101d0301e9a2284b77757919ffc114330becc071bb69d3fc5f5df261",
|
||||
("2023.04", "x86_64"): "24aad913a156996cd372439a4b2ae8a6d90aab0e2f5281f1fa81b5be9c9b9278",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user