Compare commits
234 Commits
features/r
...
bugfix/bin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d82b537339 | ||
|
|
3d8d1fe924 | ||
|
|
f09f834a8f | ||
|
|
147f39d7aa | ||
|
|
9fad3ed5f1 | ||
|
|
1375b1975b | ||
|
|
c533612ab6 | ||
|
|
e5cc3c51d1 | ||
|
|
70650cacbd | ||
|
|
d8a5638d9f | ||
|
|
158f6ddb14 | ||
|
|
86d1a6f2fc | ||
|
|
ca10ff1649 | ||
|
|
19175b9bab | ||
|
|
7d64a8e81f | ||
|
|
a0b68a0baa | ||
|
|
b7e0692dfa | ||
|
|
8cc62ef916 | ||
|
|
2363aba710 | ||
|
|
5a0c56229c | ||
|
|
96bb72e412 | ||
|
|
7c977733f1 | ||
|
|
d48a760837 | ||
|
|
c64298a183 | ||
|
|
31d89e80bc | ||
|
|
2844f1fc45 | ||
|
|
8b85f8ec58 | ||
|
|
7ef52acfce | ||
|
|
c3fecfb103 | ||
|
|
ef278c601c | ||
|
|
5e9a8d27f7 | ||
|
|
ffc90f944c | ||
|
|
699c0cf9a0 | ||
|
|
55cac3b098 | ||
|
|
35d07a6f18 | ||
|
|
1f42a5e598 | ||
|
|
967463432b | ||
|
|
bc03b5caaf | ||
|
|
649760dc1a | ||
|
|
77118427b1 | ||
|
|
1a024963a2 | ||
|
|
aebb601b70 | ||
|
|
d95f53e1ac | ||
|
|
0fe57962b3 | ||
|
|
8ec451aa7d | ||
|
|
14349cb882 | ||
|
|
bc438ed4e9 | ||
|
|
760a12c440 | ||
|
|
9325c8e53f | ||
|
|
fd5b7d6cce | ||
|
|
b19a0744f1 | ||
|
|
75d0c0dff7 | ||
|
|
aedc41c3a0 | ||
|
|
1f39d6d916 | ||
|
|
1bc3b0a926 | ||
|
|
62596fb796 | ||
|
|
15c35a3cff | ||
|
|
249c90f909 | ||
|
|
043b2cbb7c | ||
|
|
aedf215b90 | ||
|
|
eed4a63be7 | ||
|
|
13b609b4b6 | ||
|
|
85dc20cb55 | ||
|
|
667c39987c | ||
|
|
f35d8c4102 | ||
|
|
6ac3186132 | ||
|
|
466572dc14 | ||
|
|
1c0bf12e5b | ||
|
|
bf990bc8ec | ||
|
|
267358a799 | ||
|
|
392b548312 | ||
|
|
8c3b82c140 | ||
|
|
2e4b533420 | ||
|
|
3b393fe0eb | ||
|
|
895ceeda38 | ||
|
|
6f1e3a4ad3 | ||
|
|
ce41b7457b | ||
|
|
4f9f56630b | ||
|
|
e216ba1520 | ||
|
|
4fd5ee1d9d | ||
|
|
89f32d51f3 | ||
|
|
3b3be70226 | ||
|
|
95888602f2 | ||
|
|
0205fefe0c | ||
|
|
b261b2a5ff | ||
|
|
8c58c14c3d | ||
|
|
f7b4d488c3 | ||
|
|
38a8d4d2fe | ||
|
|
1ceee714db | ||
|
|
5b3e4ba3f8 | ||
|
|
37426e41cb | ||
|
|
a05e729e1b | ||
|
|
f8a6799e67 | ||
|
|
633ebd149c | ||
|
|
bf6220821b | ||
|
|
3db96aa892 | ||
|
|
c7628d768a | ||
|
|
e1ad926189 | ||
|
|
8705735c74 | ||
|
|
d82fa045d5 | ||
|
|
c72d51cb3a | ||
|
|
d76f184430 | ||
|
|
19ea24d2bd | ||
|
|
83efea32f4 | ||
|
|
bc577f2dee | ||
|
|
04529fbe80 | ||
|
|
cdfbe2c25d | ||
|
|
8305742d75 | ||
|
|
e0137b1566 | ||
|
|
be95699a55 | ||
|
|
7d96a0aa5a | ||
|
|
874b713edf | ||
|
|
3c0a98c5ab | ||
|
|
998bf90b35 | ||
|
|
2ca32fbc8c | ||
|
|
e50d08ce48 | ||
|
|
696d81513d | ||
|
|
b38afa7528 | ||
|
|
0f2786c9d3 | ||
|
|
f72c2ab583 | ||
|
|
f6e6403fd1 | ||
|
|
5550271ead | ||
|
|
a1b19345b1 | ||
|
|
e2aeb06c91 | ||
|
|
14ae0e0d94 | ||
|
|
516587a1da | ||
|
|
dc36fd87bb | ||
|
|
06b5141d01 | ||
|
|
27610838dd | ||
|
|
0c7fd9bd8c | ||
|
|
bf2b30a5f5 | ||
|
|
f42680b785 | ||
|
|
a2afd5b82f | ||
|
|
11f1b371f7 | ||
|
|
cfc46504ac | ||
|
|
163251aa65 | ||
|
|
6ab9f3a290 | ||
|
|
45043bcdf5 | ||
|
|
7642fa3d99 | ||
|
|
1689f7fcbe | ||
|
|
4f67afeb5f | ||
|
|
9d8ecffed0 | ||
|
|
1ada7ea809 | ||
|
|
4a8db00691 | ||
|
|
01f8236bf5 | ||
|
|
57822d3014 | ||
|
|
3fdb3f832a | ||
|
|
bedad508a9 | ||
|
|
31f6dedecd | ||
|
|
dc0c4959db | ||
|
|
8550b6cf49 | ||
|
|
0e5c4c9cbf | ||
|
|
9bac72e818 | ||
|
|
a65a217c54 | ||
|
|
26e6aae8d4 | ||
|
|
d6fabd1533 | ||
|
|
d245c46487 | ||
|
|
3eba28e383 | ||
|
|
0d91cb58dc | ||
|
|
5f55abeecb | ||
|
|
c3c2416672 | ||
|
|
9e2bc41e45 | ||
|
|
8f80c5e6f7 | ||
|
|
a5e1367882 | ||
|
|
c724e26ba9 | ||
|
|
1b9a1992fb | ||
|
|
815764bdef | ||
|
|
adf073b53c | ||
|
|
449e885d4c | ||
|
|
560472ce3a | ||
|
|
0c66446437 | ||
|
|
6b4b1dacd9 | ||
|
|
3e90134e14 | ||
|
|
4ad0594c7b | ||
|
|
f13b760f10 | ||
|
|
0f6fb1a706 | ||
|
|
e74d85a524 | ||
|
|
ffd63c5de1 | ||
|
|
ed263615d7 | ||
|
|
b4e775a11a | ||
|
|
6a2844fdee | ||
|
|
44d670a8ce | ||
|
|
4a0ac87d07 | ||
|
|
2c6898c717 | ||
|
|
0a8083c604 | ||
|
|
5b45df5269 | ||
|
|
9d7cc43673 | ||
|
|
932065beca | ||
|
|
360192cbfe | ||
|
|
7bc349c041 | ||
|
|
603ec40ab1 | ||
|
|
ed6695b9c9 | ||
|
|
fb173f80b2 | ||
|
|
15d4262b9b | ||
|
|
7116fb1b70 | ||
|
|
9b713fa6a6 | ||
|
|
8d4a5cb247 | ||
|
|
bf2d44c87e | ||
|
|
926e311f3c | ||
|
|
8ef937032c | ||
|
|
73ce789390 | ||
|
|
e4f3cfcc3a | ||
|
|
7ac05485c6 | ||
|
|
13b0e73a4e | ||
|
|
03c54aebdd | ||
|
|
f4a4b3fa87 | ||
|
|
6b3607287a | ||
|
|
b4b2585d67 | ||
|
|
29855ae31e | ||
|
|
44b9efa132 | ||
|
|
f21e26b904 | ||
|
|
73865c38f9 | ||
|
|
38ccefbe84 | ||
|
|
1bd33d88bd | ||
|
|
67ad23cc11 | ||
|
|
8640b50258 | ||
|
|
043cc688ef | ||
|
|
e52527029a | ||
|
|
a7d2f76ac5 | ||
|
|
fbb134b1af | ||
|
|
548e9ae88c | ||
|
|
5728ba0122 | ||
|
|
2bda10edb5 | ||
|
|
bd15ca4f16 | ||
|
|
f9dfd5fcb8 | ||
|
|
f3c4e1adbb | ||
|
|
b2d3ed9096 | ||
|
|
dac5fec255 | ||
|
|
9784b8f926 | ||
|
|
adef8f6ca7 | ||
|
|
c36f15e29e | ||
|
|
03531ed904 | ||
|
|
54332b2d83 | ||
|
|
165f42b7ce |
12
.github/workflows/bootstrap.yml
vendored
12
.github/workflows/bootstrap.yml
vendored
@@ -12,6 +12,7 @@ on:
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
@@ -19,6 +20,10 @@ on:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
@@ -175,10 +180,11 @@ jobs:
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: macos-latest
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
@@ -186,7 +192,7 @@ jobs:
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -205,7 +211,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
|
||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -19,6 +19,10 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
10
.github/workflows/macos_python.yml
vendored
10
.github/workflows/macos_python.yml
vendored
@@ -16,6 +16,10 @@ on:
|
||||
- '.github/workflows/macos_python.yml'
|
||||
# TODO: run if we touch any of the recipes involved in this
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# GitHub Action Limits
|
||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
@@ -26,7 +30,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -42,7 +46,7 @@ jobs:
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -56,7 +60,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
||||
21
.github/workflows/unit_tests.yaml
vendored
21
.github/workflows/unit_tests.yaml
vendored
@@ -9,6 +9,11 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
@@ -16,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
@@ -34,7 +39,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -109,7 +114,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -174,7 +179,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -240,7 +245,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -289,7 +294,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -332,7 +337,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -345,7 +350,7 @@ jobs:
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (wwithout coverage)
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
|
||||
19
.github/workflows/windows_python.yml
vendored
19
.github/workflows/windows_python.yml
vendored
@@ -9,6 +9,11 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
@@ -18,7 +23,7 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -36,7 +41,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -58,7 +63,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -78,7 +83,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -98,7 +103,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -123,7 +128,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -154,7 +159,7 @@ jobs:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -84,8 +84,8 @@ build ``hdf5`` with Intel oneAPI MPI do::
|
||||
|
||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||
|
||||
Using an Externally Installed oneAPI
|
||||
====================================
|
||||
Using Externally Installed oneAPI Tools
|
||||
=======================================
|
||||
|
||||
Spack can also use oneAPI tools that are manually installed with
|
||||
`Intel Installers`_. The procedures for configuring Spack to use
|
||||
@@ -110,7 +110,7 @@ Another option is to manually add the configuration to
|
||||
Libraries
|
||||
---------
|
||||
|
||||
If you want Spack to use MKL that you have installed without Spack in
|
||||
If you want Spack to use oneMKL that you have installed without Spack in
|
||||
the default location, then add the following to
|
||||
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
||||
|
||||
@@ -139,7 +139,7 @@ You can also use Spack-installed libraries. For example::
|
||||
spack load intel-oneapi-mkl
|
||||
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with MKL.
|
||||
environment variables for building an application with oneMKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
@@ -15,6 +15,9 @@ IntelPackage
|
||||
Intel packages in Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This is an earlier version of Intel software development tools and has
|
||||
now been replaced by Intel oneAPI Toolkits.
|
||||
|
||||
Spack can install and use several software development products offered by Intel.
|
||||
Some of these are available under no-cost terms, others require a paid license.
|
||||
All share the same basic steps for configuration, installation, and, where
|
||||
|
||||
@@ -109,9 +109,10 @@ Spack Images on Docker Hub
|
||||
--------------------------
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built on `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
at every push to ``develop`` or to a release branch. The OS that
|
||||
are currently supported are summarized in the table below:
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
are then pushed both to `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
and to `GitHub Container Registry <https://github.com/orgs/spack/packages?repo_name=spack>`_.
|
||||
The OS that are currently supported are summarized in the table below:
|
||||
|
||||
.. _containers-supported-os:
|
||||
|
||||
@@ -121,22 +122,31 @@ are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 16.04
|
||||
- ``ubuntu:16.04``
|
||||
- ``spack/ubuntu-xenial``
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
.. image:: dockerhub_spack.png
|
||||
.. image:: images/ghcr_spack.png
|
||||
|
||||
with the exception of the ``latest`` tag that points to the HEAD
|
||||
of the ``develop`` branch. These images are available for anyone
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 88 KiB |
@@ -545,8 +545,8 @@ environment and have a single view of it in the filesystem.
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: separately`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: together`` is replaced by ``concretizer:unify:false``.
|
||||
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
@@ -799,7 +799,7 @@ directories.
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: {name}/{version}-{compiler.name}
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
@@ -1051,4 +1051,4 @@ the include is conditional.
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
|
||||
BIN
lib/spack/docs/images/ghcr_spack.png
Normal file
BIN
lib/spack/docs/images/ghcr_spack.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 70 KiB |
@@ -1070,13 +1070,32 @@ Commits
|
||||
|
||||
Submodules
|
||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||
recursively along with the repository at fetch time. For more information
|
||||
about git submodules see the manpage of git: ``man git-submodule``.
|
||||
recursively along with the repository at fetch time.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||
|
||||
If a package has needs more fine-grained control over submodules, define
|
||||
``submodules`` to be a callable function that takes the package instance as
|
||||
its only argument. The function should return a list of submodules to be fetched.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def submodules(package):
|
||||
submodules = []
|
||||
if "+variant-1" in package.spec:
|
||||
submodules.append("submodule_for_variant_1")
|
||||
if "+variant-2" in package.spec:
|
||||
submodules.append("submodule_for_variant_2")
|
||||
return submodules
|
||||
|
||||
|
||||
class MyPackage(Package):
|
||||
version("0.1.0", submodules=submodules)
|
||||
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
@@ -2775,6 +2794,256 @@ Suppose a user invokes ``spack install`` like this:
|
||||
Spack will fail with a constraint violation, because the version of
|
||||
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
||||
|
||||
.. _custom-attributes:
|
||||
|
||||
------------------
|
||||
Custom attributes
|
||||
------------------
|
||||
|
||||
Often a package will need to provide attributes for dependents to query
|
||||
various details about what it provides. While any number of custom defined
|
||||
attributes can be implemented by a package, the four specific attributes
|
||||
described below are always available on every package with default
|
||||
implementations and the ability to customize with alternate implementations
|
||||
in the case of virtual packages provided:
|
||||
|
||||
=========== =========================================== =====================
|
||||
Attribute Purpose Default
|
||||
=========== =========================================== =====================
|
||||
``home`` The installation path for the package ``spec.prefix``
|
||||
``command`` An executable command for the package | ``spec.name`` found
|
||||
in
|
||||
| ``.home.bin``
|
||||
``headers`` A list of headers provided by the package | All headers
|
||||
searched
|
||||
| recursively in
|
||||
``.home.include``
|
||||
``libs`` A list of libraries provided by the package | ``lib{spec.name}``
|
||||
searched
|
||||
| recursively in
|
||||
``.home`` starting
|
||||
| with ``lib``,
|
||||
``lib64``, then the
|
||||
| rest of ``.home``
|
||||
=========== =========================================== =====================
|
||||
|
||||
Each of these can be customized by implementing the relevant attribute
|
||||
as a ``@property`` in the package's class:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
@property
|
||||
def libs(self):
|
||||
# The library provided by Foo is libMyFoo.so
|
||||
return find_libraries('libMyFoo', root=self.home, recursive=True)
|
||||
|
||||
A package may also provide a custom implementation of each attribute
|
||||
for the virtual packages it provides by implementing the
|
||||
``virtualpackagename_attributename`` property in the package's class.
|
||||
The implementation used is the first one found from:
|
||||
|
||||
#. Specialized virtual: ``Package.virtualpackagename_attributename``
|
||||
#. Generic package: ``Package.attributename``
|
||||
#. Default
|
||||
|
||||
The use of customized attributes is demonstrated in the next example.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Example: Customized attributes for virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Consider a package ``foo`` that can optionally provide two virtual
|
||||
packages ``bar`` and ``baz``. When both are enabled the installation tree
|
||||
appears as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
include/foo.h
|
||||
include/bar/bar.h
|
||||
lib64/libFoo.so
|
||||
lib64/libFooBar.so
|
||||
baz/include/baz/baz.h
|
||||
baz/lib/libFooBaz.so
|
||||
|
||||
The install tree shows that ``foo`` is providing the header ``include/foo.h``
|
||||
and library ``lib64/libFoo.so`` in it's install prefix. The virtual
|
||||
package ``bar`` is providing ``include/bar/bar.h`` and library
|
||||
``lib64/libFooBar.so``, also in ``foo``'s install prefix. The ``baz``
|
||||
package, however, is provided in the ``baz`` subdirectory of ``foo``'s
|
||||
prefix with the ``include/baz/baz.h`` header and ``lib/libFooBaz.so``
|
||||
library. Such a package could implement the optional attributes as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, description='Enable the Foo implementation of bar')
|
||||
variant('baz', default=False, description='Enable the Foo implementation of baz')
|
||||
...
|
||||
provides('bar', when='+bar')
|
||||
provides('baz', when='+baz')
|
||||
....
|
||||
|
||||
# Just the foo headers
|
||||
@property
|
||||
def headers(self):
|
||||
return find_headers('foo', root=self.home.include, recursive=False)
|
||||
|
||||
# Just the foo libraries
|
||||
@property
|
||||
def libs(self):
|
||||
return find_libraries('libFoo', root=self.home, recursive=True)
|
||||
|
||||
# The header provided by the bar virutal package
|
||||
@property
|
||||
def bar_headers(self):
|
||||
return find_headers('bar/bar.h', root=self.home.include, recursive=False)
|
||||
|
||||
# The libary provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries('libFooBar', root=sef.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
def baz_home(self):
|
||||
return self.prefix.baz
|
||||
|
||||
# The header provided by the baz virtual package
|
||||
@property
|
||||
def baz_headers(self):
|
||||
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
||||
|
||||
# The library provided by the baz virtual package
|
||||
@property
|
||||
def baz_libs(self):
|
||||
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
||||
|
||||
Now consider another package, ``foo-app``, depending on all three:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class FooApp(CMakePackage):
|
||||
...
|
||||
depends_on('foo')
|
||||
depends_on('bar')
|
||||
depends_on('baz')
|
||||
|
||||
The resulting spec objects for it's dependencies shows the result of
|
||||
the above attribute implementations:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The core headers and libraries of the foo package
|
||||
|
||||
>>> spec['foo']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['foo'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the package install prefix without an explicit implementation
|
||||
>>> spec['foo'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# foo headers from the foo prefix
|
||||
>>> spec['foo'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/foo.h',
|
||||
])
|
||||
|
||||
# foo include directories from the foo prefix
|
||||
>>> spec['foo'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# foo libraries from the foo prefix
|
||||
>>> spec['foo'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFoo.so',
|
||||
])
|
||||
|
||||
# foo library directories from the foo prefix
|
||||
>>> spec['foo'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual bar package in the same prefix as foo
|
||||
|
||||
# bar resolves to the foo package
|
||||
>>> spec['bar']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['bar'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the foo prefix without either a Foo.bar_home
|
||||
# or Foo.home implementation
|
||||
>>> spec['bar'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# bar header in the foo prefix
|
||||
>>> spec['bar'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/bar/bar.h'
|
||||
])
|
||||
|
||||
# bar include dirs from the foo prefix
|
||||
>>> spec['bar'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# bar library from the foo prefix
|
||||
>>> spec['bar'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFooBar.so'
|
||||
])
|
||||
|
||||
# bar library directories from the foo prefix
|
||||
>>> spec['bar'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual baz package in a subdirectory of foo's prefix
|
||||
|
||||
# baz resolves to the foo package
|
||||
>>> spec['baz']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['baz'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# baz_home implementation provides the subdirectory inside the foo prefix
|
||||
>>> spec['baz'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz'
|
||||
|
||||
# baz headers in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include/baz/baz.h'
|
||||
])
|
||||
|
||||
# baz include directories in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include'
|
||||
]
|
||||
|
||||
# baz libraries in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so'
|
||||
])
|
||||
|
||||
# baz library directories in the baz subdirectory of the foo porefix
|
||||
>>> spec['baz'].libs.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib'
|
||||
]
|
||||
|
||||
.. _abstract-and-concrete:
|
||||
|
||||
-------------------------
|
||||
@@ -5476,6 +5745,24 @@ Version Lists
|
||||
|
||||
Spack packages should list supported versions with the newest first.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``home`` vs ``prefix``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``home`` and ``prefix`` are both attributes that can be queried on a
|
||||
package's dependencies, often when passing configure arguments pointing to the
|
||||
location of a dependency. The difference is that while ``prefix`` is the
|
||||
location on disk where a concrete package resides, ``home`` is the `logical`
|
||||
location that a package resides, which may be different than ``prefix`` in
|
||||
the case of virtual packages or other special circumstances. For most use
|
||||
cases inside a package, it's dependency locations can be accessed via either
|
||||
``self.spec['foo'].home`` or ``self.spec['foo'].prefix``. Specific packages
|
||||
that should be consumed by dependents via ``.home`` instead of ``.prefix``
|
||||
should be noted in their respective documentation.
|
||||
|
||||
See :ref:`custom-attributes` for more details and an example implementing
|
||||
a custom ``home`` attribute.
|
||||
|
||||
---------------------------
|
||||
Packaging workflow commands
|
||||
---------------------------
|
||||
|
||||
@@ -7,7 +7,7 @@ bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
bzip2, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
@@ -15,4 +15,4 @@ gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit 53fc4ac91e9b4c5e4079f15772503a80bece72ad)
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
@@ -85,21 +85,21 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
@@ -143,6 +143,20 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -200,6 +214,20 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -262,6 +290,20 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -302,22 +344,19 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -308,6 +308,68 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def exploding_archive_catch(stage):
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||
# a single directory. If the tarball *didn't* explode, move its
|
||||
# contents to the staging source directory & remove the container
|
||||
# directory. If the tarball did explode, just rename the tarball
|
||||
# directory to the staging source directory.
|
||||
#
|
||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||
# hidden files, which can end up *alongside* a single top-level
|
||||
# directory. We initially ignore presence of hidden files to
|
||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||
# are copied to the source directory.
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(stage.path,
|
||||
"spack-expanded-archive")
|
||||
mkdirp(tarball_container)
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(tarball_container)
|
||||
try:
|
||||
yield
|
||||
# catch an exploding archive on sucessful extraction
|
||||
os.chdir(orig_dir)
|
||||
exploding_archive_handler(tarball_container, stage)
|
||||
except Exception as e:
|
||||
# return current directory context to previous on failure
|
||||
os.chdir(orig_dir)
|
||||
raise e
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def exploding_archive_handler(tarball_container, stage):
|
||||
"""
|
||||
Args:
|
||||
tarball_container: where the archive was expanded to
|
||||
stage: Stage object referencing filesystem location
|
||||
where archive is being expanded
|
||||
"""
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
for f in files:
|
||||
src = os.path.join(tarball_container, f)
|
||||
dest = os.path.join(stage.path, f)
|
||||
shutil.move(src, dest)
|
||||
os.rmdir(tarball_container)
|
||||
else:
|
||||
# This is a non-directory entry (e.g., a patch file) so simply
|
||||
# rename the tarball container to be the source path.
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
else:
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
|
||||
@@ -298,7 +298,8 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
|
||||
@@ -624,14 +624,10 @@ def get_buildfile_manifest(spec):
|
||||
"""
|
||||
data = {"text_to_relocate": [], "binary_to_relocate": [],
|
||||
"link_to_relocate": [], "other": [],
|
||||
"binary_to_relocate_fullpath": [], "offsets": {}}
|
||||
"binary_to_relocate_fullpath": []}
|
||||
|
||||
blacklist = (".spack", "man")
|
||||
|
||||
# Get all the paths we will want to relocate in binaries
|
||||
paths_to_relocate = [s.prefix for s in spec.traverse(root=True)]
|
||||
paths_to_relocate.append(spack.store.layout.root)
|
||||
|
||||
# Do this at during tarball creation to save time when tarball unpacked.
|
||||
# Used by make_package_relative to determine binaries to change.
|
||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
||||
@@ -666,11 +662,6 @@ def get_buildfile_manifest(spec):
|
||||
(m_subtype in ('x-mach-binary')
|
||||
and sys.platform == 'darwin') or
|
||||
(not filename.endswith('.o'))):
|
||||
|
||||
# Last path to relocate is the layout root, which is a substring
|
||||
# of the others
|
||||
indices = relocate.compute_indices(path_name, paths_to_relocate)
|
||||
data['offsets'][rel_path_name] = indices
|
||||
data['binary_to_relocate'].append(rel_path_name)
|
||||
data['binary_to_relocate_fullpath'].append(path_name)
|
||||
added = True
|
||||
@@ -709,7 +700,6 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
||||
buildinfo['relocate_binaries'] = manifest['binary_to_relocate']
|
||||
buildinfo['relocate_links'] = manifest['link_to_relocate']
|
||||
buildinfo['prefix_to_hash'] = prefix_to_hash
|
||||
buildinfo['offsets'] = manifest['offsets']
|
||||
filename = buildinfo_file_name(workdir)
|
||||
with open(filename, 'w') as outfile:
|
||||
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
||||
@@ -1483,25 +1473,11 @@ def is_backup_file(file):
|
||||
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
# Relocate links to the new install prefix
|
||||
links = [link for link in buildinfo.get('relocate_links', [])]
|
||||
relocate.relocate_links(
|
||||
links, old_layout_root, old_prefix, new_prefix
|
||||
)
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
files_to_relocate = [os.path.join(workdir, filename)
|
||||
for filename in buildinfo.get('relocate_binaries')
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of rpaths in binaries
|
||||
# TODO: Is this necessary? How are null-terminated strings handled
|
||||
# in the rpath header?
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename)
|
||||
for filename in buildinfo.get('relocate_binaries')
|
||||
]
|
||||
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate,
|
||||
@@ -1517,11 +1493,25 @@ def is_backup_file(file):
|
||||
prefix_to_prefix_bin, rel,
|
||||
old_prefix,
|
||||
new_prefix)
|
||||
# Relocate links to the new install prefix
|
||||
links = [link for link in buildinfo.get('relocate_links', [])]
|
||||
relocate.relocate_links(
|
||||
links, old_layout_root, old_prefix, new_prefix
|
||||
)
|
||||
|
||||
# If offsets is None, we will recompute offsets when needed
|
||||
offsets = buildinfo.get('offsets', None)
|
||||
relocate.relocate_text_bin(
|
||||
files_to_relocate, prefix_to_prefix_bin, offsets, workdir)
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
paths_to_relocate = [old_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
files_to_relocate = list(filter(
|
||||
lambda pathname: not relocate.file_is_relocatable(
|
||||
pathname, paths_to_relocate=paths_to_relocate),
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
|
||||
@@ -80,32 +80,41 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec['python'].package
|
||||
module_paths = {
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
}
|
||||
sys.path.extend(module_paths)
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-3]
|
||||
sys.path = path_before
|
||||
|
||||
return False
|
||||
|
||||
@@ -456,9 +465,10 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _validate_source_is_trusted(conf):
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError('source is not trusted')
|
||||
|
||||
|
||||
@@ -529,7 +539,7 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
if b.try_import(module, abstract_spec):
|
||||
@@ -571,7 +581,7 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
|
||||
@@ -1259,6 +1259,14 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob('%s/intel*log' % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@run_after('install')
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
# directory to catch this error condition.
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError('The installer has failed to install anything.')
|
||||
|
||||
@run_after('install')
|
||||
def configure_rpath(self):
|
||||
if '+rpath' not in self.spec:
|
||||
|
||||
@@ -8,7 +8,10 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
@@ -147,6 +150,58 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags(object):
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
))
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
single_errors = [
|
||||
'({0}) {1} {2} => {3}'.format(
|
||||
i + 1, match.group(0), next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent("""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}""").format('\n'.join(single_errors))
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -157,15 +212,28 @@ def parse_specs(args, **kwargs):
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
sargs = ' '.join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
return specs
|
||||
try:
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
return specs
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
if unquoted_flags:
|
||||
msg += '\n\n'
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
|
||||
@@ -379,7 +379,9 @@ def _remove(args):
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
mirror_dir = spack.util.path.canonicalize_path(
|
||||
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
|
||||
@@ -826,7 +826,7 @@ def get_versions(args, name):
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith('file://'):
|
||||
valid_url = False # No point in spidering these
|
||||
except ValueError:
|
||||
except (ValueError, TypeError):
|
||||
valid_url = False
|
||||
|
||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||
|
||||
@@ -125,7 +125,7 @@ def external_find(args):
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
packages_to_check = list(spack.repo.path.all_packages())
|
||||
|
||||
detected_packages = spack.detection.by_executable(
|
||||
packages_to_check, path_hints=args.path)
|
||||
@@ -177,7 +177,10 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
if fname.endswith('.json'):
|
||||
fpath = os.path.join(directory, fname)
|
||||
tty.debug("Adding manifest file: {0}".format(fpath))
|
||||
manifest_files.append(os.path.join(directory, fpath))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
@@ -185,6 +188,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
tty.debug("Reading manifest file: " + path)
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
|
||||
@@ -80,7 +80,8 @@ def spec(parser, args):
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
specs = [(s, s.concretized()) for s in input_specs]
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
|
||||
@@ -94,16 +94,16 @@ def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
git = which("git", required=True)
|
||||
|
||||
# ensure base is in the repo
|
||||
git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base,
|
||||
fail_on_error=False)
|
||||
base_sha = git("rev-parse", "--quiet", "--verify", "--revs-only", base,
|
||||
fail_on_error=False, output=str)
|
||||
if git.returncode != 0:
|
||||
tty.die(
|
||||
"This repository does not have a '%s' branch." % base,
|
||||
"This repository does not have a '%s' revision." % base,
|
||||
"spack style needs this branch to determine which files changed.",
|
||||
"Ensure that '%s' exists, or specify files to check explicitly." % base
|
||||
)
|
||||
|
||||
range = "{0}...".format(base)
|
||||
range = "{0}...".format(base_sha.strip())
|
||||
|
||||
git_args = [
|
||||
# Add changed files committed since branching off of develop
|
||||
|
||||
@@ -110,7 +110,7 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
|
||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT'
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = 'overrides-'
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
|
||||
compiler_name_translation = {
|
||||
'nvidia': 'nvhpc',
|
||||
'rocm': 'rocmcc',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -1282,6 +1283,10 @@ def _concretize_separately(self, tests=False):
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Ensure that buildcache index is updated if reuse is on
|
||||
if spack.config.get('config:reuse', False):
|
||||
spack.binary_distribution.binary_index.update()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(arguments) == 0:
|
||||
return []
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
import six.moves.urllib.parse as urllib_parse
|
||||
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
get_single_file,
|
||||
@@ -119,6 +120,11 @@ def __init__(self, **kwargs):
|
||||
# 'no_cache' option from version directive.
|
||||
self.cache_enabled = not kwargs.pop('no_cache', False)
|
||||
|
||||
self.package = None
|
||||
|
||||
def set_package(self, package):
|
||||
self.package = package
|
||||
|
||||
# Subclasses need to implement these methods
|
||||
def fetch(self):
|
||||
"""Fetch source code archive or repo.
|
||||
@@ -242,6 +248,10 @@ def source_id(self):
|
||||
if all(component_ids):
|
||||
return component_ids
|
||||
|
||||
def set_package(self, package):
|
||||
for item in self:
|
||||
item.package = package
|
||||
|
||||
|
||||
@fetcher
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
@@ -520,7 +530,7 @@ def expand(self):
|
||||
"Failed on expand() for URL %s" % self.url)
|
||||
|
||||
if not self.extension:
|
||||
self.extension = extension(self.archive_file)
|
||||
self.extension = extension(self.url)
|
||||
|
||||
if self.stage.expanded:
|
||||
tty.debug('Source already staged to %s' % self.stage.source_path)
|
||||
@@ -528,50 +538,11 @@ def expand(self):
|
||||
|
||||
decompress = decompressor_for(self.archive_file, self.extension)
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(self.stage.path,
|
||||
"spack-expanded-archive")
|
||||
|
||||
# Below we assume that the command to decompress expand the
|
||||
# archive in the current working directory
|
||||
mkdirp(tarball_container)
|
||||
with working_dir(tarball_container):
|
||||
with fs.exploding_archive_catch(self.stage):
|
||||
decompress(self.archive_file)
|
||||
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||
# a single directory. If the tarball *didn't* explode, move its
|
||||
# contents to the staging source directory & remove the container
|
||||
# directory. If the tarball did explode, just rename the tarball
|
||||
# directory to the staging source directory.
|
||||
#
|
||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||
# hidden files, which can end up *alongside* a single top-level
|
||||
# directory. We initially ignore presence of hidden files to
|
||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||
# are copied to the source directory.
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
self.stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, self.stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
for f in files:
|
||||
src = os.path.join(tarball_container, f)
|
||||
dest = os.path.join(self.stage.path, f)
|
||||
shutil.move(src, dest)
|
||||
os.rmdir(tarball_container)
|
||||
else:
|
||||
# This is a non-directory entry (e.g., a patch file) so simply
|
||||
# rename the tarball container to be the source path.
|
||||
shutil.move(tarball_container, self.stage.source_path)
|
||||
|
||||
else:
|
||||
shutil.move(tarball_container, self.stage.source_path)
|
||||
|
||||
def archive(self, destination):
|
||||
"""Just moves this archive to the destination."""
|
||||
if not self.archive_file:
|
||||
@@ -1014,9 +985,20 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git(*args)
|
||||
|
||||
# Init submodules if the user asked for them.
|
||||
if self.submodules:
|
||||
with working_dir(dest):
|
||||
args = ['submodule', 'update', '--init', '--recursive']
|
||||
git_commands = []
|
||||
submodules = self.submodules
|
||||
if callable(submodules):
|
||||
submodules = list(submodules(self.package))
|
||||
git_commands.append(["submodule", "init", "--"] + submodules)
|
||||
git_commands.append(['submodule', 'update', '--recursive'])
|
||||
elif submodules:
|
||||
git_commands.append(["submodule", "update", "--init", "--recursive"])
|
||||
|
||||
if not git_commands:
|
||||
return
|
||||
|
||||
with working_dir(dest):
|
||||
for args in git_commands:
|
||||
if not spack.config.get('config:debug'):
|
||||
args.insert(1, '--quiet')
|
||||
git(*args)
|
||||
|
||||
@@ -95,7 +95,10 @@ def view_copy(src, dst, view, spec=None):
|
||||
view.get_projection_for_spec(dep)
|
||||
|
||||
if spack.relocate.is_binary(dst):
|
||||
spack.relocate.relocate_text_bin([dst], prefix_to_projection)
|
||||
spack.relocate.relocate_text_bin(
|
||||
binaries=[dst],
|
||||
prefixes=prefix_to_projection
|
||||
)
|
||||
else:
|
||||
prefix_to_projection[spack.store.layout.root] = view._root
|
||||
prefix_to_projection[orig_sbang] = new_sbang
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform as py_platform
|
||||
import re
|
||||
from subprocess import check_output
|
||||
|
||||
from spack.version import Version
|
||||
|
||||
@@ -51,6 +52,17 @@ def __init__(self):
|
||||
|
||||
if 'ubuntu' in distname:
|
||||
version = '.'.join(version[0:2])
|
||||
# openSUSE Tumbleweed is a rolling release which can change
|
||||
# more than once in a week, so set version to tumbleweed$GLIBVERS
|
||||
elif 'opensuse-tumbleweed' in distname or 'opensusetumbleweed' in distname:
|
||||
distname = 'opensuse'
|
||||
output = check_output(["ldd", "--version"]).decode()
|
||||
libcvers = re.findall(r'ldd \(GNU libc\) (.*)', output)
|
||||
if len(libcvers) == 1:
|
||||
version = 'tumbleweed' + libcvers[0]
|
||||
else:
|
||||
version = 'tumbleweed' + version[0]
|
||||
|
||||
else:
|
||||
version = version[0]
|
||||
|
||||
|
||||
@@ -82,4 +82,4 @@
|
||||
conditional,
|
||||
disjoint_sets,
|
||||
)
|
||||
from spack.version import Version, ver
|
||||
from spack.version import Version, ver
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized, nullcontext
|
||||
from llnl.util.lang import match_predicate, memoized, nullcontext
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.compilers
|
||||
@@ -1312,6 +1312,7 @@ def _make_fetcher(self):
|
||||
resources = self._get_needed_resources()
|
||||
for resource in resources:
|
||||
fetcher.append(resource.fetcher)
|
||||
fetcher.set_package(self)
|
||||
return fetcher
|
||||
|
||||
@property
|
||||
@@ -1326,6 +1327,7 @@ def fetcher(self):
|
||||
@fetcher.setter
|
||||
def fetcher(self, f):
|
||||
self._fetcher = f
|
||||
self._fetcher.set_package(self)
|
||||
|
||||
def dependencies_of_type(self, *deptypes):
|
||||
"""Get dependencies that can possibly have these deptypes.
|
||||
@@ -1445,6 +1447,10 @@ def prefix(self):
|
||||
"""Get the prefix into which this package should be installed."""
|
||||
return self.spec.prefix
|
||||
|
||||
@property
|
||||
def home(self):
|
||||
return self.prefix
|
||||
|
||||
@property # type: ignore[misc]
|
||||
@memoized
|
||||
def compiler(self):
|
||||
@@ -1721,7 +1727,7 @@ def content_hash(self, content=None):
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
env = spack.environment.active_environment()
|
||||
from_local_sources = env and env.is_develop(self.spec)
|
||||
if not self.spec.external and not from_local_sources:
|
||||
if self.has_code and not self.spec.external and not from_local_sources:
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
@@ -2178,10 +2184,8 @@ def check_paths(path_list, filetype, predicate):
|
||||
check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
|
||||
check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
|
||||
|
||||
installed = set(os.listdir(self.prefix))
|
||||
installed.difference_update(
|
||||
spack.store.layout.hidden_file_regexes)
|
||||
if not installed:
|
||||
ignore_file = match_predicate(spack.store.layout.hidden_file_regexes)
|
||||
if all(map(ignore_file, os.listdir(self.prefix))):
|
||||
raise InstallError(
|
||||
"Install failed for %s. Nothing was installed!" % self.name)
|
||||
|
||||
|
||||
@@ -433,8 +433,9 @@ def needs_binary_relocation(m_type, m_subtype):
|
||||
m_type (str): MIME type of the file
|
||||
m_subtype (str): MIME subtype of the file
|
||||
"""
|
||||
subtypes = ('x-executable', 'x-sharedlib', 'x-mach-binary', 'x-pie-executable')
|
||||
if m_type == 'application':
|
||||
if m_subtype in ('x-executable', 'x-sharedlib', 'x-mach-binary'):
|
||||
if m_subtype in subtypes:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -469,6 +470,47 @@ def _replace_prefix_text(filename, compiled_prefixes):
|
||||
f.truncate()
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, byte_prefixes):
|
||||
"""Replace all the occurrences of the old install prefix with a
|
||||
new install prefix in binary files.
|
||||
|
||||
The new install prefix is prefixed with ``os.sep`` until the
|
||||
lengths of the prefixes are the same.
|
||||
|
||||
Args:
|
||||
filename (str): target binary file
|
||||
byte_prefixes (OrderedDict): OrderedDictionary where the keys are
|
||||
precompiled regex of the old prefixes and the values are the new
|
||||
prefixes (uft-8 encoded)
|
||||
"""
|
||||
|
||||
with open(filename, 'rb+') as f:
|
||||
data = f.read()
|
||||
f.seek(0)
|
||||
for orig_bytes, new_bytes in byte_prefixes.items():
|
||||
original_data_len = len(data)
|
||||
# Skip this hassle if not found
|
||||
if orig_bytes not in data:
|
||||
continue
|
||||
# We only care about this problem if we are about to replace
|
||||
length_compatible = len(new_bytes) <= len(orig_bytes)
|
||||
if not length_compatible:
|
||||
tty.debug('Binary failing to relocate is %s' % filename)
|
||||
raise BinaryTextReplaceError(orig_bytes, new_bytes)
|
||||
pad_length = len(orig_bytes) - len(new_bytes)
|
||||
padding = os.sep * pad_length
|
||||
padding = padding.encode('utf-8')
|
||||
data = data.replace(orig_bytes, new_bytes + padding)
|
||||
# Really needs to be the same length
|
||||
if not len(data) == original_data_len:
|
||||
print('Length of pad:', pad_length, 'should be', len(padding))
|
||||
print(new_bytes, 'was to replace', orig_bytes)
|
||||
raise BinaryStringReplacementError(
|
||||
filename, original_data_len, len(data))
|
||||
f.write(data)
|
||||
f.truncate()
|
||||
|
||||
|
||||
def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
|
||||
prefix_to_prefix, rel, old_prefix, new_prefix):
|
||||
"""
|
||||
@@ -776,6 +818,49 @@ def relocate_text(files, prefixes, concurrency=32):
|
||||
tp.join()
|
||||
|
||||
|
||||
def relocate_text_bin(binaries, prefixes, concurrency=32):
|
||||
"""Replace null terminated path strings hard coded into binaries.
|
||||
|
||||
The new install prefix must be shorter than the original one.
|
||||
|
||||
Args:
|
||||
binaries (list): binaries to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
||||
concurrency (int): Desired degree of parallelism.
|
||||
|
||||
Raises:
|
||||
BinaryTextReplaceError: when the new path is longer than the old path
|
||||
"""
|
||||
byte_prefixes = collections.OrderedDict({})
|
||||
|
||||
for orig_prefix, new_prefix in prefixes.items():
|
||||
if orig_prefix != new_prefix:
|
||||
if isinstance(orig_prefix, bytes):
|
||||
orig_bytes = orig_prefix
|
||||
else:
|
||||
orig_bytes = orig_prefix.encode('utf-8')
|
||||
if isinstance(new_prefix, bytes):
|
||||
new_bytes = new_prefix
|
||||
else:
|
||||
new_bytes = new_prefix.encode('utf-8')
|
||||
byte_prefixes[orig_bytes] = new_bytes
|
||||
|
||||
# Do relocations on text in binaries that refers to the install tree
|
||||
# multiprocesing.ThreadPool.map requires single argument
|
||||
args = []
|
||||
|
||||
for binary in binaries:
|
||||
args.append((binary, byte_prefixes))
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
|
||||
def is_relocatable(spec):
|
||||
"""Returns True if an installed spec is relocatable.
|
||||
|
||||
@@ -1042,120 +1127,3 @@ def fixup_macos_rpaths(spec):
|
||||
))
|
||||
else:
|
||||
tty.debug('No rpath fixup needed for ' + specname)
|
||||
|
||||
|
||||
def compute_indices(filename, paths_to_relocate):
|
||||
"""
|
||||
Compute the indices in filename at which each of paths_to_relocate occurs.
|
||||
|
||||
Arguments:
|
||||
filename (str): file to compute indices for
|
||||
paths_to_relocate (List[str]): paths to find indices of
|
||||
Returns:
|
||||
Dict
|
||||
"""
|
||||
with open(filename, 'rb') as f:
|
||||
contents = f.read()
|
||||
|
||||
substring_prefix = os.path.commonprefix(paths_to_relocate).encode('utf-8')
|
||||
|
||||
indices = {}
|
||||
index = 0
|
||||
max_length = max(len(path) for path in paths_to_relocate)
|
||||
while True:
|
||||
try:
|
||||
# We search for the smallest substring of all paths we relocate
|
||||
# In practice, this is the spack install root, and we relocate
|
||||
# prefixes in the root and the root itself
|
||||
index = contents.index(substring_prefix, index)
|
||||
except ValueError:
|
||||
# The string isn't found in the rest of the binary
|
||||
break
|
||||
else:
|
||||
# only copy the smallest portion of the binary for comparisons
|
||||
substring_to_check = contents[index:index + max_length]
|
||||
for path in paths_to_relocate:
|
||||
# We guarantee any substring in the list comes after any superstring
|
||||
p = path.encode('utf-8')
|
||||
if substring_to_check.startswith(p):
|
||||
indices[index] = str(path)
|
||||
index += len(path)
|
||||
break
|
||||
else:
|
||||
index += 1
|
||||
return indices
|
||||
|
||||
|
||||
def _relocate_binary_text(filename, offsets, prefix_to_prefix):
|
||||
"""
|
||||
Relocate the text of a single binary file, given the offsets at which the
|
||||
replacements need to be made
|
||||
|
||||
Arguments:
|
||||
filename (str): file to modify
|
||||
offsets (Dict[int, str]): locations of the strings to replace
|
||||
prefix_to_prefix (Dict[str, str]): strings to replace and their replacements
|
||||
"""
|
||||
with open(filename, 'rb+') as f:
|
||||
for index, prefix in offsets.items():
|
||||
replacement = prefix_to_prefix[prefix].encode('utf-8')
|
||||
if len(replacement) > len(prefix):
|
||||
raise BinaryTextReplaceError(prefix, replacement)
|
||||
|
||||
# read forward until we find the end of the string including
|
||||
# the prefix and compute the replacement as we go
|
||||
f.seek(index + len(prefix))
|
||||
c = f.read(1)
|
||||
while c not in (None, b'\x00'):
|
||||
replacement += c
|
||||
c = f.read(1)
|
||||
|
||||
# seek back to the index position and write the replacement in
|
||||
# and add null-terminator
|
||||
f.seek(index)
|
||||
f.write(replacement)
|
||||
f.write(b'\x00')
|
||||
|
||||
|
||||
def relocate_text_bin(
|
||||
files_to_relocate, prefix_to_prefix, offsets=None,
|
||||
relative_root=None, concurrency=32
|
||||
):
|
||||
"""
|
||||
For each file given, replace all keys in the given translation dict with
|
||||
the associated values. Optionally executes using precomputed memoized offsets
|
||||
for the substitutions.
|
||||
|
||||
Arguments:
|
||||
files_to_relocate (List[str]): The files to modify
|
||||
prefix_to_prefix (Dict[str, str]): keys are strings to replace, values are
|
||||
replacements
|
||||
offsets (Dict[str, Dict[int, str]): (optional) Mapping from relative filenames to
|
||||
a mapping from indices to strings to replace found at each index
|
||||
relative_root (str): (optional) prefix for relative paths in offsets
|
||||
"""
|
||||
# defaults to the common prefix of all input files
|
||||
rel_root = relative_root or os.path.commonprefix(files_to_relocate)
|
||||
|
||||
if offsets is None:
|
||||
offsets = {}
|
||||
for filename in files_to_relocate:
|
||||
indices = compute_indices(
|
||||
filename,
|
||||
list(prefix_to_prefix.keys()),
|
||||
)
|
||||
relpath = os.path.relpath(filename, rel_root)
|
||||
offsets[relpath] = indices
|
||||
|
||||
args = [
|
||||
(filename, offsets[os.path.relpath(filename, rel_root)], prefix_to_prefix)
|
||||
for filename in files_to_relocate
|
||||
]
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_relocate_binary_text), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
@@ -93,10 +93,8 @@ def rewire_node(spec, explicit):
|
||||
False,
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix)
|
||||
|
||||
# Relocate text strings of prefixes embedded in binaries
|
||||
relocate.relocate_text_bin(bins_to_relocate, prefix_to_prefix)
|
||||
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate,
|
||||
prefixes=prefix_to_prefix)
|
||||
# Copy package into place, except for spec.json (because spec.json
|
||||
# describes the old spec and not the new spliced spec).
|
||||
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
|
||||
|
||||
@@ -198,9 +198,6 @@ def update(data):
|
||||
" [files={0}]")
|
||||
warnings.warn(msg.format(', '.join(data['include'])))
|
||||
|
||||
if 'packages' in data:
|
||||
updated |= spack.schema.packages.update(data['packages'])
|
||||
|
||||
# Spack 0.19 drops support for `spack:concretization` in favor of
|
||||
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
|
||||
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
|
||||
|
||||
@@ -9,54 +9,6 @@
|
||||
"""
|
||||
|
||||
|
||||
def deprecate_paths_and_modules(instance, deprecated_properties):
|
||||
"""Function to produce warning/error messages if "paths" and "modules" are
|
||||
found in "packages.yaml"
|
||||
|
||||
Args:
|
||||
instance: instance of the configuration file
|
||||
deprecated_properties: deprecated properties in instance
|
||||
|
||||
Returns:
|
||||
Warning/Error message to be printed
|
||||
"""
|
||||
import copy
|
||||
import os.path
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
# Copy the instance to remove default attributes that are not related
|
||||
# to the part that needs to be reported
|
||||
instance_copy = copy.copy(instance)
|
||||
|
||||
# Check if this configuration comes from an environment or not
|
||||
absolute_path = instance_copy._end_mark.name
|
||||
command_to_suggest = '$ spack config update packages'
|
||||
if os.path.basename(absolute_path) == 'spack.yaml':
|
||||
command_to_suggest = '$ spack env update <environment>'
|
||||
|
||||
# Retrieve the relevant part of the configuration as YAML
|
||||
keys_to_be_removed = [
|
||||
x for x in instance_copy if x not in deprecated_properties
|
||||
]
|
||||
for key in keys_to_be_removed:
|
||||
instance_copy.pop(key)
|
||||
yaml_as_str = syaml.dump_config(instance_copy, blame=True)
|
||||
|
||||
if llnl.util.tty.is_debug():
|
||||
msg = 'OUTDATED CONFIGURATION FILE [file={0}]\n{1}'
|
||||
llnl.util.tty.debug(msg.format(absolute_path, yaml_as_str))
|
||||
|
||||
msg = ('detected deprecated properties in {0}\nActivate the debug '
|
||||
'flag to have more information on the deprecated parts or '
|
||||
'run:\n\n\t{2}\n\nto update the file to the new format\n')
|
||||
return msg.format(
|
||||
absolute_path, yaml_as_str, command_to_suggest
|
||||
)
|
||||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
'packages': {
|
||||
@@ -136,16 +88,7 @@ def deprecate_paths_and_modules(instance, deprecated_properties):
|
||||
'required': ['spec']
|
||||
}
|
||||
},
|
||||
# Deprecated properties, will trigger an error with a
|
||||
# message telling how to update.
|
||||
'paths': {'type': 'object'},
|
||||
'modules': {'type': 'object'},
|
||||
},
|
||||
'deprecatedProperties': {
|
||||
'properties': ['modules', 'paths'],
|
||||
'message': deprecate_paths_and_modules,
|
||||
'error': False
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -160,41 +103,3 @@ def deprecate_paths_and_modules(instance, deprecated_properties):
|
||||
'additionalProperties': False,
|
||||
'properties': properties,
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
"""Update the data in place to remove deprecated properties.
|
||||
|
||||
Args:
|
||||
data (dict): dictionary to be updated
|
||||
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
changed = False
|
||||
for cfg_object in data.values():
|
||||
externals = []
|
||||
|
||||
# If we don't have these deprecated attributes, continue
|
||||
if not any(x in cfg_object for x in ('paths', 'modules')):
|
||||
continue
|
||||
|
||||
# If we arrive here we need to make some changes i.e.
|
||||
# we need to remove and eventually convert some attributes
|
||||
changed = True
|
||||
paths = cfg_object.pop('paths', {})
|
||||
for spec, prefix in paths.items():
|
||||
externals.append({
|
||||
'spec': str(spec),
|
||||
'prefix': str(prefix)
|
||||
})
|
||||
modules = cfg_object.pop('modules', {})
|
||||
for spec, module in modules.items():
|
||||
externals.append({
|
||||
'spec': str(spec),
|
||||
'modules': [str(module)]
|
||||
})
|
||||
if externals:
|
||||
cfg_object['externals'] = externals
|
||||
|
||||
return changed
|
||||
|
||||
@@ -631,6 +631,7 @@ def visit(node):
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, 'concretize.lp'))
|
||||
self.control.load(os.path.join(parent_dir, "os_facts.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
timer.phase("load")
|
||||
|
||||
@@ -716,7 +717,7 @@ def __init__(self, tests=False):
|
||||
self.variant_values_from_specs = set()
|
||||
self.version_constraints = set()
|
||||
self.target_constraints = set()
|
||||
self.default_targets = {}
|
||||
self.default_targets = []
|
||||
self.compiler_version_constraints = set()
|
||||
self.post_facts = []
|
||||
|
||||
@@ -748,7 +749,13 @@ def key_fn(version):
|
||||
|
||||
pkg = packagize(pkg)
|
||||
declared_versions = self.declared_versions[pkg.name]
|
||||
most_to_least_preferred = sorted(declared_versions, key=key_fn)
|
||||
partially_sorted_versions = sorted(set(declared_versions), key=key_fn)
|
||||
|
||||
most_to_least_preferred = []
|
||||
for _, group in itertools.groupby(partially_sorted_versions, key=key_fn):
|
||||
most_to_least_preferred.extend(list(sorted(
|
||||
group, reverse=True, key=lambda x: spack.version.ver(x.version)
|
||||
)))
|
||||
|
||||
for weight, declared_version in enumerate(most_to_least_preferred):
|
||||
self.gen.fact(fn.version_declared(
|
||||
@@ -1171,29 +1178,19 @@ def target_preferences(self, pkg_name):
|
||||
if not self.target_specs_cache:
|
||||
self.target_specs_cache = [
|
||||
spack.spec.Spec('target={0}'.format(target_name))
|
||||
for target_name in archspec.cpu.TARGETS
|
||||
for _, target_name in self.default_targets
|
||||
]
|
||||
|
||||
target_specs = self.target_specs_cache
|
||||
preferred_targets = [x for x in target_specs if key_fn(x) < 0]
|
||||
package_targets = self.target_specs_cache[:]
|
||||
package_targets.sort(key=key_fn)
|
||||
|
||||
for i, preferred in enumerate(preferred_targets):
|
||||
self.gen.fact(fn.package_target_weight(
|
||||
str(preferred.architecture.target), pkg_name, i
|
||||
))
|
||||
|
||||
# generate weights for non-preferred targets on a per-package basis
|
||||
default_targets = {
|
||||
name: weight for
|
||||
name, weight in self.default_targets.items()
|
||||
if not any(preferred.architecture.target.name == name
|
||||
for preferred in preferred_targets)
|
||||
}
|
||||
|
||||
num_preferred = len(preferred_targets)
|
||||
for name, weight in default_targets.items():
|
||||
self.gen.fact(fn.default_target_weight(
|
||||
name, pkg_name, weight + num_preferred + 30
|
||||
offset = 0
|
||||
best_default = self.default_targets[0][1]
|
||||
for i, preferred in enumerate(package_targets):
|
||||
if str(preferred.architecture.target) == best_default and i != 0:
|
||||
offset = 100
|
||||
self.gen.fact(fn.target_weight(
|
||||
pkg_name, str(preferred.architecture.target), i + offset
|
||||
))
|
||||
|
||||
def flag_defaults(self):
|
||||
@@ -1597,11 +1594,12 @@ def target_defaults(self, specs):
|
||||
# these are stored to be generated as facts later offset by the
|
||||
# number of preferred targets
|
||||
if target.name in best_targets:
|
||||
self.default_targets[target.name] = i
|
||||
self.default_targets.append((i, target.name))
|
||||
i += 1
|
||||
else:
|
||||
self.default_targets[target.name] = 100
|
||||
self.default_targets.append((100, target.name))
|
||||
|
||||
self.default_targets = list(sorted(set(self.default_targets)))
|
||||
self.gen.newline()
|
||||
|
||||
def virtual_providers(self):
|
||||
|
||||
@@ -107,10 +107,28 @@ possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
version_declared(Package, Version, Weight).
|
||||
|
||||
version_weight(Package, Weight)
|
||||
% we can't use the weight for an external version if we don't use the
|
||||
% corresponding external spec.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "external"),
|
||||
not external(Package).
|
||||
|
||||
% we can't use a weight from an installed spec if we are building it
|
||||
% and vice-versa
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "installed"),
|
||||
build(Package).
|
||||
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
not version_declared(Package, Version, Weight, "installed"),
|
||||
not build(Package).
|
||||
|
||||
1 { version_weight(Package, Weight) : version_declared(Package, Version, Weight) } 1
|
||||
:- version(Package, Version),
|
||||
node(Package),
|
||||
Weight = #min{W : version_declared(Package, Version, W)}.
|
||||
node(Package).
|
||||
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
@@ -120,6 +138,11 @@ version_weight(Package, Weight)
|
||||
{ version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint).
|
||||
|
||||
% If there is at least a version that satisfy the constraint, impose a lower
|
||||
% bound on the choice rule to avoid false positives with the error below
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint), version_satisfies(Package, Constraint, _).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
@@ -481,13 +504,13 @@ variant(Package, Variant) :- variant_condition(ID, Package, Variant),
|
||||
condition_holds(ID).
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
@@ -714,9 +737,14 @@ node_os_mismatch(Package, Dependency) :-
|
||||
% every OS is compatible with itself. We can use `os_compatible` to declare
|
||||
os_compatible(OS, OS) :- os(OS).
|
||||
|
||||
% OS compatibility rules for reusing solves.
|
||||
% catalina binaries can be used on bigsur. Direction is package -> dependency.
|
||||
os_compatible("bigsur", "catalina").
|
||||
% Transitive compatibility among operating systems
|
||||
os_compatible(OS1, OS3) :- os_compatible(OS1, OS2), os_compatible(OS2, OS3).
|
||||
|
||||
% We can select only operating systems compatible with the ones
|
||||
% for which we can build software. We need a cardinality constraint
|
||||
% since we might have more than one "buildable_os(OS)" fact.
|
||||
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
|
||||
node_os(Package, ReusedOS).
|
||||
|
||||
% If an OS is set explicitly respect the value
|
||||
node_os(Package, OS) :- node_os_set(Package, OS), node(Package).
|
||||
@@ -779,27 +807,6 @@ target_compatible(Descendent, Ancestor)
|
||||
#defined target_satisfies/2.
|
||||
#defined target_parent/2.
|
||||
|
||||
% If the package does not have any specific weight for this
|
||||
% target, offset the default weights by the number of specific
|
||||
% weights and use that. We additionally offset by 30 to ensure
|
||||
% preferences are propagated even against large numbers of
|
||||
% otherwise "better" matches.
|
||||
target_weight(Target, Package, Weight)
|
||||
:- default_target_weight(Target, Package, Weight),
|
||||
node(Package),
|
||||
not derive_target_from_parent(_, Package),
|
||||
not package_target_weight(Target, Package, _).
|
||||
|
||||
% TODO: Need to account for the case of more than one parent
|
||||
% TODO: each of which sets different targets
|
||||
target_weight(Target, Dependency, Weight)
|
||||
:- depends_on(Package, Dependency),
|
||||
derive_target_from_parent(Package, Dependency),
|
||||
target_weight(Target, Package, Weight).
|
||||
|
||||
target_weight(Target, Package, Weight)
|
||||
:- package_target_weight(Target, Package, Weight).
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- node_target(Package, Target),
|
||||
@@ -816,11 +823,7 @@ node_target(Package, Target)
|
||||
node_target_weight(Package, Weight)
|
||||
:- node(Package),
|
||||
node_target(Package, Target),
|
||||
target_weight(Target, Package, Weight).
|
||||
|
||||
derive_target_from_parent(Parent, Package)
|
||||
:- depends_on(Parent, Package),
|
||||
not package_target_weight(_, Package, _).
|
||||
target_weight(Package, Target, Weight).
|
||||
|
||||
% compatibility rules for targets among nodes
|
||||
node_target_match(Parent, Dependency)
|
||||
@@ -938,6 +941,9 @@ compiler_weight(Package, 100)
|
||||
not node_compiler_preference(Package, Compiler, Version, _),
|
||||
not default_compiler_preference(Compiler, Version, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
:- node_compiler_version(Package, Compiler, Version), not compiler_version(Compiler, Version).
|
||||
|
||||
#defined node_compiler_preference/4.
|
||||
#defined default_compiler_preference/3.
|
||||
|
||||
@@ -1246,6 +1252,7 @@ opt_criterion(1, "non-preferred targets").
|
||||
#heuristic variant_value(Package, Variant, Value) : variant_default_value(Package, Variant, Value), node(Package). [10, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node(Package) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node_os(Package, OS) : buildable_os(OS). [10, true]
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
|
||||
22
lib/spack/spack/solver/os_facts.lp
Normal file
22
lib/spack/spack/solver/os_facts.lp
Normal file
@@ -0,0 +1,22 @@
|
||||
% Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
% OS compatibility rules for reusing solves.
|
||||
% os_compatible(RecentOS, OlderOS)
|
||||
% OlderOS binaries can be used on RecentOS
|
||||
|
||||
% macOS
|
||||
os_compatible("monterey", "bigsur").
|
||||
os_compatible("bigsur", "catalina").
|
||||
|
||||
% Ubuntu
|
||||
os_compatible("ubuntu22.04", "ubuntu21.10").
|
||||
os_compatible("ubuntu21.10", "ubuntu21.04").
|
||||
os_compatible("ubuntu21.04", "ubuntu20.10").
|
||||
os_compatible("ubuntu20.10", "ubuntu20.04").
|
||||
os_compatible("ubuntu20.04", "ubuntu19.10").
|
||||
os_compatible("ubuntu19.10", "ubuntu19.04").
|
||||
os_compatible("ubuntu19.04", "ubuntu18.10").
|
||||
os_compatible("ubuntu18.10", "ubuntu18.04").
|
||||
@@ -896,7 +896,7 @@ def clear(self):
|
||||
def _command_default_handler(descriptor, spec, cls):
|
||||
"""Default handler when looking for the 'command' attribute.
|
||||
|
||||
Tries to search for ``spec.name`` in the ``spec.prefix.bin`` directory.
|
||||
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
|
||||
|
||||
Parameters:
|
||||
descriptor (ForwardQueryToPackage): descriptor that triggered the call
|
||||
@@ -910,20 +910,21 @@ def _command_default_handler(descriptor, spec, cls):
|
||||
Raises:
|
||||
RuntimeError: If the command is not found
|
||||
"""
|
||||
path = os.path.join(spec.prefix.bin, spec.name)
|
||||
home = getattr(spec.package, 'home')
|
||||
path = os.path.join(home.bin, spec.name)
|
||||
|
||||
if fs.is_exe(path):
|
||||
return spack.util.executable.Executable(path)
|
||||
else:
|
||||
msg = 'Unable to locate {0} command in {1}'
|
||||
raise RuntimeError(msg.format(spec.name, spec.prefix.bin))
|
||||
raise RuntimeError(msg.format(spec.name, home.bin))
|
||||
|
||||
|
||||
def _headers_default_handler(descriptor, spec, cls):
|
||||
"""Default handler when looking for the 'headers' attribute.
|
||||
|
||||
Tries to search for ``*.h`` files recursively starting from
|
||||
``spec.prefix.include``.
|
||||
``spec.package.home.include``.
|
||||
|
||||
Parameters:
|
||||
descriptor (ForwardQueryToPackage): descriptor that triggered the call
|
||||
@@ -937,21 +938,22 @@ def _headers_default_handler(descriptor, spec, cls):
|
||||
Raises:
|
||||
NoHeadersError: If no headers are found
|
||||
"""
|
||||
headers = fs.find_headers('*', root=spec.prefix.include, recursive=True)
|
||||
home = getattr(spec.package, 'home')
|
||||
headers = fs.find_headers('*', root=home.include, recursive=True)
|
||||
|
||||
if headers:
|
||||
return headers
|
||||
else:
|
||||
msg = 'Unable to locate {0} headers in {1}'
|
||||
raise spack.error.NoHeadersError(
|
||||
msg.format(spec.name, spec.prefix.include))
|
||||
msg.format(spec.name, home))
|
||||
|
||||
|
||||
def _libs_default_handler(descriptor, spec, cls):
|
||||
"""Default handler when looking for the 'libs' attribute.
|
||||
|
||||
Tries to search for ``lib{spec.name}`` recursively starting from
|
||||
``spec.prefix``. If ``spec.name`` starts with ``lib``, searches for
|
||||
``spec.package.home``. If ``spec.name`` starts with ``lib``, searches for
|
||||
``{spec.name}`` instead.
|
||||
|
||||
Parameters:
|
||||
@@ -978,6 +980,7 @@ def _libs_default_handler(descriptor, spec, cls):
|
||||
# get something like 'libabcXabc.so, but for now we consider this
|
||||
# unlikely).
|
||||
name = spec.name.replace('-', '?')
|
||||
home = getattr(spec.package, 'home')
|
||||
|
||||
# Avoid double 'lib' for packages whose names already start with lib
|
||||
if not name.startswith('lib'):
|
||||
@@ -990,12 +993,12 @@ def _libs_default_handler(descriptor, spec, cls):
|
||||
|
||||
for shared in search_shared:
|
||||
libs = fs.find_libraries(
|
||||
name, spec.prefix, shared=shared, recursive=True)
|
||||
name, home, shared=shared, recursive=True)
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = 'Unable to recursively locate {0} libraries in {1}'
|
||||
raise spack.error.NoLibrariesError(msg.format(spec.name, spec.prefix))
|
||||
raise spack.error.NoLibrariesError(msg.format(spec.name, home))
|
||||
|
||||
|
||||
class ForwardQueryToPackage(object):
|
||||
@@ -1116,6 +1119,9 @@ def __set__(self, instance, value):
|
||||
|
||||
|
||||
class SpecBuildInterface(lang.ObjectWrapper):
|
||||
# home is available in the base Package so no default is needed
|
||||
home = ForwardQueryToPackage('home', default_handler=None)
|
||||
|
||||
command = ForwardQueryToPackage(
|
||||
'command',
|
||||
default_handler=_command_default_handler
|
||||
|
||||
@@ -363,12 +363,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
def expected_archive_files(self):
|
||||
"""Possible archive file paths."""
|
||||
paths = []
|
||||
|
||||
fnames = []
|
||||
expanded = True
|
||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||
expanded = self.default_fetcher.expand_archive
|
||||
fnames.append(os.path.basename(self.default_fetcher.url))
|
||||
clean_url = os.path.basename(
|
||||
sup.sanitize_file_path(self.default_fetcher.url))
|
||||
fnames.append(clean_url)
|
||||
|
||||
if self.mirror_paths:
|
||||
fnames.extend(os.path.basename(x) for x in self.mirror_paths)
|
||||
|
||||
@@ -180,3 +180,20 @@ def test_status_function_find_files(
|
||||
|
||||
_, missing = spack.bootstrap.status_message('optional')
|
||||
assert missing is expected_missing
|
||||
|
||||
|
||||
@pytest.mark.regression('31042')
|
||||
def test_source_is_disabled(mutable_config):
|
||||
# Get the configuration dictionary of the current bootstrapping source
|
||||
conf = next(iter(spack.bootstrap.bootstrapping_sources()))
|
||||
|
||||
# The source is not explicitly enabled or disabled, so the following
|
||||
# call should raise to skip using it for bootstrapping
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
|
||||
# Try to explicitly disable the source and verify that the behavior
|
||||
# is the same as above
|
||||
spack.config.add('bootstrap:trusted:{0}:{1}'.format(conf['name'], False))
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
|
||||
@@ -45,21 +45,22 @@ def test_negative_integers_not_allowed_for_parallel_jobs(job_parser):
|
||||
assert 'expected a positive integer' in str(exc_info.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('specs,expected_variants,unexpected_variants', [
|
||||
(['coreutils', 'cflags=-O3 -g'], [], ['g']),
|
||||
(['coreutils', 'cflags=-O3', '-g'], ['g'], []),
|
||||
@pytest.mark.parametrize('specs,cflags,negated_variants', [
|
||||
(['coreutils cflags="-O3 -g"'], ['-O3', '-g'], []),
|
||||
(['coreutils', 'cflags=-O3 -g'], ['-O3'], ['g']),
|
||||
(['coreutils', 'cflags=-O3', '-g'], ['-O3'], ['g']),
|
||||
])
|
||||
@pytest.mark.regression('12951')
|
||||
def test_parse_spec_flags_with_spaces(
|
||||
specs, expected_variants, unexpected_variants
|
||||
):
|
||||
def test_parse_spec_flags_with_spaces(specs, cflags, negated_variants):
|
||||
spec_list = spack.cmd.parse_specs(specs)
|
||||
assert len(spec_list) == 1
|
||||
|
||||
s = spec_list.pop()
|
||||
|
||||
assert all(x not in s.variants for x in unexpected_variants)
|
||||
assert all(x in s.variants for x in expected_variants)
|
||||
assert s.compiler_flags['cflags'] == cflags
|
||||
assert list(s.variants.keys()) == negated_variants
|
||||
for v in negated_variants:
|
||||
assert '~{0}'.format(v) in s
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
|
||||
@@ -29,22 +29,6 @@ def _create_config(scope=None, data={}, section='packages'):
|
||||
return cfg_file
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def packages_yaml_v015(mutable_config):
|
||||
"""Create a packages.yaml in the old format"""
|
||||
old_data = {
|
||||
'packages': {
|
||||
'cmake': {
|
||||
'paths': {'cmake@3.14.0': '/usr'}
|
||||
},
|
||||
'gcc': {
|
||||
'modules': {'gcc@8.3.0': 'gcc-8'}
|
||||
}
|
||||
}
|
||||
}
|
||||
return functools.partial(_create_config, data=old_data, section='packages')
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def config_yaml_v015(mutable_config):
|
||||
"""Create a packages.yaml in the old format"""
|
||||
@@ -493,19 +477,6 @@ def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
|
||||
assert output == expected
|
||||
|
||||
|
||||
def test_config_update_packages(packages_yaml_v015):
|
||||
"""Test Spack updating old packages.yaml format for externals
|
||||
to new format. Ensure that data is preserved and converted
|
||||
properly.
|
||||
"""
|
||||
packages_yaml_v015()
|
||||
config('update', '-y', 'packages')
|
||||
|
||||
# Check the entries have been transformed
|
||||
data = spack.config.get('packages')
|
||||
check_packages_updated(data)
|
||||
|
||||
|
||||
def test_config_update_config(config_yaml_v015):
|
||||
config_yaml_v015()
|
||||
config('update', '-y', 'config')
|
||||
@@ -522,100 +493,26 @@ def test_config_update_not_needed(mutable_config):
|
||||
assert data_before == data_after
|
||||
|
||||
|
||||
def test_config_update_fail_on_permission_issue(
|
||||
packages_yaml_v015, monkeypatch
|
||||
):
|
||||
# The first time it will update and create the backup file
|
||||
packages_yaml_v015()
|
||||
# Mock a global scope where we cannot write
|
||||
monkeypatch.setattr(
|
||||
spack.cmd.config, '_can_update_config_file', lambda x, y: False
|
||||
)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
config('update', '-y', 'packages')
|
||||
|
||||
|
||||
def test_config_revert(packages_yaml_v015):
|
||||
cfg_file = packages_yaml_v015()
|
||||
bkp_file = cfg_file + '.bkp'
|
||||
|
||||
config('update', '-y', 'packages')
|
||||
|
||||
# Check that the backup file exists, compute its md5 sum
|
||||
assert os.path.exists(bkp_file)
|
||||
md5bkp = fs.md5sum(bkp_file)
|
||||
|
||||
config('revert', '-y', 'packages')
|
||||
|
||||
# Check that the backup file does not exist anymore and
|
||||
# that the md5 sum of the configuration file is the same
|
||||
# as that of the old backup file
|
||||
assert not os.path.exists(bkp_file)
|
||||
assert md5bkp == fs.md5sum(cfg_file)
|
||||
|
||||
|
||||
def test_config_revert_raise_if_cant_write(packages_yaml_v015, monkeypatch):
|
||||
packages_yaml_v015()
|
||||
config('update', '-y', 'packages')
|
||||
|
||||
# Mock a global scope where we cannot write
|
||||
monkeypatch.setattr(
|
||||
spack.cmd.config, '_can_revert_update', lambda x, y, z: False
|
||||
)
|
||||
# The command raises with an helpful error if a configuration
|
||||
# file is to be deleted and we don't have sufficient permissions
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
config('revert', '-y', 'packages')
|
||||
|
||||
|
||||
def test_updating_config_implicitly_raises(packages_yaml_v015):
|
||||
# Trying to write implicitly to a scope with a configuration file
|
||||
# in the old format raises an exception
|
||||
packages_yaml_v015()
|
||||
with pytest.raises(RuntimeError):
|
||||
config('add', 'packages:cmake:buildable:false')
|
||||
|
||||
|
||||
def test_updating_multiple_scopes_at_once(packages_yaml_v015):
|
||||
# Create 2 config files in the old format
|
||||
packages_yaml_v015(scope='user')
|
||||
packages_yaml_v015(scope='site')
|
||||
|
||||
# Update both of them at once
|
||||
config('update', '-y', 'packages')
|
||||
|
||||
for scope in ('user', 'site'):
|
||||
data = spack.config.get('packages', scope=scope)
|
||||
check_packages_updated(data)
|
||||
|
||||
|
||||
@pytest.mark.regression('18031')
|
||||
def test_config_update_can_handle_comments(mutable_config):
|
||||
# Create an outdated config file with comments
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, 'packages')
|
||||
cfg_file = spack.config.config.get_config_filename(scope, 'config')
|
||||
with open(cfg_file, mode='w') as f:
|
||||
f.write("""
|
||||
packages:
|
||||
config:
|
||||
# system cmake in /usr
|
||||
cmake:
|
||||
paths:
|
||||
cmake@3.14.0: /usr
|
||||
# Another comment after the outdated section
|
||||
buildable: False
|
||||
install_tree: './foo'
|
||||
# Another comment after the outdated section
|
||||
install_hash_length: 7
|
||||
""")
|
||||
|
||||
# Try to update it, it should not raise errors
|
||||
config('update', '-y', 'packages')
|
||||
config('update', '-y', 'config')
|
||||
|
||||
# Check data
|
||||
data = spack.config.get('packages', scope=scope)
|
||||
assert 'paths' not in data['cmake']
|
||||
assert 'externals' in data['cmake']
|
||||
externals = data['cmake']['externals']
|
||||
assert len(externals) == 1
|
||||
assert externals[0]['spec'] == 'cmake@3.14.0'
|
||||
assert externals[0]['prefix'] == '/usr'
|
||||
data = spack.config.get('config', scope=scope)
|
||||
assert 'root' in data['install_tree']
|
||||
|
||||
# Check the comment is there
|
||||
with open(cfg_file) as f:
|
||||
@@ -627,39 +524,21 @@ def test_config_update_can_handle_comments(mutable_config):
|
||||
|
||||
@pytest.mark.regression('18050')
|
||||
def test_config_update_works_for_empty_paths(mutable_config):
|
||||
# Create an outdated config file with empty "paths" and "modules"
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, 'packages')
|
||||
cfg_file = spack.config.config.get_config_filename(scope, 'config')
|
||||
with open(cfg_file, mode='w') as f:
|
||||
f.write("""
|
||||
packages:
|
||||
cmake:
|
||||
paths: {}
|
||||
modules: {}
|
||||
buildable: False
|
||||
config:
|
||||
install_tree: ''
|
||||
""")
|
||||
|
||||
# Try to update it, it should not raise errors
|
||||
output = config('update', '-y', 'packages')
|
||||
output = config('update', '-y', 'config')
|
||||
|
||||
# This ensures that we updated the configuration
|
||||
assert '[backup=' in output
|
||||
|
||||
|
||||
def check_packages_updated(data):
|
||||
"""Check that the data from the packages_yaml_v015
|
||||
has been updated.
|
||||
"""
|
||||
assert 'externals' in data['cmake']
|
||||
externals = data['cmake']['externals']
|
||||
assert {'spec': 'cmake@3.14.0', 'prefix': '/usr'} in externals
|
||||
assert 'paths' not in data['cmake']
|
||||
assert 'externals' in data['gcc']
|
||||
externals = data['gcc']['externals']
|
||||
assert {'spec': 'gcc@8.3.0', 'modules': ['gcc-8']} in externals
|
||||
assert 'modules' not in data['gcc']
|
||||
|
||||
|
||||
def check_config_updated(data):
|
||||
assert isinstance(data['install_tree'], dict)
|
||||
assert data['install_tree']['root'] == '/fake/path'
|
||||
|
||||
@@ -160,3 +160,9 @@ def _parse_name_offset(path, v):
|
||||
spack.cmd.create.get_name(args)
|
||||
captured = capsys.readouterr()
|
||||
assert "Couldn't guess a name" in str(captured)
|
||||
|
||||
|
||||
def test_no_url(parser):
|
||||
"""Test creation of package without a URL."""
|
||||
args = parser.parse_args(['--skip-editor', '-n', 'create-new-package'])
|
||||
spack.cmd.create.create(parser, args)
|
||||
|
||||
@@ -2288,76 +2288,6 @@ def test_env_write_only_non_default_nested(tmpdir):
|
||||
assert manifest == contents
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def packages_yaml_v015(tmpdir):
|
||||
"""Return the path to an existing manifest in the v0.15.x format
|
||||
and the path to a non yet existing backup file.
|
||||
"""
|
||||
raw_yaml = """
|
||||
spack:
|
||||
specs:
|
||||
- mpich
|
||||
packages:
|
||||
cmake:
|
||||
paths:
|
||||
cmake@3.17.3: /usr
|
||||
"""
|
||||
manifest = tmpdir.ensure('spack.yaml')
|
||||
backup_file = tmpdir.join('spack.yaml.bkp')
|
||||
manifest.write(raw_yaml)
|
||||
return manifest, backup_file
|
||||
|
||||
|
||||
def test_update_anonymous_env(packages_yaml_v015):
|
||||
manifest, backup_file = packages_yaml_v015
|
||||
env('update', '-y', str(manifest.dirname))
|
||||
|
||||
# The environment is now at the latest format
|
||||
assert ev.is_latest_format(str(manifest))
|
||||
# A backup file has been created and it's not at the latest format
|
||||
assert os.path.exists(str(backup_file))
|
||||
assert not ev.is_latest_format(str(backup_file))
|
||||
|
||||
|
||||
def test_double_update(packages_yaml_v015):
|
||||
manifest, backup_file = packages_yaml_v015
|
||||
|
||||
# Update the environment
|
||||
env('update', '-y', str(manifest.dirname))
|
||||
# Try to read the environment (it should not error)
|
||||
ev.create('test', str(manifest))
|
||||
# Updating again does nothing since the manifest is up-to-date
|
||||
env('update', '-y', str(manifest.dirname))
|
||||
|
||||
# The environment is at the latest format
|
||||
assert ev.is_latest_format(str(manifest))
|
||||
# A backup file has been created and it's not at the latest format
|
||||
assert os.path.exists(str(backup_file))
|
||||
assert not ev.is_latest_format(str(backup_file))
|
||||
|
||||
|
||||
def test_update_and_revert(packages_yaml_v015):
|
||||
manifest, backup_file = packages_yaml_v015
|
||||
|
||||
# Update the environment
|
||||
env('update', '-y', str(manifest.dirname))
|
||||
assert os.path.exists(str(backup_file))
|
||||
assert not ev.is_latest_format(str(backup_file))
|
||||
assert ev.is_latest_format(str(manifest))
|
||||
|
||||
# Revert to previous state
|
||||
env('revert', '-y', str(manifest.dirname))
|
||||
assert not os.path.exists(str(backup_file))
|
||||
assert not ev.is_latest_format(str(manifest))
|
||||
|
||||
|
||||
def test_old_format_cant_be_updated_implicitly(packages_yaml_v015):
|
||||
manifest, backup_file = packages_yaml_v015
|
||||
env('activate', str(manifest.dirname))
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
add('hdf5')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('concretization,unify', [
|
||||
('together', 'true'),
|
||||
('separately', 'false')
|
||||
|
||||
@@ -4,10 +4,12 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import re
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
@@ -55,6 +57,54 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
assert h in output
|
||||
|
||||
|
||||
def test_spec_parse_dependency_variant_value():
|
||||
"""Verify that we can provide multiple key=value variants to multiple separate
|
||||
packages within a spec string."""
|
||||
output = spec('multivalue-variant fee=barbaz ^ a foobar=baz')
|
||||
|
||||
assert 'fee=barbaz' in output
|
||||
assert 'foobar=baz' in output
|
||||
|
||||
|
||||
def test_spec_parse_cflags_quoting():
|
||||
"""Verify that compiler flags can be provided to a spec from the command line."""
|
||||
output = spec('--yaml', 'gcc cflags="-Os -pipe" cxxflags="-flto -Os"')
|
||||
gh_flagged = spack.spec.Spec.from_yaml(output)
|
||||
|
||||
assert ['-Os', '-pipe'] == gh_flagged.compiler_flags['cflags']
|
||||
assert ['-flto', '-Os'] == gh_flagged.compiler_flags['cxxflags']
|
||||
|
||||
|
||||
def test_spec_parse_unquoted_flags_report():
|
||||
"""Verify that a useful error message is produced if unquoted compiler flags are
|
||||
provided."""
|
||||
# This should fail during parsing, since /usr/include is interpreted as a spec hash.
|
||||
with pytest.raises(spack.error.SpackError) as cm:
|
||||
# We don't try to figure out how many following args were intended to be part of
|
||||
# cflags, we just explain how to fix it for the immediate next arg.
|
||||
spec('gcc cflags=-Os -pipe -other-arg-that-gets-ignored cflags=-I /usr/include')
|
||||
# Verify that the generated error message is nicely formatted.
|
||||
assert str(cm.value) == dedent('''\
|
||||
No installed spec matches the hash: 'usr'
|
||||
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
(1) cflags=-Os -pipe => cflags="-Os -pipe"
|
||||
(2) cflags=-I /usr/include => cflags="-I /usr/include"''')
|
||||
|
||||
# Verify that the same unquoted cflags report is generated in the error message even
|
||||
# if it fails during concretization, not just during parsing.
|
||||
with pytest.raises(spack.error.SpackError) as cm:
|
||||
spec('gcc cflags=-Os -pipe')
|
||||
cm = str(cm.value)
|
||||
assert cm.startswith('trying to set variant "pipe" in package "gcc", but the package has no such variant [happened during concretization of gcc cflags="-Os" ~pipe]') # noqa: E501
|
||||
assert cm.endswith('(1) cflags=-Os -pipe => cflags="-Os -pipe"')
|
||||
|
||||
|
||||
def test_spec_yaml():
|
||||
output = spec('--yaml', 'mpileaks')
|
||||
|
||||
@@ -125,14 +175,14 @@ def test_spec_returncode():
|
||||
|
||||
|
||||
def test_spec_parse_error():
|
||||
with pytest.raises(spack.spec.SpecParseError) as e:
|
||||
with pytest.raises(spack.error.SpackError) as e:
|
||||
spec("1.15:")
|
||||
|
||||
# make sure the error is formatted properly
|
||||
error_msg = """\
|
||||
1.15:
|
||||
^"""
|
||||
assert error_msg in e.value.long_message
|
||||
assert error_msg in str(e.value)
|
||||
|
||||
|
||||
def test_env_aware_spec(mutable_mock_env_path):
|
||||
|
||||
@@ -98,6 +98,26 @@ def test_changed_files(flake8_package):
|
||||
assert flake8_package in files
|
||||
|
||||
|
||||
def test_changed_files_from_git_rev_base(tmpdir, capfd):
|
||||
"""Test arbitrary git ref as base."""
|
||||
git = which("git", required=True)
|
||||
with tmpdir.as_cwd():
|
||||
git("init")
|
||||
git("checkout", "-b", "main")
|
||||
git("config", "user.name", "test user")
|
||||
git("config", "user.email", "test@user.com")
|
||||
git("commit", "--allow-empty", "-m", "initial commit")
|
||||
|
||||
tmpdir.ensure('bin/spack')
|
||||
assert changed_files(base="HEAD") == ['bin/spack']
|
||||
assert changed_files(base="main") == ['bin/spack']
|
||||
|
||||
git("add", 'bin/spack')
|
||||
git("commit", "-m", "v1")
|
||||
assert changed_files(base="HEAD") == []
|
||||
assert changed_files(base="HEAD~") == ["bin/spack"]
|
||||
|
||||
|
||||
def test_changed_no_base(tmpdir, capfd):
|
||||
"""Ensure that we fail gracefully with no base branch."""
|
||||
tmpdir.join("bin").ensure("spack")
|
||||
@@ -113,7 +133,7 @@ def test_changed_no_base(tmpdir, capfd):
|
||||
changed_files(base="foobar")
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
assert "This repository does not have a 'foobar' branch." in err
|
||||
assert "This repository does not have a 'foobar'" in err
|
||||
|
||||
|
||||
def test_changed_files_all_files(flake8_package):
|
||||
|
||||
@@ -1732,3 +1732,79 @@ def test_best_effort_coconcretize_preferences(
|
||||
if expected_spec in spec:
|
||||
counter += 1
|
||||
assert counter == occurances, concrete_specs
|
||||
|
||||
@pytest.mark.regression('30864')
|
||||
def test_misleading_error_message_on_version(self, mutable_database):
|
||||
# For this bug to be triggered we need a reusable dependency
|
||||
# that is not optimal in terms of optimization scores.
|
||||
# We pick an old version of "b"
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse')
|
||||
|
||||
reusable_specs = [
|
||||
spack.spec.Spec('non-existing-conditional-dep@1.0').concretized()
|
||||
]
|
||||
root_spec = spack.spec.Spec('non-existing-conditional-dep@2.0')
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError,
|
||||
match="'dep-with-variants' satisfies '@999'"):
|
||||
solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
|
||||
@pytest.mark.regression('31148')
|
||||
def test_version_weight_and_provenance(self):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse')
|
||||
|
||||
reusable_specs = [
|
||||
spack.spec.Spec(spec_str).concretized()
|
||||
for spec_str in ('b@0.9', 'b@1.0')
|
||||
]
|
||||
root_spec = spack.spec.Spec('a foobar=bar')
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result = solver.driver.solve(
|
||||
setup, [root_spec], reuse=reusable_specs, out=sys.stdout
|
||||
)
|
||||
# The result here should have a single spec to build ('a')
|
||||
# and it should be using b@1.0 with a version badness of 2
|
||||
# The provenance is:
|
||||
# version_declared("b","1.0",0,"package_py").
|
||||
# version_declared("b","0.9",1,"package_py").
|
||||
# version_declared("b","1.0",2,"installed").
|
||||
# version_declared("b","0.9",3,"installed").
|
||||
for criterion in [
|
||||
(1, None, 'number of packages to build (vs. reuse)'),
|
||||
(2, 0, 'version badness')
|
||||
]:
|
||||
assert criterion in result.criteria
|
||||
assert result.specs[0].satisfies('^b@1.0')
|
||||
|
||||
@pytest.mark.regression('31169')
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
import spack.solver.asp
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse')
|
||||
|
||||
root_spec = spack.spec.Spec('b')
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
wrong_compiler.compiler = spack.spec.CompilerSpec('gcc@12.1.0')
|
||||
wrong_os.architecture = spack.spec.ArchSpec('test-ubuntu2204-x86_64')
|
||||
reusable_specs = [wrong_compiler, wrong_os]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result = solver.driver.solve(
|
||||
setup, [root_spec], reuse=reusable_specs, out=sys.stdout
|
||||
)
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies('%gcc@4.5.0')
|
||||
assert concrete_spec.satisfies('os=debian6')
|
||||
|
||||
1
lib/spack/spack/test/data/compression/Foo
Normal file
1
lib/spack/spack/test/data/compression/Foo
Normal file
@@ -0,0 +1 @@
|
||||
TEST
|
||||
BIN
lib/spack/spack/test/data/compression/Foo.Z
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.Z
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.bz2
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.bz2
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.gz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tar
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tar
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tar.Z
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tar.Z
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tar.bz2
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tar.bz2
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tar.gz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tar.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tar.xz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tar.xz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tbz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tbz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tbz2
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tbz2
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.tgz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.tgz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.txz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.txz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.xz
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.xz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/compression/Foo.zip
Normal file
BIN
lib/spack/spack/test/data/compression/Foo.zip
Normal file
Binary file not shown.
@@ -326,6 +326,37 @@ def test_gitsubmodule(submodules, mock_git_repository, config,
|
||||
assert not os.path.isfile(file_path)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_gitsubmodules_callable(
|
||||
mock_git_repository, config, mutable_mock_repo, monkeypatch
|
||||
):
|
||||
"""
|
||||
Test GitFetchStrategy behavior with submodules selected after concretization
|
||||
"""
|
||||
def submodules_callback(package):
|
||||
name = 'third_party/submodule0'
|
||||
return [name]
|
||||
|
||||
type_of_test = 'tag-branch'
|
||||
t = mock_git_repository.checks[type_of_test]
|
||||
|
||||
# Construct the package under test
|
||||
spec = Spec('git-test')
|
||||
spec.concretize()
|
||||
pkg = spack.repo.get(spec)
|
||||
args = copy.copy(t.args)
|
||||
args['submodules'] = submodules_callback
|
||||
monkeypatch.setitem(pkg.versions, ver('git'), args)
|
||||
pkg.do_stage()
|
||||
with working_dir(pkg.stage.source_path):
|
||||
file_path = os.path.join(pkg.stage.source_path,
|
||||
'third_party/submodule0/r0_file_0')
|
||||
assert os.path.isfile(file_path)
|
||||
file_path = os.path.join(pkg.stage.source_path,
|
||||
'third_party/submodule1/r0_file_1')
|
||||
assert not os.path.isfile(file_path)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_gitsubmodules_delete(
|
||||
mock_git_repository, config, mutable_mock_repo, monkeypatch
|
||||
|
||||
@@ -53,6 +53,50 @@ def test_install_and_uninstall(install_mockery, mock_fetch, monkeypatch):
|
||||
raise
|
||||
|
||||
|
||||
def test_pkg_attributes(install_mockery, mock_fetch, monkeypatch):
|
||||
# Get a basic concrete spec for the dummy package.
|
||||
spec = Spec('attributes-foo-app ^attributes-foo')
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
|
||||
pkg = spec.package
|
||||
pkg.do_install()
|
||||
foo = 'attributes-foo'
|
||||
assert spec['bar'].prefix == spec[foo].prefix
|
||||
assert spec['baz'].prefix == spec[foo].prefix
|
||||
|
||||
assert spec[foo].home == spec[foo].prefix
|
||||
assert spec['bar'].home == spec[foo].home
|
||||
assert spec['baz'].home == spec[foo].prefix.baz
|
||||
|
||||
foo_headers = spec[foo].headers
|
||||
# assert foo_headers.basenames == ['foo.h']
|
||||
assert foo_headers.directories == [spec[foo].home.include]
|
||||
bar_headers = spec['bar'].headers
|
||||
# assert bar_headers.basenames == ['bar.h']
|
||||
assert bar_headers.directories == [spec['bar'].home.include]
|
||||
baz_headers = spec['baz'].headers
|
||||
# assert baz_headers.basenames == ['baz.h']
|
||||
assert baz_headers.directories == [spec['baz'].home.include]
|
||||
|
||||
if 'platform=windows' in spec:
|
||||
lib_suffix = '.lib'
|
||||
elif 'platform=darwin' in spec:
|
||||
lib_suffix = '.dylib'
|
||||
else:
|
||||
lib_suffix = '.so'
|
||||
|
||||
foo_libs = spec[foo].libs
|
||||
assert foo_libs.basenames == ['libFoo' + lib_suffix]
|
||||
assert foo_libs.directories == [spec[foo].home.lib64]
|
||||
bar_libs = spec['bar'].libs
|
||||
assert bar_libs.basenames == ['libFooBar' + lib_suffix]
|
||||
assert bar_libs.directories == [spec['bar'].home.lib64]
|
||||
baz_libs = spec['baz'].libs
|
||||
assert baz_libs.basenames == ['libFooBaz' + lib_suffix]
|
||||
assert baz_libs.directories == [spec['baz'].home.lib]
|
||||
|
||||
|
||||
def mock_remove_prefix(*args):
|
||||
raise MockInstallError(
|
||||
"Intentional error",
|
||||
@@ -379,9 +423,8 @@ def test_failing_build(install_mockery, mock_fetch, capfd):
|
||||
spec = Spec('failing-build').concretized()
|
||||
pkg = spec.package
|
||||
|
||||
with pytest.raises(spack.build_environment.ChildError):
|
||||
with pytest.raises(spack.build_environment.ChildError, match='Expected failure'):
|
||||
pkg.do_install()
|
||||
assert 'InstallError: Expected Failure' in capfd.readouterr()[0]
|
||||
|
||||
|
||||
class MockInstallError(spack.error.SpackError):
|
||||
@@ -414,9 +457,27 @@ def test_nosource_pkg_install(
|
||||
pkg.do_install()
|
||||
out = capfd.readouterr()
|
||||
assert "Installing dependency-install" in out[0]
|
||||
|
||||
# Make sure a warning for missing code is issued
|
||||
assert "Missing a source id for nosource" in out[1]
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_nosource_bundle_pkg_install(
|
||||
install_mockery, mock_fetch, mock_packages, capfd):
|
||||
"""Test install phases with the nosource-bundle package."""
|
||||
spec = Spec('nosource-bundle').concretized()
|
||||
pkg = spec.package
|
||||
|
||||
# Make sure install works even though there is no associated code.
|
||||
pkg.do_install()
|
||||
out = capfd.readouterr()
|
||||
assert "Installing dependency-install" in out[0]
|
||||
|
||||
# Make sure a warning for missing code is *not* issued
|
||||
assert "Missing a source id for nosource" not in out[1]
|
||||
|
||||
|
||||
def test_nosource_pkg_install_post_install(
|
||||
install_mockery, mock_fetch, mock_packages):
|
||||
"""Test install phases with the nosource package with post-install."""
|
||||
@@ -594,3 +655,16 @@ def test_install_error():
|
||||
assert exc.__class__.__name__ == 'InstallError'
|
||||
assert exc.message == msg
|
||||
assert exc.long_message == long_msg
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_empty_install_sanity_check_prefix(
|
||||
monkeypatch, install_mockery, mock_fetch, mock_packages
|
||||
):
|
||||
"""Test empty install triggers sanity_check_prefix."""
|
||||
spec = Spec('failing-empty-install').concretized()
|
||||
with pytest.raises(
|
||||
spack.build_environment.ChildError,
|
||||
match='Nothing was installed'
|
||||
):
|
||||
spec.package.do_install()
|
||||
|
||||
@@ -294,37 +294,14 @@ def test_set_elf_rpaths_warning(mock_patchelf):
|
||||
assert output is None
|
||||
|
||||
|
||||
def test_relocate_binary_text(tmpdir):
|
||||
filename = str(tmpdir.join('binary'))
|
||||
with open(filename, 'wb') as f:
|
||||
f.write(b'somebinarytext')
|
||||
f.write(b'/usr/relpath')
|
||||
f.write(b'\0')
|
||||
f.write(b'morebinarytext')
|
||||
|
||||
spack.relocate._relocate_binary_text(filename, {14: '/usr'}, {'/usr': '/foo'})
|
||||
with open(filename, 'rb') as f:
|
||||
contents = f.read()
|
||||
assert b'/foo/relpath\0' in contents
|
||||
|
||||
|
||||
@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc')
|
||||
@skip_unless_linux
|
||||
def test_relocate_binary(hello_world):
|
||||
def test_replace_prefix_bin(hello_world):
|
||||
# Compile an "Hello world!" executable and set RPATHs
|
||||
executable = hello_world(rpaths=['/usr/lib', '/usr/lib64'])
|
||||
|
||||
with open(str(executable), 'rb') as f:
|
||||
contents = f.read()
|
||||
index_0 = contents.index(b'/usr')
|
||||
index_1 = contents.index(b'/usr', index_0 + 1)
|
||||
offsets = {index_0: '/usr/lib', index_1: '/usr/lib64'}
|
||||
|
||||
# Relocate the RPATHs
|
||||
spack.relocate._relocate_binary_text(
|
||||
str(executable), offsets,
|
||||
{'/usr/lib': '/foo/lib', '/usr/lib64': '/foo/lib64'}
|
||||
)
|
||||
spack.relocate._replace_prefix_bin(str(executable), {b'/usr': b'/foo'})
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert '/foo/lib:/foo/lib64' in rpaths_for(executable)
|
||||
@@ -413,11 +390,13 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
|
||||
assert not text_in_bin(str(new_binary.dirpath()), new_binary)
|
||||
|
||||
# Check this call succeed
|
||||
orig_path_bytes = str(orig_binary.dirpath())
|
||||
new_path_bytes = str(new_binary.dirpath())
|
||||
orig_path_bytes = str(orig_binary.dirpath()).encode('utf-8')
|
||||
new_path_bytes = str(new_binary.dirpath()).encode('utf-8')
|
||||
|
||||
spack.relocate.relocate_text_bin(
|
||||
[str(new_binary)], {orig_path_bytes: new_path_bytes})
|
||||
[str(new_binary)],
|
||||
{orig_path_bytes: new_path_bytes}
|
||||
)
|
||||
|
||||
# Check original directory is not there anymore and it was
|
||||
# substituted with the new one
|
||||
@@ -426,14 +405,15 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
|
||||
|
||||
|
||||
def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
|
||||
short_prefix = '/short'
|
||||
long_prefix = '/much/longer'
|
||||
short_prefix = b'/short'
|
||||
long_prefix = b'/much/longer'
|
||||
fpath = str(tmpdir.join('fakebin'))
|
||||
with open(fpath, 'w') as f:
|
||||
f.write('/short')
|
||||
with pytest.raises(spack.relocate.BinaryTextReplaceError):
|
||||
spack.relocate.relocate_text_bin(
|
||||
[fpath], {short_prefix: long_prefix})
|
||||
[fpath], {short_prefix: long_prefix}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables('install_name_tool', 'file', 'cc')
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
SpecFormatSigilError,
|
||||
SpecFormatStringError,
|
||||
UnconstrainableDependencySpecError,
|
||||
UnsupportedCompilerError,
|
||||
)
|
||||
from spack.variant import (
|
||||
InvalidVariantValueError,
|
||||
@@ -1320,3 +1321,8 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
|
||||
# make sure package hash is NOT recomputed
|
||||
assert not getattr(spec["dt-diamond-bottom"], '_package_hash', None)
|
||||
|
||||
|
||||
def test_unsupported_compiler():
|
||||
with pytest.raises(UnsupportedCompilerError):
|
||||
Spec('gcc%fake-compiler').validate_or_raise()
|
||||
|
||||
@@ -8,7 +8,10 @@
|
||||
The YAML and JSON formats preserve DAG information in the spec.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import ast
|
||||
import collections
|
||||
import inspect
|
||||
import os
|
||||
|
||||
@@ -433,3 +436,75 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
spec = Spec.from_yaml(yaml)
|
||||
concrete_spec = spec.concretized()
|
||||
assert concrete_spec.eq_dag(spec)
|
||||
|
||||
|
||||
#: A well ordered Spec dictionary, using ``OrderdDict``.
|
||||
#: Any operation that transforms Spec dictionaries should
|
||||
#: preserve this order.
|
||||
ordered_spec = collections.OrderedDict([
|
||||
("arch", collections.OrderedDict([
|
||||
("platform", "darwin"),
|
||||
("platform_os", "bigsur"),
|
||||
("target", collections.OrderedDict([
|
||||
("features", [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
]),
|
||||
("generation", 0),
|
||||
("name", "skylake"),
|
||||
("parents", ["broadwell"]),
|
||||
("vendor", "GenuineIntel"),
|
||||
])),
|
||||
])),
|
||||
("compiler", collections.OrderedDict([
|
||||
("name", "apple-clang"),
|
||||
("version", "13.0.0"),
|
||||
])),
|
||||
("name", "zlib"),
|
||||
("namespace", "builtin"),
|
||||
("parameters", collections.OrderedDict([
|
||||
("cflags", []),
|
||||
("cppflags", []),
|
||||
("cxxflags", []),
|
||||
("fflags", []),
|
||||
("ldflags", []),
|
||||
("ldlibs", []),
|
||||
("optimize", True),
|
||||
("pic", True),
|
||||
("shared", True),
|
||||
])),
|
||||
("version", "1.2.11"),
|
||||
])
|
||||
|
||||
|
||||
@pytest.mark.regression("31092")
|
||||
def test_strify_preserves_order():
|
||||
"""Ensure that ``spack_json._strify()`` dumps dictionaries in the right order.
|
||||
|
||||
``_strify()`` is used in ``spack_json.dump()``, which is used in
|
||||
``Spec.dag_hash()``, so if this goes wrong, ``Spec`` hashes can vary between python
|
||||
versions.
|
||||
|
||||
"""
|
||||
strified = sjson._strify(ordered_spec)
|
||||
assert list(ordered_spec.items()) == list(strified.items())
|
||||
|
||||
98
lib/spack/spack/test/util/compression.py
Normal file
98
lib/spack/spack/test/util/compression.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.paths import spack_root
|
||||
from spack.util import compression as scomp
|
||||
from spack.util.executable import CommandNotFoundError
|
||||
|
||||
datadir = os.path.join(spack_root, 'lib', 'spack',
|
||||
'spack', 'test', 'data', 'compression')
|
||||
|
||||
ext_archive = {}
|
||||
[ext_archive.update({ext: '.'.join(['Foo', ext])}) for
|
||||
ext in scomp.ALLOWED_ARCHIVE_TYPES if 'TAR' not in ext]
|
||||
|
||||
|
||||
def support_stub():
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compr_support_check(monkeypatch):
|
||||
monkeypatch.setattr(scomp, 'lzma_support', support_stub)
|
||||
monkeypatch.setattr(scomp, 'tar_support', support_stub)
|
||||
monkeypatch.setattr(scomp, 'gzip_support', support_stub)
|
||||
monkeypatch.setattr(scomp, 'bz2_support', support_stub)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def archive_file(tmpdir_factory, request):
|
||||
"""Copy example archive to temp directory for test"""
|
||||
archive_file_stub = os.path.join(datadir, 'Foo')
|
||||
extension = request.param
|
||||
tmpdir = tmpdir_factory.mktemp('compression')
|
||||
shutil.copy(archive_file_stub + '.' + extension, str(tmpdir))
|
||||
return os.path.join(str(tmpdir), 'Foo.%s' % extension)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('archive_file', ext_archive.keys(), indirect=True)
|
||||
def test_native_unpacking(tmpdir_factory, archive_file):
|
||||
extension = scomp.extension(archive_file)
|
||||
util = scomp.decompressor_for(archive_file, extension)
|
||||
tmpdir = tmpdir_factory.mktemp("comp_test")
|
||||
with working_dir(str(tmpdir)):
|
||||
assert not os.listdir(os.getcwd())
|
||||
util(archive_file)
|
||||
files = os.listdir(os.getcwd())
|
||||
assert len(files) == 1
|
||||
with open(files[0], 'r') as f:
|
||||
contents = f.read()
|
||||
assert 'TEST' in contents
|
||||
|
||||
|
||||
@pytest.mark.parametrize('archive_file', ext_archive.keys(), indirect=True)
|
||||
def test_system_unpacking(tmpdir_factory, archive_file, compr_support_check):
|
||||
extension = scomp.extension(archive_file)
|
||||
# actually run test
|
||||
util = scomp.decompressor_for(archive_file, extension)
|
||||
tmpdir = tmpdir_factory.mktemp("system_comp_test")
|
||||
with working_dir(str(tmpdir)):
|
||||
assert not os.listdir(os.getcwd())
|
||||
util(archive_file)
|
||||
files = os.listdir(os.getcwd())
|
||||
assert len(files) == 1
|
||||
with open(files[0], 'r') as f:
|
||||
contents = f.read()
|
||||
assert 'TEST' in contents
|
||||
|
||||
|
||||
def test_unallowed_extension():
|
||||
bad_ext_archive = 'Foo.py'
|
||||
with pytest.raises(CommandNotFoundError):
|
||||
scomp.decompressor_for(bad_ext_archive, 'py')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('archive', ext_archive.values())
|
||||
def test_get_extension(archive):
|
||||
ext = scomp.extension(archive)
|
||||
assert ext_archive[ext] == archive
|
||||
|
||||
|
||||
def test_get_bad_extension():
|
||||
archive = 'Foo.py'
|
||||
ext = scomp.extension(archive)
|
||||
assert ext is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('path', ext_archive.values())
|
||||
def test_allowed_archvie(path):
|
||||
assert scomp.allowed_archive(path)
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
@@ -12,11 +13,8 @@
|
||||
import spack.config
|
||||
import spack.util.path as sup
|
||||
|
||||
# This module pertains to path string padding manipulation specifically
|
||||
# which is used for binary caching. This functionality is not supported
|
||||
# on Windows as of yet.
|
||||
pytestmark = pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Tests fail on Windows")
|
||||
is_windows = sys.platform == 'win32'
|
||||
|
||||
|
||||
#: Some lines with lots of placeholders
|
||||
padded_lines = [
|
||||
@@ -34,74 +32,87 @@
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("padded,fixed", zip(padded_lines, fixed_lines))
|
||||
def test_padding_substitution(padded, fixed):
|
||||
"""Ensure that all padded lines are unpadded correctly."""
|
||||
assert fixed == sup.padding_filter(padded)
|
||||
def test_sanitze_file_path(tmpdir):
|
||||
"""Test filtering illegal characters out of potential file paths"""
|
||||
# *nix illegal files characters are '/' and none others
|
||||
illegal_file_path = str(tmpdir) + '//' + 'abcdefghi.txt'
|
||||
if is_windows:
|
||||
# Windows has a larger set of illegal characters
|
||||
illegal_file_path = os.path.join(tmpdir, 'a<b>cd?e:f"g|h*i.txt')
|
||||
real_path = sup.sanitize_file_path(illegal_file_path)
|
||||
assert real_path == os.path.join(str(tmpdir), 'abcdefghi.txt')
|
||||
|
||||
|
||||
def test_no_substitution():
|
||||
"""Ensure that a line not containing one full path placeholder is not modified."""
|
||||
partial = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
assert sup.padding_filter(partial) == partial
|
||||
# This class pertains to path string padding manipulation specifically
|
||||
# which is used for binary caching. This functionality is not supported
|
||||
# on Windows as of yet.
|
||||
@pytest.mark.skipif(is_windows,
|
||||
reason='Padding funtionality unsupported on Windows')
|
||||
class TestPathPadding():
|
||||
@pytest.mark.parametrize("padded,fixed", zip(padded_lines, fixed_lines))
|
||||
def test_padding_substitution(self, padded, fixed):
|
||||
"""Ensure that all padded lines are unpadded correctly."""
|
||||
assert fixed == sup.padding_filter(padded)
|
||||
|
||||
def test_no_substitution(self):
|
||||
"""Ensure that a line not containing one full path placeholder
|
||||
is not modified."""
|
||||
partial = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
assert sup.padding_filter(partial) == partial
|
||||
|
||||
def test_short_substitution():
|
||||
"""Ensure that a single placeholder path component is replaced"""
|
||||
short = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
short_subst = "--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-63-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
assert short_subst == sup.padding_filter(short)
|
||||
def test_short_substitution(self):
|
||||
"""Ensure that a single placeholder path component is replaced"""
|
||||
short = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
short_subst = "--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-63-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
assert short_subst == sup.padding_filter(short)
|
||||
|
||||
def test_partial_substitution(self):
|
||||
"""Ensure that a single placeholder path component is replaced"""
|
||||
short = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_p/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
short_subst = "--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-73-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
assert short_subst == sup.padding_filter(short)
|
||||
|
||||
def test_partial_substitution():
|
||||
"""Ensure that a single placeholder path component is replaced"""
|
||||
short = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_p/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
short_subst = "--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-73-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
|
||||
assert short_subst == sup.padding_filter(short)
|
||||
def test_longest_prefix_re(self):
|
||||
"""Test that longest_prefix_re generates correct regular expressions."""
|
||||
assert "(s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
|
||||
"string", capture=True
|
||||
)
|
||||
assert "(?:s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
|
||||
"string", capture=False
|
||||
)
|
||||
|
||||
def test_output_filtering(self, capfd, install_mockery, mutable_config):
|
||||
"""Test filtering padding out of tty messages."""
|
||||
long_path = "/" + "/".join([sup.SPACK_PATH_PADDING_CHARS] * 200)
|
||||
padding_string = "[padded-to-%d-chars]" % len(long_path)
|
||||
|
||||
def test_longest_prefix_re():
|
||||
"""Test that longest_prefix_re generates correct regular expressions."""
|
||||
assert "(s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
|
||||
"string", capture=True
|
||||
)
|
||||
assert "(?:s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
|
||||
"string", capture=False
|
||||
)
|
||||
# test filtering when padding is enabled
|
||||
with spack.config.override('config:install_tree', {"padded_length": 256}):
|
||||
# tty.msg with filtering on the first argument
|
||||
with sup.filter_padding():
|
||||
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in out
|
||||
|
||||
# tty.msg with filtering on a laterargument
|
||||
with sup.filter_padding():
|
||||
tty.msg("here is a long path:", "%s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in out
|
||||
|
||||
def test_output_filtering(capfd, install_mockery, mutable_config):
|
||||
"""Test filtering padding out of tty messages."""
|
||||
long_path = "/" + "/".join([sup.SPACK_PATH_PADDING_CHARS] * 200)
|
||||
padding_string = "[padded-to-%d-chars]" % len(long_path)
|
||||
# tty.error with filtering on the first argument
|
||||
with sup.filter_padding():
|
||||
tty.error("here is a long path: %s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in err
|
||||
|
||||
# test filtering when padding is enabled
|
||||
with spack.config.override('config:install_tree', {"padded_length": 256}):
|
||||
# tty.msg with filtering on the first argument
|
||||
with sup.filter_padding():
|
||||
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
|
||||
# tty.error with filtering on a later argument
|
||||
with sup.filter_padding():
|
||||
tty.error("here is a long path:", "%s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in err
|
||||
|
||||
# test no filtering
|
||||
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in out
|
||||
|
||||
# tty.msg with filtering on a laterargument
|
||||
with sup.filter_padding():
|
||||
tty.msg("here is a long path:", "%s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in out
|
||||
|
||||
# tty.error with filtering on the first argument
|
||||
with sup.filter_padding():
|
||||
tty.error("here is a long path: %s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in err
|
||||
|
||||
# tty.error with filtering on a later argument
|
||||
with sup.filter_padding():
|
||||
tty.error("here is a long path:", "%s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string in err
|
||||
|
||||
# test no filtering
|
||||
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
|
||||
out, err = capfd.readouterr()
|
||||
assert padding_string not in out
|
||||
assert padding_string not in out
|
||||
|
||||
@@ -5,10 +5,11 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from itertools import product
|
||||
|
||||
from spack.util.executable import which
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
|
||||
# Supported archive extensions.
|
||||
PRE_EXTS = ["tar", "TAR"]
|
||||
@@ -22,35 +23,146 @@
|
||||
is_windows = sys.platform == 'win32'
|
||||
|
||||
|
||||
def bz2_support():
|
||||
try:
|
||||
import bz2 # noqa
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def gzip_support():
|
||||
try:
|
||||
import gzip # noqa
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def lzma_support():
|
||||
try:
|
||||
import lzma # noqa # novermin
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def tar_support():
|
||||
try:
|
||||
import tarfile # noqa
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def allowed_archive(path):
|
||||
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
return False if not path else \
|
||||
any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
|
||||
|
||||
def _untar(archive_file):
|
||||
""" Untar archive. Prefer native Python `tarfile`
|
||||
but fall back to system utility if there is a failure
|
||||
to find the native Python module (tar on Unix).
|
||||
Filters archives through native support gzip and xz
|
||||
compression formats.
|
||||
|
||||
Args:
|
||||
archive_file (str): absolute path to the archive to be extracted.
|
||||
Can be one of .tar(.[gz|bz2|xz|Z]) or .(tgz|tbz|tbz2|txz).
|
||||
"""
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
outfile = os.path.basename(archive_file.strip(ext))
|
||||
uncompress_required = 'Z' in ext
|
||||
lzma_required = 'xz' in ext
|
||||
lzma_needed_and_not_available = not lzma_support() and lzma_required
|
||||
if tar_support() and not uncompress_required and\
|
||||
not lzma_needed_and_not_available:
|
||||
import tarfile
|
||||
tar = tarfile.open(archive_file)
|
||||
tar.extractall()
|
||||
tar.close()
|
||||
else:
|
||||
tar = which('tar', required=True)
|
||||
tar.add_default_arg('-oxf')
|
||||
tar(archive_file)
|
||||
return outfile
|
||||
|
||||
|
||||
def _bunzip2(archive_file):
|
||||
""" Use Python's bz2 module to decompress bz2 compressed archives
|
||||
Fall back to system utility failing to find Python module `bz2`
|
||||
|
||||
Args:
|
||||
archive_file (str): absolute path to the bz2 archive to be decompressed
|
||||
"""
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
compressed_file_name = os.path.basename(archive_file)
|
||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
||||
working_dir = os.getcwd()
|
||||
archive_out = os.path.join(working_dir, decompressed_file)
|
||||
copy_path = os.path.join(working_dir, compressed_file_name)
|
||||
if bz2_support():
|
||||
import bz2
|
||||
f_bz = bz2.BZ2File(archive_file, mode='rb')
|
||||
with open(archive_out, 'wb') as ar:
|
||||
ar.write(f_bz.read())
|
||||
f_bz.close()
|
||||
else:
|
||||
shutil.copy(archive_file, copy_path)
|
||||
bunzip2 = which('bunzip2', required=True)
|
||||
bunzip2.add_default_arg('-q')
|
||||
return bunzip2(copy_path)
|
||||
return archive_out
|
||||
|
||||
|
||||
def _gunzip(archive_file):
|
||||
"""Like gunzip, but extracts in the current working directory
|
||||
""" Decompress `.gz` extensions. Prefer native Python `gzip` module.
|
||||
Failing back to system utility gunzip.
|
||||
Like gunzip, but extracts in the current working directory
|
||||
instead of in-place.
|
||||
|
||||
Args:
|
||||
archive_file (str): absolute path of the file to be decompressed
|
||||
"""
|
||||
import gzip
|
||||
decompressed_file = os.path.basename(archive_file.strip('.gz'))
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
||||
working_dir = os.getcwd()
|
||||
destination_abspath = os.path.join(working_dir, decompressed_file)
|
||||
with gzip.open(archive_file, "rb") as f_in:
|
||||
if gzip_support():
|
||||
import gzip
|
||||
f_in = gzip.open(archive_file, "rb")
|
||||
with open(destination_abspath, "wb") as f_out:
|
||||
f_out.write(f_in.read())
|
||||
else:
|
||||
_system_gunzip(archive_file)
|
||||
return destination_abspath
|
||||
|
||||
|
||||
def _system_gunzip(archive_file):
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
||||
working_dir = os.getcwd()
|
||||
destination_abspath = os.path.join(working_dir, decompressed_file)
|
||||
compressed_file = os.path.basename(archive_file)
|
||||
copy_path = os.path.join(working_dir, compressed_file)
|
||||
shutil.copy(archive_file, copy_path)
|
||||
gzip = which("gzip")
|
||||
gzip.add_default_arg("-d")
|
||||
gzip(copy_path)
|
||||
return destination_abspath
|
||||
|
||||
|
||||
def _unzip(archive_file):
|
||||
"""Try to use Python's zipfile, but extract in the current working
|
||||
directory instead of in-place.
|
||||
|
||||
If unavailable, search for 'unzip' executable on system and use instead
|
||||
"""
|
||||
Extract Zipfile, searching for unzip system executable
|
||||
If unavailable, search for 'tar' executable on system and use instead
|
||||
|
||||
Args:
|
||||
archive_file (str): absolute path of the file to be decompressed
|
||||
"""
|
||||
|
||||
destination_abspath = os.getcwd()
|
||||
exe = 'unzip'
|
||||
arg = '-q'
|
||||
if is_windows:
|
||||
@@ -59,21 +171,119 @@ def _unzip(archive_file):
|
||||
unzip = which(exe, required=True)
|
||||
unzip.add_default_arg(arg)
|
||||
unzip(archive_file)
|
||||
return destination_abspath
|
||||
|
||||
|
||||
def decompressor_for(path, extension=None):
|
||||
"""Get the appropriate decompressor for a path."""
|
||||
if ((extension and re.match(r'\.?zip$', extension)) or
|
||||
path.endswith('.zip')):
|
||||
def _unZ(archive_file):
|
||||
if is_windows:
|
||||
result = _7zip(archive_file)
|
||||
else:
|
||||
result = _system_gunzip(archive_file)
|
||||
return result
|
||||
|
||||
|
||||
def _lzma_decomp(archive_file):
|
||||
"""Decompress lzma compressed files. Prefer Python native
|
||||
lzma module, but fall back on command line xz tooling
|
||||
to find available Python support. This is the xz command
|
||||
on Unix and 7z on Windows"""
|
||||
if lzma_support():
|
||||
import lzma # novermin
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
||||
archive_out = os.path.join(os.getcwd(), decompressed_file)
|
||||
with open(archive_out, 'wb') as ar:
|
||||
with lzma.open(archive_file) as lar:
|
||||
ar.write(lar.read())
|
||||
else:
|
||||
if is_windows:
|
||||
return _7zip(archive_file)
|
||||
else:
|
||||
return _xz(archive_file)
|
||||
|
||||
|
||||
def _xz(archive_file):
|
||||
"""Decompress lzma compressed .xz files via xz command line
|
||||
tool. Available only on Unix
|
||||
"""
|
||||
if is_windows:
|
||||
raise RuntimeError('XZ tool unavailable on Windows')
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
||||
working_dir = os.getcwd()
|
||||
destination_abspath = os.path.join(working_dir, decompressed_file)
|
||||
compressed_file = os.path.basename(archive_file)
|
||||
copy_path = os.path.join(working_dir, compressed_file)
|
||||
shutil.copy(archive_file, copy_path)
|
||||
xz = which('xz', required=True)
|
||||
xz.add_default_arg('-d')
|
||||
xz(copy_path)
|
||||
return destination_abspath
|
||||
|
||||
|
||||
def _7zip(archive_file):
|
||||
"""Unpack/decompress with 7z executable
|
||||
7z is able to handle a number file extensions however
|
||||
it may not be available on system.
|
||||
|
||||
Without 7z, Windows users with certain versions of Python may
|
||||
be unable to extract .xz files, and all Windows users will be unable
|
||||
to extract .Z files. If we cannot find 7z either externally or a
|
||||
Spack installed copy, we fail, but inform the user that 7z can
|
||||
be installed via `spack install 7zip`
|
||||
|
||||
Args:
|
||||
archive_file (str): absolute path of file to be unarchived
|
||||
"""
|
||||
_, ext = os.path.splitext(archive_file)
|
||||
outfile = os.path.basename(archive_file.strip(ext))
|
||||
_7z = which('7z')
|
||||
if not _7z:
|
||||
raise CommandNotFoundError("7z unavailable,\
|
||||
unable to extract %s files. 7z can be installed via Spack" % ext)
|
||||
_7z.add_default_arg('e')
|
||||
_7z(archive_file)
|
||||
return outfile
|
||||
|
||||
|
||||
def decompressor_for(path, ext):
|
||||
"""Returns a function pointer to appropriate decompression
|
||||
algorithm based on extension type.
|
||||
|
||||
Args:
|
||||
path (str): path of the archive file requiring decompression
|
||||
ext (str): Extension of archive file
|
||||
"""
|
||||
if not allowed_archive(ext):
|
||||
raise CommandNotFoundError("Cannot extract archive, \
|
||||
unrecognized file extension: '%s'" % ext)
|
||||
|
||||
if re.match(r'\.?zip$', ext) or path.endswith('.zip'):
|
||||
return _unzip
|
||||
if extension and re.match(r'gz', extension):
|
||||
|
||||
if re.match(r'gz', ext):
|
||||
return _gunzip
|
||||
if extension and re.match(r'bz2', extension):
|
||||
bunzip2 = which('bunzip2', required=True)
|
||||
return bunzip2
|
||||
tar = which('tar', required=True)
|
||||
tar.add_default_arg('-oxf')
|
||||
return tar
|
||||
|
||||
if re.match(r'bz2', ext):
|
||||
return _bunzip2
|
||||
|
||||
# Python does not have native support
|
||||
# of any kind for .Z files. In these cases,
|
||||
# we rely on external tools such as tar,
|
||||
# 7z, or uncompressZ
|
||||
if re.match(r'Z$', ext):
|
||||
return _unZ
|
||||
|
||||
# Python and platform may not have support for lzma
|
||||
# compression. If no lzma support, use tools available on systems
|
||||
# 7zip on Windows and the xz tool on Unix systems.
|
||||
if re.match(r'xz', ext):
|
||||
return _lzma_decomp
|
||||
|
||||
if ('xz' in ext or 'Z' in ext) and is_windows:
|
||||
return _7zip
|
||||
|
||||
return _untar
|
||||
|
||||
|
||||
def strip_extension(path):
|
||||
|
||||
@@ -174,6 +174,8 @@ def remove(self, key):
|
||||
try:
|
||||
lock.acquire_write()
|
||||
os.unlink(self.cache_path(key))
|
||||
except FileNotFoundError:
|
||||
pass # for thread safety when locks=False
|
||||
finally:
|
||||
lock.release_write()
|
||||
lock.cleanup()
|
||||
|
||||
@@ -87,6 +87,42 @@ def path_to_os_path(*pths):
|
||||
return ret_pths
|
||||
|
||||
|
||||
def sanitize_file_path(pth):
|
||||
"""
|
||||
Formats strings to contain only characters that can
|
||||
be used to generate legal file paths.
|
||||
|
||||
Criteria for legal files based on
|
||||
https://en.wikipedia.org/wiki/Filename#Comparison_of_filename_limitations
|
||||
|
||||
Args:
|
||||
pth: string containing path to be created
|
||||
on the host filesystem
|
||||
|
||||
Return:
|
||||
sanitized string that can legally be made into a path
|
||||
"""
|
||||
# on unix, splitting path by seperators will remove
|
||||
# instances of illegal characters on join
|
||||
pth_cmpnts = pth.split(os.path.sep)
|
||||
|
||||
if is_windows:
|
||||
drive_match = r'[a-zA-Z]:'
|
||||
is_abs = bool(re.match(drive_match, pth_cmpnts[0]))
|
||||
drive = pth_cmpnts[0] + os.path.sep if is_abs else ''
|
||||
pth_cmpnts = pth_cmpnts[1:] if drive else pth_cmpnts
|
||||
illegal_chars = r'[<>?:"|*\\]'
|
||||
else:
|
||||
drive = '/' if not pth_cmpnts[0] else ''
|
||||
illegal_chars = r'[/]'
|
||||
|
||||
pth = []
|
||||
for cmp in pth_cmpnts:
|
||||
san_cmp = re.sub(illegal_chars, '', cmp)
|
||||
pth.append(san_cmp)
|
||||
return drive + os.path.join(*pth)
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice=None):
|
||||
"""
|
||||
Filters function arguments to account for platform path separators.
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
|
||||
import collections
|
||||
import json
|
||||
from typing import Any, Dict, Optional # novm
|
||||
|
||||
@@ -72,9 +73,10 @@ def _strify(data, ignore_dicts=False):
|
||||
# if this is a dictionary, return dictionary of byteified keys and values
|
||||
# but only if we haven't already byteified it
|
||||
if isinstance(data, dict) and not ignore_dicts:
|
||||
return dict((_strify(key, ignore_dicts=True),
|
||||
_strify(value, ignore_dicts=True)) for key, value in
|
||||
iteritems(data))
|
||||
return collections.OrderedDict(
|
||||
(_strify(key, ignore_dicts=True), _strify(value, ignore_dicts=True))
|
||||
for key, value in iteritems(data)
|
||||
)
|
||||
|
||||
# if it's anything else, return it in its original form
|
||||
return data
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class AttributesFooApp(BundlePackage):
|
||||
version('1.0')
|
||||
depends_on('bar')
|
||||
depends_on('baz')
|
||||
@@ -0,0 +1,69 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class AttributesFoo(BundlePackage):
|
||||
phases = ['install']
|
||||
version('1.0')
|
||||
|
||||
provides('bar')
|
||||
provides('baz')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
if 'platform=windows' in spec:
|
||||
lib_suffix = '.lib'
|
||||
elif 'platform=darwin' in spec:
|
||||
lib_suffix = '.dylib'
|
||||
else:
|
||||
lib_suffix = '.so'
|
||||
|
||||
mkdirp(prefix.include)
|
||||
touch(prefix.include.join('foo.h'))
|
||||
mkdirp(prefix.include.bar)
|
||||
touch(prefix.include.bar.join('bar.h'))
|
||||
mkdirp(prefix.lib64)
|
||||
touch(prefix.lib64.join('libFoo' + lib_suffix))
|
||||
touch(prefix.lib64.join('libFooBar' + lib_suffix))
|
||||
mkdirp(prefix.baz.include.baz)
|
||||
touch(prefix.baz.include.baz.join('baz.h'))
|
||||
mkdirp(prefix.baz.lib)
|
||||
touch(prefix.baz.lib.join('libFooBaz' + lib_suffix))
|
||||
|
||||
# Headers provided by Foo
|
||||
@property
|
||||
def headers(self):
|
||||
return find_headers('foo', root=self.home.include, recursive=False)
|
||||
|
||||
# Libraries provided by Foo
|
||||
@property
|
||||
def libs(self):
|
||||
return find_libraries('libFoo', root=self.home, recursive=True)
|
||||
|
||||
# Header provided by the bar virutal package
|
||||
@property
|
||||
def bar_headers(self):
|
||||
return find_headers('bar/bar', root=self.home.include, recursive=False)
|
||||
|
||||
# Libary provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries('libFooBar', root=self.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
def baz_home(self):
|
||||
return self.home.baz
|
||||
|
||||
# Header provided by the baz virtual package
|
||||
@property
|
||||
def baz_headers(self):
|
||||
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
||||
|
||||
# Library provided by the baz virtual package
|
||||
@property
|
||||
def baz_libs(self):
|
||||
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class FailingEmptyInstall(Package):
|
||||
"""This package installs nothing, install should fail."""
|
||||
homepage = "http://www.example.com/trivial_install"
|
||||
url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
pass
|
||||
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class NonExistingConditionalDep(Package):
|
||||
"""Simple package with no source and one dependency"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
|
||||
version('2.0')
|
||||
version('1.0')
|
||||
|
||||
depends_on('dep-with-variants@999', when='@2.0')
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class NosourceBundle(BundlePackage):
|
||||
"""Simple bundle package with one dependency"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
|
||||
version('1.0')
|
||||
|
||||
depends_on('dependency-install')
|
||||
@@ -7,8 +7,8 @@
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Nosource(BundlePackage):
|
||||
"""Simple bundle package with one dependency"""
|
||||
class Nosource(Package):
|
||||
"""Simple package with no source and one dependency"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Atompaw(AutotoolsPackage):
|
||||
"""atompaw generates projector augmented wave (PAW)
|
||||
potentials for electronics structure calculations."""
|
||||
|
||||
homepage = "https://users.wfu.edu/natalie/papers/pwpaw/"
|
||||
url = "https://users.wfu.edu/natalie/papers/pwpaw/atompaw-4.2.0.0.tar.gz"
|
||||
|
||||
version('4.2.0.0', sha256='9ab4f4ab78a720fbcd95bbbc1403e8ff348d15570e7c694932a56be15985e93d')
|
||||
version('4.1.1.0', sha256='b1ee2b53720066655d98523ef337e54850cb1e68b3a2da04ff5a1576d3893891')
|
||||
version('4.1.0.6', sha256='42a46c0569367c0b971fbc3dcaf5eaec7020bdff111022b6f320de9f11c41c2c')
|
||||
|
||||
variant('libxc', default=False, description='Compile with libxc')
|
||||
variant('shared', default=True)
|
||||
|
||||
depends_on('libxc', when='+libxc')
|
||||
depends_on('blas')
|
||||
|
||||
def configure_args(self):
|
||||
spec = self.spec
|
||||
|
||||
args = ['--with-linalg-prefix=' + spec['blas'].prefix]
|
||||
args += ['--with-linalg-libs=' + ' '.join(spec['blas'].libs)]
|
||||
|
||||
if '+libxc' in spec:
|
||||
args += ['--enable-libxc']
|
||||
args += ['--with-libxc-prefix=' + spec['libxc'].prefix]
|
||||
|
||||
if '+shared' in spec:
|
||||
args += ['--enable-shared']
|
||||
else:
|
||||
args += ['--enable-static']
|
||||
|
||||
return args
|
||||
16
var/spack/repos/builtin/packages/7zip/noexcept_typedef.patch
Normal file
16
var/spack/repos/builtin/packages/7zip/noexcept_typedef.patch
Normal file
@@ -0,0 +1,16 @@
|
||||
diff --git a/CPP/7zip/UI/Common/Update.cpp b/CPP/7zip/UI/Common/Update.prev.cpp
|
||||
index 451b12c..3be3781 100644
|
||||
--- a/CPP/7zip/UI/Common/Update.cpp
|
||||
+++ b/CPP/7zip/UI/Common/Update.prev.cpp
|
||||
@@ -1075,11 +1075,7 @@ static HRESULT EnumerateInArchiveItems(
|
||||
|
||||
#if defined(_WIN32) && !defined(UNDER_CE)
|
||||
|
||||
-#pragma push_macro("WIN_NOEXCEPT")
|
||||
-#undef WIN_NOEXCEPT
|
||||
-#define WIN_NOEXCEPT
|
||||
#include <MAPI.h>
|
||||
-#pragma pop_macro("WIN_NOEXCEPT")
|
||||
|
||||
#endif
|
||||
|
||||
94
var/spack/repos/builtin/packages/7zip/package.py
Normal file
94
var/spack/repos/builtin/packages/7zip/package.py
Normal file
@@ -0,0 +1,94 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import glob
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class _7zip(SourceforgePackage, Package):
|
||||
"""7-Zip is a file archiver for Windows"""
|
||||
|
||||
homepage = "https://sourceforge.net/projects/sevenzip"
|
||||
sourceforge_mirror_path = 'sevenzip/files/7z2107-src.tar.xz'
|
||||
|
||||
executables = ['7z']
|
||||
|
||||
version('21.07', sha256='213d594407cb8efcba36610b152ca4921eda14163310b43903d13e68313e1e39')
|
||||
|
||||
variant('link_type', default='shared',
|
||||
description='build shared and/or static libraries',
|
||||
values=('static', 'shared'), multi=True)
|
||||
|
||||
phases = ['build', 'install']
|
||||
|
||||
conflicts('platform=linux')
|
||||
conflicts('platform=darwin')
|
||||
conflicts('platform=cray')
|
||||
|
||||
# TODO: Patch on WinSDK version 10.0.20348.0 when SDK is introduced to Spack
|
||||
# This patch solves a known bug in that SDK version on the 7zip side
|
||||
# right now patch for all versions to prevent build errors
|
||||
patch('noexcept_typedef.patch', when='platform=windows')
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe):
|
||||
output = Executable(exe)('--help', output=str, error=str)
|
||||
match = re.search(r'7-Zip ([0-9][0-9]*.[0-9][0-9])', output)
|
||||
return match.group(1) if match else None
|
||||
|
||||
def url_version(self, version):
|
||||
ver_str = str(version).replace('.', '')
|
||||
return '7z' + ver_str
|
||||
|
||||
@property
|
||||
def _7z_src_dir(self):
|
||||
return os.path.join(self.stage.source_path, 'CPP', '7zip')
|
||||
|
||||
@property
|
||||
def plat_arch(self):
|
||||
"""
|
||||
String referencing platform architecture
|
||||
filtered through 7zip's Windows build file
|
||||
"""
|
||||
arch = platform.machine()
|
||||
if arch.lower() == 'amd64':
|
||||
arch = 'x64'
|
||||
elif arch.lower() == 'i386':
|
||||
arch = 'x86'
|
||||
return arch
|
||||
|
||||
def is_64bit(self):
|
||||
return platform.machine().endswith('64')
|
||||
|
||||
def build(self, spec, prefix):
|
||||
link_type = '1' if 'static' in spec.variants['link_type'].value else '0'
|
||||
nmake_args = ['PLATFORM=%s' % self.plat_arch,
|
||||
'MY_STATIC_LINK=%s' % link_type,
|
||||
'NEW_COMPILER=1']
|
||||
# 7zips makefile is configured in such as way that if this value is set
|
||||
# compiler paths with spaces are incorrectly parsed. Compiler will be infered
|
||||
# from VCVARs on Windows
|
||||
os.environ.pop('CC', None)
|
||||
with working_dir(self._7z_src_dir):
|
||||
nmake(*nmake_args)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""7Zip exports no install target so we must hand install"""
|
||||
arch_prefix = 'x64' if self.is_64bit() else 'x86'
|
||||
path_roots = ['Bundles', 'UI']
|
||||
exts = ['*.exe', '*.dll']
|
||||
with working_dir(self._7z_src_dir):
|
||||
for root in path_roots:
|
||||
pth = os.path.join(root, '*', arch_prefix)
|
||||
for ext in exts:
|
||||
glob_str = os.path.join(pth, ext)
|
||||
files = glob.glob(glob_str)
|
||||
[shutil.copy(os.path.join(self._7z_src_dir, x),
|
||||
os.path.join(prefix, os.path.basename(x))) for x in files]
|
||||
@@ -22,6 +22,14 @@ class Abacus(MakefilePackage):
|
||||
url = "https://github.com/abacusmodeling/abacus-develop/archive/refs/tags/v2.2.1.tar.gz"
|
||||
|
||||
version("develop", branch="develop")
|
||||
version(
|
||||
"2.2.3",
|
||||
sha256="88dbf6a3bdd907df3e097637ec8e51fde13e2f5e0b44f3667443195481320edf",
|
||||
)
|
||||
version(
|
||||
"2.2.2",
|
||||
sha256="4a7cf2ec6e43dd5c53d5f877a941367074f4714d93c1977a719782957916169e",
|
||||
)
|
||||
version(
|
||||
"2.2.1",
|
||||
sha256="14feca1d8d1ce025d3f263b85ebfbebc1a1efff704b6490e95b07603c55c1d63",
|
||||
@@ -62,16 +70,15 @@ def edit(self, spec, prefix):
|
||||
FORTRAN = ifort\n\
|
||||
CPLUSPLUS = icpc\n\
|
||||
CPLUSPLUS_MPI = mpiicpc\n\
|
||||
LAPACK_DIR = %s\n\
|
||||
LAPACK_DIR = $(MKLROOT)\n\
|
||||
FFTW_DIR = %s\n\
|
||||
ELPA_DIR = %s\n\
|
||||
ELPA_INCLUDE = -I${ELPA_DIR}/include/elpa%s%s\n\
|
||||
CEREAL_DIR = %s\n\
|
||||
OBJ_DIR = obj\n\
|
||||
OBJ_DIR_serial = obj\n\
|
||||
NP = 14"
|
||||
NP = 14\n"
|
||||
% (
|
||||
spec["mkl"].prefix,
|
||||
spec["fftw"].prefix,
|
||||
spec["elpa"].prefix,
|
||||
inc_var,
|
||||
|
||||
@@ -39,6 +39,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Supported Acts versions
|
||||
version('main', branch='main')
|
||||
version('master', branch='main', deprecated=True) # For compatibility
|
||||
version('19.2.0', commit='adf079e0f7e278837093bf53988da73730804e22', submodules=True)
|
||||
version('19.1.0', commit='82f42a2cc80d4259db251275c09b84ee97a7bd22', submodules=True)
|
||||
version('19.0.0', commit='1ce9c583150060ba8388051685433899713d56d9', submodules=True)
|
||||
version('18.0.0', commit='fe03b5af6ca2b092dec87c4cef77dd552bbbe719', submodules=True)
|
||||
|
||||
@@ -21,8 +21,8 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
tags = ['e4s']
|
||||
|
||||
version('master', branch='master')
|
||||
version('2.8.0', sha256='5af3d950e616989133955c2430bd09bcf6bad3a04cf62317b401eaf6e7c2d479',
|
||||
preferred=True)
|
||||
version('2.8.1', sha256='3f515b442bbd52e3189866b121613fe3b59edb8845692ea86fad83d1eba35d93')
|
||||
version('2.8.0', sha256='5af3d950e616989133955c2430bd09bcf6bad3a04cf62317b401eaf6e7c2d479')
|
||||
version('2.7.1', sha256='c8e237fd51f49d8a62a0660db12b72ea5067512aa7970f3fcf80b70e3f87ca3e')
|
||||
version('2.7.0', sha256='4b5df1a1f92d7ff380416dec7511cfcfe3dc44da27e486ed63c3e6cffb173924')
|
||||
version('2.6.0', sha256='45b41889065f8b840725928db092848b8a8b8d1bfae1b92e72f8868d1c76216c')
|
||||
|
||||
@@ -19,6 +19,7 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
|
||||
homepage = "https://github.com/LLNL/Aluminum"
|
||||
url = "https://github.com/LLNL/Aluminum/archive/v0.1.tar.gz"
|
||||
git = "https://github.com/LLNL/Aluminum.git"
|
||||
tags = ['ecp', 'radiuss']
|
||||
|
||||
maintainers = ['bvanessen']
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ class Amdlibflame(LibflameBase):
|
||||
|
||||
maintainers = ['amd-toolchain-support']
|
||||
|
||||
version('3.1', sha256='97c74086306fa6dea9233a3730407c400c196b55f4461d4861364b1ac131ca42')
|
||||
version('3.1', sha256='4520fb93fcc89161f65a40810cae0fa1f87cecb242da4a69655f502545a53426')
|
||||
version('3.0.1', sha256='5859e7b39ffbe73115dd598b035f212d36310462cf3a45e555a5087301710776')
|
||||
version('3.0', sha256='d94e08b688539748571e6d4c1ec1ce42732eac18bd75de989234983c33f01ced')
|
||||
version('2.2', sha256='12b9c1f92d2c2fa637305aaa15cf706652406f210eaa5cbc17aaea9fcfa576dc')
|
||||
|
||||
@@ -115,6 +115,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('cmake@3.14:', type='build', when='@19.04:')
|
||||
# cmake @3.17: is necessary to handle cuda @11: correctly
|
||||
depends_on('cmake@3.17:', type='build', when='^cuda @11:')
|
||||
depends_on('cmake@3.17:', type='build', when='@22.06:')
|
||||
depends_on('cmake@3.20:', type='build', when='+rocm')
|
||||
depends_on('hdf5@1.10.4: +mpi', when='+hdf5')
|
||||
depends_on('rocrand', type='build', when='+rocm')
|
||||
|
||||
@@ -16,6 +16,8 @@ class Amrvis(MakefilePackage):
|
||||
homepage = "https://github.com/AMReX-Codes/Amrvis"
|
||||
git = "https://github.com/AMReX-Codes/Amrvis.git"
|
||||
|
||||
maintainers = ['etpalmer63']
|
||||
|
||||
version('main', tag='main')
|
||||
|
||||
variant(
|
||||
@@ -158,6 +160,14 @@ def edit(self, spec, prefix):
|
||||
'# Spack removed INCLUDE_LOCATIONS and LIBRARY_LOCATIONS'
|
||||
)
|
||||
|
||||
# Rewrite configuration file with location of
|
||||
# the color palette after install
|
||||
configfile = FileFilter("amrvis.defaults")
|
||||
configfile.filter(
|
||||
r'^palette\s*Palette\s*',
|
||||
'palette {0}/etc/Palette\n'.format(prefix)
|
||||
)
|
||||
|
||||
# Read GNUmakefile into array
|
||||
with open('GNUmakefile', 'r') as file:
|
||||
contents = file.readlines()
|
||||
@@ -194,8 +204,15 @@ def setup_build_environment(self, env):
|
||||
env.set('CXX', self.spec['mpi'].mpicxx)
|
||||
env.set('F77', self.spec['mpi'].mpif77)
|
||||
env.set('FC', self.spec['mpi'].mpifc)
|
||||
# Set CONFIG_FILEPATH so Amrvis can find the configuration
|
||||
# file, amrvis.defaults.
|
||||
env.set('CONFIG_FILEPATH', self.spec.prefix.etc)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# Install exe manually
|
||||
mkdirp(prefix.bin)
|
||||
install('*.ex', prefix.bin)
|
||||
# Install configuration file and default color Palette
|
||||
mkdirp(prefix.etc)
|
||||
install('amrvis.defaults', prefix.etc)
|
||||
install('Palette', prefix.etc)
|
||||
|
||||
@@ -55,6 +55,7 @@ class Apex(CMakePackage):
|
||||
depends_on('zlib')
|
||||
depends_on('cmake@3.10.0:', type='build')
|
||||
depends_on('binutils@2.33:+libiberty+headers', when='+binutils')
|
||||
depends_on('gettext', when='+binutils ^binutils+nls')
|
||||
depends_on('activeharmony@4.6:', when='+activeharmony')
|
||||
depends_on('activeharmony@4.6:', when='+plugins')
|
||||
depends_on('otf2@2.1:', when='+otf2')
|
||||
@@ -113,6 +114,9 @@ def cmake_args(self):
|
||||
if '+binutils' in spec:
|
||||
args.append('-DBFD_ROOT={0}'.format(spec['binutils'].prefix))
|
||||
|
||||
if '+binutils ^binutils+nls' in spec:
|
||||
args.append('-DCMAKE_SHARED_LINKER_FLAGS=-lintl')
|
||||
|
||||
if '+otf2' in spec:
|
||||
args.append('-DOTF2_ROOT={0}'.format(spec['otf2'].prefix))
|
||||
|
||||
|
||||
@@ -26,8 +26,8 @@ class Assimp(CMakePackage):
|
||||
version('5.0.1', sha256='11310ec1f2ad2cd46b95ba88faca8f7aaa1efe9aa12605c55e3de2b977b3dbfc')
|
||||
version('4.0.1', sha256='60080d8ab4daaab309f65b3cffd99f19eb1af8d05623fff469b9b652818e286e')
|
||||
|
||||
patch('https://patch-diff.githubusercontent.com/raw/assimp/assimp/pull/4203.patch',
|
||||
sha256='a227714a215023184536e38b4bc7f8341f635e16bfb3b0ea029d420c29aacd2d',
|
||||
patch('https://patch-diff.githubusercontent.com/raw/assimp/assimp/pull/4203.patch?full_index=1',
|
||||
sha256='24135e88bcef205e118f7a3f99948851c78d3f3e16684104dc603439dd790d74',
|
||||
when='@5.1:5.2.2')
|
||||
|
||||
variant('shared', default=True,
|
||||
|
||||
@@ -16,6 +16,7 @@ class Atmi(CMakePackage):
|
||||
homepage = "https://github.com/RadeonOpenCompute/atmi"
|
||||
git = "https://github.com/RadeonOpenCompute/atmi.git"
|
||||
url = "https://github.com/RadeonOpenCompute/atmi/archive/rocm-5.1.3.tar.gz"
|
||||
tags = ['rocm']
|
||||
|
||||
maintainers = ['srekolam', 'arjun-raj-kuppala']
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ class Atompaw(AutotoolsPackage):
|
||||
homepage = "https://users.wfu.edu/natalie/papers/pwpaw/man.html"
|
||||
url = "https://users.wfu.edu/natalie/papers/pwpaw/atompaw-4.0.0.13.tar.gz"
|
||||
|
||||
version('4.2.0.1', sha256='d3476a5aa5f80f9430b81f28273c2c2a9b6e7d9c3d08c65544247bb76cd5a114')
|
||||
version('4.2.0.0', sha256='9ab4f4ab78a720fbcd95bbbc1403e8ff348d15570e7c694932a56be15985e93d')
|
||||
version('4.1.1.0', sha256='b1ee2b53720066655d98523ef337e54850cb1e68b3a2da04ff5a1576d3893891')
|
||||
version('4.0.0.13', sha256='cbd73f11f3e9cc3ff2e5f3ec87498aeaf439555903d0b95a72f3b0a021902020')
|
||||
@@ -30,7 +31,7 @@ class Atompaw(AutotoolsPackage):
|
||||
depends_on('libxc')
|
||||
depends_on('libxc@:2', when='@:4.0')
|
||||
|
||||
patch('atompaw-4.1.1.0-fix-ifort.patch', when='@4.1.1.0:')
|
||||
patch('atompaw-4.1.1.0-fix-ifort.patch', when='@4.1.1.0:4.2.0.0')
|
||||
patch('atompaw-4.1.1.0-fix-fujitsu.patch', when='@4.1.1.0 %fj')
|
||||
|
||||
parallel = False
|
||||
|
||||
@@ -27,6 +27,8 @@ class Axl(CMakePackage):
|
||||
maintainers = ['CamStan', 'gonsie']
|
||||
|
||||
version('main', branch='main')
|
||||
version('0.7.0', sha256='840ef61eadc9aa277d128df08db4cdf6cfa46b8fcf47b0eee0972582a61fbc50')
|
||||
version('0.6.0', sha256='86edb35f99b63c0ffb9dd644a019a63b062923b4efc95c377e92a1b13e79f537')
|
||||
version('0.5.0', sha256='9f3bbb4de563896551bdb68e889ba93ea1984586961ad8c627ed766bff020acf')
|
||||
version('0.4.0', sha256='0530142629d77406a00643be32492760c2cf12d1b56c6b6416791c8ff5298db2')
|
||||
version('0.3.0', sha256='737d616b669109805f7aed1858baac36c97bf0016e1115b5c56ded05d792613e')
|
||||
@@ -36,11 +38,16 @@ class Axl(CMakePackage):
|
||||
depends_on('kvtree')
|
||||
depends_on('zlib', type='link')
|
||||
|
||||
depends_on('kvtree@main', when='@main')
|
||||
depends_on('kvtree@1.3.0', when='@0.6.0')
|
||||
|
||||
variant('async_api', default='daemon',
|
||||
description='Set of async transfer APIs to enable',
|
||||
values=['cray_dw', 'intel_cppr', 'daemon', 'none'], multi=True,
|
||||
validator=async_api_validator)
|
||||
|
||||
variant('pthreads', default=True, description='Enable Pthread support', when='@0.6:')
|
||||
|
||||
variant('bbapi', default=True, description='Enable IBM BBAPI support')
|
||||
|
||||
variant('bbapi_fallback', default=False,
|
||||
@@ -80,4 +87,7 @@ def cmake_args(self):
|
||||
if spec.satisfies('platform=cray'):
|
||||
args.append(self.define('AXL_LINK_STATIC', True))
|
||||
|
||||
if spec.satisfies('@0.6.0:'):
|
||||
args.append(self.define_from_variant('ENABLE_PTHREADS', 'pthreads'))
|
||||
|
||||
return args
|
||||
|
||||
@@ -25,8 +25,8 @@ class Babelflow(CMakePackage):
|
||||
|
||||
variant("shared", default=True, description="Build Babelflow as shared libs")
|
||||
|
||||
# The C++ headers of gcc-11 don't provide <limits> as side effect of others
|
||||
@when('%gcc@11:')
|
||||
# Fix missing implicit includes
|
||||
@when('%gcc@7:')
|
||||
def setup_build_environment(self, env):
|
||||
env.append_flags('CXXFLAGS', '-include limits')
|
||||
|
||||
|
||||
@@ -10,10 +10,19 @@ class Bear(CMakePackage):
|
||||
"""Bear is a tool that generates a compilation database for clang tooling
|
||||
from non-cmake build systems."""
|
||||
homepage = "https://github.com/rizsotto/Bear"
|
||||
git = "git@github.com:rizsotto/Bear.git"
|
||||
url = "https://github.com/rizsotto/Bear/archive/2.0.4.tar.gz"
|
||||
maintainers = ['vmiheer', 'trws']
|
||||
|
||||
version('3.0.19', sha256='2fcfe2c6e029182cfc54ed26b3505c0ef12b0f43df03fb587f335afdc2ca9431')
|
||||
version('3.0.18', sha256='ae94047c79b4f48462b66981f66a67b6a833d75d4c40e7afead491b1865f1142')
|
||||
version('3.0.0', sha256='7b68aad69e887d945ad20f8e9f3a8c33cf2d59cc80da7e52d931d8c685fe2f79')
|
||||
version('2.2.0', sha256='6bd61a6d64a24a61eab17e7f2950e688820c72635e1cf7ea8ea7bf9482f3b612')
|
||||
version('2.0.4', sha256='33ea117b09068aa2cd59c0f0f7535ad82c5ee473133779f1cc20f6f99793a63e')
|
||||
|
||||
depends_on('python')
|
||||
depends_on('fmt', when='@3.0.0:')
|
||||
depends_on('grpc', when='@3.0.0:')
|
||||
depends_on('nlohmann-json', when='@3.0.0:')
|
||||
depends_on('spdlog', when='@3.0.0:')
|
||||
depends_on('cmake@2.8:', type='build')
|
||||
depends_on('python', type='build')
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user