Compare commits
441 Commits
2021.08.19
...
features/k
Author | SHA1 | Date | |
---|---|---|---|
![]() |
702774edea | ||
![]() |
7713dd4063 | ||
![]() |
24c01d57cf | ||
![]() |
620836a809 | ||
![]() |
0c5402ea5c | ||
![]() |
a22686279c | ||
![]() |
0dd04ffbfb | ||
![]() |
24a4d81097 | ||
![]() |
b5d2c30d26 | ||
![]() |
7a9fe189e1 | ||
![]() |
f5c1ae32d1 | ||
![]() |
e0b901153b | ||
![]() |
4fd8640586 | ||
![]() |
970bf4318c | ||
![]() |
218ae0c5d1 | ||
![]() |
ad7984c5c0 | ||
![]() |
b1f4f91f41 | ||
![]() |
2914f9076e | ||
![]() |
663c37cac4 | ||
![]() |
36ba640cbd | ||
![]() |
5e33b20230 | ||
![]() |
d3c04ed345 | ||
![]() |
20a191ad93 | ||
![]() |
d06537f75c | ||
![]() |
09d89ef265 | ||
![]() |
0e3f7ce0ed | ||
![]() |
e914e561ec | ||
![]() |
3c9a58bd0b | ||
![]() |
59eea2859a | ||
![]() |
c12dc1a5de | ||
![]() |
ea2d4b05bc | ||
![]() |
f9ecc4966d | ||
![]() |
545f971bec | ||
![]() |
9d36f7f518 | ||
![]() |
e65ab166b9 | ||
![]() |
f8743d0cbf | ||
![]() |
9055deea16 | ||
![]() |
1e3c012fea | ||
![]() |
61242db8f9 | ||
![]() |
9550703132 | ||
![]() |
e450612188 | ||
![]() |
115c39e762 | ||
![]() |
b8f1bd407e | ||
![]() |
cea11f3714 | ||
![]() |
b35d6d13a7 | ||
![]() |
6a1a4d4bb6 | ||
![]() |
713fd67b4a | ||
![]() |
2ded87d40e | ||
![]() |
9963642c1c | ||
![]() |
8e37c30e2f | ||
![]() |
04289b2009 | ||
![]() |
d764b776d7 | ||
![]() |
1cb2855054 | ||
![]() |
95b0eb9fdd | ||
![]() |
a2a273832f | ||
![]() |
cf6aa8f012 | ||
![]() |
fd11c6f5f7 | ||
![]() |
eaa918c8f3 | ||
![]() |
eacba1ffac | ||
![]() |
a3f6df33ef | ||
![]() |
3b94e22ad4 | ||
![]() |
e568564e2f | ||
![]() |
6547f41096 | ||
![]() |
c895332284 | ||
![]() |
3d11716e54 | ||
![]() |
f88d90e432 | ||
![]() |
8089b86dc2 | ||
![]() |
f1842f363d | ||
![]() |
7ced07a141 | ||
![]() |
189968e207 | ||
![]() |
f54fad40ba | ||
![]() |
38a010b580 | ||
![]() |
e1694afdde | ||
![]() |
d842c08a9b | ||
![]() |
54219852d9 | ||
![]() |
4a8a6b4c9d | ||
![]() |
c5a27980df | ||
![]() |
406117148d | ||
![]() |
e6700d47aa | ||
![]() |
ca550cd819 | ||
![]() |
8c46e82862 | ||
![]() |
d0bbe18c79 | ||
![]() |
ca538e18a4 | ||
![]() |
59028aa0a5 | ||
![]() |
c8868f1922 | ||
![]() |
89bed5773e | ||
![]() |
1639ac8e86 | ||
![]() |
7a794f8b0a | ||
![]() |
4f9b539644 | ||
![]() |
7753c816f0 | ||
![]() |
0bbd71d561 | ||
![]() |
b8512983d9 | ||
![]() |
ea261e3530 | ||
![]() |
acd1b04ea2 | ||
![]() |
f2d60261c9 | ||
![]() |
8ad1dd6036 | ||
![]() |
c832ac28ae | ||
![]() |
a647ae2aeb | ||
![]() |
d00082e70b | ||
![]() |
501f87fb22 | ||
![]() |
55dd306790 | ||
![]() |
a2b7f9997d | ||
![]() |
1c6504d2f5 | ||
![]() |
cc1285c1e1 | ||
![]() |
304249604a | ||
![]() |
1eb2798c43 | ||
![]() |
e284cd136a | ||
![]() |
f5474a2b8b | ||
![]() |
c824cad2ea | ||
![]() |
3fe1ecd807 | ||
![]() |
2b65c53d2b | ||
![]() |
327cca7e2e | ||
![]() |
09fa155333 | ||
![]() |
a7b6149cc0 | ||
![]() |
b87d9c29c1 | ||
![]() |
cc20dbf645 | ||
![]() |
9a9b5dee2e | ||
![]() |
69d69cbc79 | ||
![]() |
2284007db9 | ||
![]() |
744cedc7e9 | ||
![]() |
e631ccc6f7 | ||
![]() |
3cfc1dbc14 | ||
![]() |
2970c02639 | ||
![]() |
77a98cabfa | ||
![]() |
b6aea0d6bf | ||
![]() |
c06db97970 | ||
![]() |
90dc90e8d1 | ||
![]() |
f5ef532bdc | ||
![]() |
2bd2ef27a2 | ||
![]() |
a4a393d097 | ||
![]() |
a6ce000e09 | ||
![]() |
3e65828a7e | ||
![]() |
6f950bc8ee | ||
![]() |
963b931309 | ||
![]() |
9bd9cc2c7b | ||
![]() |
4c149ade7f | ||
![]() |
ecc950d10c | ||
![]() |
9b2e7e6140 | ||
![]() |
19a973eca0 | ||
![]() |
ce16503bd3 | ||
![]() |
ef67ecde60 | ||
![]() |
2b1916d845 | ||
![]() |
469e580034 | ||
![]() |
80585562c9 | ||
![]() |
2aa9e337ee | ||
![]() |
06a1cf2449 | ||
![]() |
2a9b9c9046 | ||
![]() |
b12cee32de | ||
![]() |
17f9ddb2b5 | ||
![]() |
dbf030f27a | ||
![]() |
8937102006 | ||
![]() |
cf0b3632ff | ||
![]() |
6b852bc170 | ||
![]() |
843c38e69e | ||
![]() |
2bc0c0ea59 | ||
![]() |
3087d74ca7 | ||
![]() |
03f54ea4bb | ||
![]() |
25522b5c9c | ||
![]() |
f4b96a21c8 | ||
![]() |
291703f146 | ||
![]() |
6bf1f69b4c | ||
![]() |
b9eeef8c38 | ||
![]() |
ec2d4c07b3 | ||
![]() |
7dafc827a7 | ||
![]() |
26f740b25a | ||
![]() |
0c996671b8 | ||
![]() |
4eb4994472 | ||
![]() |
916cdfbb56 | ||
![]() |
d7405ddd39 | ||
![]() |
010b431692 | ||
![]() |
e45800126a | ||
![]() |
7456a0348f | ||
![]() |
1367cc97c2 | ||
![]() |
9d0b8b575b | ||
![]() |
8f2f5639c8 | ||
![]() |
cf38a96b14 | ||
![]() |
b1009b48b9 | ||
![]() |
d3a1da8496 | ||
![]() |
4985215072 | ||
![]() |
db403391c8 | ||
![]() |
3d631377c0 | ||
![]() |
387ee5a0b7 | ||
![]() |
1bccd866ae | ||
![]() |
97f0c3ccd9 | ||
![]() |
2db858e9c4 | ||
![]() |
4da0561496 | ||
![]() |
477c8ce820 | ||
![]() |
323b47a94e | ||
![]() |
acc11f676d | ||
![]() |
02b92dbf10 | ||
![]() |
09a6f3533b | ||
![]() |
e4c38ba14c | ||
![]() |
d292541edb | ||
![]() |
07fe558509 | ||
![]() |
377f031461 | ||
![]() |
d63566915d | ||
![]() |
11ad6e1a8a | ||
![]() |
ccece0e197 | ||
![]() |
65e7e1f969 | ||
![]() |
b0a915a3b6 | ||
![]() |
e3b220f699 | ||
![]() |
512edfcceb | ||
![]() |
8e249c03de | ||
![]() |
726537e01b | ||
![]() |
d71a0590b7 | ||
![]() |
3039237a0e | ||
![]() |
5e48d2c16f | ||
![]() |
31e6967c49 | ||
![]() |
c9932b2d1e | ||
![]() |
c83f4b01aa | ||
![]() |
7b6ca59038 | ||
![]() |
62653b9c36 | ||
![]() |
ebcc222181 | ||
![]() |
9984e61347 | ||
![]() |
76632d6710 | ||
![]() |
d394e9978e | ||
![]() |
5f415c9782 | ||
![]() |
c432076280 | ||
![]() |
73d7444ca7 | ||
![]() |
93c75fe3f7 | ||
![]() |
aa65293709 | ||
![]() |
767f03f82f | ||
![]() |
4ba6a850d9 | ||
![]() |
91ef60eb0e | ||
![]() |
383d4cc84c | ||
![]() |
ca9ff82ad0 | ||
![]() |
4690fdc081 | ||
![]() |
1b368e433c | ||
![]() |
94d6d3951a | ||
![]() |
58272c9d57 | ||
![]() |
1b51f09bf0 | ||
![]() |
e3f4036212 | ||
![]() |
eeacda3dce | ||
![]() |
c6961ba4d3 | ||
![]() |
32f1aa607c | ||
![]() |
8ad05d6a74 | ||
![]() |
57467d05e1 | ||
![]() |
9750459e05 | ||
![]() |
2c1e9cc7b7 | ||
![]() |
e7ac422982 | ||
![]() |
e916b699ee | ||
![]() |
fa89ca2eb0 | ||
![]() |
1c22742eed | ||
![]() |
d4b5911671 | ||
![]() |
47e9b62b43 | ||
![]() |
100078ec3a | ||
![]() |
54d8fea9fc | ||
![]() |
3eee93ee76 | ||
![]() |
b5cb75e5ec | ||
![]() |
75675de02a | ||
![]() |
b8ad621907 | ||
![]() |
eac757da8c | ||
![]() |
011a940f44 | ||
![]() |
85c5589620 | ||
![]() |
ee9b1a6ea5 | ||
![]() |
986776c937 | ||
![]() |
2739edd42c | ||
![]() |
e4d80c997a | ||
![]() |
3571c1b812 | ||
![]() |
7831d6be75 | ||
![]() |
c8f58c5f1d | ||
![]() |
56f1904538 | ||
![]() |
10608edd24 | ||
![]() |
3adee93d14 | ||
![]() |
d31d339bf6 | ||
![]() |
5692c15e3a | ||
![]() |
f0a85059c2 | ||
![]() |
551ae264fe | ||
![]() |
a92bed0dc5 | ||
![]() |
3f9f2c2abe | ||
![]() |
72c6fc2fda | ||
![]() |
38088dd898 | ||
![]() |
4f40454800 | ||
![]() |
46214b0caa | ||
![]() |
ce0eb4862f | ||
![]() |
058ae3f0fd | ||
![]() |
2439b8d59c | ||
![]() |
891207f20e | ||
![]() |
5ec708cb48 | ||
![]() |
822d6a93fb | ||
![]() |
64f3e37479 | ||
![]() |
8a938978a4 | ||
![]() |
a067b48112 | ||
![]() |
ca1d1c427c | ||
![]() |
b330474a13 | ||
![]() |
1c44912f9b | ||
![]() |
5971372be7 | ||
![]() |
53dae0040a | ||
![]() |
cdc28a9623 | ||
![]() |
d4e04f9410 | ||
![]() |
1bf84d170f | ||
![]() |
d7263b5da0 | ||
![]() |
ba65cc73ef | ||
![]() |
c302887f9b | ||
![]() |
5521aae4f7 | ||
![]() |
229247c899 | ||
![]() |
b92abd79ab | ||
![]() |
26c645650d | ||
![]() |
39cdd085c9 | ||
![]() |
8793d93e8c | ||
![]() |
34c9c89b55 | ||
![]() |
d993ee7972 | ||
![]() |
22fe56ad24 | ||
![]() |
9cfcc16084 | ||
![]() |
dcabbca1c5 | ||
![]() |
25bca688ce | ||
![]() |
0b769855a1 | ||
![]() |
0d73fd2b11 | ||
![]() |
dc8626b801 | ||
![]() |
1b71d22194 | ||
![]() |
95c9a031ee | ||
![]() |
d6cbf72b19 | ||
![]() |
ae91d49f21 | ||
![]() |
8a0a60c575 | ||
![]() |
163fe86bda | ||
![]() |
8b75e81666 | ||
![]() |
177750b215 | ||
![]() |
b0f348315c | ||
![]() |
11f370e7be | ||
![]() |
4a8785d371 | ||
![]() |
adc4699c3a | ||
![]() |
44a8e17549 | ||
![]() |
b4bf0c3476 | ||
![]() |
a588d5dc58 | ||
![]() |
31c4cdf59c | ||
![]() |
98ee702b37 | ||
![]() |
dbdf8f2ce7 | ||
![]() |
ea08e93f2f | ||
![]() |
dcb3fbf98e | ||
![]() |
202510869d | ||
![]() |
ed695f3267 | ||
![]() |
a83b75b878 | ||
![]() |
eefcd3d00d | ||
![]() |
722376c201 | ||
![]() |
269615b9ca | ||
![]() |
ec2d8a1571 | ||
![]() |
c630594092 | ||
![]() |
e291fa1b1a | ||
![]() |
8c7f94db1c | ||
![]() |
f7391c1970 | ||
![]() |
5926056f3a | ||
![]() |
1c81438343 | ||
![]() |
31bca57e89 | ||
![]() |
9da1cb615f | ||
![]() |
e4a79dab47 | ||
![]() |
fd5b13b7a4 | ||
![]() |
31c0bcf346 | ||
![]() |
849943c63d | ||
![]() |
47ef59c885 | ||
![]() |
9c0fb86b48 | ||
![]() |
29c4d5901a | ||
![]() |
0dce021f94 | ||
![]() |
8f34a66502 | ||
![]() |
7499212bc1 | ||
![]() |
7e9ed7e56d | ||
![]() |
75db07e674 | ||
![]() |
968d393f6b | ||
![]() |
ac3b46fc95 | ||
![]() |
fd03d539cc | ||
![]() |
92bef1da6f | ||
![]() |
3291be6cb1 | ||
![]() |
d0fdbc1ab2 | ||
![]() |
92be358582 | ||
![]() |
f33c4e7280 | ||
![]() |
e321578bbe | ||
![]() |
a2e9a1b642 | ||
![]() |
beed6047e8 | ||
![]() |
11fd88ee3c | ||
![]() |
418db4e910 | ||
![]() |
f231ae97f4 | ||
![]() |
e1bd3ae4db | ||
![]() |
729d66a3f8 | ||
![]() |
4d55203ce5 | ||
![]() |
2bdeaa1b48 | ||
![]() |
3d0bad465b | ||
![]() |
506d5744aa | ||
![]() |
026cf7aa30 | ||
![]() |
e12b030def | ||
![]() |
057bf434ce | ||
![]() |
004f86aab7 | ||
![]() |
c01730e33b | ||
![]() |
1fed008410 | ||
![]() |
b0590bf4e8 | ||
![]() |
c2901ea14a | ||
![]() |
b17046723d | ||
![]() |
f07be01fa8 | ||
![]() |
559db31511 | ||
![]() |
e1d194b9a3 | ||
![]() |
6ed7d40be7 | ||
![]() |
1533c2fade | ||
![]() |
986bcef160 | ||
![]() |
09d317c293 | ||
![]() |
7093fb214f | ||
![]() |
534df5cd68 | ||
![]() |
6c21d64c50 | ||
![]() |
3db5029a4b | ||
![]() |
7449d6950a | ||
![]() |
9f8e40e95c | ||
![]() |
af3ebeeea1 | ||
![]() |
42df61d631 | ||
![]() |
e741211c09 | ||
![]() |
4cc27f58db | ||
![]() |
7d3a3af621 | ||
![]() |
ff73ac6e9a | ||
![]() |
5c37db5db3 | ||
![]() |
13978d68ea | ||
![]() |
54b9fe219b | ||
![]() |
1fd1f1c93f | ||
![]() |
a0259cc4f4 | ||
![]() |
d5d1d9548f | ||
![]() |
e28e6d2618 | ||
![]() |
d8fc38a467 | ||
![]() |
c4c14e0c69 | ||
![]() |
c09eea5947 | ||
![]() |
b03049e938 | ||
![]() |
b63a8b3e27 | ||
![]() |
ea390198f4 | ||
![]() |
fb05d9830a | ||
![]() |
4ad779c4c4 | ||
![]() |
1775383f5f | ||
![]() |
a85bc4eee1 | ||
![]() |
9903d05be9 | ||
![]() |
70c81069ab | ||
![]() |
e4a559a571 | ||
![]() |
8aae76eee0 | ||
![]() |
b83f06df0c | ||
![]() |
7845da58a7 | ||
![]() |
54bce50a17 | ||
![]() |
c3898ca3bf | ||
![]() |
1efeb933ec | ||
![]() |
473e9aa08e | ||
![]() |
7e168b8535 | ||
![]() |
a8c7d9a2ed | ||
![]() |
a478a8cf9a | ||
![]() |
f7c9e497f1 | ||
![]() |
ef9d3a464f | ||
![]() |
08a4212ec3 | ||
![]() |
038bd61e14 | ||
![]() |
b4e347d2ef | ||
![]() |
e1d578299e | ||
![]() |
3b148f1192 |
@@ -14,3 +14,8 @@ ignore:
|
||||
- share/spack/qa/.*
|
||||
|
||||
comment: off
|
||||
|
||||
# Inline codecov annotations make the code hard to read, and they add
|
||||
# annotations in files that seemingly have nothing to do with the PR.
|
||||
github_checks:
|
||||
annotations: false
|
||||
|
@@ -1,6 +0,0 @@
|
||||
FROM python:3.7-alpine
|
||||
|
||||
RUN pip install pygithub
|
||||
|
||||
ADD entrypoint.py /entrypoint.py
|
||||
ENTRYPOINT ["/entrypoint.py"]
|
@@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Maintainer review action.
|
||||
|
||||
This action checks which packages have changed in a PR, and adds their
|
||||
maintainers to the pull request for review.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from github import Github
|
||||
|
||||
|
||||
def spack(*args):
|
||||
"""Run the spack executable with arguments, and return the output split.
|
||||
|
||||
This does just enough to run `spack pkg` and `spack maintainers`, the
|
||||
two commands used by this action.
|
||||
"""
|
||||
github_workspace = os.environ['GITHUB_WORKSPACE']
|
||||
spack = os.path.join(github_workspace, 'bin', 'spack')
|
||||
output = subprocess.check_output([spack] + list(args))
|
||||
split = re.split(r'\s*', output.decode('utf-8').strip())
|
||||
return [s for s in split if s]
|
||||
|
||||
|
||||
def main():
|
||||
# get these first so that we'll fail early
|
||||
token = os.environ['GITHUB_TOKEN']
|
||||
event_path = os.environ['GITHUB_EVENT_PATH']
|
||||
|
||||
with open(event_path) as file:
|
||||
data = json.load(file)
|
||||
|
||||
# make sure it's a pull_request event
|
||||
assert 'pull_request' in data
|
||||
|
||||
# only request reviews on open, edit, or reopen
|
||||
action = data['action']
|
||||
if action not in ('opened', 'edited', 'reopened'):
|
||||
return
|
||||
|
||||
# get data from the event payload
|
||||
pr_data = data['pull_request']
|
||||
base_branch_name = pr_data['base']['ref']
|
||||
full_repo_name = pr_data['base']['repo']['full_name']
|
||||
pr_number = pr_data['number']
|
||||
requested_reviewers = pr_data['requested_reviewers']
|
||||
author = pr_data['user']['login']
|
||||
|
||||
# get a list of packages that this PR modified
|
||||
changed_pkgs = spack(
|
||||
'pkg', 'changed', '--type', 'ac', '%s...' % base_branch_name)
|
||||
|
||||
# get maintainers for all modified packages
|
||||
maintainers = set()
|
||||
for pkg in changed_pkgs:
|
||||
pkg_maintainers = set(spack('maintainers', pkg))
|
||||
maintainers |= pkg_maintainers
|
||||
|
||||
# remove any maintainers who are already on the PR, and the author,
|
||||
# as you can't review your own PR)
|
||||
maintainers -= set(requested_reviewers)
|
||||
maintainers -= set([author])
|
||||
|
||||
if not maintainers:
|
||||
return
|
||||
|
||||
# request reviews from each maintainer
|
||||
gh = Github(token)
|
||||
repo = gh.get_repo(full_repo_name)
|
||||
pr = repo.get_pull(pr_number)
|
||||
pr.create_review_request(list(maintainers))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
24
.github/workflows/unit_tests.yaml
vendored
24
.github/workflows/unit_tests.yaml
vendored
@@ -24,9 +24,9 @@ jobs:
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.6- -t=3.5- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t=2.6- -t=3.5- -v var/spack/repos
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.6- -t=3.5- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools flake8 mypy>=0.800 black
|
||||
pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black types-six
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -129,8 +129,9 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial
|
||||
sudo apt-get install -y ninja-build patchelf
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
@@ -155,6 +156,8 @@ jobs:
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack external find --not-buildable cmake bison
|
||||
@@ -162,6 +165,7 @@ jobs:
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
@@ -171,6 +175,7 @@ jobs:
|
||||
- name: Run unit tests (reduced suite without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
@@ -286,7 +291,7 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack unit-test -k 'not svn and not hg' -x --verbose
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
@@ -302,8 +307,9 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial
|
||||
sudo apt-get install -y ninja-build patchelf
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
@@ -364,7 +370,7 @@ jobs:
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade codecov coverage
|
||||
pip install --upgrade flake8 pep8-naming mypy
|
||||
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.800
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -508,4 +508,4 @@ $RECYCLE.BIN/
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
*.lnk
|
||||
|
@@ -14,9 +14,8 @@
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
default:
|
||||
prefix_inspections:
|
||||
lib:
|
||||
- LD_LIBRARY_PATH
|
||||
lib64:
|
||||
- LD_LIBRARY_PATH
|
||||
prefix_inspections:
|
||||
lib:
|
||||
- LD_LIBRARY_PATH
|
||||
lib64:
|
||||
- LD_LIBRARY_PATH
|
||||
|
@@ -34,19 +34,21 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
lua-lang: [lua, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
mpi: [openmpi, mpich]
|
||||
mysql-client: [mysql, mariadb-c-client]
|
||||
opencl: [pocl]
|
||||
onedal: [intel-oneapi-dal]
|
||||
osmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
szip: [libszip, libaec]
|
||||
szip: [libaec, libszip]
|
||||
tbb: [intel-tbb]
|
||||
unwind: [libunwind]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
|
@@ -1730,6 +1730,39 @@ This issue typically manifests with the error below:
|
||||
|
||||
A nicer error message is TBD in future versions of Spack.
|
||||
|
||||
---------------
|
||||
Troubleshooting
|
||||
---------------
|
||||
|
||||
The ``spack audit`` command:
|
||||
|
||||
.. command-output:: spack audit -h
|
||||
|
||||
can be used to detect a number of configuration issues. This command detects
|
||||
configuration settings which might not be strictly wrong but are not likely
|
||||
to be useful outside of special cases.
|
||||
|
||||
It can also be used to detect dependency issues with packages - for example
|
||||
cases where a package constrains a dependency with a variant that doesn't
|
||||
exist (in this case Spack could report the problem ahead of time but
|
||||
automatically performing the check would slow down most runs of Spack).
|
||||
|
||||
A detailed list of the checks currently implemented for each subcommand can be
|
||||
printed with:
|
||||
|
||||
.. command-output:: spack -v audit list
|
||||
|
||||
Depending on the use case, users might run the appropriate subcommands to obtain
|
||||
diagnostics. Issues, if found, are reported to stdout:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack audit packages lammps
|
||||
PKG-DIRECTIVES: 1 issue found
|
||||
1. lammps: wrong variant in "conflicts" directive
|
||||
the variant 'adios' does not exist
|
||||
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
|
||||
|
||||
|
||||
------------
|
||||
Getting Help
|
||||
|
@@ -31,9 +31,25 @@ Build caches are created via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache create spec
|
||||
$ spack buildcache create <spec>
|
||||
|
||||
|
||||
If you wanted to create a build cache in a local directory, you would provide
|
||||
the ``-d`` argument to target that directory, again also specifying the spec.
|
||||
Here is an example creating a local directory, "spack-cache" and creating
|
||||
build cache files for the "ninja" spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir -p ./spack-cache
|
||||
$ spack buildcache create -d ./spack-cache ninja
|
||||
==> Buildcache files will be output to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
gpgconf: socketdir is '/run/user/1000/gnupg'
|
||||
gpg: using "E6DF6A8BD43208E4D6F392F23777740B7DBD643D" as default secret key for signing
|
||||
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
@@ -43,19 +59,98 @@ with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add <name> <url>
|
||||
$ spack mirror add <name> <url>
|
||||
|
||||
|
||||
Note that the url can be a web url _or_ a local filesystem location. In the previous
|
||||
example, you might add the directory "spack-cache" and call it ``mymirror``:
|
||||
|
||||
Build caches are found via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache list
|
||||
$ spack mirror add mymirror ./spack-cache
|
||||
|
||||
Build caches are installed via:
|
||||
|
||||
You can see that the mirror is added with ``spack mirror list`` as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache install
|
||||
|
||||
$ spack mirror list
|
||||
mymirror file:///home/spackuser/spack/spack-cache
|
||||
spack-public https://spack-llnl-mirror.s3-us-west-2.amazonaws.com/
|
||||
|
||||
|
||||
At this point, you've create a buildcache, but spack hasn't indexed it, so if
|
||||
you run ``spack buildcache list`` you won't see any results. You need to index
|
||||
this new build cache as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache update-index -d spack-cache/
|
||||
|
||||
Now you can use list:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache list
|
||||
==> 1 cached build.
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
ninja@1.10.2
|
||||
|
||||
|
||||
Great! So now let's say you have a different spack installation, or perhaps just
|
||||
a different environment for the same one, and you want to install a package from
|
||||
that build cache. Let's first uninstall the actual library "ninja" to see if we can
|
||||
re-install it from the cache.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack uninstall ninja
|
||||
|
||||
|
||||
And now reinstall from the buildcache
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache install ninja
|
||||
==> buildcache spec(s) matching ninja
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-i4e5luour7jxdpc3bkiykd4imke3mkym.spack
|
||||
####################################################################################################################################### 100.0%
|
||||
==> Installing buildcache for spec ninja@1.10.2%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
gpgconf: socketdir is '/run/user/1000/gnupg'
|
||||
gpg: Signature made Tue 23 Mar 2021 10:16:29 PM MDT
|
||||
gpg: using RSA key E6DF6A8BD43208E4D6F392F23777740B7DBD643D
|
||||
gpg: Good signature from "spackuser (GPG created for Spack) <spackuser@noreply.users.github.com>" [ultimate]
|
||||
|
||||
|
||||
It worked! You've just completed a full example of creating a build cache with
|
||||
a spec of interest, adding it as a mirror, updating it's index, listing the contents,
|
||||
and finally, installing from it.
|
||||
|
||||
|
||||
Note that the above command is intended to install a particular package to a
|
||||
build cache you have created, and not to install a package from a build cache.
|
||||
For the latter, once a mirror is added, by default when you do ``spack install`` the ``--use-cache``
|
||||
flag is set, and you will install a package from a build cache if it is available.
|
||||
If you want to always use the cache, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --cache-only <package>
|
||||
|
||||
For example, to combine all of the commands above to add the E4S build cache
|
||||
and then install from it exclusively, you would do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --cache-only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
List of popular build caches
|
||||
|
@@ -17,10 +17,10 @@
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||
@@ -82,6 +82,8 @@
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
|
||||
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
@@ -136,6 +138,7 @@ def setup(sphinx):
|
||||
#
|
||||
# The short X.Y version.
|
||||
import spack
|
||||
|
||||
version = '.'.join(str(s) for s in spack.spack_version_info[:2])
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = spack.spack_version
|
||||
@@ -179,7 +182,8 @@ def setup(sphinx):
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic, Comment, Text
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
@@ -188,6 +192,7 @@ class SpackStyle(DefaultStyle):
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append('.') # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse('spack = conf:SpackStyle', dist=dist)
|
||||
|
@@ -363,7 +363,7 @@ to ``spack install`` on the command line, ``--no-add`` is the default,
|
||||
while for dependency specs on the other hand, it is optional. In other
|
||||
words, if there is an unambiguous match in the active concrete environment
|
||||
for a root spec provided to ``spack install`` on the command line, spack
|
||||
does not require you to specify the ``--no-add` option to prevent the spec
|
||||
does not require you to specify the ``--no-add`` option to prevent the spec
|
||||
from being added again. At the same time, a spec that already exists in the
|
||||
environment, but only as a dependency, will be added to the environment as a
|
||||
root spec without the ``--no-add`` option.
|
||||
|
@@ -70,7 +70,13 @@ Sourcing these files will put the ``spack`` command in your ``PATH``, set
|
||||
up your ``MODULEPATH`` to use Spack's packages, and add other useful
|
||||
shell integration for :ref:`certain commands <packaging-shell-support>`,
|
||||
:ref:`environments <environments>`, and :ref:`modules <modules>`. For
|
||||
``bash``, it also sets up tab completion.
|
||||
``bash`` and ``zsh``, it also sets up tab completion.
|
||||
|
||||
In order to know which directory to add to your ``MODULEPATH``, these scripts
|
||||
query the ``spack`` command. On shared filesystems, this can be a bit slow,
|
||||
especially if you log in frequently. If you don't use modules, or want to set
|
||||
``MODULEPATH`` manually instead, you can set the ``SPACK_SKIP_MODULES``
|
||||
environment variable to skip this step and speed up sourcing the file.
|
||||
|
||||
If you do not want to use Spack's shell support, you can always just run
|
||||
the ``spack`` command directly from ``spack/bin/spack``.
|
||||
@@ -1166,7 +1172,7 @@ the key that we just created:
|
||||
60D2685DAB647AD4DB54125961E09BB6F2A0ADCB
|
||||
uid [ultimate] dinosaur (GPG created for Spack) <dinosaur@thedinosaurthings.com>
|
||||
|
||||
|
||||
|
||||
Note that the name "dinosaur" can be seen under the uid, which is the unique
|
||||
id. We might need this reference if we want to export or otherwise reference the key.
|
||||
|
||||
@@ -1205,7 +1211,7 @@ If you want to include the private key, then just add `--secret`:
|
||||
|
||||
$ spack gpg export --secret dinosaur.priv dinosaur
|
||||
|
||||
This will write the private key to the file `dinosaur.priv`.
|
||||
This will write the private key to the file `dinosaur.priv`.
|
||||
|
||||
.. warning::
|
||||
|
||||
|
@@ -103,6 +103,140 @@ more tags to your build, you can do:
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
@@ -117,4 +251,15 @@ flag.
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``.
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
||||
|
@@ -920,12 +920,13 @@ For some packages, source code is provided in a Version Control System
|
||||
(VCS) repository rather than in a tarball. Spack can fetch packages
|
||||
from VCS repositories. Currently, Spack supports fetching with `Git
|
||||
<git-fetch_>`_, `Mercurial (hg) <hg-fetch_>`_, `Subversion (svn)
|
||||
<svn-fetch_>`_, and `Go <go-fetch_>`_. In all cases, the destination
|
||||
<svn-fetch_>`_, `CVS (cvs) <cvs-fetch_>`_, and `Go <go-fetch_>`_.
|
||||
In all cases, the destination
|
||||
is the standard stage source path.
|
||||
|
||||
To fetch a package from a source repository, Spack needs to know which
|
||||
VCS to use and where to download from. Much like with ``url``, package
|
||||
authors can specify a class-level ``git``, ``hg``, ``svn``, or ``go``
|
||||
authors can specify a class-level ``git``, ``hg``, ``svn``, ``cvs``, or ``go``
|
||||
attribute containing the correct download location.
|
||||
|
||||
Many packages developed with Git have both a Git repository as well as
|
||||
@@ -1173,6 +1174,55 @@ you can check out a branch or tag by changing the URL. If you want to
|
||||
package multiple branches, simply add a ``svn`` argument to each
|
||||
version directive.
|
||||
|
||||
.. _cvs-fetch:
|
||||
|
||||
^^^
|
||||
CVS
|
||||
^^^
|
||||
|
||||
CVS (Concurrent Versions System) is an old centralized version control
|
||||
system. It is a predecessor of Subversion.
|
||||
|
||||
To fetch with CVS, use the ``cvs``, branch, and ``date`` parameters.
|
||||
The destination directory will be the standard stage source path.
|
||||
|
||||
Fetching the head
|
||||
Simply add a ``cvs`` parameter to the package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(Package):
|
||||
|
||||
cvs = ":pserver:outreach.scidac.gov/cvsroot%module=modulename"
|
||||
|
||||
version('1.1.2.4')
|
||||
|
||||
CVS repository locations are described using an older syntax that
|
||||
is different from today's ubiquitous URL syntax. ``:pserver:``
|
||||
denotes the transport method. CVS servers can host multiple
|
||||
repositories (called "modules") at the same location, and one needs
|
||||
to specify both the server location and the module name to access.
|
||||
Spack combines both into one string using the ``%module=modulename``
|
||||
suffix shown above.
|
||||
|
||||
This download method is untrusted.
|
||||
|
||||
Fetching a date
|
||||
Versions in CVS are commonly specified by date. To fetch a
|
||||
particular branch or date, add a ``branch`` and/or ``date`` argument
|
||||
to the version directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('2021.4.22', branch='branchname', date='2021-04-22')
|
||||
|
||||
Unfortunately, CVS does not identify repository-wide commits via a
|
||||
revision or hash like Subversion, Git, or Mercurial do. This makes
|
||||
it impossible to specify an exact commit to check out.
|
||||
|
||||
CVS has more features, but since CVS is rarely used these days, Spack
|
||||
does not support all of them.
|
||||
|
||||
.. _go-fetch:
|
||||
|
||||
^^
|
||||
@@ -1207,7 +1257,7 @@ Variants
|
||||
Many software packages can be configured to enable optional
|
||||
features, which often come at the expense of additional dependencies or
|
||||
longer build times. To be flexible enough and support a wide variety of
|
||||
use cases, Spack permits to expose to the end-user the ability to choose
|
||||
use cases, Spack allows you to expose to the end-user the ability to choose
|
||||
which features should be activated in a package at the time it is installed.
|
||||
The mechanism to be employed is the :py:func:`spack.directives.variant` directive.
|
||||
|
||||
@@ -2725,6 +2775,57 @@ packages be built with MVAPICH and GCC.
|
||||
|
||||
See the :ref:`concretization-preferences` section for more details.
|
||||
|
||||
|
||||
.. _group_when_spec:
|
||||
|
||||
----------------------------
|
||||
Common ``when=`` constraints
|
||||
----------------------------
|
||||
|
||||
In case a package needs many directives to share the whole ``when=``
|
||||
argument, or just part of it, Spack allows you to group the common part
|
||||
under a context manager:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Gcc(AutotoolsPackage):
|
||||
|
||||
with when('+nvptx'):
|
||||
depends_on('cuda')
|
||||
conflicts('@:6', msg='NVPTX only supported in gcc 7 and above')
|
||||
conflicts('languages=ada')
|
||||
conflicts('languages=brig')
|
||||
conflicts('languages=go')
|
||||
|
||||
The snippet above is equivalent to the more verbose:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Gcc(AutotoolsPackage):
|
||||
|
||||
depends_on('cuda', when='+nvptx')
|
||||
conflicts('@:6', when='+nvptx', msg='NVPTX only supported in gcc 7 and above')
|
||||
conflicts('languages=ada', when='+nvptx')
|
||||
conflicts('languages=brig', when='+nvptx')
|
||||
conflicts('languages=go', when='+nvptx')
|
||||
|
||||
Constraints stemming from the context are added to what is explicitly present in the
|
||||
``when=`` argument of a directive, so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with when('+elpa'):
|
||||
depends_on('elpa+openmp', when='+openmp')
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('elpa+openmp', when='+openmp+elpa')
|
||||
|
||||
Constraints from nested context managers are also added together, but they are rarely
|
||||
needed or recommended.
|
||||
|
||||
.. _install-method:
|
||||
|
||||
------------------
|
||||
|
@@ -169,11 +169,28 @@ have disabled it (using ``rebuild-index: False``) because the index would only b
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subesequent pipeline runs.
|
||||
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
|
||||
* In the build jobs (``runner-attributes``), the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
|
||||
* When using ``include`` in your environment, be sure to make the included
|
||||
files available in the build jobs. This means adding those files to the
|
||||
artifact directory. Those files will also be missing in the reproducibility
|
||||
artifact.
|
||||
|
||||
* Because the location of the environment changed, including files with
|
||||
relative path may have to be adapted to work both in the project context
|
||||
(generation job) and in the concrete env dir context (build job).
|
||||
|
||||
-----------------------------------
|
||||
Spack commands supporting pipelines
|
||||
-----------------------------------
|
||||
|
||||
Spack provides a command ``ci`` command with a few sub-commands supporting spack
|
||||
Spack provides a ``ci`` command with a few sub-commands supporting spack
|
||||
ci pipelines. These commands are covered in more detail in this section.
|
||||
|
||||
.. _cmd-spack-ci:
|
||||
|
@@ -543,7 +543,8 @@ specified from the command line using the ``--projection-file`` option
|
||||
to the ``spack view`` command.
|
||||
|
||||
The projections configuration file is a mapping of partial specs to
|
||||
spec format strings, as shown in the example below.
|
||||
spec format strings, defined by the :meth:`~spack.spec.Spec.format`
|
||||
function, as shown in the example below.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -11,7 +11,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 130607c373fd88cd3c43da94c0d3afd3a44084b0)
|
||||
* Version: 0.1.2 (commit 26dec9d47e509daf8c970de4c89da200da52ad20)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
@@ -1725,6 +1725,12 @@
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1828,6 +1834,12 @@
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": "20:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
import argparse
|
||||
import errno
|
||||
import re
|
||||
import sys
|
||||
|
||||
from six import StringIO
|
||||
|
@@ -4,9 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import errno
|
||||
import hashlib
|
||||
import glob
|
||||
import grp
|
||||
import hashlib
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
@@ -19,10 +19,11 @@
|
||||
from contextlib import contextmanager
|
||||
|
||||
import six
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from spack.util.executable import Executable
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import Sequence # novm
|
||||
|
@@ -5,14 +5,15 @@
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import functools
|
||||
import inspect
|
||||
from datetime import datetime, timedelta
|
||||
from six import string_types
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from six import string_types
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
from itertools import izip_longest # novm
|
||||
|
@@ -7,12 +7,12 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
import filecmp
|
||||
|
||||
from llnl.util.filesystem import traverse_tree, mkdirp, touch
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, touch, traverse_tree
|
||||
|
||||
__all__ = ['LinkTree']
|
||||
|
||||
|
@@ -3,16 +3,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import fcntl
|
||||
import errno
|
||||
import time
|
||||
import fcntl
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.util.string
|
||||
|
||||
import spack.util.string
|
||||
|
||||
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
|
||||
'LockError', 'LockTimeoutError',
|
||||
|
@@ -12,12 +12,13 @@
|
||||
import termios
|
||||
import textwrap
|
||||
import traceback
|
||||
import six
|
||||
from datetime import datetime
|
||||
|
||||
import six
|
||||
from six import StringIO
|
||||
from six.moves import input
|
||||
|
||||
from llnl.util.tty.color import cprint, cwrite, cescape, clen
|
||||
from llnl.util.tty.color import cescape, clen, cprint, cwrite
|
||||
|
||||
# Globals
|
||||
_debug = 0
|
||||
|
@@ -10,10 +10,11 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from six import StringIO, text_type
|
||||
|
||||
from llnl.util.tty import terminal_size
|
||||
from llnl.util.tty.color import clen, cextra
|
||||
from llnl.util.tty.color import cextra, clen
|
||||
|
||||
|
||||
class ColumnConfig:
|
||||
|
@@ -60,9 +60,9 @@
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
import six
|
||||
|
@@ -13,15 +13,14 @@
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
import signal
|
||||
from contextlib import contextmanager
|
||||
from six import string_types
|
||||
from six import StringIO
|
||||
|
||||
from typing import Optional # novm
|
||||
from types import ModuleType # novm
|
||||
from typing import Optional # novm
|
||||
|
||||
from six import StringIO, string_types
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
@@ -14,10 +14,10 @@
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import signal
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import sys
|
||||
import termios
|
||||
import time
|
||||
|
@@ -8,9 +8,9 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
from spack.compilers.clang import Clang
|
||||
|
||||
|
||||
class ABI(object):
|
||||
|
@@ -10,11 +10,10 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import spack.util.classes
|
||||
import spack.paths
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.paths
|
||||
import spack.util.classes
|
||||
|
||||
mod_path = spack.paths.analyzers_path
|
||||
analyzers = spack.util.classes.list_classes("spack.analyzers", mod_path)
|
||||
|
@@ -7,14 +7,15 @@
|
||||
and (optionally) interact with a Spack Monitor
|
||||
"""
|
||||
|
||||
import spack.monitor
|
||||
import spack.hooks
|
||||
import llnl.util.tty as tty
|
||||
import spack.util.path
|
||||
import spack.config
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.util.path
|
||||
|
||||
|
||||
def get_analyzer_dir(spec, analyzer_dir=None):
|
||||
"""
|
||||
|
@@ -8,11 +8,12 @@
|
||||
directory."""
|
||||
|
||||
|
||||
import spack.monitor
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
import os
|
||||
|
||||
import spack.monitor
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class ConfigArgs(AnalyzerBase):
|
||||
|
||||
|
@@ -8,11 +8,11 @@
|
||||
an index of key, value pairs for environment variables."""
|
||||
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
import os
|
||||
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
import os
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class EnvironmentVariables(AnalyzerBase):
|
||||
|
@@ -8,11 +8,12 @@
|
||||
analyzer folder for further processing."""
|
||||
|
||||
|
||||
import spack.monitor
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
import os
|
||||
|
||||
import spack.monitor
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class InstallFiles(AnalyzerBase):
|
||||
|
||||
|
@@ -4,20 +4,20 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import spack
|
||||
import spack.error
|
||||
import spack.bootstrap
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.binary_distribution
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package
|
||||
import spack.repo
|
||||
|
||||
import os
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class Libabigail(AnalyzerBase):
|
||||
|
@@ -60,20 +60,21 @@
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.error as serr
|
||||
import spack.paths
|
||||
import spack.util.classes
|
||||
import spack.util.executable
|
||||
import spack.version
|
||||
import spack.util.classes
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
|
||||
|
395
lib/spack/spack/audit.py
Normal file
395
lib/spack/spack/audit.py
Normal file
@@ -0,0 +1,395 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes and functions to register audit checks for various parts of
|
||||
Spack and run them on-demand.
|
||||
|
||||
To register a new class of sanity checks (e.g. sanity checks for
|
||||
compilers.yaml), the first action required is to create a new AuditClass
|
||||
object:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
audit_cfgcmp = AuditClass(
|
||||
tag='CFG-COMPILER',
|
||||
description='Sanity checks on compilers.yaml',
|
||||
kwargs=()
|
||||
)
|
||||
|
||||
This object is to be used as a decorator to register functions
|
||||
that will perform each a single check:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@audit_cfgcmp
|
||||
def _search_duplicate_compilers(error_cls):
|
||||
pass
|
||||
|
||||
These functions need to take as argument the keywords declared when
|
||||
creating the decorator object plus an ``error_cls`` argument at the
|
||||
end, acting as a factory to create Error objects. It should return a
|
||||
(possibly empty) list of errors.
|
||||
|
||||
Calls to each of these functions are triggered by the ``run`` method of
|
||||
the decorator object, that will forward the keyword arguments passed
|
||||
as input.
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
try:
|
||||
from collections.abc import Sequence # novm
|
||||
except ImportError:
|
||||
from collections import Sequence
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
CALLBACKS = {}
|
||||
|
||||
#: Map a group of checks to the list of related audit tags
|
||||
GROUPS = collections.defaultdict(list)
|
||||
|
||||
|
||||
class Error(object):
|
||||
"""Information on an error reported in a test."""
|
||||
def __init__(self, summary, details):
|
||||
self.summary = summary
|
||||
self.details = tuple(details)
|
||||
|
||||
def __str__(self):
|
||||
return self.summary + '\n' + '\n'.join([
|
||||
' ' + detail for detail in self.details
|
||||
])
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.summary != other.summary or self.details != other.details:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __hash__(self):
|
||||
value = (self.summary, self.details)
|
||||
return hash(value)
|
||||
|
||||
|
||||
class AuditClass(Sequence):
|
||||
def __init__(self, group, tag, description, kwargs):
|
||||
"""Return an object that acts as a decorator to register functions
|
||||
associated with a specific class of sanity checks.
|
||||
|
||||
Args:
|
||||
group (str): group in which this check is to be inserted
|
||||
tag (str): tag uniquely identifying the class of sanity checks
|
||||
description (str): description of the sanity checks performed
|
||||
by this tag
|
||||
kwargs (tuple of str): keyword arguments that each registered
|
||||
function needs to accept
|
||||
"""
|
||||
if tag in CALLBACKS:
|
||||
msg = 'audit class "{0}" already registered'
|
||||
raise ValueError(msg.format(tag))
|
||||
|
||||
self.group = group
|
||||
self.tag = tag
|
||||
self.description = description
|
||||
self.kwargs = kwargs
|
||||
self.callbacks = []
|
||||
|
||||
# Init the list of hooks
|
||||
CALLBACKS[self.tag] = self
|
||||
|
||||
# Update the list of tags in the group
|
||||
GROUPS[self.group].append(self.tag)
|
||||
|
||||
def __call__(self, func):
|
||||
self.callbacks.append(func)
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.callbacks[item]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.callbacks)
|
||||
|
||||
def run(self, **kwargs):
|
||||
msg = 'please pass "{0}" as keyword arguments'
|
||||
msg = msg.format(', '.join(self.kwargs))
|
||||
assert set(self.kwargs) == set(kwargs), msg
|
||||
|
||||
errors = []
|
||||
kwargs['error_cls'] = Error
|
||||
for fn in self.callbacks:
|
||||
errors.extend(fn(**kwargs))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def run_group(group, **kwargs):
|
||||
"""Run the checks that are part of the group passed as argument.
|
||||
|
||||
Args:
|
||||
group (str): group of checks to be run
|
||||
**kwargs: keyword arguments forwarded to the checks
|
||||
|
||||
Returns:
|
||||
List of (tag, errors) that failed.
|
||||
"""
|
||||
reports = []
|
||||
for check in GROUPS[group]:
|
||||
errors = run_check(check, **kwargs)
|
||||
reports.append((check, errors))
|
||||
return reports
|
||||
|
||||
|
||||
def run_check(tag, **kwargs):
|
||||
"""Run the checks associated with a single tag.
|
||||
|
||||
Args:
|
||||
tag (str): tag of the check
|
||||
**kwargs: keyword arguments forwarded to the checks
|
||||
|
||||
Returns:
|
||||
Errors occurred during the checks
|
||||
"""
|
||||
return CALLBACKS[tag].run(**kwargs)
|
||||
|
||||
|
||||
# TODO: For the generic check to be useful for end users,
|
||||
# TODO: we need to implement hooks like described in
|
||||
# TODO: https://github.com/spack/spack/pull/23053/files#r630265011
|
||||
#: Generic checks relying on global state
|
||||
generic = AuditClass(
|
||||
group='generic',
|
||||
tag='GENERIC',
|
||||
description='Generic checks relying on global variables',
|
||||
kwargs=()
|
||||
)
|
||||
|
||||
|
||||
#: Sanity checks on compilers.yaml
|
||||
config_compiler = AuditClass(
|
||||
group='configs',
|
||||
tag='CFG-COMPILER',
|
||||
description='Sanity checks on compilers.yaml',
|
||||
kwargs=()
|
||||
)
|
||||
|
||||
|
||||
@config_compiler
|
||||
def _search_duplicate_compilers(error_cls):
|
||||
"""Report compilers with the same spec and two different definitions"""
|
||||
import spack.config
|
||||
errors = []
|
||||
|
||||
compilers = list(sorted(
|
||||
spack.config.get('compilers'), key=lambda x: x['compiler']['spec']
|
||||
))
|
||||
for spec, group in itertools.groupby(
|
||||
compilers, key=lambda x: x['compiler']['spec']
|
||||
):
|
||||
group = list(group)
|
||||
if len(group) == 1:
|
||||
continue
|
||||
|
||||
error_msg = 'Compiler defined multiple times: {0}'
|
||||
try:
|
||||
details = [str(x._start_mark).strip() for x in group]
|
||||
except Exception:
|
||||
details = []
|
||||
errors.append(error_cls(
|
||||
summary=error_msg.format(spec), details=details
|
||||
))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on packages.yaml
|
||||
config_packages = AuditClass(
|
||||
group='configs',
|
||||
tag='CFG-PACKAGES',
|
||||
description='Sanity checks on packages.yaml',
|
||||
kwargs=()
|
||||
)
|
||||
|
||||
|
||||
@config_packages
|
||||
def _search_duplicate_specs_in_externals(error_cls):
|
||||
"""Search for duplicate specs declared as externals"""
|
||||
import spack.config
|
||||
|
||||
errors, externals = [], collections.defaultdict(list)
|
||||
packages_yaml = spack.config.get('packages')
|
||||
|
||||
for name, pkg_config in packages_yaml.items():
|
||||
# No externals can be declared under all
|
||||
if name == 'all' or 'externals' not in pkg_config:
|
||||
continue
|
||||
|
||||
current_externals = pkg_config['externals']
|
||||
for entry in current_externals:
|
||||
# Ask for the string representation of the spec to normalize
|
||||
# aspects of the spec that may be represented in multiple ways
|
||||
# e.g. +foo or foo=true
|
||||
key = str(spack.spec.Spec(entry['spec']))
|
||||
externals[key].append(entry)
|
||||
|
||||
for spec, entries in sorted(externals.items()):
|
||||
# If there's a single external for a spec we are fine
|
||||
if len(entries) < 2:
|
||||
continue
|
||||
|
||||
# Otherwise wwe need to report an error
|
||||
error_msg = 'Multiple externals share the same spec: {0}'.format(spec)
|
||||
try:
|
||||
lines = [str(x._start_mark).strip() for x in entries]
|
||||
details = [
|
||||
'Please remove all but one of the following entries:'
|
||||
] + lines + [
|
||||
'as they might result in non-deterministic hashes'
|
||||
]
|
||||
except TypeError:
|
||||
details = []
|
||||
|
||||
errors.append(error_cls(summary=error_msg, details=details))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
package_directives = AuditClass(
|
||||
group='packages',
|
||||
tag='PKG-DIRECTIVES',
|
||||
description='Sanity checks on specs used in directives',
|
||||
kwargs=('pkgs',)
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg.conflicts.items():
|
||||
for trigger, _ in triggers:
|
||||
vrn = spack.spec.Spec(conflict)
|
||||
try:
|
||||
vrn.constrain(trigger)
|
||||
except Exception as e:
|
||||
msg = 'Generic error in conflict for package "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||
continue
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg, vrn, directive='conflicts', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "depends_on" directive
|
||||
for _, triggers in pkg.dependencies.items():
|
||||
triggers = list(triggers)
|
||||
for trigger in list(triggers):
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg, vrn, directive='depends_on', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "patch" directive
|
||||
for _, triggers in pkg.provided.items():
|
||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||
for vrn in triggers:
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg, vrn, directive='patch', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "resource" directive
|
||||
for vrn in pkg.resources:
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg, vrn, directive='resource', error_cls=error_cls
|
||||
))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg = spack.repo.get(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
summary = (pkg_name + ": unknown package '{0}' in "
|
||||
"'depends_on' directive".format(dependency_name))
|
||||
details = [
|
||||
" in " + filename
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
for _, dependency_edge in dependency_data.items():
|
||||
dependency_variants = dependency_edge.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
dependency_pkg.variants[name].validate_or_raise(
|
||||
value, pkg=dependency_pkg
|
||||
)
|
||||
except Exception as e:
|
||||
summary = (pkg_name + ": wrong variant used for a "
|
||||
"dependency in a 'depends_on' directive")
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = ('the variant {0} does not '
|
||||
'exist'.format(error_msg))
|
||||
error_msg += " in package '" + dependency_name + "'"
|
||||
|
||||
errors.append(error_cls(
|
||||
summary=summary, details=[error_msg, 'in ' + filename]
|
||||
))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
import spack.variant
|
||||
variant_exceptions = (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
spack.variant.InvalidVariantValueError,
|
||||
KeyError
|
||||
)
|
||||
errors = []
|
||||
for name, v in constraint.variants.items():
|
||||
try:
|
||||
pkg.variants[name].validate_or_raise(v, pkg=pkg)
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = 'the variant {0} does not exist'.format(error_msg)
|
||||
|
||||
err = error_cls(summary=summary, details=[
|
||||
error_msg, 'in ' + filename
|
||||
])
|
||||
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
@@ -4,22 +4,20 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import shutil
|
||||
import tempfile
|
||||
import hashlib
|
||||
import glob
|
||||
from ordereddict_backport import OrderedDict
|
||||
|
||||
from contextlib import closing
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
import json
|
||||
|
||||
from six.moves.urllib.error import URLError, HTTPError
|
||||
from ordereddict_backport import OrderedDict
|
||||
from six.moves.urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -29,19 +27,18 @@
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.mirror
|
||||
import spack.relocate as relocate
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.mirror
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
|
||||
|
||||
_build_cache_relative_path = 'build_cache'
|
||||
_build_cache_keys_relative_path = '_pgp'
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
import sysconfig # novm
|
||||
except ImportError:
|
||||
|
@@ -33,44 +33,52 @@
|
||||
calls you can make from within the install() function.
|
||||
"""
|
||||
import inspect
|
||||
import re
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
|
||||
from six import StringIO
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.filesystem import mkdirp, install, install_tree
|
||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.architecture as arch
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.package
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
import spack.install_test
|
||||
import spack.subprocess_context
|
||||
import spack.architecture as arch
|
||||
import spack.util.path
|
||||
from spack.util.string import plural
|
||||
from spack.util.environment import (
|
||||
env_flag, filter_system_paths, get_path, is_system_path,
|
||||
EnvironmentModifications, validate, preserve_environment)
|
||||
from spack.util.environment import system_dirs
|
||||
from spack.error import NoLibrariesError, NoHeadersError
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.module_cmd import load_module, path_from_modules, module
|
||||
from spack.util.log_parse import parse_log_events, make_log_context
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.environment import (
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
is_system_path,
|
||||
preserve_environment,
|
||||
system_dirs,
|
||||
validate,
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
from spack.util.string import plural
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
#
|
||||
@@ -78,7 +86,7 @@
|
||||
|
||||
#
|
||||
# These environment variables are set by
|
||||
# set_build_environment_variables and used to pass parameters to
|
||||
# set_wrapper_variables and used to pass parameters to
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
|
||||
@@ -159,6 +167,12 @@ def clean_environment():
|
||||
env.unset('CPLUS_INCLUDE_PATH')
|
||||
env.unset('OBJC_INCLUDE_PATH')
|
||||
|
||||
env.unset('CMAKE_PREFIX_PATH')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
env.unset('DYLD_INSERT_LIBRARIES')
|
||||
|
||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
||||
# interference with Spack dependencies.
|
||||
# CNL requires these variables to be set (or at least some of them,
|
||||
@@ -306,111 +320,20 @@ def set_compiler_environment_variables(pkg, env):
|
||||
return env
|
||||
|
||||
|
||||
def _place_externals_last(spec_container):
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper
|
||||
(which have the prefix `SPACK_`) and also add the compiler wrappers
|
||||
to PATH.
|
||||
|
||||
This determines the injected -L/-I/-rpath options; each
|
||||
of these specifies a search order and this function computes these
|
||||
options in a manner that is intended to match the DAG traversal order
|
||||
in `modifications_from_dependencies`: that method uses a post-order
|
||||
traversal so that `PrependPath` actions from dependencies take lower
|
||||
precedence; we use a post-order traversal here to match the visitation
|
||||
order of `modifications_from_dependencies` (so we are visiting the
|
||||
lowest priority packages first).
|
||||
"""
|
||||
For a (possibly unordered) container of specs, return an ordered list
|
||||
where all external specs are at the end of the list. External packages
|
||||
may be installed in merged prefixes with other packages, and so
|
||||
they should be deprioritized for any search order (i.e. in PATH, or
|
||||
for a set of -L entries in a compiler invocation).
|
||||
"""
|
||||
# Establish an arbitrary but fixed ordering of specs so that resulting
|
||||
# environment variable values are stable
|
||||
spec_container = sorted(spec_container, key=lambda x: x.name)
|
||||
first = list(x for x in spec_container if not x.external)
|
||||
second = list(x for x in spec_container if x.external)
|
||||
return first + second
|
||||
|
||||
|
||||
def set_build_environment_variables(pkg, env, dirty):
|
||||
"""Ensure a clean install environment when we build packages.
|
||||
|
||||
This involves unsetting pesky environment variables that may
|
||||
affect the build. It also involves setting environment variables
|
||||
used by Spack's compiler wrappers.
|
||||
|
||||
Args:
|
||||
pkg: The package we are building
|
||||
env: The build environment
|
||||
dirty (bool): Skip unsetting the user's environment settings
|
||||
"""
|
||||
# Gather information about various types of dependencies
|
||||
build_deps = set(pkg.spec.dependencies(deptype=('build', 'test')))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
|
||||
build_link_deps = build_deps | link_deps
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
# This includes all build dependencies and any other dependencies that
|
||||
# should be added to PATH (e.g. supporting executables run by build
|
||||
# dependencies)
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype='run'))
|
||||
|
||||
# External packages may be installed in a prefix which contains many other
|
||||
# package installs. To avoid having those installations override
|
||||
# Spack-installed packages, they are placed at the end of search paths.
|
||||
# System prefixes are removed entirely later on since they are already
|
||||
# searched.
|
||||
build_deps = _place_externals_last(build_deps)
|
||||
link_deps = _place_externals_last(link_deps)
|
||||
build_link_deps = _place_externals_last(build_link_deps)
|
||||
rpath_deps = _place_externals_last(rpath_deps)
|
||||
build_and_supporting_deps = _place_externals_last(
|
||||
build_and_supporting_deps)
|
||||
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
rpath_dirs = []
|
||||
|
||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
||||
# not be created for the install).
|
||||
for libdir in ['lib', 'lib64']:
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.append(lib_path)
|
||||
|
||||
# Set up link, include, RPATH directories that are passed to the
|
||||
# compiler wrapper
|
||||
for dep in link_deps:
|
||||
if is_system_path(dep.prefix):
|
||||
continue
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
|
||||
for default_lib_dir in ['lib', 'lib64']:
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
|
||||
link_dirs.extend(dep_link_dirs)
|
||||
if dep in rpath_deps:
|
||||
rpath_dirs.extend(dep_link_dirs)
|
||||
|
||||
try:
|
||||
include_dirs.extend(query.headers.directories)
|
||||
except NoHeadersError:
|
||||
tty.debug("No headers found for {0}".format(dep.name))
|
||||
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ':'.join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs))
|
||||
|
||||
build_and_supporting_prefixes = filter_system_paths(
|
||||
x.prefix for x in build_and_supporting_deps)
|
||||
build_link_prefixes = filter_system_paths(
|
||||
x.prefix for x in build_link_deps)
|
||||
|
||||
# Add dependencies to CMAKE_PREFIX_PATH
|
||||
env.set_path('CMAKE_PREFIX_PATH', get_cmake_prefix_path(pkg))
|
||||
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
@@ -420,16 +343,6 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
extra_rpaths = ':'.join(compiler.extra_rpaths)
|
||||
env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
|
||||
|
||||
# Add bin directories from dependencies to the PATH for the build.
|
||||
# These directories are added to the beginning of the search path, and in
|
||||
# the order given by 'build_and_supporting_prefixes' (the iteration order
|
||||
# is reversed because each entry is prepended)
|
||||
for prefix in reversed(build_and_supporting_prefixes):
|
||||
for dirname in ['bin', 'bin64']:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path('PATH', bin_dir)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
@@ -449,6 +362,7 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
||||
for item in env_paths:
|
||||
env.prepend_path('PATH', item)
|
||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
||||
@@ -467,14 +381,69 @@ def set_build_environment_variables(pkg, env, dirty):
|
||||
raise RuntimeError("No ccache binary found in PATH")
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
|
||||
# Add any pkgconfig directories to PKG_CONFIG_PATH
|
||||
for prefix in reversed(build_link_prefixes):
|
||||
for directory in ('lib', 'lib64', 'share'):
|
||||
pcdir = os.path.join(prefix, directory, 'pkgconfig')
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path('PKG_CONFIG_PATH', pcdir)
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
|
||||
return env
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
rpath_dirs = []
|
||||
|
||||
def _prepend_all(list_to_modify, items_to_add):
|
||||
# Update the original list (creating a new list would be faster but
|
||||
# may not be convenient)
|
||||
for item in reversed(list(items_to_add)):
|
||||
list_to_modify.insert(0, item)
|
||||
|
||||
def update_compiler_args_for_dep(dep):
|
||||
if dep in link_deps and (not is_system_path(dep.prefix)):
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
|
||||
for default_lib_dir in ['lib', 'lib64']:
|
||||
default_lib_prefix = os.path.join(
|
||||
dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
|
||||
_prepend_all(link_dirs, dep_link_dirs)
|
||||
if dep in rpath_deps:
|
||||
_prepend_all(rpath_dirs, dep_link_dirs)
|
||||
|
||||
try:
|
||||
_prepend_all(include_dirs, query.headers.directories)
|
||||
except NoHeadersError:
|
||||
tty.debug("No headers found for {0}".format(dep.name))
|
||||
|
||||
for dspec in pkg.spec.traverse(root=False, order='post'):
|
||||
if dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# Just above, we prepended entries for -L/-rpath for externals. We
|
||||
# now do this for non-external packages so that Spack-built packages
|
||||
# are searched first for libraries etc.
|
||||
for dspec in pkg.spec.traverse(root=False, order='post'):
|
||||
if not dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
||||
# not be created for the install).
|
||||
for libdir in ['lib64', 'lib']:
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ':'.join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs))
|
||||
|
||||
|
||||
def determine_number_of_jobs(
|
||||
@@ -712,15 +681,6 @@ def get_rpaths(pkg):
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
build_deps = set(pkg.spec.dependencies(deptype=('build', 'test')))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
|
||||
build_link_deps = build_deps | link_deps
|
||||
build_link_deps = _place_externals_last(build_link_deps)
|
||||
build_link_prefixes = filter_system_paths(x.prefix for x in build_link_deps)
|
||||
return build_link_prefixes
|
||||
|
||||
|
||||
def get_std_cmake_args(pkg):
|
||||
"""List of standard arguments used if a package is a CMakePackage.
|
||||
|
||||
@@ -788,42 +748,40 @@ def load_external_modules(pkg):
|
||||
|
||||
def setup_package(pkg, dirty, context='build'):
|
||||
"""Execute all environment setup routines."""
|
||||
if context not in ['build', 'test']:
|
||||
raise ValueError(
|
||||
"'context' must be one of ['build', 'test'] - got: {0}"
|
||||
.format(context))
|
||||
|
||||
set_module_variables_for_package(pkg)
|
||||
|
||||
env = EnvironmentModifications()
|
||||
|
||||
if not dirty:
|
||||
clean_environment()
|
||||
|
||||
# setup compilers and build tools for build contexts
|
||||
# setup compilers for build contexts
|
||||
need_compiler = context == 'build' or (context == 'test' and
|
||||
pkg.test_requires_compiler)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env)
|
||||
set_build_environment_variables(pkg, env, dirty)
|
||||
set_wrapper_variables(pkg, env)
|
||||
|
||||
env.extend(modifications_from_dependencies(
|
||||
pkg.spec, context, custom_mods_only=False))
|
||||
|
||||
# architecture specific setup
|
||||
pkg.architecture.platform.setup_platform_environment(pkg, env)
|
||||
|
||||
if context == 'build':
|
||||
# recursive post-order dependency information
|
||||
env.extend(
|
||||
modifications_from_dependencies(pkg.spec, context=context)
|
||||
)
|
||||
pkg.setup_build_environment(env)
|
||||
|
||||
if (not dirty) and (not env.is_unset('CPATH')):
|
||||
tty.debug("A dependency has updated CPATH, this may lead pkg-"
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'.")
|
||||
|
||||
# setup package itself
|
||||
set_module_variables_for_package(pkg)
|
||||
pkg.setup_build_environment(env)
|
||||
elif context == 'test':
|
||||
import spack.user_environment as uenv # avoid circular import
|
||||
env.extend(uenv.environment_modifications_for_spec(pkg.spec))
|
||||
env.extend(
|
||||
modifications_from_dependencies(pkg.spec, context=context)
|
||||
)
|
||||
set_module_variables_for_package(pkg)
|
||||
pkg.setup_run_environment(env)
|
||||
env.prepend_path('PATH', '.')
|
||||
|
||||
# Loading modules, in particular if they are meant to be used outside
|
||||
@@ -865,39 +823,173 @@ def setup_package(pkg, dirty, context='build'):
|
||||
env.apply_modifications()
|
||||
|
||||
|
||||
def modifications_from_dependencies(spec, context):
|
||||
def _make_runnable(pkg, env):
|
||||
# Helper method which prepends a Package's bin/ prefix to the PATH
|
||||
# environment variable
|
||||
prefix = pkg.prefix
|
||||
|
||||
for dirname in ['bin', 'bin64']:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path('PATH', bin_dir)
|
||||
|
||||
|
||||
def modifications_from_dependencies(spec, context, custom_mods_only=True):
|
||||
"""Returns the environment modifications that are required by
|
||||
the dependencies of a spec and also applies modifications
|
||||
to this spec's package at module scope, if need be.
|
||||
|
||||
Environment modifications include:
|
||||
|
||||
- Updating PATH so that executables can be found
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies
|
||||
- Running custom package environment modifications
|
||||
|
||||
Custom package modifications can conflict with the default PATH changes
|
||||
we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH
|
||||
environment variables), so this applies changes in a fixed order:
|
||||
|
||||
- All modifications (custom and default) from external deps first
|
||||
- All modifications from non-external deps afterwards
|
||||
|
||||
With that order, `PrependPath` actions from non-external default
|
||||
environment modifications will take precedence over custom modifications
|
||||
from external packages.
|
||||
|
||||
A secondary constraint is that custom and default modifications are
|
||||
grouped on a per-package basis: combined with the post-order traversal this
|
||||
means that default modifications of dependents can override custom
|
||||
modifications of dependencies (again, this would only occur for PATH,
|
||||
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
|
||||
|
||||
Args:
|
||||
spec (Spec): spec for which we want the modifications
|
||||
context (str): either 'build' for build-time modifications or 'run'
|
||||
for run-time modifications
|
||||
"""
|
||||
if context not in ['build', 'run', 'test']:
|
||||
raise ValueError(
|
||||
"Expecting context to be one of ['build', 'run', 'test'], "
|
||||
"got: {0}".format(context))
|
||||
|
||||
env = EnvironmentModifications()
|
||||
pkg = spec.package
|
||||
|
||||
# Maps the context to deptype and method to be called
|
||||
deptype_and_method = {
|
||||
'build': (('build', 'link', 'test'),
|
||||
'setup_dependent_build_environment'),
|
||||
'run': (('link', 'run'), 'setup_dependent_run_environment'),
|
||||
'test': (('link', 'run', 'test'), 'setup_dependent_run_environment')
|
||||
}
|
||||
deptype, method = deptype_and_method[context]
|
||||
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
|
||||
# function; these sets form the building blocks of those collections.
|
||||
build_deps = set(spec.dependencies(deptype=('build', 'test')))
|
||||
link_deps = set(spec.traverse(root=False, deptype='link'))
|
||||
build_link_deps = build_deps | link_deps
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype='run'))
|
||||
run_and_supporting_deps = set(
|
||||
spec.traverse(root=False, deptype=('run', 'link')))
|
||||
test_and_supporting_deps = set()
|
||||
for test_dep in set(spec.dependencies(deptype='test')):
|
||||
test_and_supporting_deps.update(test_dep.traverse(deptype='run'))
|
||||
|
||||
root = context == 'test'
|
||||
for dspec in spec.traverse(order='post', root=root, deptype=deptype):
|
||||
dpkg = dspec.package
|
||||
set_module_variables_for_package(dpkg)
|
||||
# Allow dependencies to modify the module
|
||||
dpkg.setup_dependent_package(pkg.module, spec)
|
||||
getattr(dpkg, method)(env, spec)
|
||||
# All dependencies that might have environment modifications to apply
|
||||
custom_mod_deps = set()
|
||||
if context == 'build':
|
||||
custom_mod_deps.update(build_and_supporting_deps)
|
||||
# Tests may be performed after build
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
else:
|
||||
# test/run context
|
||||
custom_mod_deps.update(run_and_supporting_deps)
|
||||
if context == 'test':
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
custom_mod_deps.update(link_deps)
|
||||
|
||||
# Determine 'exe_deps': the set of packages with binaries we want to use
|
||||
if context == 'build':
|
||||
exe_deps = build_and_supporting_deps | test_and_supporting_deps
|
||||
elif context == 'run':
|
||||
exe_deps = set(spec.traverse(deptype='run'))
|
||||
elif context == 'test':
|
||||
exe_deps = test_and_supporting_deps
|
||||
|
||||
def default_modifications_for_dep(dep):
|
||||
if (dep in build_link_deps and
|
||||
not is_system_path(dep.prefix) and
|
||||
context == 'build'):
|
||||
prefix = dep.prefix
|
||||
|
||||
env.prepend_path('CMAKE_PREFIX_PATH', prefix)
|
||||
|
||||
for directory in ('lib', 'lib64', 'share'):
|
||||
pcdir = os.path.join(prefix, directory, 'pkgconfig')
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path('PKG_CONFIG_PATH', pcdir)
|
||||
|
||||
if dep in exe_deps and not is_system_path(dep.prefix):
|
||||
_make_runnable(dep, env)
|
||||
|
||||
def add_modifications_for_dep(dep):
|
||||
# Some callers of this function only want the custom modifications.
|
||||
# For callers that want both custom and default modifications, we want
|
||||
# to perform the default modifications here (this groups custom
|
||||
# and default modifications together on a per-package basis).
|
||||
if not custom_mods_only:
|
||||
default_modifications_for_dep(dep)
|
||||
|
||||
# Perform custom modifications here (PrependPath actions performed in
|
||||
# the custom method override the default environment modifications
|
||||
# we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and
|
||||
# PKG_CONFIG_PATH)
|
||||
if dep in custom_mod_deps:
|
||||
dpkg = dep.package
|
||||
set_module_variables_for_package(dpkg)
|
||||
# Allow dependencies to modify the module
|
||||
dpkg.setup_dependent_package(spec.package.module, spec)
|
||||
if context == 'build':
|
||||
dpkg.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
dpkg.setup_dependent_run_environment(env, spec)
|
||||
|
||||
# Note that we want to perform environment modifications in a fixed order.
|
||||
# The Spec.traverse method provides this: i.e. in addition to
|
||||
# the post-order semantics, it also guarantees a fixed traversal order
|
||||
# among dependencies which are not constrained by post-order semantics.
|
||||
for dspec in spec.traverse(root=False, order='post'):
|
||||
if dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
|
||||
for dspec in spec.traverse(root=False, order='post'):
|
||||
# Default env modifications for non-external packages can override
|
||||
# custom modifications of external packages (this can only occur
|
||||
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
|
||||
if not dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
# Note that unlike modifications_from_dependencies, this does not include
|
||||
# any edits to CMAKE_PREFIX_PATH defined in custom
|
||||
# setup_dependent_build_environment implementations of dependency packages
|
||||
build_deps = set(pkg.spec.dependencies(deptype=('build', 'test')))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
|
||||
build_link_deps = build_deps | link_deps
|
||||
spack_built = []
|
||||
externals = []
|
||||
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
||||
# prepending all externals and then all non-externals
|
||||
for dspec in pkg.spec.traverse(root=False, order='post'):
|
||||
if dspec in build_link_deps:
|
||||
if dspec.external:
|
||||
externals.insert(0, dspec)
|
||||
else:
|
||||
spack_built.insert(0, dspec)
|
||||
|
||||
ordered_build_link_deps = spack_built + externals
|
||||
build_link_prefixes = filter_system_paths(
|
||||
x.prefix for x in ordered_build_link_deps)
|
||||
return build_link_prefixes
|
||||
|
||||
|
||||
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd):
|
||||
|
||||
|
@@ -6,6 +6,7 @@
|
||||
# Why doesn't this work for me?
|
||||
# from spack import *
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.directives import extends
|
||||
from spack.package import ExtensionError
|
||||
|
@@ -7,13 +7,13 @@
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
from subprocess import PIPE
|
||||
from subprocess import check_call
|
||||
from subprocess import PIPE, check_call
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.filesystem import working_dir, force_remove
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import force_remove, working_dir
|
||||
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -345,8 +345,11 @@ def build(self, spec, prefix):
|
||||
"""Makes the build targets specified by
|
||||
:py:attr:``~.AutotoolsPackage.build_targets``
|
||||
"""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ['V=1']
|
||||
params += self.build_targets
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
inspect.getmodule(self).make(*params)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Makes the install targets specified by
|
||||
|
@@ -4,8 +4,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import install, mkdirp
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import install, mkdirp
|
||||
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.package import run_after
|
||||
|
@@ -10,10 +10,11 @@
|
||||
import re
|
||||
from typing import List # novm
|
||||
|
||||
import spack.build_environment
|
||||
from llnl.util.filesystem import working_dir
|
||||
from spack.directives import depends_on, variant, conflicts
|
||||
from spack.package import PackageBase, InstallError, run_after
|
||||
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
|
@@ -3,10 +3,9 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import PackageBase
|
||||
from spack.directives import depends_on, variant, conflicts
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class CudaPackage(PackageBase):
|
||||
@@ -79,47 +78,46 @@ def cuda_flags(arch_list):
|
||||
depends_on('cuda@11.0:', when='cuda_arch=80')
|
||||
depends_on('cuda@11.1:', when='cuda_arch=86')
|
||||
|
||||
# There are at least three cases to be aware of for compiler conflicts
|
||||
# 1. Linux x86_64
|
||||
# 2. Linux ppc64le
|
||||
# 3. Mac OS X
|
||||
# CUDA-compiler conflicts are version-to-version specific and are
|
||||
# difficult to express with the current Spack conflict syntax
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
# platform=linux, since they should also apply to platform=cray, and may
|
||||
# apply to platform=darwin. We currently do not provide conflicts for
|
||||
# platform=darwin with %apple-clang.
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
arch_platform = ' target=x86_64: platform=linux'
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5' + arch_platform)
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8' + arch_platform)
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform)
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89' + arch_platform)
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2:' + arch_platform)
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform)
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5' + arch_platform)
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform)
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1' + arch_platform)
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10' + arch_platform)
|
||||
conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
|
||||
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
|
||||
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
|
||||
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
|
||||
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
|
||||
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
|
||||
conflicts('%pgi@:17,20:',
|
||||
when='+cuda ^cuda@10.1.105:10.2.89' + arch_platform)
|
||||
when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
|
||||
conflicts('%pgi@:17,21:',
|
||||
when='+cuda ^cuda@11.0.2:11.1.0' + arch_platform)
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5' + arch_platform)
|
||||
when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
|
||||
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4:',
|
||||
when='+cuda ^cuda@8.0:9.0' + arch_platform)
|
||||
when='+cuda ^cuda@8.0:9.0 target=x86_64:')
|
||||
conflicts('%clang@:3.7,4.1:',
|
||||
when='+cuda ^cuda@9.1' + arch_platform)
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2' + arch_platform)
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130' + arch_platform)
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105' + arch_platform)
|
||||
when='+cuda ^cuda@9.1 target=x86_64:')
|
||||
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
|
||||
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
|
||||
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
|
||||
conflicts('%clang@:3.7,8.1:',
|
||||
when='+cuda ^cuda@10.1.105:10.1.243' + arch_platform)
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89' + arch_platform)
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2:' + arch_platform)
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)
|
||||
when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
|
||||
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
|
||||
|
||||
# x86_64 vs. ppc64le differ according to NVidia docs
|
||||
# Linux ppc64le compiler conflicts from Table from the docs below:
|
||||
@@ -129,27 +127,26 @@ def cuda_flags(arch_list):
|
||||
# https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
|
||||
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
|
||||
|
||||
arch_platform = ' target=ppc64le: platform=linux'
|
||||
# information prior to CUDA 9 difficult to find
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9' + arch_platform)
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform)
|
||||
conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
|
||||
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
|
||||
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2:' + arch_platform)
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8' + arch_platform)
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185' + arch_platform)
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10' + arch_platform)
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176' + arch_platform)
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1' + arch_platform)
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2' + arch_platform)
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130' + arch_platform)
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105' + arch_platform)
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89' + arch_platform)
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2:' + arch_platform)
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)
|
||||
conflicts('%gcc@:4', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
|
||||
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
|
||||
conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
|
||||
conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
|
||||
conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
|
||||
conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
|
||||
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
|
||||
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
|
||||
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
|
||||
conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
|
||||
conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=ppc64le:')
|
||||
conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
|
||||
|
||||
# Intel is mostly relevant for x86_64 Linux, even though it also
|
||||
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
|
||||
@@ -171,15 +168,8 @@ def cuda_flags(arch_list):
|
||||
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
|
||||
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
|
||||
|
||||
# Mac OS X
|
||||
# platform = ' platform=darwin'
|
||||
# Apple XCode clang vs. LLVM clang are difficult to specify
|
||||
# with spack syntax. Xcode clang name is `clang@x.y.z-apple`
|
||||
# which precludes ranges being specified. We have proposed
|
||||
# rename XCode clang to `clang@apple-x.y.z` or even
|
||||
# `clang-apple@x.y.z as a possible fix.
|
||||
# Compiler conflicts will be eventual taken from here:
|
||||
# https://docs.nvidia.com/cuda/cuda-installation-guide-mac-os-x/index.html#abstract
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:')
|
||||
|
||||
# Make sure cuda_arch can not be used without +cuda
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.util.url
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class GNUMirrorPackage(spack.package.PackageBase):
|
||||
|
@@ -4,26 +4,32 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import tempfile
|
||||
import re
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
HeaderList,
|
||||
LibraryList,
|
||||
ancestor,
|
||||
filter_file,
|
||||
find_headers,
|
||||
find_libraries,
|
||||
find_system_libraries,
|
||||
install,
|
||||
)
|
||||
|
||||
from llnl.util.filesystem import \
|
||||
install, ancestor, filter_file, \
|
||||
HeaderList, find_headers, \
|
||||
LibraryList, find_libraries, find_system_libraries
|
||||
|
||||
from spack.version import Version, ver
|
||||
from spack.package import PackageBase, run_after, InstallError
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.version import Version, ver
|
||||
|
||||
# A couple of utility functions that might be useful in general. If so, they
|
||||
# should really be defined elsewhere, unless deemed heretical.
|
||||
@@ -1089,7 +1095,7 @@ def _setup_dependent_env_callback(
|
||||
# Intel MPI since 2019 depends on libfabric which is not in the
|
||||
# lib directory but in a directory of its own which should be
|
||||
# included in the rpath
|
||||
if self.version >= ver('2019'):
|
||||
if self.version_yearlike >= ver('2019'):
|
||||
d = ancestor(self.component_lib_dir('mpi'))
|
||||
libfabrics_path = os.path.join(d, 'libfabric', 'lib')
|
||||
env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
|
||||
from llnl.util.filesystem import install_tree, working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import which
|
||||
|
@@ -9,6 +9,7 @@
|
||||
from typing import List # novm
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on, variant
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
@@ -101,9 +102,9 @@ def _std_args(pkg):
|
||||
|
||||
strip = 'true' if '+strip' in pkg.spec else 'false'
|
||||
|
||||
if 'libs=static,shared' in pkg.spec:
|
||||
if 'default_library=static,shared' in pkg.spec:
|
||||
default_library = 'both'
|
||||
elif 'libs=static' in pkg.spec:
|
||||
elif 'default_library=static' in pkg.spec:
|
||||
default_library = 'static'
|
||||
else:
|
||||
default_library = 'shared'
|
||||
|
@@ -8,16 +8,16 @@
|
||||
"""
|
||||
|
||||
import getpass
|
||||
import platform
|
||||
import shutil
|
||||
from sys import platform
|
||||
from os.path import basename, dirname, isdir
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.package import Package
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
|
||||
class IntelOneApiPackage(Package):
|
||||
"""Base class for Intel oneAPI packages."""
|
||||
@@ -48,7 +48,7 @@ def install(self, spec, prefix, installer_path=None):
|
||||
if installer_path is None:
|
||||
installer_path = basename(self.url_for_version(spec.version))
|
||||
|
||||
if platform == 'linux':
|
||||
if platform.system() == 'Linux':
|
||||
# Intel installer assumes and enforces that all components
|
||||
# are installed into a single prefix. Spack wants to
|
||||
# install each component in a separate prefix. The
|
||||
|
@@ -7,10 +7,11 @@
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.util.executable import Executable
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
|
||||
class PerlPackage(PackageBase):
|
||||
|
@@ -6,14 +6,20 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
filter_file,
|
||||
find,
|
||||
get_filetype,
|
||||
path_contains_subdirectory,
|
||||
same_path,
|
||||
working_dir,
|
||||
)
|
||||
from llnl.util.lang import match_predicate
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
from llnl.util.filesystem import (working_dir, get_filetype, filter_file,
|
||||
path_contains_subdirectory, same_path, find)
|
||||
from llnl.util.lang import match_predicate
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
||||
class PythonPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Python
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
@@ -75,10 +75,9 @@
|
||||
# does not like its directory structure.
|
||||
#
|
||||
|
||||
from spack.package import PackageBase
|
||||
from spack.directives import depends_on, variant, conflicts
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
|
@@ -6,10 +6,11 @@
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find, working_dir, join_path
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package import PackageBase, run_after
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
||||
class SIPPackage(PackageBase):
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.util.url
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourceforgePackage(spack.package.PackageBase):
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.util.url
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourcewarePackage(spack.package.PackageBase):
|
||||
|
@@ -6,11 +6,11 @@
|
||||
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
|
||||
class WafPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using the
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.util.url
|
||||
import spack.package
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class XorgPackage(spack.package.PackageBase):
|
||||
|
@@ -9,10 +9,10 @@
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.paths
|
||||
import spack.util.file_cache
|
||||
import spack.util.path
|
||||
|
||||
|
@@ -17,10 +17,10 @@
|
||||
from six import iteritems
|
||||
from six.moves.urllib.error import HTTPError, URLError
|
||||
from six.moves.urllib.parse import urlencode
|
||||
from six.moves.urllib.request import build_opener, HTTPHandler, Request
|
||||
from six.moves.urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -28,18 +28,17 @@
|
||||
import spack.compilers as compilers
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
from spack.error import SpackError
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.spec import Spec
|
||||
import spack.util.executable as exe
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.web as web_util
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
'always',
|
||||
@@ -81,7 +80,8 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Creating buildgroup failed (response code = {0}'.format(
|
||||
response_code)
|
||||
raise SpackError(msg)
|
||||
tty.warn(msg)
|
||||
return None
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
@@ -110,7 +110,8 @@ def populate_buildgroup(job_names, group_name, project, site,
|
||||
if not parent_group_id or not group_id:
|
||||
msg = 'Failed to create or retrieve buildgroups for {0}'.format(
|
||||
group_name)
|
||||
raise SpackError(msg)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
data = {
|
||||
'project': project,
|
||||
@@ -133,7 +134,7 @@ def populate_buildgroup(job_names, group_name, project, site,
|
||||
if response_code != 200:
|
||||
msg = 'Error response code ({0}) in populate_buildgroup'.format(
|
||||
response_code)
|
||||
raise SpackError(msg)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def is_main_phase(phase_name):
|
||||
@@ -507,7 +508,7 @@ def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
'job': get_job_name(phase_name,
|
||||
strip_compilers,
|
||||
dep_job,
|
||||
osname,
|
||||
dep_job.architecture,
|
||||
build_group),
|
||||
'artifacts': enable_artifacts_buildcache,
|
||||
})
|
||||
@@ -549,9 +550,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
|
||||
is_pr_pipeline = (
|
||||
os.environ.get('SPACK_IS_PR_PIPELINE', '').lower() == 'true'
|
||||
)
|
||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
|
||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
||||
pr_mirror_url = None
|
||||
@@ -706,14 +706,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
root_spec = spec_record['rootSpec']
|
||||
pkg_name = pkg_name_from_spec_label(spec_label)
|
||||
release_spec = root_spec[pkg_name]
|
||||
|
||||
# Check if this spec is in our list of known failures.
|
||||
if broken_specs_url:
|
||||
full_hash = release_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, full_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, full_hash))
|
||||
release_spec_full_hash = release_spec.full_hash()
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
release_spec_build_hash = release_spec.build_hash()
|
||||
|
||||
runner_attribs = find_matching_config(
|
||||
release_spec, gitlab_ci)
|
||||
@@ -746,8 +741,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
job_script.insert(0, 'cd {0}'.format(concrete_env_dir))
|
||||
|
||||
job_script.extend([
|
||||
'spack ci rebuild --prepare',
|
||||
'./install.sh'
|
||||
'spack ci rebuild'
|
||||
])
|
||||
|
||||
if 'script' in runner_attribs:
|
||||
@@ -776,7 +770,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
job_vars = {
|
||||
'SPACK_ROOT_SPEC': format_root_spec(
|
||||
root_spec, main_phase, strip_compilers),
|
||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec.dag_hash(),
|
||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
|
||||
'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
|
||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||
'SPACK_COMPILER_ACTION': compiler_action
|
||||
}
|
||||
@@ -877,6 +873,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
if prune_dag and not rebuild_spec:
|
||||
continue
|
||||
|
||||
# Check if this spec is in our list of known failures, now that
|
||||
# we know this spec needs a rebuild
|
||||
if broken_specs_url:
|
||||
broken_spec_path = url_util.join(
|
||||
broken_specs_url, release_spec_full_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append({
|
||||
'job': generate_job_name,
|
||||
@@ -1069,7 +1074,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'SPACK_JOB_LOG_DIR': rel_job_log_dir,
|
||||
'SPACK_JOB_REPRO_DIR': rel_job_repro_dir,
|
||||
'SPACK_LOCAL_MIRROR_DIR': rel_local_mirror_dir,
|
||||
'SPACK_IS_PR_PIPELINE': str(is_pr_pipeline)
|
||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||
}
|
||||
|
||||
if pr_mirror_url:
|
||||
@@ -1256,7 +1261,8 @@ def register_cdash_build(build_name, base_url, project, site, track):
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Adding build failed (response code = {0}'.format(response_code)
|
||||
raise SpackError(msg)
|
||||
tty.warn(msg)
|
||||
return (None, None)
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
@@ -1293,8 +1299,9 @@ def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
|
||||
tty.debug('Did not find cdashid for {0} on {1}'.format(
|
||||
dep_pkg_name, url))
|
||||
else:
|
||||
raise SpackError('Did not find cdashid for {0} anywhere'.format(
|
||||
tty.warn('Did not find cdashid for {0} anywhere'.format(
|
||||
dep_pkg_name))
|
||||
return
|
||||
|
||||
payload = {
|
||||
"project": cdash_project,
|
||||
@@ -1315,7 +1322,8 @@ def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
|
||||
job_build_id, dep_build_id, response_code)
|
||||
raise SpackError(msg)
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
response_text = response.read()
|
||||
tty.debug('Relate builds response: {0}'.format(response_text))
|
||||
@@ -1338,7 +1346,16 @@ def write_cdashid_to_mirror(cdashid, spec, mirror_url):
|
||||
tty.debug('pushing cdashid to url')
|
||||
tty.debug(' local file path: {0}'.format(local_cdash_path))
|
||||
tty.debug(' remote url: {0}'.format(remote_url))
|
||||
web_util.push_to_url(local_cdash_path, remote_url)
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_cdash_path, remote_url)
|
||||
except Exception as inst:
|
||||
# No matter what went wrong here, don't allow the pipeline to fail
|
||||
# just because there was an issue storing the cdashid on the mirror
|
||||
msg = 'Failed to write cdashid {0} to mirror {1}'.format(
|
||||
cdashid, mirror_url)
|
||||
tty.warn(inst)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def read_cdashid_from_mirror(spec, mirror_url):
|
||||
@@ -1356,40 +1373,34 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
return int(contents)
|
||||
|
||||
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id,
|
||||
sign_binaries):
|
||||
if mirror_url:
|
||||
try:
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
spack.cmd.buildcache._createtarball(
|
||||
env, spec_yaml=yaml_path, add_deps=False,
|
||||
output_location=mirror_url, force=True, allow_root=True,
|
||||
unsigned=unsigned)
|
||||
if build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
build_id, mirror_url))
|
||||
write_cdashid_to_mirror(build_id, spec, mirror_url)
|
||||
except Exception as inst:
|
||||
# If the mirror we're pushing to is on S3 and there's some
|
||||
# permissions problem, for example, we can't just target
|
||||
# that exception type here, since users of the
|
||||
# `spack ci rebuild' may not need or want any dependency
|
||||
# on boto3. So we use the first non-boto exception type
|
||||
# in the heirarchy:
|
||||
# boto3.exceptions.S3UploadFailedError
|
||||
# boto3.exceptions.Boto3Error
|
||||
# Exception
|
||||
# BaseException
|
||||
# object
|
||||
err_msg = 'Error msg: {0}'.format(inst)
|
||||
if 'Access Denied' in err_msg:
|
||||
tty.msg('Permission problem writing to {0}'.format(
|
||||
mirror_url))
|
||||
tty.msg(err_msg)
|
||||
else:
|
||||
raise inst
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, sign_binaries):
|
||||
try:
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
spack.cmd.buildcache._createtarball(
|
||||
env, spec_yaml=yaml_path, add_deps=False,
|
||||
output_location=mirror_url, force=True, allow_root=True,
|
||||
unsigned=unsigned)
|
||||
except Exception as inst:
|
||||
# If the mirror we're pushing to is on S3 and there's some
|
||||
# permissions problem, for example, we can't just target
|
||||
# that exception type here, since users of the
|
||||
# `spack ci rebuild' may not need or want any dependency
|
||||
# on boto3. So we use the first non-boto exception type
|
||||
# in the heirarchy:
|
||||
# boto3.exceptions.S3UploadFailedError
|
||||
# boto3.exceptions.Boto3Error
|
||||
# Exception
|
||||
# BaseException
|
||||
# object
|
||||
err_msg = 'Error msg: {0}'.format(inst)
|
||||
if 'Access Denied' in err_msg:
|
||||
tty.msg('Permission problem writing to {0}'.format(
|
||||
mirror_url))
|
||||
tty.msg(err_msg)
|
||||
else:
|
||||
raise inst
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
|
@@ -5,19 +5,20 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import attr_setdefault, index_by
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
from llnl.util.filesystem import join_path
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -27,8 +28,6 @@
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.string
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
|
@@ -9,7 +9,6 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
|
||||
description = 'add a spec to an environment'
|
||||
section = "environments"
|
||||
level = "long"
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import spack.paths
|
||||
import spack.report
|
||||
|
||||
|
||||
description = "run analyzers on installed packages"
|
||||
section = "analysis"
|
||||
level = "long"
|
||||
|
@@ -8,8 +8,10 @@
|
||||
import collections
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.tty.colify as colify
|
||||
import llnl.util.tty.color as color
|
||||
|
||||
import spack.architecture as architecture
|
||||
|
||||
description = "print architecture information about this machine"
|
||||
|
80
lib/spack/spack/cmd/audit.py
Normal file
80
lib/spack/spack/cmd/audit.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
import spack.repo
|
||||
|
||||
description = "audit configuration files, packages, etc."
|
||||
section = "system"
|
||||
level = "short"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
# Top level flags, valid for every audit class
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
|
||||
|
||||
# Audit configuration files
|
||||
sp.add_parser('configs', help='audit configuration files')
|
||||
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser('packages', help='audit package recipes')
|
||||
pkg_parser.add_argument(
|
||||
'name', metavar='PKG', nargs='*',
|
||||
help='package to be analyzed (if none all packages will be processed)',
|
||||
)
|
||||
|
||||
# List all checks
|
||||
sp.add_parser('list', help='list available checks and exits')
|
||||
|
||||
|
||||
def configs(parser, args):
|
||||
reports = spack.audit.run_group(args.subcommand)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize('@*b{' + subcommand + '}:'))
|
||||
for tag in check_tags:
|
||||
audit_obj = spack.audit.CALLBACKS[tag]
|
||||
print(' ' + audit_obj.description)
|
||||
if args.verbose:
|
||||
for idx, fn in enumerate(audit_obj.callbacks):
|
||||
print(' {0}. '.format(idx + 1) + fn.__doc__)
|
||||
print()
|
||||
print()
|
||||
|
||||
|
||||
def audit(parser, args):
|
||||
subcommands = {
|
||||
'configs': configs,
|
||||
'packages': packages,
|
||||
'list': list
|
||||
}
|
||||
subcommands[args.subcommand](parser, args)
|
||||
|
||||
|
||||
def _process_reports(reports):
|
||||
for check, errors in reports:
|
||||
if errors:
|
||||
msg = '{0}: {1} issue{2} found'.format(
|
||||
check, len(errors), '' if len(errors) == 1 else 's'
|
||||
)
|
||||
header = '@*b{' + msg + '}'
|
||||
print(cl.colorize(header))
|
||||
for idx, error in enumerate(errors):
|
||||
print(str(idx + 1) + '. ' + str(error))
|
||||
raise SystemExit(1)
|
||||
else:
|
||||
msg = '{0}: 0 issues found.'.format(check)
|
||||
header = '@*b{' + msg + '}'
|
||||
print(cl.colorize(header))
|
@@ -8,16 +8,15 @@
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import pretty_date
|
||||
from llnl.util.filesystem import working_dir
|
||||
from llnl.util.lang import pretty_date
|
||||
from llnl.util.tty.colify import colify_table
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.util.executable import which
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.cmd import spack_is_git_repo
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
description = "show contributors to packages"
|
||||
section = "developer"
|
||||
|
@@ -8,10 +8,12 @@
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
@@ -19,17 +21,12 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.util.url as url_util
|
||||
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_spec_yamls
|
||||
from spack.util.string import plural
|
||||
|
||||
from spack.cmd import display_specs
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
|
@@ -15,7 +15,7 @@
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import ver, Version
|
||||
from spack.version import Version, ver
|
||||
|
||||
description = "checksum available versions of a package"
|
||||
section = "packaging"
|
||||
|
@@ -17,15 +17,14 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
import spack.config as cfg
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
|
||||
description = "manage continuous integration pipelines"
|
||||
section = "build"
|
||||
level = "long"
|
||||
@@ -196,8 +195,7 @@ def ci_rebuild(args):
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
|
||||
pr_env_var = get_env_var('SPACK_IS_PR_PIPELINE')
|
||||
dev_env_var = get_env_var('SPACK_IS_DEVELOP_PIPELINE')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
@@ -231,7 +229,6 @@ def ci_rebuild(args):
|
||||
eq_idx = proj_enc.find('=') + 1
|
||||
cdash_project_enc = proj_enc[eq_idx:]
|
||||
cdash_site = ci_cdash['site']
|
||||
cdash_id_path = os.path.join(repro_dir, 'cdash_id.txt')
|
||||
tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
|
||||
tty.debug('cdash_project = {0}'.format(cdash_project))
|
||||
tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
|
||||
@@ -242,8 +239,11 @@ def ci_rebuild(args):
|
||||
|
||||
# Is this a pipeline run on a spack PR or a merge to develop? It might
|
||||
# be neither, e.g. a pipeline run on some environment repository.
|
||||
spack_is_pr_pipeline = True if pr_env_var == 'True' else False
|
||||
spack_is_develop_pipeline = True if dev_env_var == 'True' else False
|
||||
spack_is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
spack_is_develop_pipeline = spack_pipeline_type == 'spack_protected_branch'
|
||||
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
@@ -396,7 +396,7 @@ def ci_rebuild(args):
|
||||
job_spec_pkg_name, matching_mirror))
|
||||
tty.debug('Downloading to {0}'.format(build_cache_dir))
|
||||
buildcache.download_buildcache_files(
|
||||
job_spec, build_cache_dir, True, matching_mirror)
|
||||
job_spec, build_cache_dir, False, matching_mirror)
|
||||
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
@@ -431,24 +431,21 @@ def ci_rebuild(args):
|
||||
cdash_build_name, cdash_base_url, cdash_project,
|
||||
cdash_site, job_spec_buildgroup)
|
||||
|
||||
cdash_upload_url = '{0}/submit.php?project={1}'.format(
|
||||
cdash_base_url, cdash_project_enc)
|
||||
if cdash_build_id is not None:
|
||||
cdash_upload_url = '{0}/submit.php?project={1}'.format(
|
||||
cdash_base_url, cdash_project_enc)
|
||||
|
||||
install_args.extend([
|
||||
'--cdash-upload-url', cdash_upload_url,
|
||||
'--cdash-build', cdash_build_name,
|
||||
'--cdash-site', cdash_site,
|
||||
'--cdash-buildstamp', cdash_build_stamp,
|
||||
])
|
||||
install_args.extend([
|
||||
'--cdash-upload-url', cdash_upload_url,
|
||||
'--cdash-build', cdash_build_name,
|
||||
'--cdash-site', cdash_site,
|
||||
'--cdash-buildstamp', cdash_build_stamp,
|
||||
])
|
||||
|
||||
tty.debug('CDash: Relating build with dependency builds')
|
||||
spack_ci.relate_cdash_builds(
|
||||
spec_map, cdash_base_url, cdash_build_id, cdash_project,
|
||||
[pipeline_mirror_url, pr_mirror_url, remote_mirror_url])
|
||||
|
||||
# store the cdash build id on disk for later
|
||||
with open(cdash_id_path, 'w') as fd:
|
||||
fd.write(cdash_build_id)
|
||||
tty.debug('CDash: Relating build with dependency builds')
|
||||
spack_ci.relate_cdash_builds(
|
||||
spec_map, cdash_base_url, cdash_build_id, cdash_project,
|
||||
[pipeline_mirror_url, pr_mirror_url, remote_mirror_url])
|
||||
|
||||
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
||||
# compiler or one of its deps.
|
||||
@@ -494,11 +491,14 @@ def ci_rebuild(args):
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code != 0 and spack_is_develop_pipeline:
|
||||
if install_exit_code == 1 and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
dev_fail_hash = job_spec.full_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||
broken_spec_path))
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
empty_file_path = os.path.join(tmpdir, 'empty.txt')
|
||||
|
||||
@@ -541,17 +541,31 @@ def ci_rebuild(args):
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
|
||||
cdash_build_id, sign_binaries)
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
|
||||
sign_binaries)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
cdash_build_id, buildcache_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, buildcache_mirror_url)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec, job_spec_yaml_path, pipeline_mirror_url,
|
||||
cdash_build_id, sign_binaries)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec, job_spec_yaml_path, pipeline_mirror_url,
|
||||
sign_binaries)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
cdash_build_id, pipeline_mirror_url))
|
||||
spack_ci.write_cdashid_to_mirror(
|
||||
cdash_build_id, job_spec, pipeline_mirror_url)
|
||||
else:
|
||||
tty.debug('spack install exited non-zero, will not create buildcache')
|
||||
|
||||
@@ -580,16 +594,16 @@ def ci_rebuild(args):
|
||||
print(reproduce_msg)
|
||||
|
||||
# Tie job success/failure to the success/failure of building the spec
|
||||
sys.exit(install_exit_code)
|
||||
return install_exit_code
|
||||
|
||||
|
||||
def ci_reproduce(args):
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
|
||||
spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
return args.func(args)
|
||||
|
@@ -10,15 +10,14 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.cmd.test
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.test
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
|
||||
description = "remove temporary build files and/or downloaded archives"
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
@@ -14,7 +14,9 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import (
|
||||
ArgparseWriter, ArgparseRstWriter, ArgparseCompletionWriter
|
||||
ArgparseCompletionWriter,
|
||||
ArgparseRstWriter,
|
||||
ArgparseWriter,
|
||||
)
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
@@ -23,7 +25,6 @@
|
||||
import spack.paths
|
||||
from spack.main import section_descriptions
|
||||
|
||||
|
||||
description = "list available spack commands"
|
||||
section = "developer"
|
||||
level = "long"
|
||||
|
@@ -8,10 +8,11 @@
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment as build_environment
|
||||
import spack.paths
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.paths
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
|
@@ -7,16 +7,18 @@
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from six import iteritems
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from llnl.util.lang import index_by
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
from spack.spec import CompilerSpec, ArchSpec
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.spec import ArchSpec, CompilerSpec
|
||||
|
||||
description = "manage compilers"
|
||||
section = "system"
|
||||
|
@@ -10,15 +10,16 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import spack.config
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.schema.env
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.schema.packages
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.util.editor import editor
|
||||
import spack.store
|
||||
import spack.repo
|
||||
|
||||
description = "get and set configuration options"
|
||||
section = "config"
|
||||
|
@@ -4,7 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import spack.container
|
||||
import spack.monitor
|
||||
|
||||
description = ("creates recipes to build images for different"
|
||||
" container runtimes")
|
||||
@@ -12,6 +14,10 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||
|
||||
|
||||
def containerize(parser, args):
|
||||
config_dir = args.env_dir or os.getcwd()
|
||||
config_file = os.path.abspath(os.path.join(config_dir, 'spack.yaml'))
|
||||
@@ -21,5 +27,12 @@ def containerize(parser, args):
|
||||
|
||||
config = spack.container.validate(config_file)
|
||||
|
||||
# If we have a monitor request, add monitor metadata to config
|
||||
if args.use_monitor:
|
||||
config['spack']['monitor'] = {"disable_auth": args.monitor_disable_auth,
|
||||
"host": args.monitor_host,
|
||||
"keep_going": args.monitor_keep_going,
|
||||
"prefix": args.monitor_prefix,
|
||||
"tags": args.monitor_tags}
|
||||
recipe = spack.container.recipe(config)
|
||||
print(recipe)
|
||||
|
@@ -11,16 +11,23 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack.util.web
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
UndetectableVersionError,
|
||||
parse_name,
|
||||
parse_version,
|
||||
)
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import which, ProcessError
|
||||
from spack.util.naming import mod_to_class
|
||||
from spack.util.naming import simplify_name, valid_fully_qualified_module_name
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError
|
||||
from spack.url import parse_name, parse_version
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.naming import (
|
||||
mod_to_class,
|
||||
simplify_name,
|
||||
valid_fully_qualified_module_name,
|
||||
)
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
|
@@ -14,18 +14,18 @@
|
||||
installation and its deprecator.
|
||||
'''
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.store
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
from spack.error import SpackError
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "Replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
|
@@ -3,14 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.repo
|
||||
|
||||
description = "developer build: build from code in current working directory"
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "add a spec to an environment's dev-build information"
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import glob
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
@@ -8,22 +8,21 @@
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.schema.env
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.install
|
||||
import spack.cmd.uninstall
|
||||
import spack.cmd.modules
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.install
|
||||
import spack.cmd.modules
|
||||
import spack.cmd.uninstall
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.schema.env
|
||||
import spack.util.string as string
|
||||
|
||||
|
||||
description = "manage virtual environments"
|
||||
section = "environments"
|
||||
level = "short"
|
||||
|
@@ -9,9 +9,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
|
@@ -10,10 +10,12 @@
|
||||
import sys
|
||||
from collections import defaultdict, namedtuple
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
import six
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
|
@@ -9,17 +9,17 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.user_environment as uenv
|
||||
from spack.util.string import plural
|
||||
from spack.database import InstallStatuses
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "list and search installed packages"
|
||||
section = "basic"
|
||||
|
@@ -9,7 +9,6 @@
|
||||
|
||||
import spack.cmd.style
|
||||
|
||||
|
||||
description = "alias for spack style (deprecated)"
|
||||
section = spack.cmd.style.section
|
||||
level = spack.cmd.style.level
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.store
|
||||
from spack.graph import graph_dot, graph_ascii
|
||||
from spack.graph import graph_ascii, graph_dot
|
||||
|
||||
description = "generate graphs of package dependency relationships"
|
||||
section = "basic"
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
description = "get help on spack and its commands"
|
||||
|
@@ -6,6 +6,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import textwrap
|
||||
|
||||
from six.moves import zip_longest
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -13,10 +14,9 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.fetch_strategy as fs
|
||||
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -155,6 +155,26 @@ def print_text_info(pkg):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Maintainers: ') + mnt)
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Externally Detectable: '))
|
||||
|
||||
# If the package has an 'executables' field, it can detect an installation
|
||||
if hasattr(pkg, 'executables'):
|
||||
find_attributes = []
|
||||
if hasattr(pkg, 'determine_version'):
|
||||
find_attributes.append('version')
|
||||
|
||||
if hasattr(pkg, 'determine_variants'):
|
||||
find_attributes.append('variants')
|
||||
|
||||
# If the package does not define 'determine_version' nor
|
||||
# 'determine_variants', then it must use some custom detection
|
||||
# mechanism. In this case, just inform the user it's detectable somehow.
|
||||
color.cprint(' True{0}'.format(
|
||||
' (' + ', '.join(find_attributes) + ')' if find_attributes else ''))
|
||||
else:
|
||||
color.cprint(' False')
|
||||
|
||||
color.cprint('')
|
||||
color.cprint(section_title("Tags: "))
|
||||
if hasattr(pkg, 'tags'):
|
||||
|
@@ -23,7 +23,6 @@
|
||||
from spack.error import SpackError
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
|
||||
description = "build and install packages"
|
||||
section = "build"
|
||||
level = "short"
|
||||
@@ -347,6 +346,10 @@ def get_tests(specs):
|
||||
reporter.filename = default_log_file(specs[0])
|
||||
reporter.specs = specs
|
||||
|
||||
# Tell the monitor about the specs
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(args, **kwargs)
|
||||
|
@@ -3,23 +3,22 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
from __future__ import division, print_function
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import math
|
||||
import json
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.repo
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.version import VersionList
|
||||
|
||||
if sys.version_info > (3, 1):
|
||||
|
@@ -8,9 +8,9 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.util.environment
|
||||
import spack.user_environment as uenv
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
|
||||
description = "add package to the user environment"
|
||||
section = "user environment"
|
||||
|
@@ -6,12 +6,13 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
|
@@ -6,7 +6,8 @@
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from spack.util.log_parse import parse_log_events, make_log_context
|
||||
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
|
||||
description = "filter errors and warnings from build logs"
|
||||
section = "build"
|
||||
|
@@ -12,7 +12,6 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
|
||||
import spack.repo
|
||||
|
||||
description = "get information about package maintainers"
|
||||
|
@@ -7,16 +7,16 @@
|
||||
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
@@ -17,9 +17,8 @@
|
||||
import spack.repo
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
description = "manage mirrors (source and binary)"
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
from typing import Dict, Callable # novm
|
||||
from typing import Callable, Dict # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user