Compare commits
1672 Commits
v0.9
...
efischer/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
30e563bd23 | ||
|
|
5450f9b917 | ||
|
|
72a3d35d0c | ||
|
|
a21d3d353b | ||
|
|
2850ca60be | ||
|
|
4095eb2196 | ||
|
|
f7ee8d001a | ||
|
|
57033a62fa | ||
|
|
31cab4bae2 | ||
|
|
66ea42f8d7 | ||
|
|
f5c9a8c2ee | ||
|
|
5333a799ea | ||
|
|
90070c317d | ||
|
|
a3f1ae8c4f | ||
|
|
a9a29d207d | ||
|
|
e119c32a9c | ||
|
|
82808f944a | ||
|
|
83897af710 | ||
|
|
d39322e278 | ||
|
|
974749aaf6 | ||
|
|
6acfcd82b1 | ||
|
|
079d063c6d | ||
|
|
db7c3d2de6 | ||
|
|
2053db4d17 | ||
|
|
7ebb17e459 | ||
|
|
e73a1a4ec3 | ||
|
|
77c24d4127 | ||
|
|
80754a4062 | ||
|
|
8170bcfcdd | ||
|
|
07e0da24c5 | ||
|
|
22d2786940 | ||
|
|
e9bc3a9e32 | ||
|
|
848299ba38 | ||
|
|
2ddaabed25 | ||
|
|
062ff13da6 | ||
|
|
93cb2db531 | ||
|
|
a7e568b1d1 | ||
|
|
19bbe5e421 | ||
|
|
86fe78f5f1 | ||
|
|
85be3aefa7 | ||
|
|
24c0ece841 | ||
|
|
2b6833cb80 | ||
|
|
73620fe868 | ||
|
|
e6bac6f208 | ||
|
|
63859bf113 | ||
|
|
5cfa74801f | ||
|
|
5970b7a00a | ||
|
|
41750ce70d | ||
|
|
d154f151aa | ||
|
|
6d2d5806b5 | ||
|
|
8a481e7e13 | ||
|
|
86d39255ec | ||
|
|
2efd7a5e0b | ||
|
|
dae219d5c3 | ||
|
|
459b6eae11 | ||
|
|
3194f2001b | ||
|
|
05f5ba4bf9 | ||
|
|
73cae8d9c4 | ||
|
|
52ae0ea055 | ||
|
|
de12a34415 | ||
|
|
243d516bf0 | ||
|
|
6500ac80e4 | ||
|
|
a2692e4ef5 | ||
|
|
bdc825fc3c | ||
|
|
64d3f87e60 | ||
|
|
7eb386e55e | ||
|
|
5da9116c06 | ||
|
|
aca8576de4 | ||
|
|
e81f3daa28 | ||
|
|
7b49166f9b | ||
|
|
a44beee172 | ||
|
|
65abd279cd | ||
|
|
87d15212eb | ||
|
|
4b354cbe26 | ||
|
|
226d4f04f3 | ||
|
|
5661afa315 | ||
|
|
0c8462723b | ||
|
|
fbe76c41fd | ||
|
|
78685addbb | ||
|
|
55b1182a4c | ||
|
|
d5903e4612 | ||
|
|
f0f2c6c486 | ||
|
|
a177b9be8a | ||
|
|
df623dad15 | ||
|
|
835b8f246e | ||
|
|
d7873b5319 | ||
|
|
c1af0d3806 | ||
|
|
d4e3699510 | ||
|
|
466da3afcc | ||
|
|
90e5ccd98c | ||
|
|
13f636ec68 | ||
|
|
c8dd1bb40b | ||
|
|
3445a781eb | ||
|
|
6641f42417 | ||
|
|
296212d78d | ||
|
|
f4ec99ecb4 | ||
|
|
b99e945e6d | ||
|
|
7f57405eac | ||
|
|
e8afbfbce7 | ||
|
|
5dcc0f7802 | ||
|
|
9509c10064 | ||
|
|
f182590aae | ||
|
|
54ef9443d3 | ||
|
|
74dd7a36b5 | ||
|
|
749a4e78c7 | ||
|
|
3df208efc0 | ||
|
|
69931996bf | ||
|
|
f9cb21e886 | ||
|
|
0074ecd6fa | ||
|
|
8b94937609 | ||
|
|
bd8aab00e4 | ||
|
|
ae20e53cfb | ||
|
|
37faa41b6f | ||
|
|
1e673bfa42 | ||
|
|
27793d97dc | ||
|
|
9ec1c0e18e | ||
|
|
042666eb7e | ||
|
|
826732efec | ||
|
|
eb13cbe9bd | ||
|
|
0d17d90b29 | ||
|
|
3edfa390f7 | ||
|
|
bc64990bc6 | ||
|
|
483e4116c0 | ||
|
|
e1c2ea5123 | ||
|
|
c815aed207 | ||
|
|
d4377c1293 | ||
|
|
633e540a17 | ||
|
|
ec7f58bf96 | ||
|
|
1be63348b5 | ||
|
|
568305ddff | ||
|
|
cd6c370303 | ||
|
|
f147d64527 | ||
|
|
04cee45ac9 | ||
|
|
2b0444b6f4 | ||
|
|
b6fad65f52 | ||
|
|
1363b05e93 | ||
|
|
5cc76f7d84 | ||
|
|
59ce0c7514 | ||
|
|
ddf1879bed | ||
|
|
90dd26397c | ||
|
|
7135299a66 | ||
|
|
ac15d5d619 | ||
|
|
eac9a5905f | ||
|
|
80742b2647 | ||
|
|
8a0824df09 | ||
|
|
a388871083 | ||
|
|
2b83ea30e7 | ||
|
|
f56de63972 | ||
|
|
0b8a3ad8ad | ||
|
|
aa6a6df640 | ||
|
|
e905ce16c9 | ||
|
|
11e595089d | ||
|
|
87d0a7c315 | ||
|
|
09c9786fab | ||
|
|
1edfc82123 | ||
|
|
e6a122417a | ||
|
|
f59653ac2c | ||
|
|
84e331c586 | ||
|
|
db59f87e32 | ||
|
|
240f1fd223 | ||
|
|
867121ca68 | ||
|
|
bf1072c902 | ||
|
|
8061deb883 | ||
|
|
e7f4fd404d | ||
|
|
aa860bf4df | ||
|
|
158deae625 | ||
|
|
cfde03e694 | ||
|
|
4bca1c5440 | ||
|
|
d380d16427 | ||
|
|
2be065418b | ||
|
|
a095fd517f | ||
|
|
fb9f6fe9b5 | ||
|
|
9d4a36a62f | ||
|
|
0c75c13cc0 | ||
|
|
38dcd6bce9 | ||
|
|
102ac7bcf1 | ||
|
|
5362864cc9 | ||
|
|
15d9fb1879 | ||
|
|
2042e9a6d8 | ||
|
|
1339714eec | ||
|
|
5d690c9270 | ||
|
|
e5743db9b9 | ||
|
|
d195576fba | ||
|
|
5e5024342f | ||
|
|
4de45c2684 | ||
|
|
ce6ac93abe | ||
|
|
1f5a21decf | ||
|
|
ab049eca41 | ||
|
|
37fc258313 | ||
|
|
faa0a0e4c3 | ||
|
|
cf2f902b82 | ||
|
|
bf028990e7 | ||
|
|
025609c63f | ||
|
|
7aaad89ba9 | ||
|
|
2b94d7cf8e | ||
|
|
5d48c108a3 | ||
|
|
3ea1325620 | ||
|
|
dd30066feb | ||
|
|
e65a8b6a9a | ||
|
|
8121ac2c27 | ||
|
|
93f5a750cb | ||
|
|
281f6f18e1 | ||
|
|
db87a9d3ce | ||
|
|
d7329d7bc2 | ||
|
|
e9edfec0ec | ||
|
|
2ad21e7070 | ||
|
|
09d96bef9f | ||
|
|
9ebbde0e01 | ||
|
|
1de5817b58 | ||
|
|
f699d7c08e | ||
|
|
152fa33a55 | ||
|
|
3cf2fd40a8 | ||
|
|
653905e3a2 | ||
|
|
f800708ef3 | ||
|
|
6a62a6b693 | ||
|
|
a07a0ef54b | ||
|
|
3c81bb44ba | ||
|
|
e0db1f0268 | ||
|
|
9e16902397 | ||
|
|
63592096c5 | ||
|
|
4983ebcf7c | ||
|
|
d3f115933e | ||
|
|
18d155e713 | ||
|
|
a6afaeb974 | ||
|
|
769408130a | ||
|
|
90f68d2c66 | ||
|
|
8e20a14945 | ||
|
|
f403f256ca | ||
|
|
20221ee3aa | ||
|
|
594a6710ed | ||
|
|
d6dedee6ff | ||
|
|
0c02ee86a7 | ||
|
|
e12d7aadda | ||
|
|
fd11db92e1 | ||
|
|
ba0577dc96 | ||
|
|
ef1369c365 | ||
|
|
e17862c421 | ||
|
|
d5bb2955b1 | ||
|
|
d4274b32f2 | ||
|
|
6ab78eb88e | ||
|
|
d7665a63e3 | ||
|
|
369b2ef01f | ||
|
|
1ac1cc3d2c | ||
|
|
3c6f6fc811 | ||
|
|
bacfa91cfd | ||
|
|
cabcdf0ec9 | ||
|
|
fcc8b1d3e4 | ||
|
|
61b3ecb6d2 | ||
|
|
af8741c23c | ||
|
|
a591e183bc | ||
|
|
584e5506f2 | ||
|
|
2bd1a51193 | ||
|
|
1578a95982 | ||
|
|
7affaca321 | ||
|
|
b7fa2c4e21 | ||
|
|
2e1dbd0697 | ||
|
|
e69423a154 | ||
|
|
e68b7d8c63 | ||
|
|
638f779841 | ||
|
|
be1158f7a3 | ||
|
|
f0609699ba | ||
|
|
43371c31a2 | ||
|
|
dce105d16f | ||
|
|
4acf6d76bf | ||
|
|
a7bfaa3797 | ||
|
|
88bb67e279 | ||
|
|
46e9d85283 | ||
|
|
7c46a4c0e4 | ||
|
|
3e6ebd577f | ||
|
|
353726f08f | ||
|
|
7ae163d436 | ||
|
|
b9195b415a | ||
|
|
2f1c000f62 | ||
|
|
d7d12aa2dc | ||
|
|
48997cffa1 | ||
|
|
4ff4eab476 | ||
|
|
f90692cf81 | ||
|
|
13d9eed85b | ||
|
|
941acef009 | ||
|
|
b5a117c923 | ||
|
|
1534a115bd | ||
|
|
a03a35565e | ||
|
|
326b9838ed | ||
|
|
fa70a837d4 | ||
|
|
91004158c6 | ||
|
|
2929fb0a4d | ||
|
|
3f26bbc8b3 | ||
|
|
a3322fab0a | ||
|
|
21158195fb | ||
|
|
74a181febc | ||
|
|
9490bd2154 | ||
|
|
4c8672ed1f | ||
|
|
c77425bb22 | ||
|
|
a6605d842b | ||
|
|
f2f5f6c279 | ||
|
|
c994565c62 | ||
|
|
1b9becc541 | ||
|
|
08ff7b65af | ||
|
|
4d72e0fb9d | ||
|
|
a5a4525bed | ||
|
|
949621eb7f | ||
|
|
857a03c127 | ||
|
|
114da813a3 | ||
|
|
bf467c5df3 | ||
|
|
6c3623422f | ||
|
|
63121a0c49 | ||
|
|
12d1263980 | ||
|
|
6f332c7e4c | ||
|
|
bfcec69630 | ||
|
|
4373a2b629 | ||
|
|
a57d94af03 | ||
|
|
a1703bf70d | ||
|
|
dea7bbb4a0 | ||
|
|
565bd5f51e | ||
|
|
bef7e2645a | ||
|
|
f6a4a6b00f | ||
|
|
0c0b37800d | ||
|
|
4ecf481337 | ||
|
|
6f7e12d49b | ||
|
|
0aa513ad70 | ||
|
|
450c750214 | ||
|
|
a4e59c2758 | ||
|
|
9433e84776 | ||
|
|
17f0eb5148 | ||
|
|
ba11f19efc | ||
|
|
bb5dd49206 | ||
|
|
679ceabf36 | ||
|
|
31042e8ed1 | ||
|
|
2705f2c0e3 | ||
|
|
c678a9e3da | ||
|
|
0cf1f917d5 | ||
|
|
bc2e0cc87c | ||
|
|
0425f5d523 | ||
|
|
0ce98d4d65 | ||
|
|
7f43a7d134 | ||
|
|
661708b7fa | ||
|
|
ae167c09fc | ||
|
|
14d861a41c | ||
|
|
e864d27641 | ||
|
|
b892cebe8a | ||
|
|
49e47966a8 | ||
|
|
e861e35acf | ||
|
|
1847aa035d | ||
|
|
630ff6871d | ||
|
|
94238eebfa | ||
|
|
5b79f0d04a | ||
|
|
dc9fcea989 | ||
|
|
15514792d5 | ||
|
|
851bc506e4 | ||
|
|
932300256c | ||
|
|
6361f5c2d4 | ||
|
|
2d51ea5da4 | ||
|
|
fed7ba1dab | ||
|
|
19578d954f | ||
|
|
ab885a5397 | ||
|
|
4014a29d2b | ||
|
|
b4cd2a85df | ||
|
|
f5aed63b0b | ||
|
|
5cac0a528c | ||
|
|
bdb64c2e39 | ||
|
|
0903ae0599 | ||
|
|
1e3a5d4e26 | ||
|
|
6db99a4fe7 | ||
|
|
5b1a882fc2 | ||
|
|
a2578c21e4 | ||
|
|
8d0758fc4c | ||
|
|
880cbb2217 | ||
|
|
3b4820f290 | ||
|
|
9e05fdf4a1 | ||
|
|
7e53f4328f | ||
|
|
e470478dc1 | ||
|
|
7d303afd64 | ||
|
|
1552ed943a | ||
|
|
31b7580b76 | ||
|
|
cc3fc2ba4c | ||
|
|
0ce54c99af | ||
|
|
aaa5c9e8a4 | ||
|
|
cca240c8f9 | ||
|
|
ec09dfe5d1 | ||
|
|
fbbb82259a | ||
|
|
ea11aab71f | ||
|
|
2042c94a6c | ||
|
|
64be8a80b2 | ||
|
|
7185fe2876 | ||
|
|
0724eba935 | ||
|
|
441bb83682 | ||
|
|
2cffc90284 | ||
|
|
68cedd2599 | ||
|
|
d00fd06095 | ||
|
|
3d571ad934 | ||
|
|
8815f0a0b5 | ||
|
|
bfd03db12a | ||
|
|
f14eb07dc1 | ||
|
|
a9cc6a7d08 | ||
|
|
be2f2e42db | ||
|
|
05e5276aec | ||
|
|
dbf799bbf9 | ||
|
|
f39e570f36 | ||
|
|
58a6039c0a | ||
|
|
0662b953f0 | ||
|
|
1b04b8be01 | ||
|
|
500218df80 | ||
|
|
173a16b38a | ||
|
|
68ba5377c1 | ||
|
|
253507c0c3 | ||
|
|
7684652ce3 | ||
|
|
b12dc34ba7 | ||
|
|
9c31ee8a75 | ||
|
|
ae5ba2b110 | ||
|
|
13499f1655 | ||
|
|
49e9f365d8 | ||
|
|
0fa8a491d3 | ||
|
|
41cd861861 | ||
|
|
2913aa8d09 | ||
|
|
1bd2def41e | ||
|
|
386f0e577a | ||
|
|
7207ce2a18 | ||
|
|
20e52e5052 | ||
|
|
e9944150a5 | ||
|
|
fc1804974c | ||
|
|
12167e8f37 | ||
|
|
8dc26bbcd9 | ||
|
|
ee7acc6b13 | ||
|
|
f4422dc165 | ||
|
|
dc7e0899a0 | ||
|
|
5bb2d3baa0 | ||
|
|
5e97eb5ec4 | ||
|
|
6c80b15220 | ||
|
|
aad5a4c4b3 | ||
|
|
5eebb2defa | ||
|
|
b6ce0e6f0e | ||
|
|
943896e237 | ||
|
|
8ee4df8101 | ||
|
|
572f1cd427 | ||
|
|
e04662f84f | ||
|
|
5caaa2cd75 | ||
|
|
3759f62cba | ||
|
|
07f2dd5498 | ||
|
|
eab56b71be | ||
|
|
3040381f03 | ||
|
|
f855cf6bab | ||
|
|
b1e6c58ff2 | ||
|
|
4e6fdd12e2 | ||
|
|
458deaae5e | ||
|
|
9c7b98dcc8 | ||
|
|
e4525e57bb | ||
|
|
dba384108b | ||
|
|
e4ced765f1 | ||
|
|
a2d4dcc636 | ||
|
|
969e785d94 | ||
|
|
5b7ec8de95 | ||
|
|
e19872e77f | ||
|
|
eb865b0df2 | ||
|
|
c6a05f4a7d | ||
|
|
3126ed5f21 | ||
|
|
d684b17c06 | ||
|
|
5cfaa557d3 | ||
|
|
23006d1195 | ||
|
|
81cd458c26 | ||
|
|
27986c9edf | ||
|
|
b7bf88c761 | ||
|
|
29278090eb | ||
|
|
b1e5ec0573 | ||
|
|
4f09e8c975 | ||
|
|
a8855f48aa | ||
|
|
4181fd79cd | ||
|
|
b51be2bb1b | ||
|
|
773bca159a | ||
|
|
ec9959b152 | ||
|
|
1be486c90a | ||
|
|
7220bc1766 | ||
|
|
395c616a48 | ||
|
|
ebbcebac62 | ||
|
|
fa92f58167 | ||
|
|
ac2e0962ce | ||
|
|
fb2d2303d6 | ||
|
|
f5b4664c7c | ||
|
|
00d7fd8e21 | ||
|
|
adffba5081 | ||
|
|
e083c91d5e | ||
|
|
ed737cb2a2 | ||
|
|
488550c2e6 | ||
|
|
8523f75e6c | ||
|
|
4707362ccd | ||
|
|
dd5b1ac098 | ||
|
|
bcbe9c02da | ||
|
|
1544f98ee0 | ||
|
|
f0d5317913 | ||
|
|
2ae7429b18 | ||
|
|
4efe022165 | ||
|
|
796308ed85 | ||
|
|
4c105895ab | ||
|
|
9523e50732 | ||
|
|
52f0249c61 | ||
|
|
a27cb639d8 | ||
|
|
cf8f14780f | ||
|
|
098af17971 | ||
|
|
5745ede31d | ||
|
|
0b1d03abe6 | ||
|
|
00280936da | ||
|
|
480fe9cb9a | ||
|
|
bd91dd9d6d | ||
|
|
732c24f603 | ||
|
|
4e1f86881a | ||
|
|
07d4c6cf0b | ||
|
|
ead1b93f75 | ||
|
|
0be4c7dfbd | ||
|
|
5e5e36a659 | ||
|
|
16f67b5bb1 | ||
|
|
b8444aa518 | ||
|
|
a09bebcaea | ||
|
|
a9541997ae | ||
|
|
bd02892f7b | ||
|
|
ea9fa81ba5 | ||
|
|
670157b364 | ||
|
|
e5ae48a9da | ||
|
|
47514d07b1 | ||
|
|
f2e8f27c15 | ||
|
|
fc79b104f0 | ||
|
|
97143768c8 | ||
|
|
d3b97227a1 | ||
|
|
da2b695f93 | ||
|
|
10c285a774 | ||
|
|
0654ee6a10 | ||
|
|
8cf03c209f | ||
|
|
c588ce9648 | ||
|
|
3fac2dcc41 | ||
|
|
0080ffcf9f | ||
|
|
ce902bf27a | ||
|
|
2cde6aaabd | ||
|
|
578ddea418 | ||
|
|
cf0f5756f5 | ||
|
|
9e1d26e973 | ||
|
|
e389afedaa | ||
|
|
b9c79824a2 | ||
|
|
0777d3a5ba | ||
|
|
2344ffd0c7 | ||
|
|
0c75174ec3 | ||
|
|
e973adf84b | ||
|
|
f9137f606f | ||
|
|
8856a226c7 | ||
|
|
d8acd41ba1 | ||
|
|
8efbec3c0b | ||
|
|
4c7c9f4ff5 | ||
|
|
cd4f429f72 | ||
|
|
24c14ff7a8 | ||
|
|
eda1176ba7 | ||
|
|
c9f4e8ce5a | ||
|
|
6983c1d30d | ||
|
|
412618d531 | ||
|
|
7db4170062 | ||
|
|
f4fb9a0771 | ||
|
|
79e066d53c | ||
|
|
106147716a | ||
|
|
0d18f7249c | ||
|
|
26d1ddc176 | ||
|
|
4c506b36c5 | ||
|
|
4434f482c0 | ||
|
|
3def8ef60e | ||
|
|
20584ab084 | ||
|
|
7c28338d60 | ||
|
|
17a63c9646 | ||
|
|
01a361d637 | ||
|
|
7f20678969 | ||
|
|
69e36c9821 | ||
|
|
c503b43e38 | ||
|
|
83330d2b59 | ||
|
|
6501cba5a7 | ||
|
|
26480f14f9 | ||
|
|
6b0fb476ab | ||
|
|
dd7bd4f320 | ||
|
|
1315753e70 | ||
|
|
1c9cf668a1 | ||
|
|
8034536054 | ||
|
|
afa4cdc2d8 | ||
|
|
25e765baae | ||
|
|
a64a2e72cf | ||
|
|
c4b4ce7d85 | ||
|
|
4ad29b6c9b | ||
|
|
7e16a4edd7 | ||
|
|
acfeb5d043 | ||
|
|
71684cb071 | ||
|
|
4dec06e4f4 | ||
|
|
94df1b801a | ||
|
|
7555c3d949 | ||
|
|
013cfafe57 | ||
|
|
218fc602fa | ||
|
|
25a9181a1b | ||
|
|
02340062b4 | ||
|
|
4a276807b1 | ||
|
|
51834773b4 | ||
|
|
46ee4a3a53 | ||
|
|
607813d5ce | ||
|
|
67fade5b1e | ||
|
|
a9ed2296a1 | ||
|
|
fa0bbb57ea | ||
|
|
49cd6f6269 | ||
|
|
653f25d676 | ||
|
|
089189afaf | ||
|
|
0a89342508 | ||
|
|
06f57a31c1 | ||
|
|
19df1ea79d | ||
|
|
2b4d2b8748 | ||
|
|
a46138dea9 | ||
|
|
ea425a101a | ||
|
|
583232ea52 | ||
|
|
89c9bec81e | ||
|
|
262ab40188 | ||
|
|
199a8af7cc | ||
|
|
e9f42c1a94 | ||
|
|
a96eb7096d | ||
|
|
cc027148eb | ||
|
|
2b3ba850b3 | ||
|
|
b3789a4693 | ||
|
|
4030f8d7c3 | ||
|
|
cddaba8add | ||
|
|
192369dd2b | ||
|
|
73f10c9363 | ||
|
|
cdc2ebee90 | ||
|
|
2e4966c854 | ||
|
|
c898b9db04 | ||
|
|
117743863e | ||
|
|
28ce08b9e2 | ||
|
|
0e8bf79fa4 | ||
|
|
dc671ad037 | ||
|
|
1bd3a702c9 | ||
|
|
561748a063 | ||
|
|
9ea4f80f15 | ||
|
|
0752eccfa5 | ||
|
|
6175ce7559 | ||
|
|
987fb137f9 | ||
|
|
514c61b8fe | ||
|
|
992250ddea | ||
|
|
d61190c3ff | ||
|
|
96e9dbca08 | ||
|
|
5a1ed51dcd | ||
|
|
63101e9914 | ||
|
|
8c1cc44522 | ||
|
|
36f818b9fb | ||
|
|
c0be585b67 | ||
|
|
656b2d562f | ||
|
|
f761130343 | ||
|
|
f1d85ac2e9 | ||
|
|
1585c014d7 | ||
|
|
37ce4108ce | ||
|
|
e822257565 | ||
|
|
a4a2f179c7 | ||
|
|
4663288b49 | ||
|
|
64b1bafd41 | ||
|
|
5f1b6f000d | ||
|
|
b29b7595dd | ||
|
|
1f3cb2b93c | ||
|
|
077848fc0b | ||
|
|
f3e9588457 | ||
|
|
487ec2aff1 | ||
|
|
b8e2799700 | ||
|
|
5d152edcaf | ||
|
|
faa3d43d9c | ||
|
|
a0584c78a8 | ||
|
|
d71a12438b | ||
|
|
39aef5fc00 | ||
|
|
a82385cdae | ||
|
|
0ca1a481a3 | ||
|
|
ac1701ae29 | ||
|
|
77965ce5bf | ||
|
|
6fd45520da | ||
|
|
45c675fe7f | ||
|
|
bae97d17d0 | ||
|
|
bdf82246f7 | ||
|
|
1ae43b3405 | ||
|
|
6d2ec9baf7 | ||
|
|
b4682c8ca3 | ||
|
|
e275b567eb | ||
|
|
64d91943ec | ||
|
|
3e718920d1 | ||
|
|
3051c3d71d | ||
|
|
e052aaf44d | ||
|
|
668b4f1b2c | ||
|
|
5ffc50732b | ||
|
|
fcfe2618d5 | ||
|
|
d24c11f2b1 | ||
|
|
3864da6300 | ||
|
|
d09b0c95ec | ||
|
|
0e2afedb7f | ||
|
|
de9cce3e80 | ||
|
|
52ca215e29 | ||
|
|
b8bd02c5b2 | ||
|
|
a73d91a2e5 | ||
|
|
27aa265abd | ||
|
|
b7d9b58cc5 | ||
|
|
955d02049c | ||
|
|
069de3f008 | ||
|
|
04b3d8d86f | ||
|
|
502d15fd3b | ||
|
|
5e323b052a | ||
|
|
81daaddfe9 | ||
|
|
38e722f75d | ||
|
|
15790d7018 | ||
|
|
551d9eb328 | ||
|
|
1b08296392 | ||
|
|
8de84b5d8a | ||
|
|
78ac11ff88 | ||
|
|
283d621b6e | ||
|
|
86b1b28906 | ||
|
|
efdcd199c1 | ||
|
|
db1516d582 | ||
|
|
d99fbbb68f | ||
|
|
11960e53ef | ||
|
|
5f861e075c | ||
|
|
255d05a9c1 | ||
|
|
6d36ec3d76 | ||
|
|
7f6541ef02 | ||
|
|
664bdd359f | ||
|
|
306e601d4e | ||
|
|
fbaff33f3f | ||
|
|
a729c332c8 | ||
|
|
1682257b35 | ||
|
|
691e628726 | ||
|
|
85947d0985 | ||
|
|
0d3a52ae29 | ||
|
|
1b553f2b21 | ||
|
|
f14cd67446 | ||
|
|
84afaf385f | ||
|
|
1cc6e42682 | ||
|
|
38550bbc1b | ||
|
|
7551e4c14f | ||
|
|
b29d5e6032 | ||
|
|
b0f4052bd8 | ||
|
|
e482994a15 | ||
|
|
c086ccdab6 | ||
|
|
33d0660abc | ||
|
|
3338fcf014 | ||
|
|
c8137194f0 | ||
|
|
f04258eb29 | ||
|
|
1fa67e458c | ||
|
|
642be5fa3d | ||
|
|
2759f14f23 | ||
|
|
1b9e8a0e65 | ||
|
|
bb8a8ecda3 | ||
|
|
49e8e4b590 | ||
|
|
1a1bf31032 | ||
|
|
1b53452618 | ||
|
|
d339f2af29 | ||
|
|
4c1b53660d | ||
|
|
a2be05f24b | ||
|
|
719b6bfffd | ||
|
|
8c1274bbbb | ||
|
|
fb6d850637 | ||
|
|
b9148b1751 | ||
|
|
403a55afb2 | ||
|
|
663b30b3e8 | ||
|
|
6bb31a9a4c | ||
|
|
91da3d8e64 | ||
|
|
c6e89d9283 | ||
|
|
14cef6eab8 | ||
|
|
c096bb332a | ||
|
|
081918d71a | ||
|
|
58487accdd | ||
|
|
3df0f1902d | ||
|
|
4e27d91351 | ||
|
|
57eaea2e8c | ||
|
|
61fe89bda0 | ||
|
|
3d755a148b | ||
|
|
03fdb2058a | ||
|
|
379f23be02 | ||
|
|
9b926a480f | ||
|
|
780a57367d | ||
|
|
32e086f44a | ||
|
|
c8fc56686c | ||
|
|
63e45c586c | ||
|
|
a47b3ba01b | ||
|
|
f5f7abd71f | ||
|
|
2b9d02d594 | ||
|
|
4c063c6055 | ||
|
|
976b2b4d25 | ||
|
|
1f64f08cb3 | ||
|
|
cb9c4ac866 | ||
|
|
79b3ef0362 | ||
|
|
55c4a676ad | ||
|
|
d687e332ad | ||
|
|
690937f953 | ||
|
|
6bac1598f6 | ||
|
|
f2a692c515 | ||
|
|
d166c04db5 | ||
|
|
1de675e976 | ||
|
|
d5abcc5c88 | ||
|
|
519b760222 | ||
|
|
53bbbfbe21 | ||
|
|
9fb8030370 | ||
|
|
85982017b6 | ||
|
|
861f174f66 | ||
|
|
3e8391458c | ||
|
|
890df7153a | ||
|
|
2143c38ab8 | ||
|
|
6d988dde8d | ||
|
|
3100c5948a | ||
|
|
729f23b432 | ||
|
|
0de4ba738b | ||
|
|
31e9ded768 | ||
|
|
d10fceaacc | ||
|
|
0e171127b9 | ||
|
|
f0f7b23c8a | ||
|
|
fe4ef286f2 | ||
|
|
56adb5d9a5 | ||
|
|
16a4c49f98 | ||
|
|
d784d561fc | ||
|
|
e837be1af9 | ||
|
|
137db14b75 | ||
|
|
d3b0cb56b7 | ||
|
|
6793a54748 | ||
|
|
ba1ada5424 | ||
|
|
a770151359 | ||
|
|
6327877a6f | ||
|
|
50ac98bc04 | ||
|
|
ba87937fff | ||
|
|
1b7eedbb7d | ||
|
|
0e422dec8c | ||
|
|
bc2fa76588 | ||
|
|
30d083c6b7 | ||
|
|
a7b8cb6901 | ||
|
|
6f4b0e9bda | ||
|
|
cfd380d514 | ||
|
|
cd943ebe94 | ||
|
|
37728afada | ||
|
|
5f720f9b7c | ||
|
|
e3e94f0ac9 | ||
|
|
89bf5f4045 | ||
|
|
7ec191ce0b | ||
|
|
6f69c01915 | ||
|
|
2fcaa011ce | ||
|
|
06de8c56af | ||
|
|
eb388306fd | ||
|
|
c996632113 | ||
|
|
bc1320d83a | ||
|
|
7df89896f4 | ||
|
|
b71d430af6 | ||
|
|
0fd58fb585 | ||
|
|
32c801bbf6 | ||
|
|
ad48ad7586 | ||
|
|
5cdd37959a | ||
|
|
6810965457 | ||
|
|
18612eca59 | ||
|
|
48888b0f8e | ||
|
|
c4c167c1ca | ||
|
|
51b0f53b33 | ||
|
|
06e38d2ca2 | ||
|
|
793c9f5546 | ||
|
|
0384794912 | ||
|
|
26325fe812 | ||
|
|
9bb918915f | ||
|
|
7fb45e4bfd | ||
|
|
d7c2f683d9 | ||
|
|
b80a03e6b6 | ||
|
|
9026fb8e8a | ||
|
|
3ea9d1e665 | ||
|
|
bf4eeb48b9 | ||
|
|
76458f5bfc | ||
|
|
798dabc8f2 | ||
|
|
0faa6dd753 | ||
|
|
0185226042 | ||
|
|
99c3ed56fa | ||
|
|
305d5698df | ||
|
|
a15aee5d8e | ||
|
|
d899059075 | ||
|
|
b4c36164b5 | ||
|
|
3bc4a38b78 | ||
|
|
13fcb3c083 | ||
|
|
f3f309a22a | ||
|
|
73213ac59d | ||
|
|
a1db306a0c | ||
|
|
19b69ce3a5 | ||
|
|
ece72e5337 | ||
|
|
bad32724ec | ||
|
|
8f28df1d14 | ||
|
|
8b35940857 | ||
|
|
30993357d7 | ||
|
|
f86f6c6274 | ||
|
|
7d85902f37 | ||
|
|
c096aa9557 | ||
|
|
f3accb111e | ||
|
|
7cf1313572 | ||
|
|
18222de7b6 | ||
|
|
9de0503188 | ||
|
|
11863d2de3 | ||
|
|
a2ba60f6a8 | ||
|
|
e5b36583b5 | ||
|
|
acd9014553 | ||
|
|
ea88d9cfe9 | ||
|
|
055f7f4ab6 | ||
|
|
2c6c21a3bf | ||
|
|
71a297f121 | ||
|
|
8921bf4552 | ||
|
|
65e3ac4132 | ||
|
|
27e9bc6d02 | ||
|
|
d566510a40 | ||
|
|
3f4bf04dde | ||
|
|
8f846d507d | ||
|
|
10810482bc | ||
|
|
ffe9e77e36 | ||
|
|
86893e3dc4 | ||
|
|
ff3e56cf9a | ||
|
|
c904a42c5b | ||
|
|
f29b55957a | ||
|
|
b66f5b6231 | ||
|
|
e398c0dc5b | ||
|
|
060fb596f7 | ||
|
|
97feca0888 | ||
|
|
e41d67e031 | ||
|
|
8b5467e65d | ||
|
|
a7ffb2c1a4 | ||
|
|
bbe2db5814 | ||
|
|
66089c89aa | ||
|
|
2efc67da60 | ||
|
|
e0fbf14f99 | ||
|
|
8f3954c1bd | ||
|
|
4041f45b6c | ||
|
|
58cff1290d | ||
|
|
0521b9bb32 | ||
|
|
f18e1a9371 | ||
|
|
3f90fee614 | ||
|
|
77cd474a2d | ||
|
|
70a25c4587 | ||
|
|
cbd5a0a6e3 | ||
|
|
db81a74d66 | ||
|
|
a3431c5a73 | ||
|
|
d515877d91 | ||
|
|
4c4f3e9747 | ||
|
|
76d950b103 | ||
|
|
5aa3131385 | ||
|
|
b86e3047f3 | ||
|
|
45482187b4 | ||
|
|
f6aa864bc4 | ||
|
|
ac728d3e60 | ||
|
|
6180e6c047 | ||
|
|
6fb7b0fcf7 | ||
|
|
88888f5ba0 | ||
|
|
1303b5a6a9 | ||
|
|
fe30113502 | ||
|
|
0377f3580b | ||
|
|
f74d4b0660 | ||
|
|
9b299cb2bb | ||
|
|
cec283f608 | ||
|
|
aa86488fd9 | ||
|
|
c2d0870924 | ||
|
|
42ff1aaa75 | ||
|
|
31bb47356f | ||
|
|
b3881701c5 | ||
|
|
0fbd2fe224 | ||
|
|
c09111bbef | ||
|
|
59318f4705 | ||
|
|
8047edfb56 | ||
|
|
265b9bcb0f | ||
|
|
be407f531e | ||
|
|
28b2e36230 | ||
|
|
afff40e584 | ||
|
|
c9eb5f8173 | ||
|
|
5dfc2052bd | ||
|
|
98d03c74e1 | ||
|
|
7e1ee463ca | ||
|
|
5a55bb3f8d | ||
|
|
79fae306f6 | ||
|
|
81ac3b62fc | ||
|
|
9500f2718b | ||
|
|
a21e845ce7 | ||
|
|
2220784eda | ||
|
|
1dc62e8a19 | ||
|
|
12b9d6da10 | ||
|
|
8bdb6695c7 | ||
|
|
ce105e9bbe | ||
|
|
84e21703bd | ||
|
|
83b0c16728 | ||
|
|
973caa3a07 | ||
|
|
e43eaad557 | ||
|
|
a0c0728ea8 | ||
|
|
b4da4425ff | ||
|
|
fa471a4ed1 | ||
|
|
d8b9cda632 | ||
|
|
cf4a34c657 | ||
|
|
1cc04cff27 | ||
|
|
71e92774eb | ||
|
|
8fa8fc239f | ||
|
|
e5b08544ff | ||
|
|
2009354719 | ||
|
|
a5086f474e | ||
|
|
92a7c0ddd5 | ||
|
|
64aaf7ba81 | ||
|
|
83d4cdc090 | ||
|
|
a07056d67b | ||
|
|
64c83638ff | ||
|
|
15e6b88a8b | ||
|
|
36275f8e6e | ||
|
|
ccb62b4620 | ||
|
|
f9efb746d7 | ||
|
|
9e0c20c794 | ||
|
|
a17d1efe7c | ||
|
|
670669ef80 | ||
|
|
a33077b77f | ||
|
|
51cf847647 | ||
|
|
8af1c5fc8f | ||
|
|
c703bfb54d | ||
|
|
652a5eb592 | ||
|
|
62b2f2a7c9 | ||
|
|
4569df025e | ||
|
|
9a8d109650 | ||
|
|
d7f48e1ff0 | ||
|
|
0bbbfc2ef7 | ||
|
|
8770f2a0ea | ||
|
|
0743ef4d0b | ||
|
|
a4ac99877a | ||
|
|
536fa73d5f | ||
|
|
448f8c43d4 | ||
|
|
fe5eec5230 | ||
|
|
80d444624b | ||
|
|
24f65c502e | ||
|
|
b2aa78d6ab | ||
|
|
4eeaff175c | ||
|
|
a660c60eb1 | ||
|
|
8970785d33 | ||
|
|
a813f03955 | ||
|
|
4850b9d4d1 | ||
|
|
7da405e630 | ||
|
|
7213b12324 | ||
|
|
0e64c25ffc | ||
|
|
37dc13edaf | ||
|
|
8704a4ed35 | ||
|
|
fe256870b4 | ||
|
|
cb3505769b | ||
|
|
6649f9edc2 | ||
|
|
64acbbfcf0 | ||
|
|
df84fe0b58 | ||
|
|
f351e011d1 | ||
|
|
9ee6cbfb40 | ||
|
|
344280154a | ||
|
|
005827ab23 | ||
|
|
f60ae61652 | ||
|
|
8867adf942 | ||
|
|
c7dfa1699a | ||
|
|
a5aa159d9d | ||
|
|
6b03770602 | ||
|
|
88b73bacdb | ||
|
|
17b868381f | ||
|
|
a1cbb73f8b | ||
|
|
8d9ffcf565 | ||
|
|
88b671f8b1 | ||
|
|
76dfaa71a7 | ||
|
|
40d25d9062 | ||
|
|
f3d6818d5c | ||
|
|
c90cc465f5 | ||
|
|
d4df4375d5 | ||
|
|
1724ff5658 | ||
|
|
3fcd54203c | ||
|
|
2fc9ac4036 | ||
|
|
a7ab064a9e | ||
|
|
ea80c8d3c3 | ||
|
|
daa46105a7 | ||
|
|
44e188f111 | ||
|
|
7b97ea248a | ||
|
|
9cf1257be4 | ||
|
|
deb4f919cf | ||
|
|
9d99042fed | ||
|
|
716f12dd58 | ||
|
|
0e04e65ae0 | ||
|
|
5fe531050c | ||
|
|
e76083205f | ||
|
|
6390acc660 | ||
|
|
6d714e4425 | ||
|
|
3d3a26cd9a | ||
|
|
36decbfa69 | ||
|
|
d0d83cfe1e | ||
|
|
d04dc8440f | ||
|
|
4d40f86bd3 | ||
|
|
5358ffbe5a | ||
|
|
9ceb8fea78 | ||
|
|
a860a3d614 | ||
|
|
c803f7ae98 | ||
|
|
4ede87a8b0 | ||
|
|
9dd1f2747a | ||
|
|
08badb5a35 | ||
|
|
ff3469385b | ||
|
|
bfedeadacb | ||
|
|
a19c8b343e | ||
|
|
b09bee8158 | ||
|
|
b59d4f243b | ||
|
|
05d7378da5 | ||
|
|
ca5180f7a2 | ||
|
|
e6ec372926 | ||
|
|
e226837d33 | ||
|
|
8861c99333 | ||
|
|
5ba5ef6c46 | ||
|
|
94d0f21823 | ||
|
|
4767185082 | ||
|
|
5cd09af066 | ||
|
|
3c234a7d3b | ||
|
|
450b082daf | ||
|
|
427fa7fd0a | ||
|
|
52160bff18 | ||
|
|
d398ebe21f | ||
|
|
b96b32e9ec | ||
|
|
77e8096a6f | ||
|
|
bfb0985c38 | ||
|
|
a7a640bfb8 | ||
|
|
b05a565648 | ||
|
|
5d3965c7a6 | ||
|
|
7727f6fc7e | ||
|
|
4dcb15cf6d | ||
|
|
bdb9af63ed | ||
|
|
8ff7cbb9ce | ||
|
|
1601cf1b14 | ||
|
|
31d2c38687 | ||
|
|
b89bcdff62 | ||
|
|
1b28991e0a | ||
|
|
30c9d976f6 | ||
|
|
5300ffac7f | ||
|
|
a6681f2d7f | ||
|
|
59c9816dbf | ||
|
|
0e9f8bd38d | ||
|
|
953b65abcc | ||
|
|
6c0ea77d1d | ||
|
|
3737169450 | ||
|
|
eb9b00b210 | ||
|
|
0fea167f97 | ||
|
|
992bcac794 | ||
|
|
85502e34f9 | ||
|
|
bc038eb7c3 | ||
|
|
883f601bca | ||
|
|
915115386a | ||
|
|
2a4d440003 | ||
|
|
454acf58af | ||
|
|
e3ca5e6676 | ||
|
|
4952c4c7de | ||
|
|
7ce08c4216 | ||
|
|
65896ff2ed | ||
|
|
4b9707f8ea | ||
|
|
3b71d78f3c | ||
|
|
42a10d5690 | ||
|
|
2d21693b9d | ||
|
|
564483936f | ||
|
|
4a61efe641 | ||
|
|
f4a39457de | ||
|
|
165411f4a5 | ||
|
|
498c098d11 | ||
|
|
e124c86a4f | ||
|
|
bb19dffb26 | ||
|
|
ad0cfa41aa | ||
|
|
738720a000 | ||
|
|
bb57b82794 | ||
|
|
2262bc95f1 | ||
|
|
25b2f7b1d7 | ||
|
|
a2754894ea | ||
|
|
5715799d4e | ||
|
|
4925be0bc4 | ||
|
|
de1ec4be8b | ||
|
|
196737c217 | ||
|
|
835982faed | ||
|
|
864191b6ed | ||
|
|
5b023bb0a1 | ||
|
|
c7b48f6fef | ||
|
|
f751d68177 | ||
|
|
30e8e77fb6 | ||
|
|
bdd874eb7f | ||
|
|
02cadb882b | ||
|
|
669caefccb | ||
|
|
d43cfefa43 | ||
|
|
2d6db06fca | ||
|
|
ffa6fed3a3 | ||
|
|
422d3d4db6 | ||
|
|
09b1daa7b9 | ||
|
|
408aa10210 | ||
|
|
a3f39c2315 | ||
|
|
97e2834816 | ||
|
|
2207ab8f23 | ||
|
|
0e71b5dde8 | ||
|
|
30c204a0a5 | ||
|
|
bc557cc765 | ||
|
|
24d160e93e | ||
|
|
0b5ce9e16a | ||
|
|
8b6035ab82 | ||
|
|
9d04dccb79 | ||
|
|
3d2b25e45c | ||
|
|
27bc49d8c8 | ||
|
|
b843b73bb5 | ||
|
|
552cfac6ed | ||
|
|
1fb4fa02e3 | ||
|
|
fca8034578 | ||
|
|
53dcc39b72 | ||
|
|
3ab8468e73 | ||
|
|
0451c8e55f | ||
|
|
d802144f39 | ||
|
|
5d4a0e4050 | ||
|
|
97bad2c1ac | ||
|
|
58052e6632 | ||
|
|
cb673765c2 | ||
|
|
0df0677d5b | ||
|
|
9db8dc1895 | ||
|
|
817f43f927 | ||
|
|
c03c689cb2 | ||
|
|
564e7f5e5b | ||
|
|
7bdf63a0fa | ||
|
|
29b85fbce0 | ||
|
|
6f96484f83 | ||
|
|
e82fb5b58f | ||
|
|
24ee32d7b0 | ||
|
|
f3f0122f11 | ||
|
|
b14ba31250 | ||
|
|
716991775d | ||
|
|
08a9d80ebc | ||
|
|
f96c979024 | ||
|
|
c4d5bd13c7 | ||
|
|
fd345c8ef0 | ||
|
|
9f4e599232 | ||
|
|
08c8d1d1f7 | ||
|
|
4122ecbc80 | ||
|
|
4f591a6fe3 | ||
|
|
fe79e43459 | ||
|
|
d7e84fe07c | ||
|
|
04061ece4f | ||
|
|
66aaaa1751 | ||
|
|
1eed2ee985 | ||
|
|
c2ac47235f | ||
|
|
e8b4d5fb6f | ||
|
|
c16d7c481c | ||
|
|
22ca72e7b9 | ||
|
|
69585cb6e3 | ||
|
|
33e1dcc476 | ||
|
|
f0a9c45207 | ||
|
|
98a4a9f2cc | ||
|
|
becec8ac7e | ||
|
|
9dad7c2ace | ||
|
|
6dcdb50a3e | ||
|
|
0ae1298d25 | ||
|
|
f49644cdea | ||
|
|
023504ed64 | ||
|
|
dd5a6914f1 | ||
|
|
f6a0cd1bf8 | ||
|
|
285646bbfa | ||
|
|
2f18a34458 | ||
|
|
7b063e3e6c | ||
|
|
a2f90453f4 | ||
|
|
dc4ca26441 | ||
|
|
96f65479e7 | ||
|
|
e95eedb165 | ||
|
|
040c122719 | ||
|
|
9c1da4fa4c | ||
|
|
189e8b3f34 | ||
|
|
88bec814eb | ||
|
|
c0661744cc | ||
|
|
b968603a57 | ||
|
|
3b675d8b70 | ||
|
|
0036483ded | ||
|
|
c30fe932d9 | ||
|
|
8f99334f11 | ||
|
|
76d42b5200 | ||
|
|
0f40174723 | ||
|
|
734fb459a4 | ||
|
|
3a68dd2011 | ||
|
|
74dc7ffe4d | ||
|
|
dd26c0bbcc | ||
|
|
483ec0d762 | ||
|
|
c9ba73d931 | ||
|
|
b5d0a38860 | ||
|
|
1bca69f272 | ||
|
|
064d3584c0 | ||
|
|
0ef6843d85 | ||
|
|
9c95ec0b29 | ||
|
|
561a4fbeea | ||
|
|
84707ed926 | ||
|
|
6fba102d7d | ||
|
|
513aae5ef8 | ||
|
|
1da6bbd146 | ||
|
|
d438ea0880 | ||
|
|
d7612e7aaa | ||
|
|
6c352132d0 | ||
|
|
c78349a3a4 | ||
|
|
ce3146e91a | ||
|
|
0890ac4507 | ||
|
|
3ab56a188e | ||
|
|
00de72272d | ||
|
|
9bd4bc02d6 | ||
|
|
425beb4d97 | ||
|
|
404b7c7c4f | ||
|
|
4c87ef915f | ||
|
|
d087086581 | ||
|
|
c3767d593d | ||
|
|
09eed08b9c | ||
|
|
7303c387e8 | ||
|
|
b485ca8198 | ||
|
|
d94972503a | ||
|
|
0514dd5ebd | ||
|
|
175a042fd3 | ||
|
|
2e77cc635e | ||
|
|
a233ed2de7 | ||
|
|
a51566ec6e | ||
|
|
36450b9bc9 | ||
|
|
19c8a52fe1 | ||
|
|
1bf306bbbd | ||
|
|
126fc10b9d | ||
|
|
63459ab0c7 | ||
|
|
01d5ffcd87 | ||
|
|
16431f7a4c | ||
|
|
b25da51638 | ||
|
|
6926f4d0da | ||
|
|
5417f1cdc6 | ||
|
|
b1e5eafb80 | ||
|
|
deb5011d08 | ||
|
|
b4b7942230 | ||
|
|
9f8ff32bcc | ||
|
|
faa0f2a13c | ||
|
|
80310a3b7c | ||
|
|
c53b1be8b4 | ||
|
|
9f59c128be | ||
|
|
a1f8cc2817 | ||
|
|
29635b7bbd | ||
|
|
434ff28af4 | ||
|
|
d391670755 | ||
|
|
78c46aab35 | ||
|
|
637da63506 | ||
|
|
af7785727b | ||
|
|
bbbffe7423 | ||
|
|
23c7089789 | ||
|
|
fcc30cbf30 | ||
|
|
267666cd97 | ||
|
|
582d01784a | ||
|
|
d592a1655f | ||
|
|
502420ceff | ||
|
|
1a187e73ae | ||
|
|
6c95a5d496 | ||
|
|
f16228d811 | ||
|
|
631cbdbda8 | ||
|
|
7b9b18f03c | ||
|
|
471950434e | ||
|
|
761c5c8450 | ||
|
|
52c8bedb9c | ||
|
|
0fd9cdb861 | ||
|
|
7705603f73 | ||
|
|
1bbe6409e1 | ||
|
|
77a34ebdf9 | ||
|
|
c2ca5f44b4 | ||
|
|
efa506b235 | ||
|
|
b3ede099e2 | ||
|
|
c98afe2205 | ||
|
|
8078845273 | ||
|
|
8ddc1f8977 | ||
|
|
e3cd0a67d0 | ||
|
|
e9b71872a8 | ||
|
|
fce6ecb05c | ||
|
|
f43cab0951 | ||
|
|
67999d1b1c | ||
|
|
b8a91db089 | ||
|
|
4c9dd028d2 | ||
|
|
088dce709e | ||
|
|
ab41fd6692 | ||
|
|
68c681bb78 | ||
|
|
45bf93405a | ||
|
|
2e1810e3a7 | ||
|
|
566fec4015 | ||
|
|
0b5836cfce | ||
|
|
c6ac709d70 | ||
|
|
a7026da45c | ||
|
|
222c84d9be | ||
|
|
73107d6b0f | ||
|
|
0c853ac3ea | ||
|
|
7151fd8836 | ||
|
|
f1900f6a7b | ||
|
|
2d1430da13 | ||
|
|
d392056457 | ||
|
|
1523ebe9f7 | ||
|
|
06b9433351 | ||
|
|
bc087cfefb | ||
|
|
9f37e4c907 | ||
|
|
addcde4f35 | ||
|
|
2f821b9e9b | ||
|
|
9a39ccea8f | ||
|
|
1bb7bfaf7f | ||
|
|
3fc1344865 | ||
|
|
11b62114bb | ||
|
|
44f089508b | ||
|
|
d45b2c7947 | ||
|
|
6a48385111 | ||
|
|
9fb1a9537d | ||
|
|
90b7b7ba5c | ||
|
|
2243de9e2f | ||
|
|
26c5bc9d97 | ||
|
|
346f102234 | ||
|
|
8c140f4725 | ||
|
|
a2b9a000dc | ||
|
|
4ba73dac34 | ||
|
|
8cd13d4b35 | ||
|
|
49956faab9 | ||
|
|
3b84345b77 | ||
|
|
ae5198e5e7 | ||
|
|
4d466fe879 | ||
|
|
3a281b2399 | ||
|
|
01a36ab333 | ||
|
|
5bcd1e7ccd | ||
|
|
618b3f5f2d | ||
|
|
277efc1dfb | ||
|
|
0d1a1b7526 | ||
|
|
6ff6c805af | ||
|
|
7b777568ed | ||
|
|
0ad317213c | ||
|
|
07df403203 | ||
|
|
143a4d7f76 | ||
|
|
99117cbc90 | ||
|
|
918ad28a93 | ||
|
|
749508af01 | ||
|
|
3bf75fab3f | ||
|
|
9bfc83ac5c | ||
|
|
6d73c86209 | ||
|
|
9c071ea40d | ||
|
|
281835887a | ||
|
|
c6d8208150 | ||
|
|
9eb314fcaa | ||
|
|
a0c42a3fd1 | ||
|
|
c40559433b | ||
|
|
03d907e1e5 | ||
|
|
ce4de6227e | ||
|
|
bee224c567 | ||
|
|
7e9baf9e25 | ||
|
|
4a6b5d5247 | ||
|
|
a1c965d70d | ||
|
|
06c98320a8 | ||
|
|
57ced37229 | ||
|
|
618a9b2d7b | ||
|
|
9889b1a5fe | ||
|
|
8f675b0ee8 | ||
|
|
358b387283 | ||
|
|
8019748396 | ||
|
|
bd5abb2922 | ||
|
|
6423eab917 | ||
|
|
142d1f5cbc | ||
|
|
315623d361 | ||
|
|
fe71ba992d | ||
|
|
13bf7d4ff1 | ||
|
|
dbfa6c925e | ||
|
|
ed0f6f75a7 | ||
|
|
688eca23ff | ||
|
|
cb9fba98d8 | ||
|
|
7c729d4c3c | ||
|
|
75460d8586 | ||
|
|
41a97c8f80 | ||
|
|
b255f02762 | ||
|
|
ee5e507ff6 | ||
|
|
d632266a40 | ||
|
|
fd067dd8b8 | ||
|
|
ac7323118e | ||
|
|
16fa40b893 | ||
|
|
6f26c45143 | ||
|
|
0426796d9f | ||
|
|
030e8dd1ac | ||
|
|
4c9a52044a | ||
|
|
857f791286 | ||
|
|
9885f1a19e | ||
|
|
003957a689 | ||
|
|
c1a8574d8f | ||
|
|
4236157823 | ||
|
|
90bb855ffa | ||
|
|
4f9a309de8 | ||
|
|
487b2495a1 | ||
|
|
2b48728741 | ||
|
|
58efe1550d | ||
|
|
16d8c25b23 | ||
|
|
65d4169f00 | ||
|
|
ffb9574312 | ||
|
|
6e7b00a0f6 | ||
|
|
b4298979fe | ||
|
|
676591ffc0 | ||
|
|
975cba295b | ||
|
|
e46bac19fa | ||
|
|
90e90f61c1 | ||
|
|
47c8e1366f | ||
|
|
25f20b1967 | ||
|
|
340ecf6e9a | ||
|
|
86e90bba87 | ||
|
|
81e236b2de | ||
|
|
45887dec8e | ||
|
|
f27f2f8e49 | ||
|
|
4d74784209 | ||
|
|
527bb7abfe | ||
|
|
615ea969f8 | ||
|
|
4601c36f57 | ||
|
|
b591d2b501 | ||
|
|
ccd4a79b39 | ||
|
|
eb96f38298 | ||
|
|
f0149faf88 | ||
|
|
ac55ce989d | ||
|
|
5989e3f65d | ||
|
|
5e4d3e7b82 | ||
|
|
550df4cdd6 | ||
|
|
9ac2556285 | ||
|
|
b43a5498a1 | ||
|
|
f161609929 | ||
|
|
34be473b7c | ||
|
|
863a5bc4fa | ||
|
|
8d1c06d141 | ||
|
|
bd7c189c12 | ||
|
|
5c3c6e7f06 | ||
|
|
234681306c | ||
|
|
2de81cfc62 | ||
|
|
9e844d974c | ||
|
|
7779974143 | ||
|
|
339f4bfd23 | ||
|
|
5d5d3c5858 | ||
|
|
cfa7c4feb8 | ||
|
|
083b7b46d9 | ||
|
|
3c87d137a3 | ||
|
|
ebc5e26c2d | ||
|
|
a385dae1ae | ||
|
|
1367ccab93 | ||
|
|
22bf4bc080 | ||
|
|
1a7d6ed49a | ||
|
|
725d6d5fce | ||
|
|
a8de45ce28 | ||
|
|
18ddbae60e | ||
|
|
28218755a5 | ||
|
|
3558404c03 | ||
|
|
0a6b54a8c4 | ||
|
|
93a92a00ee | ||
|
|
d9e8bf1807 | ||
|
|
54042e399b | ||
|
|
1d484dbe5f | ||
|
|
ab4006ee2c | ||
|
|
524e9b372e | ||
|
|
2c20fc2ebf | ||
|
|
2a79537ba1 | ||
|
|
a3039c4c67 | ||
|
|
62b0293963 | ||
|
|
31ab238306 | ||
|
|
19dca4bcc9 | ||
|
|
35a602baaf | ||
|
|
c799301011 | ||
|
|
7732f375d2 | ||
|
|
8e8c63bd67 | ||
|
|
3e1be63b0f | ||
|
|
b9d09202c9 | ||
|
|
77e93e1b79 | ||
|
|
cb8d5ab183 | ||
|
|
fb234205c2 | ||
|
|
d0ae6dd401 | ||
|
|
2650c60374 | ||
|
|
21a5a34041 | ||
|
|
0358522533 | ||
|
|
840b41c450 | ||
|
|
15713219e5 | ||
|
|
8e54babf10 | ||
|
|
e1a0af8745 | ||
|
|
16e9d658d5 | ||
|
|
342f4bc2e0 | ||
|
|
848728858c | ||
|
|
5e3c883f2c | ||
|
|
528f9cd583 | ||
|
|
2ac5ea42af | ||
|
|
aab1a67d05 | ||
|
|
9615efd940 | ||
|
|
41046499c6 | ||
|
|
6e5238d037 | ||
|
|
2b2d4bae4e | ||
|
|
2b4dd8b9af | ||
|
|
aa28e4e81f | ||
|
|
6da1a105cb | ||
|
|
6ca1e5c4b0 | ||
|
|
86230b9ac8 | ||
|
|
d3d37ad0ce | ||
|
|
a9e9f9afb0 | ||
|
|
1b0d064743 | ||
|
|
d2da41b0bd | ||
|
|
49908386ef | ||
|
|
ba63111f45 | ||
|
|
53d4f82ce1 | ||
|
|
83917c4c30 | ||
|
|
6ccd9d6fa4 | ||
|
|
b4a0004f44 | ||
|
|
51bd91edc0 | ||
|
|
61b03b72b0 | ||
|
|
93c9c45580 | ||
|
|
29e0ff61d7 | ||
|
|
c649610473 | ||
|
|
d6768cf921 | ||
|
|
48b9023de4 | ||
|
|
751503c434 | ||
|
|
1edbaa4e62 | ||
|
|
9848ad32fd | ||
|
|
d2c2c46541 | ||
|
|
751208cedf | ||
|
|
53808f254e | ||
|
|
ff82e41404 | ||
|
|
217a2d9ea5 | ||
|
|
b5216f6ec8 | ||
|
|
31df2dd9dd | ||
|
|
fee88d289d | ||
|
|
028cca16e6 | ||
|
|
0544b164fc | ||
|
|
4e0d47f1dd | ||
|
|
d7fdb8e015 | ||
|
|
58f2b39bc8 | ||
|
|
89fbe4fdfa | ||
|
|
8f56f5cfcc | ||
|
|
fceb5a75b0 | ||
|
|
bfd05d3d27 | ||
|
|
72fa3c0492 | ||
|
|
9458f7c7d0 | ||
|
|
a5ba69d68d | ||
|
|
9dc05a57ec | ||
|
|
7c89f9d18c | ||
|
|
8c259859ab | ||
|
|
297cebd833 | ||
|
|
e78b8c16be | ||
|
|
eb2c083159 | ||
|
|
185f40eb8b | ||
|
|
3067705c10 | ||
|
|
8a13d344a8 | ||
|
|
271a839957 | ||
|
|
95a34628a3 | ||
|
|
7e6fc79eb2 | ||
|
|
5347f460b4 | ||
|
|
cd69681ae5 | ||
|
|
5417a514e9 | ||
|
|
5e75a5c81c | ||
|
|
6f339939c4 | ||
|
|
6fa0bb991a | ||
|
|
5a9394c65f | ||
|
|
cb5bc242db | ||
|
|
2d77173dfa | ||
|
|
42b5b7d2dd | ||
|
|
db1b21b9aa | ||
|
|
7989a7f903 | ||
|
|
f9c8c4d216 | ||
|
|
4ed22ad932 | ||
|
|
e0498ce54a | ||
|
|
0fcb368f47 | ||
|
|
08729315c6 | ||
|
|
b61d554dc8 | ||
|
|
2bc73b4a27 | ||
|
|
3a73ae1683 | ||
|
|
9bf8e8573c | ||
|
|
e3a02ea5c7 | ||
|
|
35532d6b0a | ||
|
|
37260962e5 | ||
|
|
5ac974c9b2 | ||
|
|
058e72d29c | ||
|
|
eb2cf1698f | ||
|
|
9a91da9ccd | ||
|
|
e39586c81d | ||
|
|
2d87bb92ed | ||
|
|
db0695e46a | ||
|
|
6e56070386 | ||
|
|
d177184777 | ||
|
|
0a004e0fce | ||
|
|
7ab921ff02 | ||
|
|
382d8478bf | ||
|
|
09597fe8dc | ||
|
|
38508c5a3f | ||
|
|
a89abb435f | ||
|
|
9b387e7682 | ||
|
|
51b69ef00b | ||
|
|
c31da9bc8f | ||
|
|
ca3cc5b23e | ||
|
|
3ba2842b53 | ||
|
|
fec197ccac | ||
|
|
ccdf105759 | ||
|
|
d328f4c3b6 | ||
|
|
29e03ac851 | ||
|
|
4f21344e87 | ||
|
|
b6d2a12ceb | ||
|
|
06fe879745 | ||
|
|
f4e72f33c8 | ||
|
|
d00314c621 | ||
|
|
c809cc9273 | ||
|
|
f3254ff02d | ||
|
|
e9f7d033ff | ||
|
|
79808da760 | ||
|
|
1501786fd9 | ||
|
|
c6cd8ae243 |
5
.flake8
5
.flake8
@@ -8,6 +8,9 @@
|
||||
# - E221: multiple spaces before operator
|
||||
# - E241: multiple spaces after ‘,’
|
||||
#
|
||||
# Let people use terse Python features:
|
||||
# - E731 : lambda expressions
|
||||
#
|
||||
# Spack allows wildcard imports:
|
||||
# - F403: disable wildcard import
|
||||
#
|
||||
@@ -16,5 +19,5 @@
|
||||
# - F999: name name be undefined or undefined from star imports.
|
||||
#
|
||||
[flake8]
|
||||
ignore = E221,E241,F403,F821,F999
|
||||
ignore = E129,E221,E241,E272,E731,F403,F821,F999,F405
|
||||
max-line-length = 79
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,10 +1,14 @@
|
||||
/var/spack/stage
|
||||
/var/spack/cache
|
||||
/var/spack/repos/*/index.yaml
|
||||
/var/spack/repos/*/lock
|
||||
*.pyc
|
||||
/opt/
|
||||
/opt
|
||||
*~
|
||||
.DS_Store
|
||||
.idea
|
||||
/etc/spack/*
|
||||
/etc/spack/licenses
|
||||
/etc/spack/*.yaml
|
||||
/etc/spackconfig
|
||||
/share/spack/dotkit
|
||||
/share/spack/modules
|
||||
|
||||
25
.travis.yml
25
.travis.yml
@@ -1,7 +1,17 @@
|
||||
language: python
|
||||
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
env:
|
||||
- TEST_TYPE=unit
|
||||
- TEST_TYPE=flake8
|
||||
|
||||
# Exclude flake8 from python 2.6
|
||||
matrix:
|
||||
exclude:
|
||||
- python: "2.6"
|
||||
env: TEST_TYPE=flake8
|
||||
|
||||
# Use new Travis infrastructure (Docker can't sudo yet)
|
||||
sudo: false
|
||||
@@ -20,20 +30,13 @@ before_install:
|
||||
- git fetch origin develop:develop
|
||||
|
||||
script:
|
||||
# Regular spack setup and tests
|
||||
- . share/spack/setup-env.sh
|
||||
- spack compilers
|
||||
- spack config get compilers
|
||||
- spack install -v libdwarf
|
||||
|
||||
# Run unit tests with code coverage
|
||||
- coverage run bin/spack test
|
||||
|
||||
# Run unit tests with code coverage plus install libdwarf
|
||||
- 'if [ "$TEST_TYPE" = "unit" ]; then share/spack/qa/run-unit-tests; fi'
|
||||
# Run flake8 code style checks.
|
||||
- share/spack/qa/run-flake8
|
||||
- 'if [ "$TEST_TYPE" = "flake8" ]; then share/spack/qa/run-flake8; fi'
|
||||
|
||||
after_success:
|
||||
- coveralls
|
||||
- 'if [ "$TEST_TYPE" = "unit" ] && [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then coveralls; fi'
|
||||
|
||||
notifications:
|
||||
email:
|
||||
|
||||
21
README.md
21
README.md
@@ -58,17 +58,24 @@ can join it here:
|
||||
|
||||
### Contributions
|
||||
|
||||
At the moment, contributing to Spack is relatively simple. Just send us
|
||||
a [pull request](https://help.github.com/articles/using-pull-requests/).
|
||||
Contributing to Spack is relatively easy. Just send us a
|
||||
[pull request](https://help.github.com/articles/using-pull-requests/).
|
||||
When you send your request, make ``develop`` the destination branch on the
|
||||
[Spack repository](https://github.com/LLNL/spack).
|
||||
|
||||
Your contribution will need to pass all the tests run by the `spack test`
|
||||
command, as well as the formatting checks in `share/spack/qa/run-flake8`.
|
||||
You should run both of these before submitting your pull request, to
|
||||
ensure that the online checks succeed.
|
||||
Before you send a PR, your code should pass the following checks:
|
||||
|
||||
Spack is using a rough approximation of the [Git
|
||||
* Your contribution will need to pass the `spack test` command.
|
||||
Run this before submitting your PR.
|
||||
|
||||
* Also run the `share/spack/qa/run-flake8` script to check for PEP8 compliance.
|
||||
To encourage contributions and readability by a broad audience,
|
||||
Spack uses the [PEP8](https://www.python.org/dev/peps/pep-0008/) coding
|
||||
standard with [a few exceptions](https://github.com/LLNL/spack/blob/develop/.flake8).
|
||||
|
||||
We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
|
||||
|
||||
Spack uses a rough approximation of the [Git
|
||||
Flow](http://nvie.com/posts/a-successful-git-branching-model/)
|
||||
branching model. The ``develop`` branch contains the latest
|
||||
contributions, and ``master`` is always tagged and points to the
|
||||
|
||||
11
bin/sbang
11
bin/sbang
@@ -79,6 +79,15 @@
|
||||
# Obviously, for this to work, `sbang` needs to have a short enough
|
||||
# path that *it* will run without hitting OS limits.
|
||||
#
|
||||
# For Lua, scripts the second line can't start with #!, as # is not
|
||||
# the comment character in lua (even though lua ignores #! on the
|
||||
# *first* line of a script). So, instrument a lua script like this,
|
||||
# using -- instead of # on the second line:
|
||||
#
|
||||
# 1 #!/bin/bash /path/to/sbang
|
||||
# 2 --!/long/path/to/lua with arguments
|
||||
# 3
|
||||
# 4 print "success!"
|
||||
#
|
||||
# How it works
|
||||
# -----------------------------
|
||||
@@ -95,6 +104,8 @@ lines=0
|
||||
while read line && ((lines < 2)) ; do
|
||||
if [[ "$line" = '#!'* ]]; then
|
||||
interpreter="${line#\#!}"
|
||||
elif [[ "$line" = '--!'*lua* ]]; then
|
||||
interpreter="${line#--!}"
|
||||
fi
|
||||
lines=$((lines+1))
|
||||
done < "$script"
|
||||
|
||||
22
bin/spack
22
bin/spack
@@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
# flake8: noqa
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
@@ -24,9 +25,10 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
if not sys.version_info[:2] >= (2,6):
|
||||
if not sys.version_info[:2] >= (2, 6):
|
||||
v_info = sys.version_info[:3]
|
||||
sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info)
|
||||
sys.exit("Spack requires Python 2.6 or higher. "
|
||||
"This is Python %d.%d.%d." % v_info)
|
||||
|
||||
import os
|
||||
|
||||
@@ -62,7 +64,8 @@ for pyc_file in orphaned_pyc_files:
|
||||
try:
|
||||
os.remove(pyc_file)
|
||||
except OSError as e:
|
||||
print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file
|
||||
print ("WARNING: Spack may fail mysteriously. "
|
||||
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
|
||||
|
||||
# If there is no working directory, use the spack prefix.
|
||||
try:
|
||||
@@ -77,7 +80,7 @@ import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import *
|
||||
import spack
|
||||
from spack.error import SpackError
|
||||
from external import argparse
|
||||
import argparse
|
||||
|
||||
# Command parsing
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -128,6 +131,7 @@ if len(sys.argv) == 1:
|
||||
# actually parse the args.
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
# Set up environment based on args.
|
||||
tty.set_verbose(args.verbose)
|
||||
@@ -138,6 +142,9 @@ def main():
|
||||
import spack.util.debug as debug
|
||||
debug.register_interrupt_handler()
|
||||
|
||||
from spack.yaml_version_check import check_yaml_versions
|
||||
check_yaml_versions()
|
||||
|
||||
spack.spack_working_dir = working_dir
|
||||
if args.mock:
|
||||
from spack.repository import RepoPath
|
||||
@@ -145,7 +152,7 @@ def main():
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
tty.warn("You asked for --insecure, which does not check SSL certificates.")
|
||||
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
|
||||
spack.curl.add_default_arg('-k')
|
||||
|
||||
# Try to load the particular command asked for and run it
|
||||
@@ -164,11 +171,12 @@ def main():
|
||||
elif isinstance(return_val, int):
|
||||
sys.exit(return_val)
|
||||
else:
|
||||
tty.die("Bad return value from command %s: %s" % (args.command, return_val))
|
||||
tty.die("Bad return value from command %s: %s"
|
||||
% (args.command, return_val))
|
||||
|
||||
if args.profile:
|
||||
import cProfile
|
||||
cProfile.run('main()', sort='tottime')
|
||||
cProfile.run('main()', sort='time')
|
||||
elif args.pdb:
|
||||
import pdb
|
||||
pdb.run('main()')
|
||||
|
||||
40
etc/spack/defaults/modules.yaml
Normal file
40
etc/spack/defaults/modules.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default configuration for Spack's module file generation.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
enable:
|
||||
- tcl
|
||||
- dotkit
|
||||
prefix_inspections:
|
||||
bin:
|
||||
- PATH
|
||||
man:
|
||||
- MANPATH
|
||||
share/man:
|
||||
- MANPATH
|
||||
lib:
|
||||
- LIBRARY_PATH
|
||||
- LD_LIBRARY_PATH
|
||||
lib64:
|
||||
- LIBRARY_PATH
|
||||
- LD_LIBRARY_PATH
|
||||
include:
|
||||
- CPATH
|
||||
lib/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
lib64/pkgconfig:
|
||||
- PKG_CONFIG_PATH
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
21
etc/spack/defaults/packages.yaml
Normal file
21
etc/spack/defaults/packages.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
mpi: [openmpi, mpich]
|
||||
blas: [openblas]
|
||||
lapack: [openblas]
|
||||
14
etc/spack/defaults/repos.yaml
Normal file
14
etc/spack/defaults/repos.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default spack repository configuration. It includes the
|
||||
# builtin spack package repository.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/repos.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/repos.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
@@ -1,29 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default spack module files generation configuration.
|
||||
#
|
||||
# Changes to this file will affect all users of this spack install,
|
||||
# although users can override these settings in their ~/.spack/modules.yaml.
|
||||
# -------------------------------------------------------------------------
|
||||
modules:
|
||||
enable:
|
||||
- tcl
|
||||
- dotkit
|
||||
prefix_inspections:
|
||||
bin:
|
||||
- PATH
|
||||
man:
|
||||
- MANPATH
|
||||
lib:
|
||||
- LIBRARY_PATH
|
||||
- LD_LIBRARY_PATH
|
||||
lib64:
|
||||
- LIBRARY_PATH
|
||||
- LD_LIBRARY_PATH
|
||||
include:
|
||||
- CPATH
|
||||
lib/pkgconfig:
|
||||
- PKGCONFIG
|
||||
lib64/pkgconfig:
|
||||
- PKGCONFIG
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
@@ -1,8 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default spack repository configuration.
|
||||
#
|
||||
# Changes to this file will affect all users of this spack install,
|
||||
# although users can override these settings in their ~/.spack/repos.yaml.
|
||||
# -------------------------------------------------------------------------
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
@@ -6,6 +6,15 @@ Basic usage
|
||||
The ``spack`` command has many *subcommands*. You'll only need a
|
||||
small subset of them for typical usage.
|
||||
|
||||
Note that Spack colorizes output. ``less -R`` should be used with
|
||||
Spack to maintian this colorization. Eg::
|
||||
|
||||
spack find | less -R
|
||||
|
||||
It is recommend that the following be put in your ``.bashrc`` file::
|
||||
|
||||
alias less='less -R'
|
||||
|
||||
|
||||
Listing available packages
|
||||
------------------------------
|
||||
@@ -24,12 +33,29 @@ Spack can install:
|
||||
|
||||
.. command-output:: spack list
|
||||
|
||||
The packages are listed by name in alphabetical order. You can also
|
||||
do wildcats searches using ``*``:
|
||||
The packages are listed by name in alphabetical order. If you specify a
|
||||
pattern to match, it will follow this set of rules. A pattern with no
|
||||
wildcards, ``*`` or ``?``, will be treated as though it started and ended with
|
||||
``*``, so ``util`` is equivalent to ``*util*``. A pattern with no capital
|
||||
letters will be treated as case-insensitive. You can also add the ``-i`` flag
|
||||
to specify a case insensitive search, or ``-d`` to search the description of
|
||||
the package in addition to the name. Some examples:
|
||||
|
||||
.. command-output:: spack list m*
|
||||
All packages whose names contain "sql" case insensitive:
|
||||
|
||||
.. command-output:: spack list *util*
|
||||
.. command-output:: spack list sql
|
||||
|
||||
All packages whose names start with a capital M:
|
||||
|
||||
.. command-output:: spack list 'M*'
|
||||
|
||||
All packages whose names or descriptions contain Documentation:
|
||||
|
||||
.. command-output:: spack list -d Documentation
|
||||
|
||||
All packages whose names contain documentation case insensitive:
|
||||
|
||||
.. command-output:: spack list -d documentation
|
||||
|
||||
.. _spack-info:
|
||||
|
||||
@@ -97,13 +123,13 @@ that the packages is installed:
|
||||
|
||||
$ spack install mpileaks
|
||||
==> Installing mpileaks
|
||||
==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
|
||||
==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
|
||||
==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
|
||||
==> mpich is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4.
|
||||
==> callpath is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318.
|
||||
==> adept-utils is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da.
|
||||
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
|
||||
######################################################################## 100.0%
|
||||
==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
|
||||
==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
|
||||
==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23/mpileaks-1.0.tar.gz
|
||||
==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23.
|
||||
==> No patches needed for mpileaks.
|
||||
==> Building mpileaks.
|
||||
|
||||
@@ -111,7 +137,7 @@ that the packages is installed:
|
||||
|
||||
==> Successfully installed mpileaks.
|
||||
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
|
||||
[+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
|
||||
[+] /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpileaks@1.0-59f6ad23
|
||||
|
||||
The last line, with the ``[+]``, indicates where the package is
|
||||
installed.
|
||||
@@ -132,10 +158,10 @@ sites, as installing a version that one user needs will not disrupt
|
||||
existing installations for other users.
|
||||
|
||||
In addition to different versions, Spack can customize the compiler,
|
||||
compile-time options (variants), and platform (for cross compiles) of
|
||||
an installation. Spack is unique in that it can also configure the
|
||||
*dependencies* a package is built with. For example, two
|
||||
configurations of the same version of a package, one built with boost
|
||||
compile-time options (variants), compiler flags, and platform (for
|
||||
cross compiles) of an installation. Spack is unique in that it can
|
||||
also configure the *dependencies* a package is built with. For example,
|
||||
two configurations of the same version of a package, one built with boost
|
||||
1.39.0, and the other version built with version 1.43.0, can coexist.
|
||||
|
||||
This can all be done on the command line using the *spec* syntax.
|
||||
@@ -166,7 +192,7 @@ To uninstall a package and every package that depends on it, you may give the
|
||||
|
||||
spack uninstall --dependents mpich
|
||||
|
||||
will display a list of all the packages that depends on `mpich` and, upon confirmation,
|
||||
will display a list of all the packages that depend on `mpich` and, upon confirmation,
|
||||
will uninstall them in the right order.
|
||||
|
||||
A line like
|
||||
@@ -213,7 +239,7 @@ Running ``spack find`` with no arguments lists installed packages:
|
||||
|
||||
$ spack find
|
||||
==> 74 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
|
||||
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
|
||||
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
|
||||
@@ -239,13 +265,19 @@ Running ``spack find`` with no arguments lists installed packages:
|
||||
lcms@2.6 pixman@0.32.6 xz@5.2.0
|
||||
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
|
||||
|
||||
-- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.9.2 --------------------------------
|
||||
libelf@0.8.10 mpich@3.0.4
|
||||
|
||||
Packages are divided into groups according to their architecture and
|
||||
compiler. Within each group, Spack tries to keep the view simple, and
|
||||
only shows the version of installed packages.
|
||||
|
||||
``spack find`` can filter the package list based on the package name, spec, or
|
||||
a number of properties of their installation status. For example, missing
|
||||
dependencies of a spec can be shown with ``-m``, packages which were
|
||||
explicitly installed with ``spack install <package>`` can be singled out with
|
||||
``-e`` and those which have been pulled in only as dependencies with ``-E``.
|
||||
|
||||
In some cases, there may be different configurations of the *same*
|
||||
version of a package installed. For example, there are two
|
||||
installations of of ``libdwarf@20130729`` above. We can look at them
|
||||
@@ -256,7 +288,7 @@ in more detail using ``spack find -d``, and by asking only to show
|
||||
|
||||
$ spack find --deps libdwarf
|
||||
==> 2 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
libdwarf@20130729-d9b90962
|
||||
^libelf@0.8.12
|
||||
libdwarf@20130729-b52fac98
|
||||
@@ -272,7 +304,7 @@ want to know whether two packages' dependencies differ, you can use
|
||||
|
||||
$ spack find -l libdwarf
|
||||
==> 2 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
|
||||
|
||||
Now the ``libwarf`` installs have hashes after their names. These are
|
||||
@@ -286,14 +318,14 @@ use ``spack find -p``:
|
||||
|
||||
$ spack find -p
|
||||
==> 74 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
|
||||
adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
|
||||
atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
|
||||
boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
|
||||
bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
|
||||
cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
|
||||
callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
|
||||
adept-utils@1.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da
|
||||
atk@2.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/atk@2.14.0-3d09ac09
|
||||
boost@1.55.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/boost@1.55.0
|
||||
bzip2@1.0.6 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/bzip2@1.0.6
|
||||
cairo@1.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/cairo@1.14.0-fcc2ab44
|
||||
callpath@1.0.2 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318
|
||||
...
|
||||
|
||||
And, finally, you can restrict your search to a particular package
|
||||
@@ -302,10 +334,10 @@ by supplying its name:
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack find -p libelf
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
|
||||
libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
|
||||
libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
libelf@0.8.11 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.11
|
||||
libelf@0.8.12 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.12
|
||||
libelf@0.8.13 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.13
|
||||
|
||||
``spack find`` actually does a lot more than this. You can use
|
||||
*specs* to query for specific configurations and builds of each
|
||||
@@ -315,7 +347,7 @@ package. If you want to find only libelf versions greater than version
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack find libelf@0.8.12:
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
libelf@0.8.12 libelf@0.8.13
|
||||
|
||||
Finding just the versions of libdwarf built with a particular version
|
||||
@@ -325,139 +357,17 @@ of libelf would look like this:
|
||||
|
||||
$ spack find -l libdwarf ^libelf@0.8.12
|
||||
==> 1 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
libdwarf@20130729-d9b90962
|
||||
|
||||
We can also search for packages that have a certain attribute. For example,
|
||||
``spack find -l libdwarf +debug`` will show only installations of libdwarf
|
||||
with the 'debug' compile-time option enabled, while ``spack find -l +debug``
|
||||
will find every installed package with a 'debug' compile-time option enabled.
|
||||
|
||||
The full spec syntax is discussed in detail in :ref:`sec-specs`.
|
||||
|
||||
|
||||
Compiler configuration
|
||||
-----------------------------------
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Spack searches for compilers on your machine
|
||||
automatically the first time it is run. It does this by inspecting
|
||||
your path.
|
||||
|
||||
.. _spack-compilers:
|
||||
|
||||
``spack compilers``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You can see which compilers spack has found by running ``spack
|
||||
compilers`` or ``spack compiler list``::
|
||||
|
||||
$ spack compilers
|
||||
==> Available compilers
|
||||
-- gcc ---------------------------------------------------------
|
||||
gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
|
||||
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
|
||||
-- intel -------------------------------------------------------
|
||||
intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
|
||||
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
|
||||
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
|
||||
.. _spack-compiler-add:
|
||||
|
||||
``spack compiler add``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
An alias for ``spack compiler find``.
|
||||
|
||||
.. _spack-compiler-find:
|
||||
|
||||
``spack compiler find``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you do not see a compiler in this list, but you want to use it with
|
||||
Spack, you can simply run ``spack compiler find`` with the path to
|
||||
where the compiler is installed. For example::
|
||||
|
||||
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
|
||||
intel@13.0.079
|
||||
|
||||
Or you can run ``spack compiler find`` with no arguments to force
|
||||
auto-detection. This is useful if you do not know where compilers are
|
||||
installed, but you know that new compilers have been added to your
|
||||
``PATH``. For example, using dotkit, you might do this::
|
||||
|
||||
$ module load gcc-4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
This loads the environment module for gcc-4.9.0 to add it to
|
||||
``PATH``, and then it adds the compiler to Spack.
|
||||
|
||||
.. _spack-compiler-info:
|
||||
|
||||
``spack compiler info``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you want to see specifics on a particular compiler, you can run
|
||||
``spack compiler info`` on it::
|
||||
|
||||
$ spack compiler info intel@15
|
||||
intel@15.0.0:
|
||||
cc = /usr/local/bin/icc-15.0.090
|
||||
cxx = /usr/local/bin/icpc-15.0.090
|
||||
f77 = /usr/local/bin/ifort-15.0.090
|
||||
fc = /usr/local/bin/ifort-15.0.090
|
||||
|
||||
This shows which C, C++, and Fortran compilers were detected by Spack.
|
||||
Notice also that we didn't have to be too specific about the
|
||||
version. We just said ``intel@15``, and information about the only
|
||||
matching Intel compiler was displayed.
|
||||
|
||||
|
||||
Manual compiler configuration
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in your ``$EDITOR``.
|
||||
|
||||
Each compiler configuration in the file looks like this::
|
||||
|
||||
...
|
||||
chaos_5_x86_64_ib:
|
||||
...
|
||||
intel@15.0.0:
|
||||
cc: /usr/local/bin/icc-15.0.024-beta
|
||||
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||
...
|
||||
|
||||
The chaos_5_x86_64_ib string is an architecture string, and multiple
|
||||
compilers can be listed underneath an architecture. The architecture
|
||||
string may be replaced with the string 'all' to signify compilers that
|
||||
work on all architectures.
|
||||
|
||||
For compilers, like ``clang``, that do not support Fortran, put
|
||||
``None`` for ``f77`` and ``fc``::
|
||||
|
||||
clang@3.3svn:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
|
||||
Once you save the file, the configured compilers will show up in the
|
||||
list displayed by ``spack compilers``.
|
||||
|
||||
|
||||
.. _sec-specs:
|
||||
|
||||
Specs & dependencies
|
||||
@@ -469,12 +379,12 @@ Spack, that descriptor is called a *spec*. Spack uses specs to refer
|
||||
to a particular build configuration (or configurations) of a package.
|
||||
Specs are more than a package name and a version; you can use them to
|
||||
specify the compiler, compiler version, architecture, compile options,
|
||||
and dependency options for a build. In this section, we'll go over
|
||||
and dependency options for a build. In this section, we\'ll go over
|
||||
the full syntax of specs.
|
||||
|
||||
Here is an example of a much longer spec than we've seen thus far::
|
||||
Here is an example of a much longer spec than we\'ve seen thus far::
|
||||
|
||||
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt =bgqos_0 ^callpath @1.1 %gcc@4.7.2
|
||||
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
|
||||
|
||||
If provided to ``spack install``, this will install the ``mpileaks``
|
||||
library at some version between ``1.2`` and ``1.4`` (inclusive),
|
||||
@@ -492,8 +402,13 @@ More formally, a spec consists of the following pieces:
|
||||
* ``%`` Optional compiler specifier, with an optional compiler version
|
||||
(``gcc`` or ``gcc@4.7.3``)
|
||||
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
|
||||
``-qt``, or ``~qt``)
|
||||
* ``=`` Optional architecture specifier (``bgqos_0``)
|
||||
``-qt``, or ``~qt``) for boolean variants
|
||||
* ``name=<value>`` Optional variant specifiers that are not restricted to
|
||||
boolean variants
|
||||
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
|
||||
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
|
||||
* ``target=<value> os=<value>`` Optional architecture specifier
|
||||
(``target=haswell os=CNL10``)
|
||||
* ``^`` Dependency specs (``^callpath@1.1``)
|
||||
|
||||
There are two things to notice here. The first is that specs are
|
||||
@@ -573,7 +488,7 @@ compilers, variants, and architectures just like any other spec.
|
||||
Specifiers are associated with the nearest package name to their left.
|
||||
For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
|
||||
``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
|
||||
``-qt``, and ``=bgqos_0`` all associate with the ``mpileaks`` package.
|
||||
``-qt``, and ``target=haswell os=CNL10`` all associate with the ``mpileaks`` package.
|
||||
|
||||
In the diagram above, ``mpileaks`` depends on ``mpich`` with an
|
||||
unspecified version, but packages can depend on other packages with
|
||||
@@ -629,22 +544,25 @@ based on site policies.
|
||||
Variants
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. Note::
|
||||
|
||||
Variants are not yet supported, but will be in the next Spack
|
||||
release (0.9), due in Q2 2015.
|
||||
|
||||
Variants are named options associated with a particular package, and
|
||||
they can be turned on or off. For example, above, supplying
|
||||
``+debug`` causes ``mpileaks`` to be built with debug flags. The
|
||||
names of particular variants available for a package depend on what
|
||||
was provided by the package author. ``spack info <package>`` will
|
||||
Variants are named options associated with a particular package. They are
|
||||
optional, as each package must provide default values for each variant it
|
||||
makes available. Variants can be specified using
|
||||
a flexible parameter syntax ``name=<value>``. For example,
|
||||
``spack install libelf debug=True`` will install libelf build with debug
|
||||
flags. The names of particular variants available for a package depend on
|
||||
what was provided by the package author. ``spack into <package>`` will
|
||||
provide information on what build variants are available.
|
||||
|
||||
Depending on the package a variant may be on or off by default. For
|
||||
``mpileaks`` here, ``debug`` is off by default, and we turned it on
|
||||
with ``+debug``. If a package is on by default you can turn it off by
|
||||
either adding ``-name`` or ``~name`` to the spec.
|
||||
For compatibility with earlier versions, variants which happen to be
|
||||
boolean in nature can be specified by a syntax that represents turning
|
||||
options on and off. For example, in the previous spec we could have
|
||||
supplied ``libelf +debug`` with the same effect of enabling the debug
|
||||
compile time option for the libelf package.
|
||||
|
||||
Depending on the package a variant may have any default value. For
|
||||
``libelf`` here, ``debug`` is ``False`` by default, and we turned it on
|
||||
with ``debug=True`` or ``+debug``. If a package is ``True`` by default
|
||||
you can turn it off by either adding ``-name`` or ``~name`` to the spec.
|
||||
|
||||
There are two syntaxes here because, depending on context, ``~`` and
|
||||
``-`` may mean different things. In most shells, the following will
|
||||
@@ -656,7 +574,7 @@ result in the shell performing home directory substitution:
|
||||
mpileaks~debug # use this instead
|
||||
|
||||
If there is a user called ``debug``, the ``~`` will be incorrectly
|
||||
expanded. In this situation, you would want to write ``mpileaks
|
||||
expanded. In this situation, you would want to write ``libelf
|
||||
-debug``. However, ``-`` can be ambiguous when included after a
|
||||
package name without spaces:
|
||||
|
||||
@@ -671,25 +589,49 @@ package, not a request for ``mpileaks`` built without ``debug``
|
||||
options. In this scenario, you should write ``mpileaks~debug`` to
|
||||
avoid ambiguity.
|
||||
|
||||
When spack normalizes specs, it prints them out with no spaces and
|
||||
uses only ``~`` for disabled variants. We allow ``-`` and spaces on
|
||||
the command line is provided for convenience and legibility.
|
||||
When spack normalizes specs, it prints them out with no spaces boolean
|
||||
variants using the backwards compatibility syntax and uses only ``~``
|
||||
for disabled boolean variants. We allow ``-`` and spaces on the command
|
||||
line is provided for convenience and legibility.
|
||||
|
||||
|
||||
Architecture specifier
|
||||
Compiler Flags
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. Note::
|
||||
Compiler flags are specified using the same syntax as non-boolean variants,
|
||||
but fulfill a different purpose. While the function of a variant is set by
|
||||
the package, compiler flags are used by the compiler wrappers to inject
|
||||
flags into the compile line of the build. Additionally, compiler flags are
|
||||
inherited by dependencies. ``spack install libdwarf cppflags=\"-g\"`` will
|
||||
install both libdwarf and libelf with the ``-g`` flag injected into their
|
||||
compile line.
|
||||
|
||||
Architecture specifiers are part of specs but are not yet
|
||||
functional. They will be in Spack version 1.0, due in Q3 2015.
|
||||
Notice that the value of the compiler flags must be escape quoted on the
|
||||
command line. From within python files, the same spec would be specified
|
||||
``libdwarf cppflags="-g"``. This is necessary because of how the shell
|
||||
handles the quote symbols.
|
||||
|
||||
The architecture specifier starts with a ``=`` and also comes after
|
||||
some package name within a spec. It allows a user to specify a
|
||||
particular architecture for the package to be built. This is mostly
|
||||
used for architectures that need cross-compilation, and in most cases,
|
||||
users will not need to specify the architecture when they install a
|
||||
package.
|
||||
The six compiler flags are injected in the order of implicit make commands
|
||||
in gnu autotools. If all flags are set, the order is
|
||||
``$cppflags $cflags|$cxxflags $ldflags command $ldlibs`` for C and C++ and
|
||||
``$fflags $cppflags $ldflags command $ldlibs`` for fortran.
|
||||
|
||||
|
||||
Architecture specifiers
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The architecture can be specified by using the reserved
|
||||
words ``target`` and/or ``os`` (``target=x86-64 os=debian7``). You can also
|
||||
use the triplet form of platform, operating system and processor.
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install libelf arch=cray_xc-CNL10-haswell
|
||||
|
||||
Users on non-Cray systems won't have to worry about specifying the architecture.
|
||||
Spack will autodetect what kind of operating system is on your machine as well
|
||||
as the processor. For more information on how the architecture can be
|
||||
used on Cray machines, check here :ref:`spack-cray`
|
||||
|
||||
|
||||
.. _sec-virtual-dependencies:
|
||||
@@ -767,6 +709,23 @@ any MPI implementation will do. If another package depends on
|
||||
error. Likewise, if you try to plug in some package that doesn't
|
||||
provide MPI, Spack will raise an error.
|
||||
|
||||
Specifying Specs by Hash
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Complicated specs can become cumbersome to enter on the command line,
|
||||
especially when many of the qualifications are necessary to
|
||||
distinguish between similar installs, for example when using the
|
||||
``uninstall`` command. To avoid this, when referencing an existing spec,
|
||||
Spack allows you to reference specs by their hash. We previously
|
||||
discussed the spec hash that Spack computes. In place of a spec in any
|
||||
command, substitute ``/<hash>`` where ``<hash>`` is any amount from
|
||||
the beginning of a spec hash. If the given spec hash is sufficient
|
||||
to be unique, Spack will replace the reference with the spec to which
|
||||
it refers. Otherwise, it will prompt for a more qualified hash.
|
||||
|
||||
Note that this will not work to reinstall a depencency uninstalled by
|
||||
``spack uninstall -f``.
|
||||
|
||||
.. _spack-providers:
|
||||
|
||||
``spack providers``
|
||||
@@ -798,47 +757,18 @@ Integration with module systems
|
||||
interface and/or generated module names may change in future
|
||||
versions.
|
||||
|
||||
Spack provides some integration with
|
||||
`Environment Modules <http://modules.sourceforge.net/>`_
|
||||
and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ to make
|
||||
it easier to use the packages it installed.
|
||||
Spack provides some integration with `Environment Modules
|
||||
<http://modules.sourceforge.net/>`_ to make it easier to use the
|
||||
packages it installs. If your system does not already have
|
||||
Environment Modules, see InstallEnvironmentModules_.
|
||||
|
||||
.. note::
|
||||
|
||||
Spack also supports `Dotkit
|
||||
<https://computing.llnl.gov/?set=jobs&page=dotkit>`_, which is used
|
||||
by some systems. If you system does not already have a module
|
||||
system installed, you should use Environment Modules or LMod.
|
||||
|
||||
Installing Environment Modules
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In order to use Spack's generated environment modules, you must have
|
||||
installed the *Environment Modules* package. On many Linux
|
||||
distributions, this can be installed from the vendor's repository:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
yum install environment-modules # (Fedora/RHEL/CentOS)
|
||||
apt-get install environment-modules # (Ubuntu/Debian)
|
||||
|
||||
If your Linux distribution does not have
|
||||
Environment Modules, you can get it with Spack:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install environment-modules
|
||||
|
||||
|
||||
In this case to activate it automatically you need to add the following two
|
||||
lines to your ``.bashrc`` profile (or similar):
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
MODULES_HOME=`spack location -i environment-modules`
|
||||
source ${MODULES_HOME}/Modules/init/bash
|
||||
|
||||
If you use a Unix shell other than ``bash``, modify the commands above
|
||||
accordingly and source the appropriate file in
|
||||
``${MODULES_HOME}/Modules/init/``.
|
||||
|
||||
|
||||
.. TODO : Add a similar section on how to install dotkit ?
|
||||
|
||||
Spack and module systems
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
@@ -892,7 +822,7 @@ of installed packages.
|
||||
|
||||
$ module avail
|
||||
|
||||
------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
|
||||
------- /home/gamblin2/spack/share/spack/modules/linux-debian7-x86_64 --------
|
||||
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
|
||||
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
|
||||
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
|
||||
@@ -963,7 +893,7 @@ Spack. For example, this will add the ``mpich`` package built with
|
||||
$ spack use mpich %gcc@4.4.7
|
||||
Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
|
||||
$ which mpicc
|
||||
~/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4/bin/mpicc
|
||||
~/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
|
||||
|
||||
Or, similarly with modules, you could type:
|
||||
|
||||
@@ -996,8 +926,8 @@ than one installed package matches it), then Spack will warn you:
|
||||
|
||||
$ spack load libelf
|
||||
==> Error: Multiple matches for spec libelf. Choose one:
|
||||
libelf@0.8.13%gcc@4.4.7=chaos_5_x86_64_ib
|
||||
libelf@0.8.13%intel@15.0.0=chaos_5_x86_64_ib
|
||||
libelf@0.8.13%gcc@4.4.7 arch=linux-debian7-x86_64
|
||||
libelf@0.8.13%intel@15.0.0 arch=linux-debian7-x86_64
|
||||
|
||||
You can either type the ``spack load`` command again with a fully
|
||||
qualified argument, or you can add just enough extra constraints to
|
||||
@@ -1038,6 +968,80 @@ of module files:
|
||||
"""Set up the compile and runtime environments for a package."""
|
||||
pass
|
||||
|
||||
|
||||
Recursive Modules
|
||||
``````````````````
|
||||
|
||||
In some cases, it is desirable to load not just a module, but also all
|
||||
the modules it depends on. This is not required for most modules
|
||||
because Spack builds binaries with RPATH support. However, not all
|
||||
packages use RPATH to find their dependencies: this can be true in
|
||||
particular for Python extensions, which are currently *not* built with
|
||||
RPATH.
|
||||
|
||||
Modules may be loaded recursively with the ``load`` command's
|
||||
``--dependencies`` or ``-r`` argument:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack load --dependencies <spec> ...
|
||||
|
||||
More than one spec may be placed on the command line here.
|
||||
|
||||
Module Commands for Shell Scripts
|
||||
``````````````````````````````````
|
||||
|
||||
Although Spack is flexible, the ``module`` command is much faster.
|
||||
This could become an issue when emitting a series of ``spack load``
|
||||
commands inside a shell script. The ``spack module loads`` may also
|
||||
be used to generate code that can be cut-and-pasted into a shell
|
||||
script. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack module loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
This script may be directly executed in bash via
|
||||
|
||||
.. code-block :: sh
|
||||
|
||||
source <( spack module loads --dependencies py-numpy git )
|
||||
|
||||
|
||||
Regenerating Module files
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
|
||||
@@ -1065,6 +1069,108 @@ overrides ``setup_dependent_environment`` in the following way:
|
||||
to insert the appropriate ``PYTHONPATH`` modifications in the module
|
||||
files of python packages.
|
||||
|
||||
|
||||
Recursive Modules
|
||||
``````````````````
|
||||
|
||||
In some cases, it is desirable to load not just a module, but also all
|
||||
the modules it depends on. This is not required for most modules
|
||||
because Spack builds binaries with RPATH support. However, not all
|
||||
packages use RPATH to find their dependencies: this can be true in
|
||||
particular for Python extensions, which are currently *not* built with
|
||||
RPATH.
|
||||
|
||||
Modules may be loaded recursively with the command:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ module load `spack module tcl --dependencies <spec>...
|
||||
|
||||
More than one spec may be placed on the command line here.
|
||||
|
||||
Module Comamnds for Shell Scripts
|
||||
``````````````````````````````````
|
||||
|
||||
Although Spack is flexbile, the ``module`` command is much faster.
|
||||
This could become an issue when emitting a series of ``spack load``
|
||||
commands inside a shell script. By adding the ``--shell`` flag,
|
||||
``spack module find`` may also be used to generate code that can be
|
||||
cut-and-pasted into a shell script. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack module loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
This script may be directly executed in bash via
|
||||
|
||||
.. code-block :: sh
|
||||
|
||||
source <( spack module loads --dependencies py-numpy git )
|
||||
|
||||
|
||||
Module Prefixes
|
||||
````````````````
|
||||
|
||||
On some systems, modules are automatically prefixed with a certain
|
||||
string; ``spack module loads`` needs to know about that prefix when it
|
||||
issues ``module load`` commands. Add the ``--prefix`` option to your
|
||||
``spack module loads`` commands if this is necessary.
|
||||
|
||||
For example, consider the following on one system:
|
||||
|
||||
..code-block:: sh
|
||||
|
||||
$ module avail
|
||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module loads antlr # WRONG!
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module loads --prefix linux-SuSE11-x86_64/ antlr
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
Regenerating Module files
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Module and dotkit files are generated when packages are installed, and
|
||||
are placed in the directory ``share/spack/modules`` under the Spack
|
||||
root. The command ``spack refresh`` will regenerate them all without
|
||||
re-building the packages; for example, if module format or options
|
||||
have changed.
|
||||
|
||||
Configuration files
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -1245,6 +1351,10 @@ regenerate all module and dotkit files from scratch:
|
||||
|
||||
.. _extensions:
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Extensions & Python support
|
||||
------------------------------------
|
||||
|
||||
@@ -1263,7 +1373,7 @@ an *extension*. Suppose you have Python installed like so:
|
||||
|
||||
$ spack find python
|
||||
==> 1 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
python@2.7.8
|
||||
|
||||
.. _spack-extensions:
|
||||
@@ -1276,7 +1386,7 @@ You can find extensions for your Python installation like this:
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
@@ -1288,7 +1398,7 @@ You can find extensions for your Python installation like this:
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
@@ -1304,8 +1414,8 @@ prefixes, and you can see this with ``spack find -p``:
|
||||
|
||||
$ spack find -p py-numpy
|
||||
==> 1 installed packages.
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
||||
|
||||
However, even though this package is installed, you cannot use it
|
||||
directly when you run ``python``:
|
||||
@@ -1366,9 +1476,9 @@ installation:
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack activate py-numpy
|
||||
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
Several things have happened here. The user requested that
|
||||
``py-numpy`` be activated in the ``python`` installation it was built
|
||||
@@ -1383,7 +1493,7 @@ packages listed as activated:
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
@@ -1395,14 +1505,14 @@ packages listed as activated:
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> 3 currently activated:
|
||||
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
|
||||
|
||||
|
||||
@@ -1431,7 +1541,7 @@ dependencies, you can use ``spack activate -f``:
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack activate -f py-numpy
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
.. _spack-deactivate:
|
||||
|
||||
@@ -1463,7 +1573,7 @@ Spack currently needs to be run from a filesystem that supports
|
||||
``flock`` locking semantics. Nearly all local filesystems and recent
|
||||
versions of NFS support this, but parallel filesystems may be mounted
|
||||
without ``flock`` support enabled. You can determine how your
|
||||
filesystems are mounted with ``mount -p``. The output for a Lustre
|
||||
filesystems are mounted with ``mount -p``. The output for a Lustre
|
||||
filesystem might look like this:
|
||||
|
||||
.. code-block:: sh
|
||||
@@ -1484,7 +1594,7 @@ This issue typically manifests with the error below:
|
||||
Traceback (most recent call last):
|
||||
File "./spack", line 176, in <module>
|
||||
main()
|
||||
File "./spack", line 154, in main
|
||||
File "./spack", line 154,' in main
|
||||
return_val = command(parser, args)
|
||||
File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
|
||||
specs = set(spack.installed_db.query(**q_args))
|
||||
@@ -1502,6 +1612,10 @@ This issue typically manifests with the error below:
|
||||
|
||||
A nicer error message is TBD in future versions of Spack.
|
||||
|
||||
|
||||
.. _spack-cray:
|
||||
|
||||
|
||||
Getting Help
|
||||
-----------------------
|
||||
|
||||
|
||||
167
lib/spack/docs/case_studies.rst
Normal file
167
lib/spack/docs/case_studies.rst
Normal file
@@ -0,0 +1,167 @@
|
||||
Using Spack for CMake-based Development
|
||||
==========================================
|
||||
|
||||
These are instructions on how to use Spack to aid in the development
|
||||
of a CMake-based project. Spack is used to help find the dependencies
|
||||
for the project, configure it at development time, and then package it
|
||||
it in a way that others can install. Using Spack for CMake-based
|
||||
development consists of three parts:
|
||||
|
||||
1. Setting up the CMake build in your software
|
||||
2. Writing the Spack Package
|
||||
3. Using it from Spack.
|
||||
|
||||
|
||||
Setting Up the CMake Build
|
||||
---------------------------------------
|
||||
|
||||
You should follow standard CMake conventions in setting up your
|
||||
software, your CMake build should NOT depend on or require Spack to
|
||||
build. See here for an example:
|
||||
https://github.com/citibeth/icebin
|
||||
|
||||
Note that there's one exception here to the rule I mentioned above.
|
||||
In ``CMakeLists.txt``, I have the following line::
|
||||
|
||||
include_directories($ENV{CMAKE_TRANSITIVE_INCLUDE_PATH})
|
||||
|
||||
|
||||
This is a hook into Spack, and it ensures that all transitive
|
||||
dependencies are included in the include path. It's not needed if
|
||||
everything is in one tree, but it is (sometimes) in the Spack world;
|
||||
when running without Spack, it has no effect.
|
||||
|
||||
Note that this "feature" is controversial, could break with future
|
||||
versions of GNU ld, and probably not the best to use. The best
|
||||
practice is that you make sure that anything you #include is listed as
|
||||
a dependency in your CMakeLists.txt.
|
||||
|
||||
To be more specific: if you #inlcude something from package A and an
|
||||
installed HEADER FILE in A #includes something from package B, then
|
||||
you should also list B as a dependency in your CMake build. If you
|
||||
depend on A but header files exported by A do NOT #include things from
|
||||
B, then you do NOT need to list B as a dependency --- even if linking
|
||||
to A links in libB.so as well.
|
||||
|
||||
I also recommend that you set up your CMake build to use RPATHs
|
||||
correctly. Not only is this a good idea and nice, but it also ensures
|
||||
that your package will build the same with or without ``spack
|
||||
install``.
|
||||
|
||||
Writing the Spack Package
|
||||
---------------------------------------
|
||||
|
||||
Now that you have a CMake build, you want to tell Spack how to
|
||||
configure it. This is done by writing a Spack package for your
|
||||
software. See here for example:
|
||||
https://github.com/citibeth/spack/blob/efischer/develop/var/spack/repos/builtin/packages/icebin/package.py
|
||||
|
||||
You need to subclass ``CMakePackage``, as is done in this example.
|
||||
This enables advanced features of Spack for helping you in configuring
|
||||
your software (keep reading...). Instead of an ``install()`` method
|
||||
used when subclassing ``Package``, you write ``configure_args()``.
|
||||
See here for more info on how this works:
|
||||
https://github.com/LLNL/spack/pull/543/files
|
||||
|
||||
NOTE: if your software is not publicly available, you do not need to
|
||||
set the URL or version. Or you can set up bogus URLs and
|
||||
versions... whatever causes Spack to not crash.
|
||||
|
||||
|
||||
Using it from Spack
|
||||
--------------------------------
|
||||
|
||||
Now that you have a Spack package, you can get Spack to setup your
|
||||
CMake project for you. Use the following to setup, configure and
|
||||
build your project::
|
||||
|
||||
cd myproject
|
||||
spack spconfig myproject@local
|
||||
mkdir build; cd build
|
||||
../spconfig.py ..
|
||||
make
|
||||
make install
|
||||
|
||||
|
||||
Everything here should look pretty familiar here from a CMake
|
||||
perspective, except that ``spack spconfig`` creates the file
|
||||
``spconfig.py``, which calls CMake with arguments appropriate for your
|
||||
Spack configuration. Think of it as the equivalent to running a bunch
|
||||
of ``spack location -i`` commands. You will run ``spconfig.py``
|
||||
instead of running CMake directly.
|
||||
|
||||
If your project is publicly available (eg on GitHub), then you can
|
||||
ALSO use this setup to "just install" a release version without going
|
||||
through the manual configuration/build step. Just do:
|
||||
|
||||
1. Put tag(s) on the version(s) in your GitHub repo you want to be release versions.
|
||||
|
||||
2. Set the ``url`` in your ``package.py`` to download a tarball for
|
||||
the appropriate version. (GitHub will give you a tarball for any
|
||||
version in the repo, if you tickle it the right way). For example::
|
||||
|
||||
https://github.com/citibeth/icebin/tarball/v0.1.0
|
||||
|
||||
Set up versions as appropriate in your ``package.py``. (Manually
|
||||
download the tarball and run ``md5sum`` to determine the
|
||||
appropriate checksum for it).
|
||||
|
||||
3. Now you should be able to say ``spack install myproject@version``
|
||||
and things "just work."
|
||||
|
||||
NOTE... in order to use the features outlined in this post, you
|
||||
currently need to use the following branch of Spack:
|
||||
https://github.com/citibeth/spack/tree/efischer/develop
|
||||
|
||||
There is a pull request open on this branch (
|
||||
https://github.com/LLNL/spack/pull/543 ) and we are working to get it
|
||||
integrated into the main ``develop`` branch.
|
||||
|
||||
|
||||
Activating your Software
|
||||
-------------------------------------
|
||||
|
||||
Once you've built your software, you will want to load it up. You can
|
||||
use ``spack load mypackage@local`` for that in your ``.bashrc``, but
|
||||
that is slow. Try stuff like the following instead:
|
||||
|
||||
The following command will load the Spack-installed packages needed
|
||||
for basic Python use of IceBin::
|
||||
|
||||
module load `spack module find tcl icebin netcdf cmake@3.5.1`
|
||||
module load `spack module find --dependencies tcl py-basemap py-giss`
|
||||
|
||||
|
||||
You can speed up shell startup by turning these into ``module load`` commands.
|
||||
|
||||
1. Cut-n-paste the script ``make_spackenv``::
|
||||
|
||||
#!/bin/sh
|
||||
#
|
||||
# Generate commands to load the Spack environment
|
||||
|
||||
SPACKENV=$HOME/spackenv.sh
|
||||
|
||||
spack module find --shell tcl git icebin@local ibmisc netcdf cmake@3.5.1 >$SPACKENV
|
||||
spack module find --dependencies --shell tcl py-basemap py-giss >>$SPACKENV
|
||||
|
||||
2. Add the following to your ``.bashrc`` file::
|
||||
|
||||
source $HOME/spackenv.sh
|
||||
# Preferentially use your checked-out Python source
|
||||
export PYTHONPATH=$HOME/icebin/pylib:$PYTHONPATH
|
||||
|
||||
3. Run ``sh make_spackenv`` whenever your Spack installation changes (including right now).
|
||||
|
||||
|
||||
Giving Back
|
||||
-------------------
|
||||
|
||||
If your software is publicly available, you should submit the
|
||||
``package.py`` for it as a pull request to the main Spack GitHub
|
||||
project. This will ensure that anyone can install your software
|
||||
(almost) painlessly with a simple ``spack install`` command. See here
|
||||
for how that has turned into detailed instructions that have
|
||||
successfully enabled collaborators to install complex software:
|
||||
|
||||
https://github.com/citibeth/icebin/blob/develop/README.rst
|
||||
@@ -51,7 +51,8 @@
|
||||
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
|
||||
|
||||
spack_version = subprocess.Popen(
|
||||
['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.')
|
||||
[spack_root + '/bin/spack', '-V'],
|
||||
stderr=subprocess.PIPE).communicate()[1].strip().split('.')
|
||||
|
||||
# Set an environment variable so that colify will print output like it would to
|
||||
# a terminal.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
.. _site-configuration:
|
||||
.. _configuration:
|
||||
|
||||
Site configuration
|
||||
Configuration
|
||||
===================================
|
||||
|
||||
.. _temp-space:
|
||||
@@ -53,9 +53,10 @@ in the first directory it finds to which it has write access. Add
|
||||
more elements to the list to indicate where your own site's temporary
|
||||
directory is.
|
||||
|
||||
.. _sec-external_packages:
|
||||
|
||||
External Packages
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
----------------------------
|
||||
Spack can be configured to use externally-installed
|
||||
packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
@@ -70,20 +71,20 @@ directory. Here's an example of an external configuration:
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This example lists three installations of OpenMPI, one built with gcc,
|
||||
one built with gcc and debug information, and another built with Intel.
|
||||
If Spack is asked to build a package that uses one of these MPIs as a
|
||||
dependency, it will use the the pre-installed OpenMPI in
|
||||
the given directory.
|
||||
the given directory. Packages.yaml can also be used to specify modules
|
||||
|
||||
Each ``packages.yaml`` begins with a ``packages:`` token, followed
|
||||
by a list of package names. To specify externals, add a ``paths``
|
||||
by a list of package names. To specify externals, add a ``paths`` or ``modules``
|
||||
token under the package name, which lists externals in a
|
||||
``spec : /path`` format. Each spec should be as
|
||||
``spec: /path`` or ``spec: module-name`` format. Each spec should be as
|
||||
well-defined as reasonably possible. If a
|
||||
package lacks a spec component, such as missing a compiler or
|
||||
package version, then Spack will guess the missing component based
|
||||
@@ -108,9 +109,9 @@ be:
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
|
||||
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
|
||||
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
@@ -118,13 +119,75 @@ its own version of OpenMPI, and it will instead always rely on a pre-built
|
||||
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
|
||||
a package name.
|
||||
|
||||
The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
|
||||
The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
|
||||
Concretization Preferences
|
||||
--------------------------------
|
||||
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, depends_on, and variants during concretization.
|
||||
The preferred configuration can be controlled via the
|
||||
``~/.spack/packages.yaml`` file for user configuations, or the
|
||||
``etc/spack/packages.yaml`` site configuration.
|
||||
|
||||
|
||||
Here's an example packages.yaml file that sets preferred packages:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
|
||||
providers:
|
||||
mpi: [mvapich, mpich, openmpi]
|
||||
|
||||
|
||||
At a high level, this example is specifying how packages should be
|
||||
concretized. The opencv package should prefer using gcc 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich for
|
||||
its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Each packages.yaml file begins with the string ``packages:`` and
|
||||
package names are specified on the next level. The special string ``all``
|
||||
applies settings to each package. Underneath each package name is
|
||||
one or more components: ``compiler``, ``variants``, ``version``,
|
||||
or ``providers``. Each component has an ordered list of spec
|
||||
``constraints``, with earlier entries in the list being preferred over
|
||||
later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, mpi) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
|
||||
Profiling
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
------------------
|
||||
|
||||
Spack has some limited built-in support for profiling, and can report
|
||||
statistics using standard Python timing tools. To use this feature,
|
||||
@@ -133,7 +196,7 @@ supply ``-p`` to Spack on the command line, before any subcommands.
|
||||
.. _spack-p:
|
||||
|
||||
``spack -p``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
``spack -p`` output looks like this:
|
||||
|
||||
@@ -80,10 +80,11 @@ with a high level view of Spack's directory structure::
|
||||
|
||||
var/
|
||||
spack/ <- build & stage directories
|
||||
repos/ <- contains package repositories
|
||||
builtin/ <- pkg repository that comes with Spack
|
||||
repo.yaml <- descriptor for the builtin repository
|
||||
packages/ <- directories under here contain packages
|
||||
repos/ <- contains package repositories
|
||||
builtin/ <- pkg repository that comes with Spack
|
||||
repo.yaml <- descriptor for the builtin repository
|
||||
packages/ <- directories under here contain packages
|
||||
cache/ <- saves resources downloaded during installs
|
||||
|
||||
opt/
|
||||
spack/ <- packages are installed here
|
||||
|
||||
@@ -31,14 +31,21 @@ platform, all on the command line.
|
||||
# Specify a compiler (and its version), with %
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3
|
||||
|
||||
# Add special compile-time options with +
|
||||
# Add special compile-time options by name
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 debug=True
|
||||
|
||||
# Add special boolean compile-time options with +
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug
|
||||
|
||||
# Cross-compile for a different architecture with =
|
||||
$ spack install mpileaks@1.1.2 =bgqos_0
|
||||
# Add compiler flags using the conventional names
|
||||
$ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags=\"-O3 -floop-block\"
|
||||
|
||||
Users can specify as many or few options as they care about. Spack
|
||||
will fill in the unspecified values with sensible defaults.
|
||||
# Cross-compile for a different architecture with arch=
|
||||
$ spack install mpileaks@1.1.2 arch=bgqos_0
|
||||
|
||||
Users can specify as many or few options as they care about. Spack
|
||||
will fill in the unspecified values with sensible defaults. The two listed
|
||||
syntaxes for variants are identical when the value is boolean.
|
||||
|
||||
|
||||
Customize dependencies
|
||||
|
||||
@@ -1,7 +1,21 @@
|
||||
Getting Started
|
||||
====================
|
||||
|
||||
Download
|
||||
Prerequisites
|
||||
---------------
|
||||
|
||||
Spack has the following minimum requirements, which must be installed
|
||||
before Spack is run:
|
||||
|
||||
1. Operating System: GNU/Linux or Macintosh
|
||||
2. Python 2.6 or 2.7
|
||||
3. A C/C++ compiler
|
||||
|
||||
These requirements can be easily installed on most modern Linux
|
||||
systems; on Macintosh, XCode is required.
|
||||
|
||||
|
||||
Installation
|
||||
--------------------
|
||||
|
||||
Getting spack is easy. You can clone it from the `github repository
|
||||
@@ -11,10 +25,16 @@ Getting spack is easy. You can clone it from the `github repository
|
||||
|
||||
$ git clone https://github.com/llnl/spack.git
|
||||
|
||||
This will create a directory called ``spack``. We'll assume that the
|
||||
full path to this directory is in the ``SPACK_ROOT`` environment
|
||||
variable. Add ``$SPACK_ROOT/bin`` to your path and you're ready to
|
||||
go:
|
||||
This will create a directory called ``spack``. If you are using Spack
|
||||
for a specific purpose, you might have received different instructions
|
||||
on how to download Spack; if so, please follow those instructions.
|
||||
|
||||
Add Spack to Shell
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
We'll assume that the full path to your downloaded Spack directory is
|
||||
in the ``SPACK_ROOT`` environment variable. Add ``$SPACK_ROOT/bin``
|
||||
to your path and you're ready to go:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
@@ -35,11 +55,44 @@ For a richer experience, use Spack's `shell support
|
||||
|
||||
This automatically adds Spack to your ``PATH``.
|
||||
|
||||
Installation
|
||||
--------------------
|
||||
Clean Environment
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You don't need to install Spack; it's ready to run as soon as you
|
||||
clone it from git.
|
||||
Many packages' installs can be broken by changing environment
|
||||
variables. For example, a packge might pick up the wrong build-time
|
||||
dependencies (most of them not specified) depending on the setting of
|
||||
``PATH``. ``GCC`` seems to be particularly vulnerable to these issues.
|
||||
|
||||
Therefore, it is recommended that Spack users run with a *clean
|
||||
environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors that no
|
||||
one knows how to fix.
|
||||
|
||||
|
||||
Although Spack will work as soon as you clone it, it won't necessarily
|
||||
be able to install any packages. That is because Spack relies
|
||||
|
||||
Check Installation
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
With Spack installed, you should be able to run some basic Spack commands. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack spec netcdf
|
||||
...
|
||||
netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64
|
||||
^curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64
|
||||
^openmpi@1.10.1%gcc@5.3.0~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-SuSE11-x86_64
|
||||
^m4@1.4.17%gcc@5.3.0+sigsegv arch=linux-SuSE11-x86_64
|
||||
^libsigsegv@2.10%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
Optional: Alternate Prefix
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You may want to run it out of a prefix other than the git repository
|
||||
you cloned. The ``spack bootstrap`` command provides this
|
||||
@@ -53,3 +106,671 @@ This will install a new spack script in ``/my/favorite/prefix/bin``,
|
||||
which you can use just like you would the regular spack script. Each
|
||||
copy of spack installs packages into its own ``$PREFIX/opt``
|
||||
directory.
|
||||
|
||||
|
||||
|
||||
|
||||
.. _compiler-config:
|
||||
|
||||
Compiler configuration
|
||||
-----------------------------------
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Spack searches for compilers on your machine
|
||||
automatically the first time it is run. It does this by inspecting
|
||||
your path.
|
||||
|
||||
.. _spack-compilers:
|
||||
|
||||
``spack compilers``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You can see which compilers spack has found by running ``spack
|
||||
compilers`` or ``spack compiler list``::
|
||||
|
||||
$ spack compilers
|
||||
==> Available compilers
|
||||
-- gcc ---------------------------------------------------------
|
||||
gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
|
||||
gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
|
||||
-- intel -------------------------------------------------------
|
||||
intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
|
||||
intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
|
||||
intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
|
||||
.. _spack-compiler-add:
|
||||
|
||||
``spack compiler add``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
An alias for ``spack compiler find``.
|
||||
|
||||
.. _spack-compiler-find:
|
||||
|
||||
``spack compiler find``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you do not see a compiler in this list, but you want to use it with
|
||||
Spack, you can simply run ``spack compiler find`` with the path to
|
||||
where the compiler is installed. For example::
|
||||
|
||||
$ spack compiler find /usr/local/tools/ic-13.0.079
|
||||
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
|
||||
intel@13.0.079
|
||||
|
||||
Or you can run ``spack compiler find`` with no arguments to force
|
||||
auto-detection. This is useful if you do not know where compilers are
|
||||
installed, but you know that new compilers have been added to your
|
||||
``PATH``. For example, using dotkit, you might do this::
|
||||
|
||||
$ module load gcc-4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
|
||||
gcc@4.9.0
|
||||
|
||||
This loads the environment module for gcc-4.9.0 to add it to
|
||||
``PATH``, and then it adds the compiler to Spack.
|
||||
|
||||
.. _spack-compiler-info:
|
||||
|
||||
``spack compiler info``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you want to see specifics on a particular compiler, you can run
|
||||
``spack compiler info`` on it::
|
||||
|
||||
$ spack compiler info intel@15
|
||||
intel@15.0.0:
|
||||
cc = /usr/local/bin/icc-15.0.090
|
||||
cxx = /usr/local/bin/icpc-15.0.090
|
||||
f77 = /usr/local/bin/ifort-15.0.090
|
||||
fc = /usr/local/bin/ifort-15.0.090
|
||||
|
||||
This shows which C, C++, and Fortran compilers were detected by Spack.
|
||||
Notice also that we didn\'t have to be too specific about the
|
||||
version. We just said ``intel@15``, and information about the only
|
||||
matching Intel compiler was displayed.
|
||||
|
||||
|
||||
Manual Compiler Configuration
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in your ``$EDITOR``.
|
||||
|
||||
Each compiler configuration in the file looks like this::
|
||||
|
||||
...
|
||||
compilers:
|
||||
- compiler:
|
||||
modules = []
|
||||
operating_system: OS
|
||||
paths:
|
||||
cc: /usr/local/bin/icc-15.0.024-beta
|
||||
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||
|
||||
spec: intel@15.0.0:
|
||||
|
||||
For compilers, like ``clang``, that do not support Fortran, put
|
||||
``None`` for ``f77`` and ``fc``::
|
||||
|
||||
clang@3.3svn:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
|
||||
Once you save the file, the configured compilers will show up in the
|
||||
list displayed by ``spack compilers``.
|
||||
|
||||
You can also add compiler flags to manually configured compilers. The
|
||||
valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``,
|
||||
``ldflags``, and ``ldlibs``. For example::
|
||||
|
||||
...
|
||||
compilers:
|
||||
- compiler:
|
||||
...
|
||||
intel@15.0.0:
|
||||
cc: /usr/local/bin/icc-15.0.024-beta
|
||||
cxx: /usr/local/bin/icpc-15.0.024-beta
|
||||
f77: /usr/local/bin/ifort-15.0.024-beta
|
||||
fc: /usr/local/bin/ifort-15.0.024-beta
|
||||
cppflags: -O3 -fPIC
|
||||
...
|
||||
|
||||
These flags will be treated by spack as if they were enterred from
|
||||
the command line each time this compiler is used. The compiler wrappers
|
||||
then inject those flags into the compiler command. Compiler flags
|
||||
enterred from the command line will be discussed in more detail in the
|
||||
following section.
|
||||
|
||||
|
||||
Build Your Own Compiler
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you are particular about which compiler/version you use, you might
|
||||
wish to have Spack build it for you. For example:
|
||||
|
||||
.. code-block::
|
||||
|
||||
spack install gcc@4.9.3
|
||||
|
||||
Once that has finished, you will need to add it to your
|
||||
``compilers.yaml`` file. If this is your preferred compiler, in
|
||||
general future Spack builds will use it.
|
||||
|
||||
.. note::
|
||||
|
||||
If you are building your own compiler, it can be useful to have a
|
||||
Spack instance just for that. For example, create a new Spack in
|
||||
``~/spack-tools`` and then run ``~/spack-tools/bin/spack install
|
||||
gcc@4.9.3``. Once the compiler is built, don't build anything
|
||||
more in that Spack instance; instead, create a new "real" Spack
|
||||
instance, configure Spack to use the compiler you've just built,
|
||||
and then build your application software in the new Spack
|
||||
instance.
|
||||
|
||||
This tip is useful because sometimes you will find yourself
|
||||
rebuilding may pacakges due to Spack updates. Sometimes, you
|
||||
might even delete your entire Spack installation and start fresh.
|
||||
If your compiler was built in a separate Spack installation, you
|
||||
will never have to rebuild it --- as long as you wish to continue
|
||||
using that version of the compiler.
|
||||
|
||||
|
||||
Compilers Requiring Modules
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Many installed compilers will work regardless of the environment they
|
||||
are called with. However, some installed compilers require
|
||||
``$LD_LIBRARY_PATH`` or other environment variables to be set in order
|
||||
to run; Intel compilers are known for this. In such a case, you
|
||||
should tell Spack which module(s) to load in order to run the chosen
|
||||
compiler. Spack will load this module into the environment ONLY when
|
||||
the compiler is run, and NOT in general for a package's ``install()``
|
||||
method. See, for example, this ``compilers.yaml`` file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
modules: [other/comp/gcc-5.3-sp3]
|
||||
operating_system: SuSE11
|
||||
paths:
|
||||
cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
|
||||
cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
|
||||
f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
|
||||
spec: gcc@5.3.0
|
||||
|
||||
Some compilers require a module to be loaded not just to run, but also
|
||||
to execute any code built with the compiler, breaking packages that
|
||||
execute any bits of code they just compiled. Such compilers should be
|
||||
taken behind the barn and put out of their misery. If that is not
|
||||
possible or practical, the user (and anyone running code built by that
|
||||
compiler) will need to load the compiler's module into their
|
||||
environment. And ``spack install --dirty`` will need to be used.
|
||||
|
||||
Compiler Verification
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You can verify that your compilers are configured properly by installing a
|
||||
simple package. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install zlib%gcc@5.3.0
|
||||
|
||||
|
||||
System Packages
|
||||
-----------------
|
||||
|
||||
Once compilers are configured, one needs to determine which
|
||||
pre-installed system packages, if any, to use in builds. This is
|
||||
configured in the file `~/.spack/packages.yaml`. For example, to use
|
||||
an OpenMPI installed in /opt/local, one would use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
paths:
|
||||
openmpi@1.10.1: /opt/local
|
||||
buildable: False
|
||||
|
||||
In general, Spack is easier to use and more reliable if it builds all
|
||||
its own dependencies. However, there are two packages for which one
|
||||
commonly needs to use system versions:
|
||||
|
||||
MPI
|
||||
~~~
|
||||
|
||||
On supercomputers, sysadmins have already built MPI versions that take
|
||||
into account the specifics of that computer's hardware. Unless you
|
||||
know how they were built and can choose the correct Spack variants,
|
||||
you are unlikely to get a working MPI from Spack. Instead, use an
|
||||
appropraite pre-installed MPI.
|
||||
|
||||
If you choose a pre-installed MPI, you should consider using the
|
||||
pre-installed compiler used to build that MPI; see above on
|
||||
``compilers.yaml``.
|
||||
|
||||
OpenSSL
|
||||
~~~~~~~~
|
||||
|
||||
The ``openssl`` package underlies much of modern security in a modern
|
||||
OS; an attacker can easily "pwn" any computer on which can modify SSL.
|
||||
Therefore, any `openssl` used on a system should be created in a
|
||||
"trusted environment" --- for example, that of the OS vendor.
|
||||
|
||||
OpenSSL is also updated by the OS vendor from time to time, in
|
||||
response to security problems discovered in the wider community. It
|
||||
is in everyone's best interest to use any newly updated versions as
|
||||
soon as they come out. Modern Linux installations have standard
|
||||
procedures for security updates without user involvement.
|
||||
|
||||
Spack running at user-level is not a trusted environment, nor do Spack
|
||||
users generally keep up-to-date on the latest security holes in SSL.
|
||||
For these reasons, any Spack-installed OpenSSL should be considered
|
||||
untrusted.
|
||||
|
||||
As long as the system-provided SSL works, it is better to use it. One can check if it works by trying to download an ``https://``. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl -O https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz
|
||||
|
||||
As long as it works, the recommended way to tell Spack to use the
|
||||
system-supplied OpenSSL is to add the following to ``packages.yaml``.
|
||||
Note that the ``@system`` "version" means "I don't care what version
|
||||
it is, just use what is there." This is appropraite for OpenSSL,
|
||||
which has a stable API.
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
# Recommended for security reasons
|
||||
# Do not install OpenSSL as non-root user.
|
||||
openssl:
|
||||
paths:
|
||||
openssl@system: /usr
|
||||
version: [system]
|
||||
buildable: False
|
||||
|
||||
|
||||
|
||||
Utilities Configuration
|
||||
-------------------------
|
||||
|
||||
Although Spack does not need installation *per se*, it does rely on
|
||||
other packages to be available on its host system. If those packages
|
||||
are out of date or missing, then Spack will not work. Sometimes, an
|
||||
appeal to the system's package manager can fix such problems. If not,
|
||||
the solution is have Spack install the required packages, and then
|
||||
have Spack use them.
|
||||
|
||||
For example, if `curl` doesn't work, one could use the following steps
|
||||
to provide Spack a working `curl`:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack install curl
|
||||
$ spack load curl
|
||||
|
||||
or alternately:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack module loads curl >>~/.bashrc
|
||||
|
||||
or if environment modules don't work:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ export PATH=`spack location -i curl`/bin:$PATH
|
||||
|
||||
|
||||
External commands are used by Spack in two places: within core Spack,
|
||||
and in the package recipes. The bootstrapping procedure for these two
|
||||
cases is somewhat different, and is treated separately below.
|
||||
|
||||
Core Spack Utilities
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Core Spack uses the following packages, aminly to download and unpack
|
||||
source code, and to load generated environment modules: ``curl``,
|
||||
``env``, ``git``, ``go``, ``hg``, ``svn``, ``tar``, ``unzip``,
|
||||
``patch``, ``environment-modules``.
|
||||
|
||||
As long as the user's environment is set up to successfully run these
|
||||
programs from outside of Spack, they should work inside of Spack as
|
||||
well. They can generally be activated as in the `curl` example above;
|
||||
or some systems might already have an appropriate hand-built
|
||||
environment module that may be loaded. Either way works.
|
||||
|
||||
A few notes on specific programs in this list:
|
||||
|
||||
cURL, git, Mercurial, etc.
|
||||
```````````````````````````
|
||||
|
||||
Spack depends on cURL to download tarballs, the format that most
|
||||
Spack-installed packages come in. Your system's cURL should always be
|
||||
able to download unencrypted ``http://``. However, the cURL on some
|
||||
systems has problems with SSL-enabled ``https://`` URLs, due to
|
||||
outdated / insecure versions of OpenSSL on those systems. This will
|
||||
prevent Spack from installing any software requiring ``https://``
|
||||
until a new cURL has been installed, using the technique above.
|
||||
|
||||
.. note::
|
||||
|
||||
``curl`` depends on ``openssl`` and ``zlib``, both of which are
|
||||
downloadable from non-SSL sources. Unfortunately, this
|
||||
Spack-built cURL should be considered untrustworthy for
|
||||
``https://`` sources becuase it relies on an OpenSSL built in user
|
||||
space. Luckily, Spack verifies checksums of the software it
|
||||
installs, and does not rely on a secure SSL implementation.
|
||||
|
||||
If your version of ``curl`` is not trustworthy, then you should
|
||||
not use it outside of Spack. Instead of putting it in your
|
||||
``.bashrc``, you might wish to create a short shell script that
|
||||
loads the appropariate module(s) and then launches Spack.
|
||||
|
||||
Some packages use source code control systems as their download
|
||||
method: ``git``, ``hg``, ``svn`` and occasionally ``go``. If you had
|
||||
to install a new ``curl``, then chances are the system-supplied
|
||||
version of these other programs will also not work, because they also
|
||||
rely on OpenSSL. Once ``curl`` has been installed, the others should
|
||||
also be installable.
|
||||
|
||||
|
||||
.. _InstallEnvironmentModules:
|
||||
|
||||
Environment Modules
|
||||
````````````````````
|
||||
|
||||
In order to use Spack's generated environment modules, you must have
|
||||
installed the *Environment Modules* package. On many Linux
|
||||
distributions, this can be installed from the vendor's repository.
|
||||
For example: ```yum install environment-modules``
|
||||
(Fedora/RHEL/CentOS). If your Linux distribution does not have
|
||||
Environment Modules, you can get it with Spack:
|
||||
|
||||
1. Consider using system tcl (as long as your system has Tcl version 8.0 or later):
|
||||
1. Identify its location using ``which tclsh``
|
||||
2. Identify its version using ``echo 'puts $tcl_version;exit 0' | tclsh``
|
||||
3. Add to ``~/.spack/packages.yaml`` and modify as appropriate:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
tcl:
|
||||
paths:
|
||||
tcl@8.5: /usr
|
||||
version: [8.5]
|
||||
buildable: False
|
||||
|
||||
2. Install with::
|
||||
.. code-block:: sh
|
||||
|
||||
spack install environment-modules
|
||||
|
||||
3. Activate with the following script (or apply the updates to your
|
||||
``.bashrc`` file manually)::
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
TMP=`tempfile`
|
||||
echo >$TMP
|
||||
MODULE_HOME=`spack location -i environment-modules`
|
||||
MODULE_VERSION=`ls -1 $MODULE_HOME/Modules | head -1`
|
||||
${MODULE_HOME}/Modules/${MODULE_VERSION}/bin/add.modules <$TMP
|
||||
cp .bashrc $TMP
|
||||
echo "MODULE_VERSION=${MODULE_VERSION}" > .bashrc
|
||||
cat $TMP >>.bashrc
|
||||
|
||||
This adds to your ``.bashrc`` (or similar) files, enabling Environment
|
||||
Modules when you log in. Re-load your .bashrc (or log out and in
|
||||
again), and then test that the ``module`` command is found with:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
module avail
|
||||
|
||||
|
||||
Package Utilities
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Spack can also encounter bootstrapping problems inside a package's
|
||||
``install()`` method. In this case, Spack will normally be running
|
||||
inside a *sanitized build environment*. This includes all of the
|
||||
package's dependencies, but none of the environment Spack inherited
|
||||
from the user: if you load a module or modify ``$PATH`` before
|
||||
launching Spack, it will have no effect.
|
||||
|
||||
In this case, you will likley need to use the ``--dirty`` flag when
|
||||
running ``spack install``, causing Spack to **not** santize the build
|
||||
environment. You are now responsible for making sure that environment
|
||||
does not do strange things to Spack or its installs.
|
||||
|
||||
Another way to get Spack to use its own version of something is to add that something to a package that needs it. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on('binutils', type='build')
|
||||
|
||||
This is considered best practice for some common build dependencies,
|
||||
such as ``autotools`` (if the ``autoreconf`` command is needed) and
|
||||
``cmake`` --- ``cmake`` especially, because different packages require
|
||||
a different version of CMake.
|
||||
|
||||
However, adding ``depends_on('binutils')`` to every package is not
|
||||
considered a best practice because every package written in
|
||||
C/C++/Fortran would need it. Loading a recent ``binutils`` into your
|
||||
environment is preferable here.
|
||||
|
||||
binutils
|
||||
~~~~~~~~~
|
||||
|
||||
# https://groups.google.com/forum/#!topic/spack/i_7l_kEEveI
|
||||
|
||||
Sometimes, strange error messages can happen while building a package.
|
||||
For exmaple, ``ld`` might crash. Or one receives a message like:
|
||||
|
||||
.. code-block::
|
||||
|
||||
ld: final link failed: Nonrepresentable section on output
|
||||
|
||||
These problems are often caused by an outdated ``binutils`` on your
|
||||
system: bootstrap as described above.
|
||||
|
||||
Install Environment Modules
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In order to use Spack's generated environment modules, you must have
|
||||
installed the *Environment Modules* package. On many Linux
|
||||
distributions, this can be installed from the vendor's repository.
|
||||
For example: ```yum install environment-modules``
|
||||
(Fedora/RHEL/CentOS). If your Linux distribution does not have
|
||||
Environment Modules, you can get it with Spack:
|
||||
|
||||
1. Consider using system tcl. If so, add to ``packages.yaml``::
|
||||
|
||||
packages:
|
||||
tcl:
|
||||
paths:
|
||||
tcl@8.5: /usr
|
||||
version: [8.5]
|
||||
buildable: False
|
||||
2. Install with::
|
||||
|
||||
spack install environment-modules
|
||||
|
||||
3. Activate with::
|
||||
|
||||
TMP=`tempfile`
|
||||
echo >$TMP
|
||||
MODULE_HOME=`spack location -i environment-modules`
|
||||
MODULE_VERSION=`ls -1 $MODULE_HOME/Modules | head -1`
|
||||
${MODULE_HOME}/Modules/${MODULE_VERSION}/bin/add.modules <$TMP
|
||||
cp .bashrc $TMP
|
||||
echo "MODULE_VERSION=${MODULE_VERSION}" > .bashrc
|
||||
cat $TMP >>.bashrc
|
||||
|
||||
This adds to your ``.bashrc`` (or similar) files, enabling Environment
|
||||
Modules when you log in. Re-load your .bashrc (or log out and in
|
||||
again), and then test that the ``module`` command is found with:
|
||||
|
||||
module avail
|
||||
|
||||
|
||||
Spack on Cray
|
||||
-----------------------------
|
||||
|
||||
Spack differs slightly when used on a Cray system. The architecture spec
|
||||
can differentiate between the front-end and back-end processor and operating system.
|
||||
For example, on Edison at NERSC, the back-end target processor
|
||||
is \"Ivy Bridge\", so you can specify to use the back-end this way:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install zlib target=ivybridge
|
||||
|
||||
You can also use the operating system to build against the back-end:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install zlib os=CNL10
|
||||
|
||||
Notice that the name includes both the operating system name and the major
|
||||
version number concatenated together.
|
||||
|
||||
Alternatively, if you want to build something for the front-end,
|
||||
you can specify the front-end target processor. The processor for a login node
|
||||
on Edison is \"Sandy bridge\" so we specify on the command line like so:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install zlib target=sandybridge
|
||||
|
||||
And the front-end operating system is:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
spack install zlib os=SuSE11
|
||||
|
||||
|
||||
|
||||
Cray compiler detection
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Spack can detect compilers using two methods. For the front-end, we treat
|
||||
everything the same. The difference lies in back-end compiler detection.
|
||||
Back-end compiler detection is made via the Tcl module avail command.
|
||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
||||
|
||||
The compilers.yaml config file will also differ. There is a
|
||||
modules section that is filled with the compiler\'s Programming Environment
|
||||
and module name. On other systems, this field is empty []::
|
||||
|
||||
...
|
||||
- compiler:
|
||||
modules:
|
||||
- PrgEnv-intel
|
||||
- intel/15.0.109
|
||||
...
|
||||
|
||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
||||
compiler are replaced with their respective Cray compiler wrapper names::
|
||||
|
||||
...
|
||||
paths:
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
...
|
||||
|
||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
||||
to call the Cray compiler wrappers during build time.
|
||||
|
||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
||||
|
||||
Spack sets the default Cray link type to dynamic, to better match other
|
||||
other platforms. Individual packages can enable static linking (which is the
|
||||
default outside of Spack on cray systems) using the -static flag.
|
||||
|
||||
Setting defaults and using Cray modules
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you want to use default compilers for each PrgEnv and also be able
|
||||
to load cray external modules, you will need to set up a packages.yaml.
|
||||
|
||||
Here\'s an example of an external configuration for cray modules:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
modules:
|
||||
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
|
||||
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
|
||||
|
||||
This tells Spack that for whatever package that depends on mpi, load the
|
||||
cray-mpich module into the environment. You can then be able to use whatever
|
||||
environment variables, libraries, etc, that are brought into the environment
|
||||
via module load.
|
||||
|
||||
You can set the default compiler that Spack can use for each compiler type.
|
||||
If you want to use the Cray defaults, then set them under *all:* in packages.yaml.
|
||||
In the compiler field, set the compiler specs in your order of preference.
|
||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
||||
|
||||
Here is an example of a full packages.yaml used at NERSC
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
modules:
|
||||
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
|
||||
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
|
||||
buildable: False
|
||||
netcdf:
|
||||
modules:
|
||||
netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf
|
||||
netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf
|
||||
buildable: False
|
||||
hdf5:
|
||||
modules:
|
||||
hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5
|
||||
hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5
|
||||
buildable: False
|
||||
all:
|
||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
||||
|
||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
||||
for each compiler type for each cray modules. This ensures that for each
|
||||
compiler on our system we can use that external module.
|
||||
|
||||
|
||||
For more on external packages check out the section :ref:`sec-external_packages`.
|
||||
|
||||
@@ -46,9 +46,11 @@ Table of Contents
|
||||
getting_started
|
||||
basic_usage
|
||||
packaging_guide
|
||||
application_developer_support
|
||||
mirrors
|
||||
site_configuration
|
||||
configuration
|
||||
developer_guide
|
||||
case_studies
|
||||
command_index
|
||||
package_list
|
||||
API Docs <spack>
|
||||
|
||||
@@ -214,3 +214,21 @@ Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``::
|
||||
If you want to change the order in which mirrors are searched for
|
||||
packages, you can edit this file and reorder the sections. Spack will
|
||||
search the topmost mirror first and the bottom-most mirror last.
|
||||
|
||||
.. _caching:
|
||||
|
||||
Local Default Cache
|
||||
----------------------------
|
||||
|
||||
Spack caches resources that are downloaded as part of installs. The cache is
|
||||
a valid spack mirror: it uses the same directory structure and naming scheme
|
||||
as other Spack mirrors (so it can be copied anywhere and referenced with a URL
|
||||
like other mirrors). The mirror is maintained locally (within the Spack
|
||||
installation directory) at :file:`var/spack/cache/`. It is always enabled (and
|
||||
is always searched first when attempting to retrieve files for an installation)
|
||||
but can be cleared with :ref:`purge <spack-purge>`; the cache directory can also
|
||||
be deleted manually without issue.
|
||||
|
||||
Caching includes retrieved tarball archives and source control repositories, but
|
||||
only resources with an associated digest or commit ID (e.g. a revision number
|
||||
for SVN) will be cached.
|
||||
|
||||
@@ -36,10 +36,11 @@ Creating & editing packages
|
||||
``spack create``
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The ``spack create`` command generates a boilerplate package template
|
||||
from a URL. The URL should point to a tarball or other software
|
||||
archive. In most cases, ``spack create`` plus a few modifications is
|
||||
all you need to get a package working.
|
||||
The ``spack create`` command creates a directory with the package name and
|
||||
generates a ``package.py`` file with a boilerplate package template from a URL.
|
||||
The URL should point to a tarball or other software archive. In most cases,
|
||||
``spack create`` plus a few modifications is all you need to get a package
|
||||
working.
|
||||
|
||||
Here's an example:
|
||||
|
||||
@@ -47,12 +48,16 @@ Here's an example:
|
||||
|
||||
$ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
|
||||
|
||||
Spack examines the tarball URL and tries to figure out the name of the
|
||||
package to be created. It also tries to determine what version strings
|
||||
look like for this package. Using this information, it will try to
|
||||
find *additional* versions by spidering the package's webpage. If it
|
||||
finds multiple versions, Spack prompts you to tell it how many
|
||||
versions you want to download and checksum:
|
||||
Spack examines the tarball URL and tries to figure out the name of the package
|
||||
to be created. Once the name is determined a directory in the appropriate
|
||||
repository is created with that name. Spack prefers, but does not require, that
|
||||
names be lower case so the directory name will be lower case when ``spack
|
||||
create`` generates it. In cases where it is desired to have mixed case or upper
|
||||
case simply rename the directory. Spack also tries to determine what version
|
||||
strings look like for this package. Using this information, it will try to find
|
||||
*additional* versions by spidering the package's webpage. If it finds multiple
|
||||
versions, Spack prompts you to tell it how many versions you want to download
|
||||
and checksum:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
@@ -297,9 +302,10 @@ directories or files (like patches) that it needs to build.
|
||||
Package Names
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Packages are named after the directory containing ``package.py``. So,
|
||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||
The ``package.py`` file defines a class called ``Libelf``, which
|
||||
Packages are named after the directory containing ``package.py``. It is
|
||||
preferred, but not required, that the directory, and thus the package name, are
|
||||
lower case. So, ``libelf``'s ``package.py`` lives in a directory called
|
||||
``libelf``. The ``package.py`` file defines a class called ``Libelf``, which
|
||||
extends Spack's ``Package`` class. for example, here is
|
||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||
|
||||
@@ -377,6 +383,8 @@ add a line like this in the package class:
|
||||
version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
|
||||
...
|
||||
|
||||
Versions should be listed with the newest version first.
|
||||
|
||||
Version URLs
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -385,8 +393,22 @@ in the package. For example, Spack is smart enough to download
|
||||
version ``8.2.1.`` of the ``Foo`` package above from
|
||||
``http://example.com/foo-8.2.1.tar.gz``.
|
||||
|
||||
If spack *cannot* extrapolate the URL from the ``url`` field, or if
|
||||
the package doesn't have a ``url`` field, you can add a URL explicitly
|
||||
If spack *cannot* extrapolate the URL from the ``url`` field by
|
||||
default, you can write your own URL generation algorithm in place of
|
||||
the ``url`` declaration. For example:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
|
||||
...
|
||||
def url_for_version(self, version):
|
||||
return 'http://example.com/version_%s/foo-%s.tar.gz' \
|
||||
% (version, version)
|
||||
...
|
||||
|
||||
If a URL cannot be derived systematically, you can add an explicit URL
|
||||
for a particular version:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -412,7 +434,7 @@ executables and other custom archive types), you can add
|
||||
.. code-block:: python
|
||||
|
||||
version('8.2.1', '4136d7b4c04df68b686570afa26988ac',
|
||||
url='http://example.com/foo-8.2.1-special-version.tar.gz', 'expand=False')
|
||||
url='http://example.com/foo-8.2.1-special-version.tar.gz', expand=False)
|
||||
|
||||
When ``expand`` is set to ``False``, Spack sets the current working
|
||||
directory to the directory containing the downloaded archive before it
|
||||
@@ -446,14 +468,25 @@ to use based on the hash length.
|
||||
``spack md5``
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you have a single file to checksum, you can use the ``spack md5``
|
||||
command to do it. Here's how you might download an archive and get a
|
||||
checksum for it:
|
||||
If you have one or more files to checksum, you can use the ``spack md5``
|
||||
command to do it:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ curl -O http://exmaple.com/foo-8.2.1.tar.gz'
|
||||
$ spack md5 foo-8.2.1.tar.gz
|
||||
$ spack md5 foo-8.2.1.tar.gz foo-8.2.2.tar.gz
|
||||
==> 2 MD5 checksums:
|
||||
4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
|
||||
1586b70a49dfe05da5fcc29ef239dce0 foo-8.2.2.tar.gz
|
||||
|
||||
``spack md5`` also accepts one or more URLs and automatically downloads
|
||||
the files for you:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack md5 http://example.com/foo-8.2.1.tar.gz
|
||||
==> Trying to fetch from http://example.com/foo-8.2.1.tar.gz
|
||||
######################################################################## 100.0%
|
||||
==> 1 MD5 checksum:
|
||||
4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
|
||||
|
||||
Doing this for lots of files, or whenever a new package version is
|
||||
@@ -547,7 +580,7 @@ The package author is responsible for coming up with a sensible name
|
||||
for each version to be fetched from a repository. For example, if
|
||||
you're fetching from a tag like ``v1.0``, you might call that ``1.0``.
|
||||
If you're fetching a nameless git commit or an older subversion
|
||||
revision, you might give the commit an intuitive name, like ``dev``
|
||||
revision, you might give the commit an intuitive name, like ``develop``
|
||||
for a development version, or ``some-fancy-new-feature`` if you want
|
||||
to be more specific.
|
||||
|
||||
@@ -557,6 +590,17 @@ branches move forward over time and you aren't guaranteed to get the
|
||||
same thing every time you fetch a particular version. Life isn't
|
||||
always simple, though, so this is not strictly enforced.
|
||||
|
||||
When fetching from from the branch corresponding to the development version
|
||||
(often called ``master``,``trunk`` or ``dev``), it is recommended to
|
||||
call this version ``develop``. Spack has special treatment for this version so
|
||||
that ``@develop`` will satisfy dependencies like
|
||||
``depends_on(abc, when="@x.y.z:")``. In other words, ``@develop`` is
|
||||
greater than any other version. The rationale is that certain features or
|
||||
options first appear in the development branch. Therefore if a package author
|
||||
wants to keep the package on the bleeding edge and provide support for new
|
||||
features, it is advised to use ``develop`` for such a version which will
|
||||
greatly simplify writing dependencies and version-related conditionals.
|
||||
|
||||
In some future release, Spack may support extrapolating repository
|
||||
versions as it does for tarball URLs, but currently this is not
|
||||
supported.
|
||||
@@ -572,6 +616,7 @@ Git fetching is enabled with the following parameters to ``version``:
|
||||
* ``tag``: name of a tag to fetch.
|
||||
* ``branch``: name of a branch to fetch.
|
||||
* ``commit``: SHA hash (or prefix) of a commit to fetch.
|
||||
* ``submodules``: Also fetch submodules when checking out this repository.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -582,7 +627,7 @@ Default branch
|
||||
|
||||
class Example(Package):
|
||||
...
|
||||
version('dev', git='https://github.com/example-project/example.git')
|
||||
version('develop', git='https://github.com/example-project/example.git')
|
||||
|
||||
This is not recommended, as the contents of the default branch
|
||||
change over time.
|
||||
@@ -628,6 +673,17 @@ Commits
|
||||
could just use the abbreviated commit hash. It's up to the package
|
||||
author to decide what makes the most sense.
|
||||
|
||||
Submodules
|
||||
|
||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||
along with the repository at fetch time.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.0.1', git='https://github.com/example-project/example.git',
|
||||
tag='v1.0.1', submdoules=True)
|
||||
|
||||
|
||||
Installing
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
@@ -655,7 +711,7 @@ Default
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('hg-head', hg='https://jay.grs.rwth-aachen.de/hg/example')
|
||||
version('develop', hg='https://jay.grs.rwth-aachen.de/hg/example')
|
||||
|
||||
Note that this is not recommended; try to fetch a particular
|
||||
revision instead.
|
||||
@@ -687,7 +743,7 @@ Fetching the head
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
|
||||
version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
|
||||
|
||||
This is not recommended, as the head will move forward over time.
|
||||
|
||||
@@ -697,12 +753,19 @@ Fetching a revision
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
|
||||
version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
|
||||
revision=128)
|
||||
|
||||
Subversion branches are handled as part of the directory structure, so
|
||||
you can check out a branch or tag by changing the ``url``.
|
||||
|
||||
Automatic caching of files fetched during installation
|
||||
------------------------------------------------------
|
||||
|
||||
Spack maintains a cache (described :ref:`here <caching>`) which saves files
|
||||
retrieved during package installations to avoid re-downloading in the case that
|
||||
a package is installed with a different specification (but the same version) or
|
||||
reinstalled on account of a change in the hashing scheme.
|
||||
|
||||
.. _license:
|
||||
|
||||
@@ -776,7 +839,7 @@ Spack will create a global license file located at
|
||||
file using the editor set in ``$EDITOR``, or vi if unset. It will look like
|
||||
this:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: sh
|
||||
|
||||
# A license is required to use pgi.
|
||||
#
|
||||
@@ -807,7 +870,7 @@ You can add your license directly to this file, or tell FlexNet to use a
|
||||
license stored on a separate license server. Here is an example that
|
||||
points to a license server called licman1:
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: sh
|
||||
|
||||
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
|
||||
USE_SERVER
|
||||
@@ -1221,11 +1284,13 @@ just as easily provide a version range:
|
||||
|
||||
depends_on("libelf@0.8.2:0.8.4:")
|
||||
|
||||
Or a requirement for a particular variant:
|
||||
Or a requirement for a particular variant or compiler flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on('libelf debug=True')
|
||||
depends_on('libelf cppflags="-fPIC")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -1233,6 +1298,31 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
Additionally, dependencies may be specified for specific use cases:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on("libelf", type=("build", "link"))
|
||||
depends_on("python", type="run")
|
||||
|
||||
The dependency types are:
|
||||
|
||||
* **"build"**: made available during the project's build. The package will
|
||||
be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
|
||||
Other projects which depend on this one will not have these modified
|
||||
(building project X doesn't need project Y's build dependencies).
|
||||
* **"link"**: the project is linked to by the project. The package will be
|
||||
added to the current package's ``rpath``.
|
||||
* **"run"**: the project is used by the project at runtime. The package will
|
||||
be added to ``PATH`` and ``PYTHONPATH``.
|
||||
|
||||
If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
|
||||
common case for compiled language usage. Also available are the aliases
|
||||
``"alldeps"`` for all dependency types and ``"nolink"`` (``("build", "run")``)
|
||||
for use by dependencies which are not expressed via a linker (e.g., Python or
|
||||
Lua module loading).
|
||||
|
||||
.. _setup-dependent-environment:
|
||||
|
||||
``setup_dependent_environment()``
|
||||
@@ -1337,6 +1427,19 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
|
||||
activate``. When it is activated, all the files in its prefix will be
|
||||
symbolically linked into the prefix of the python package.
|
||||
|
||||
Many packages produce Python extensions for *some* variants, but not
|
||||
others: they should extend ``python`` only if the apropriate
|
||||
variant(s) are selected. This may be accomplished with conditional
|
||||
``extends()`` declarations:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class FooLib(Package):
|
||||
variant('python', default=True, description= \
|
||||
'Build the Python extension Module')
|
||||
extends('python', when='+python')
|
||||
...
|
||||
|
||||
Sometimes, certain files in one package will conflict with those in
|
||||
another, which means they cannot both be activated (symlinked) at the
|
||||
same time. In this case, you can tell Spack to ignore those files
|
||||
@@ -1623,21 +1726,21 @@ the user runs ``spack install`` and the time the ``install()`` method
|
||||
is called. The concretized version of the spec above might look like
|
||||
this::
|
||||
|
||||
mpileaks@2.3%gcc@4.7.3=linux-ppc64
|
||||
^callpath@1.0%gcc@4.7.3+debug=linux-ppc64
|
||||
^dyninst@8.1.2%gcc@4.7.3=linux-ppc64
|
||||
^libdwarf@20130729%gcc@4.7.3=linux-ppc64
|
||||
^libelf@0.8.11%gcc@4.7.3=linux-ppc64
|
||||
^mpich@3.0.4%gcc@4.7.3=linux-ppc64
|
||||
mpileaks@2.3%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
^callpath@1.0%gcc@4.7.3+debug arch=linux-debian7-x86_64
|
||||
^dyninst@8.1.2%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
^libelf@0.8.11%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
^mpich@3.0.4%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
|
||||
.. graphviz::
|
||||
|
||||
digraph {
|
||||
"mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
|
||||
"mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
|
||||
"callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64"
|
||||
"dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
|
||||
"dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
|
||||
"mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
|
||||
"mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
|
||||
"callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
|
||||
"dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
|
||||
"dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
|
||||
}
|
||||
|
||||
Here, all versions, compilers, and platforms are filled in, and there
|
||||
@@ -1648,8 +1751,8 @@ point will Spack call the ``install()`` method for your package.
|
||||
Concretization in Spack is based on certain selection policies that
|
||||
tell Spack how to select, e.g., a version, when one is not specified
|
||||
explicitly. Concretization policies are discussed in more detail in
|
||||
:ref:`site-configuration`. Sites using Spack can customize them to
|
||||
match the preferences of their own users.
|
||||
:ref:`configuration`. Sites using Spack can customize them to match
|
||||
the preferences of their own users.
|
||||
|
||||
.. _spack-spec:
|
||||
|
||||
@@ -1666,9 +1769,9 @@ running ``spack spec``. For example:
|
||||
^libdwarf
|
||||
^libelf
|
||||
|
||||
dyninst@8.0.1%gcc@4.7.3=linux-ppc64
|
||||
^libdwarf@20130729%gcc@4.7.3=linux-ppc64
|
||||
^libelf@0.8.13%gcc@4.7.3=linux-ppc64
|
||||
dyninst@8.0.1%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
^libelf@0.8.13%gcc@4.7.3 arch=linux-debian7-x86_64
|
||||
|
||||
This is useful when you want to know exactly what Spack will do when
|
||||
you ask for a particular spec.
|
||||
@@ -1682,60 +1785,8 @@ be concretized on their system. For example, one user may prefer packages
|
||||
built with OpenMPI and the Intel compiler. Another user may prefer
|
||||
packages be built with MVAPICH and GCC.
|
||||
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, depends_on, and variants during concretization.
|
||||
The preferred configuration can be controlled via the
|
||||
``~/.spack/packages.yaml`` file for user configuations, or the
|
||||
``etc/spack/packages.yaml`` site configuration.
|
||||
|
||||
|
||||
Here's an example packages.yaml file that sets preferred packages:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
packages:
|
||||
dyninst:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
|
||||
providers:
|
||||
mpi: [mvapich, mpich, openmpi]
|
||||
|
||||
|
||||
At a high level, this example is specifying how packages should be
|
||||
concretized. The dyninst package should prefer using gcc 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich for
|
||||
its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Each packages.yaml file begins with the string ``packages:`` and
|
||||
package names are specified on the next level. The special string ``all``
|
||||
applies settings to each package. Underneath each package name is
|
||||
one or more components: ``compiler``, ``variants``, ``version``,
|
||||
or ``providers``. Each component has an ordered list of spec
|
||||
``constraints``, with earlier entries in the list being preferred over
|
||||
later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, mpi) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
See the `documentation in the config section <concretization-preferences_>`_
|
||||
for more details.
|
||||
|
||||
.. _install-method:
|
||||
|
||||
@@ -1894,7 +1945,7 @@ discover its dependencies.
|
||||
|
||||
If you want to see the environment that a package will build with, or
|
||||
if you want to run commands in that environment to test them out, you
|
||||
can use the :ref:```spack env`` <spack-env>` command, documented
|
||||
can use the :ref:`spack env <spack-env>` command, documented
|
||||
below.
|
||||
|
||||
.. _compiler-wrappers:
|
||||
@@ -1960,6 +2011,12 @@ the command line.
|
||||
``$rpath_flag`` can be overriden on a compiler specific basis in
|
||||
``lib/spack/spack/compilers/$compiler.py``.
|
||||
|
||||
The compiler wrappers also pass the compiler flags specified by the user from
|
||||
the command line (``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``,
|
||||
and/or ``ldlibs``). They do not override the canonical autotools flags with the
|
||||
same names (but in ALL-CAPS) that may be passed into the build by particularly
|
||||
challenging package scripts.
|
||||
|
||||
Compiler flags
|
||||
~~~~~~~~~~~~~~
|
||||
In rare circumstances such as compiling and running small unit tests, a package
|
||||
@@ -1994,6 +2051,19 @@ instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for
|
||||
the reasons outlined above.
|
||||
|
||||
|
||||
Blas and Lapack libraries
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Different packages provide implementation of ``Blas`` and ``Lapack`` routines.
|
||||
The names of the resulting static and/or shared libraries differ from package
|
||||
to package. In order to make ``install()`` method indifferent to the
|
||||
choice of ``Blas`` implementation, each package which provides it
|
||||
sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib `` to
|
||||
point to the shared and static ``Blas`` libraries, respectively. The same
|
||||
applies to packages which provide ``Lapack``. Package developers are advised to
|
||||
use these variables, for example ``spec['blas'].blas_shared_lib`` instead of
|
||||
hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``.
|
||||
|
||||
|
||||
Forking ``install()``
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -2206,12 +2276,12 @@ example:
|
||||
def install(self, prefix):
|
||||
# Do default install
|
||||
|
||||
@when('=chaos_5_x86_64_ib')
|
||||
@when('arch=linux-debian7-x86_64')
|
||||
def install(self, prefix):
|
||||
# This will be executed instead of the default install if
|
||||
# the package's sys_type() is chaos_5_x86_64_ib.
|
||||
|
||||
@when('=bgqos_0")
|
||||
@when('arch=linux-debian7-x86_64")
|
||||
def install(self, prefix):
|
||||
# This will be executed if the package's sys_type is bgqos_0
|
||||
|
||||
@@ -2339,7 +2409,7 @@ build system.
|
||||
|
||||
.. _sanity-checks:
|
||||
|
||||
Sanity checking an intallation
|
||||
Sanity checking an installation
|
||||
--------------------------------
|
||||
|
||||
By default, Spack assumes that a build has failed if nothing is
|
||||
@@ -2555,6 +2625,59 @@ File functions
|
||||
|
||||
.. _package-lifecycle:
|
||||
|
||||
Coding Style Guidelines
|
||||
---------------------------
|
||||
|
||||
The following guidelines are provided, in the interests of making
|
||||
Spack packages work in a consistent manner:
|
||||
|
||||
|
||||
Variant Names
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Spack packages with variants similar to already-existing Spack
|
||||
packages should use the same name for their variants. Standard
|
||||
variant names are:
|
||||
|
||||
======= ======== ========================
|
||||
Name Default Description
|
||||
======= ======== ========================
|
||||
shared True Build shared libraries
|
||||
static Build static libraries
|
||||
mpi Use MPI
|
||||
python Build Python extension
|
||||
======= ======== ========================
|
||||
|
||||
If specified in this table, the corresponding default should be used
|
||||
when declaring a variant.
|
||||
|
||||
|
||||
Version Lists
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Spack packges should list supported versions with the newest first.
|
||||
|
||||
Special Versions
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
The following *special* version names may be used when building a package:
|
||||
|
||||
* *@system*: Indicates a hook to the OS-installed version of the
|
||||
package. This is useful, for example, to tell Spack to use the
|
||||
OS-installed version in ``packages.yaml``::
|
||||
|
||||
openssl:
|
||||
paths:
|
||||
openssl@system: /usr
|
||||
buildable: False
|
||||
|
||||
Certain Spack internals look for the *@system* version and do
|
||||
appropriate things in that case.
|
||||
|
||||
* *@local*: Indicates the version was built manually from some source
|
||||
tree of unknown provenance (see ``spack setup``).
|
||||
|
||||
|
||||
Packaging workflow commands
|
||||
---------------------------------
|
||||
|
||||
@@ -2649,11 +2772,16 @@ build process will start from scratch.
|
||||
|
||||
``spack purge``
|
||||
~~~~~~~~~~~~~~~~~
|
||||
Cleans up all of Spack's temporary files. Use this to recover disk
|
||||
space if temporary files from interrupted or failed installs
|
||||
accumulate in the staging area. This is equivalent to running ``spack
|
||||
clean`` for every package you have fetched or staged.
|
||||
Cleans up all of Spack's temporary and cached files. This can be used to
|
||||
recover disk space if temporary files from interrupted or failed installs
|
||||
accumulate in the staging area.
|
||||
|
||||
When called with ``--stage`` or ``--all`` (or without arguments, in which case
|
||||
the default is ``--all``) this removes all staged files; this is equivalent to
|
||||
running ``spack clean`` for every package you have fetched or staged.
|
||||
|
||||
When called with ``--cache`` or ``--all`` this will clear all resources
|
||||
:ref:`cached <caching>` during installs.
|
||||
|
||||
Keeping the stage directory on success
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
@@ -2801,11 +2929,11 @@ build it:
|
||||
$ spack stage libelf
|
||||
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
|
||||
######################################################################## 100.0%
|
||||
==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13.tar.gz
|
||||
==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64.
|
||||
==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13.tar.gz
|
||||
==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64.
|
||||
$ spack cd libelf
|
||||
$ pwd
|
||||
/Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13
|
||||
/Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13
|
||||
|
||||
``spack cd`` here changed he current working directory to the
|
||||
directory containing the expanded ``libelf`` source code. There are a
|
||||
@@ -2870,3 +2998,4 @@ might write:
|
||||
DWARF_PREFIX = $(spack location -i libdwarf)
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
|
||||
576
lib/spack/docs/spack_workflows.rst
Normal file
576
lib/spack/docs/spack_workflows.rst
Normal file
@@ -0,0 +1,576 @@
|
||||
Spack Workflows
|
||||
===============================
|
||||
|
||||
The process of using Spack involves building packages, running
|
||||
binaries from those packages, and developing software that depends on
|
||||
those packages. For example, one might use Spack to build the
|
||||
`netcdf` package, use `spack load` to run the `ncdump` binary, and
|
||||
finally, write a small C program to read/write a particular NetCDF file.
|
||||
|
||||
Spack supports a variety of workflows to suit a variety of situaions
|
||||
and user preferences, there is no single way to do all these things.
|
||||
This chapter demonstrates different workflows that have been
|
||||
developed, pointing out the pros and cons of them.
|
||||
|
||||
|
||||
Definitions
|
||||
############
|
||||
|
||||
First some basic definitions.
|
||||
|
||||
Package, Concrete Spec, Installed Package
|
||||
------------------------------------------
|
||||
|
||||
In Spack, a package is an abstract recipe to build one piece of software.
|
||||
Spack packages may be used to build, in principle, any version of that
|
||||
software with any set of variants. Examples of packages include
|
||||
``curl`` and ``zlib``.
|
||||
|
||||
A package may be *instantiated* to produce a concrete spec; one
|
||||
possible realization of a particular package, out of combinatorially
|
||||
many other realizations. For example, here is a concrete spec
|
||||
instantiated from ``curl``:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
Spack's core concretization algorithm generates concrete specs by
|
||||
instantiating packages from its repo, based on a set of "hints",
|
||||
including user input and the ``packages.yaml`` file. This algorithm
|
||||
may be accessed at any time with the ``spack spec`` command. For
|
||||
example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
$ spack spec curl
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
Every time Spack installs a package, that installation corresponds to
|
||||
a concrete spec. Only a vanishingly small fraction of possible
|
||||
concrete specs will be installed at any one Spack site.
|
||||
|
||||
Consistent Sets
|
||||
----------------
|
||||
|
||||
A set of Spack specs is said to be *consistent* if each package is
|
||||
only instantiated one way within it --- that is, if two specs in the
|
||||
set have the same package, then they must also have the same version,
|
||||
variant, compiler, etc. For example, the following set is consistent:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
The following set is not consistent:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
zlib@1.2.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
|
||||
The compatibility of a set of installed packages determines what may
|
||||
be done with it. It is always possible to ``spack load`` any set of
|
||||
installed packages, whether or not they are consistent, and run their
|
||||
binaries from the command line. However, a set of installed packages
|
||||
can only be linked together in one binary if it is consistent.
|
||||
|
||||
If the user produces a series of `spack spec` or `spack load`
|
||||
commands, in general there is no guarantee of consistency between
|
||||
them. Spack's concretization procedure guarantees that the results of
|
||||
any *single* `spack spec` call will be consistent. Therefore, the
|
||||
best way to ensure a consistent set of specs is to create a Spack
|
||||
package with dependencies, and then instantiate that package. We will
|
||||
use this technique below.
|
||||
|
||||
|
||||
Building Packages
|
||||
##################
|
||||
|
||||
Suppose you are tasked with installing a set of software packages on a
|
||||
system in order to support one application -- both a core application
|
||||
program, plus software to prepare input and analyze output. The
|
||||
required software might be summed up as a series of ``spack install``
|
||||
commands in a script. If needed, this script can always be run again
|
||||
in the future. For example:
|
||||
|
||||
.. code-block::
|
||||
|
||||
spack install modele-utils
|
||||
spack install emacs
|
||||
spack install ncview
|
||||
spack install nco
|
||||
spack install modele-control
|
||||
spack install py-numpy
|
||||
|
||||
In most cases, this script will not correctly install software
|
||||
according to your specific needs: choices need to be made for
|
||||
variants, versions and virtual dependency choices may be needed. It
|
||||
*is* possible to specify these choices by extending specs on the
|
||||
command line; however, the same choices must be specified repeatedly.
|
||||
For example, if you wish to use ``openmpi`` to satisfy the ``mpi``
|
||||
dependency, then ``^openmpi`` will have to appear on *every* ``spack
|
||||
install`` line that uses MPI. It can get repetitve fast.
|
||||
|
||||
Custimizing Spack installation options is easier to do in the
|
||||
``~/.spack/packages.yaml`` file. In this file, you can specify
|
||||
preferred versions and variants to use for packages. For exmaple:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
python:
|
||||
version: [3.5.1]
|
||||
modele-utils:
|
||||
version: [cmake]
|
||||
|
||||
everytrace:
|
||||
version: [develop]
|
||||
eigen:
|
||||
variants: ~suitesparse
|
||||
netcdf:
|
||||
variants: +mpi
|
||||
|
||||
all:
|
||||
compiler: [gcc@5.3.0]
|
||||
providers:
|
||||
mpi: [openmpi]
|
||||
blas: [openblas]
|
||||
lapack: [openblas]
|
||||
|
||||
|
||||
This approach will work as long as you are building packages for just
|
||||
one application.
|
||||
|
||||
Multiple Applications
|
||||
-----------------------
|
||||
|
||||
Suppose instead you're building multiple inconsistent applications.
|
||||
For example, users want package A to be built with ``openmpi`` and
|
||||
package B with ``mpich`` --- but still share many other lower-level
|
||||
dependencies. In this case, a single ``packages.yaml`` file will not
|
||||
work. Plans are to implement *per-project* ``packages.yaml`` files.
|
||||
In the meantime, one could write shell scripts to switch
|
||||
``packages.yaml`` between multiple versions as needed, using symlinks.
|
||||
|
||||
|
||||
Combinatorial Sets
|
||||
--------------------------
|
||||
|
||||
Suppose that you are now tasked with systematically building many
|
||||
incompatible versions of packages. For example, you need to build
|
||||
``petsc`` 9 times for 3 different MPI implementations on 3 different
|
||||
compilers, in order to support user needs. In this case, you will
|
||||
need to either create 9 different ``packages.yaml`` files; or more
|
||||
likely, create 9 different ``spack install`` command lines with the
|
||||
correct options in the spec.
|
||||
|
||||
|
||||
|
||||
Loading Packages
|
||||
#################
|
||||
|
||||
Once Spack packages have been built, the next step is to use them. As
|
||||
with buiding packages, there are many ways to use them, depending on
|
||||
the use case.
|
||||
|
||||
Simple Loads
|
||||
--------------
|
||||
|
||||
Suppose that Spack has been used to install a set of command-line
|
||||
programs, which users now wish to use. One can in principle put a
|
||||
number of ``spack load`` commands into ``.bashrc``, for example:
|
||||
|
||||
.. code-block::
|
||||
|
||||
spack load modele-utils
|
||||
spack load emacs
|
||||
spack load ncview
|
||||
spack load nco
|
||||
spack load modele-control
|
||||
|
||||
Although simple load scripts like this are useful in many cases, the
|
||||
have some drawbacks:
|
||||
|
||||
1. The set of modules loaded by them will in general not be
|
||||
consistent. They are a decent way to load commands to be called
|
||||
from command shells. See below for better ways to assemble a
|
||||
consistent set of packages for building application programs.
|
||||
|
||||
2. The ``spack spec`` and ``spack install`` commands use a
|
||||
sophisticated concretization algorithm that chooses the "best"
|
||||
among several options, taking into account ``packages.yaml`` file.
|
||||
The ``spack load`` and ``spack module loads`` commands, on the
|
||||
other thand, are not very smart: if the user-supplied spec matches
|
||||
more than one installed package, then ``spack module loads`` will
|
||||
fail. This may change in the future. For now, the workaround is to
|
||||
be more specific on any ``spack module loads`` lines that fail.
|
||||
|
||||
|
||||
Cached Simple Loads
|
||||
----------------------
|
||||
|
||||
Anoter problem with using `spack load` is, it is slow; a typical user
|
||||
environment could take several seconds to load, and would not be
|
||||
appropriate to put into ``.bashrc`` directly. It is preferable to use
|
||||
a series of ``spack module loads`` commands to pre-compute which
|
||||
modules to load. These can be put in a script that is run whenever
|
||||
installed Spack packages change. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
#!/bin/sh
|
||||
#
|
||||
# Generate module load commands in ~/env/spackenv
|
||||
|
||||
cat <<EOF | /bin/sh >$HOME/env/spackenv
|
||||
FIND='spack module loads --prefix linux-SuSE11-x86_64/'
|
||||
|
||||
\$FIND modele-utils
|
||||
\$FIND emacs
|
||||
\$FIND ncview
|
||||
\$FIND nco
|
||||
\$FIND modele-control
|
||||
EOF
|
||||
|
||||
The output of this file is written in ``~/env/spackenv``:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# binutils@2.25%gcc@5.3.0+gold~krellpatch~libiberty arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/binutils-2.25-gcc-5.3.0-6w5d2t4
|
||||
# python@2.7.12%gcc@5.3.0~tk~ucs4 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/python-2.7.12-gcc-5.3.0-2azoju2
|
||||
# ncview@2.1.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/ncview-2.1.7-gcc-5.3.0-uw3knq2
|
||||
# nco@4.5.5%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/nco-4.5.5-gcc-5.3.0-7aqmimu
|
||||
# modele-control@develop%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/modele-control-develop-gcc-5.3.0-7rddsij
|
||||
# zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/zlib-1.2.8-gcc-5.3.0-fe5onbi
|
||||
# curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/curl-7.50.1-gcc-5.3.0-4vlev55
|
||||
# hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/hdf5-1.10.0-patch1-gcc-5.3.0-pwnsr4w
|
||||
# netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/netcdf-4.4.1-gcc-5.3.0-rl5canv
|
||||
# netcdf-fortran@4.4.4%gcc@5.3.0 arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/netcdf-fortran-4.4.4-gcc-5.3.0-stdk2xq
|
||||
# modele-utils@cmake%gcc@5.3.0+aux+diags+ic arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/modele-utils-cmake-gcc-5.3.0-idyjul5
|
||||
# everytrace@develop%gcc@5.3.0+fortran+mpi arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/everytrace-develop-gcc-5.3.0-p5wmb25
|
||||
|
||||
Users may now put ``source ~/env/spackenv`` into ``.bashrc``.
|
||||
|
||||
.. note ::
|
||||
|
||||
Some module systems put a prefix on the names of modules created
|
||||
by Spack. For example, that prefix is ``linux-SuSE11-x86_64/`` in
|
||||
the above case. If a prefix is not needed, you may omit the
|
||||
``--prefix`` flag from ``spack module loads``.
|
||||
|
||||
|
||||
Transitive Dependencies
|
||||
---------------------------
|
||||
|
||||
In the script above, each ``spack module loads`` command generates a
|
||||
*single* ``module load`` line. Transitive dependencies do not usually
|
||||
need to be load, only modules the user needs in in ``$PATH``. This is
|
||||
because Spack builds binaries with RPATH. Spack's RPATH policy has
|
||||
some nice features:
|
||||
|
||||
1. Modules for multiple inconsistent applications may be loaded
|
||||
simultaneously. In the above example (Multiple Applications),
|
||||
package A and package B can coexist together in the user's $PATH,
|
||||
even though they use different MPIs.
|
||||
|
||||
2. RPATH eliminates a whole class of strange errors that can happen
|
||||
in non-RPATH binaries when the wrong ``LD_LIBRARY_PATH`` is
|
||||
loaded.
|
||||
|
||||
3. Recursive module systems such as LMod are not necessary.
|
||||
|
||||
4. Modules are not needed at all to execute binaries. If a path to a
|
||||
binary is known, it may be executed. For example, the path for a
|
||||
Spack-built compiler can be given to an IDE without requiring the
|
||||
IDE to load that compiler's module.
|
||||
|
||||
Unfortunately, Spacks' RPATH support does not work in all case. For example:
|
||||
|
||||
1. Software comes in many forms --- not just compiled ELF binaries,
|
||||
but also as interpreted code in Python, R, JVM bytecode, etc.
|
||||
Those systems almost universally use an environment variable
|
||||
analogous to ``LD_LIBRARY_PATH`` to dynamically load libraries.
|
||||
|
||||
2. Although Spack generally builds binaries with RPATH, it does not
|
||||
currently do so for compiled Python extensions (for example,
|
||||
``py-numpy``). Any libraries that these extensions depend on
|
||||
(``openblas`` in this case, for example) must be specified in the
|
||||
``LD_LIBRARY_PATH``.`
|
||||
|
||||
3. In some cases, Spack-generated binaries end up without a
|
||||
functional RPATH for no discernable reason.
|
||||
|
||||
In cases where RPATH support doesn't make things "just work," it can
|
||||
be necessary to load a module's dependencies as well as the module
|
||||
itself. This is done by adding the ``--dependencies`` flag to the
|
||||
``spack module loads`` command. For example, the following line,
|
||||
added to the script above, would be used to load Numpy, along with
|
||||
core Python, Setup TOols and a number of other packages:
|
||||
|
||||
.. code-block:: sh
|
||||
\$FIND --dependencies py-numpy
|
||||
|
||||
Extension Packages
|
||||
---------------------
|
||||
|
||||
Extensions (::ref:`packaging_extension` section) may be used as as an
|
||||
alternative to loading Python packages directly. If extensions are
|
||||
activated, then ``spack load python`` will also load all the
|
||||
extensions activated for the given ``python``. However, Spack
|
||||
extensions have two potential drawbacks:
|
||||
|
||||
1. Activated packages that involve compiled C extensions may still
|
||||
need their dependencies to be loaded manually. For example,
|
||||
``spack load openblas`` might be required to make ``py-numpy``
|
||||
work.
|
||||
|
||||
2. Extensions "break" a core feature of Spack, which is that multiple
|
||||
versions of a package can co-exist side-by-side. For example,
|
||||
suppose you wish to run a Python in two different environments but
|
||||
the same basic Python --- one with ``py-numpy@1.7`` and one with
|
||||
``py-numpy@1.8``. Spack extensions will not support this potential
|
||||
debugging use case.
|
||||
|
||||
|
||||
|
||||
Filesystem Views
|
||||
-------------------------------
|
||||
|
||||
The above
|
||||
|
||||
.. Maybe this is not the right location for this documentation.
|
||||
|
||||
The Spack installation area allows for many package installation trees
|
||||
to coexist and gives the user choices as to what versions and variants
|
||||
of packages to use. To use them, the user must rely on a way to
|
||||
aggregate a subset of those packages. The section on Environment
|
||||
Modules gives one good way to do that which relies on setting various
|
||||
environment variables. An alternative way to aggregate is through
|
||||
**filesystem views**.
|
||||
|
||||
A filesystem view is a single directory tree which is the union of the
|
||||
directory hierarchies of the individual package installation trees
|
||||
that have been included. The files of the view's installed packages
|
||||
are brought into the view by symbolic or hard links back to their
|
||||
location in the original Spack installation area. As the view is
|
||||
formed, any clashes due to a file having the exact same path in its
|
||||
package installation tree are handled in a first-come-first-served
|
||||
basis and a warning is printed. Packages and their dependencies can
|
||||
be both added and removed. During removal, empty directories will be
|
||||
purged. These operations can be limited to pertain to just the
|
||||
packages listed by the user or to exclude specific dependencies and
|
||||
they allow for software installed outside of Spack to coexist inside
|
||||
the filesystem view tree.
|
||||
|
||||
By its nature, a filesystem view represents a particular choice of one
|
||||
set of packages among all the versions and variants that are available
|
||||
in the Spack installation area. It is thus equivalent to the
|
||||
directory hiearchy that might exist under ``/usr/local``. While this
|
||||
limits a view to including only one version/variant of any package, it
|
||||
provides the benefits of having a simpler and traditional layout which
|
||||
may be used without any particular knowledge that its packages were
|
||||
built by Spack.
|
||||
|
||||
Views can be used for a variety of purposes including:
|
||||
|
||||
- A central installation in a traditional layout, eg ``/usr/local`` maintained over time by the sysadmin.
|
||||
- A self-contained installation area which may for the basis of a top-level atomic versioning scheme, eg ``/opt/pro`` vs ``/opt/dev``.
|
||||
- Providing an atomic and monolithic binary distribution, eg for delivery as a single tarball.
|
||||
- Producing ephemeral testing or developing environments.
|
||||
|
||||
Using Filesystem Views
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
A filesystem view is created and packages are linked in by the ``spack
|
||||
view`` command's ``symlink`` and ``hardlink`` sub-commands. The
|
||||
``spack view remove`` command can be used to unlink some or all of the
|
||||
filesystem view.
|
||||
|
||||
The following example creates a filesystem view based
|
||||
on an installed ``cmake`` package and then removes from the view the
|
||||
files in the ``cmake`` package while retaining its dependencies.
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
$ spack view -v symlink myview cmake@3.5.2
|
||||
==> Linking package: "ncurses"
|
||||
==> Linking package: "zlib"
|
||||
==> Linking package: "openssl"
|
||||
==> Linking package: "cmake"
|
||||
|
||||
$ ls myview/
|
||||
bin doc etc include lib share
|
||||
|
||||
$ ls myview/bin/
|
||||
captoinfo clear cpack ctest infotocap openssl tabs toe tset
|
||||
ccmake cmake c_rehash infocmp ncurses6-config reset tic tput
|
||||
|
||||
$ spack view -v -d false rm myview cmake@3.5.2
|
||||
==> Removing package: "cmake"
|
||||
|
||||
$ ls myview/bin/
|
||||
captoinfo c_rehash infotocap openssl tabs toe tset
|
||||
clear infocmp ncurses6-config reset tic tput
|
||||
|
||||
|
||||
Limitations of Filesystem Views
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This section describes some limitations that should be considered in
|
||||
using filesystems views.
|
||||
|
||||
Filesystem views are merely organizational. The binary executable
|
||||
programs, shared libraries and other build products found in a view
|
||||
are mere links into the "real" Spack installation area. If a view is
|
||||
built with symbolic links it requires the Spack-installed package to
|
||||
be kept in place. Building a view with hardlinks removes this
|
||||
requirement but any internal paths (eg, rpath or ``#!`` interpreter
|
||||
specifications) will still require the Spack-installed package files
|
||||
to be in place.
|
||||
|
||||
.. FIXME: reference the relocation work of Hegner and Gartung.
|
||||
|
||||
As described above, when a view is built only a single instance of a
|
||||
file may exist in the unified filesystem tree. If more than one
|
||||
package provides a file at the same path (relative to its own root)
|
||||
then it is the first package added to the view that "wins". A warning
|
||||
is printed and it is up to the user to determine if the conflict
|
||||
matters.
|
||||
|
||||
It is up to the user to assure a consistent view is produced. In
|
||||
particular if the user excludes packages, limits the following of
|
||||
dependencies or removes packages the view may become inconsistent. In
|
||||
particular, if two packages require the same sub-tree of dependencies,
|
||||
removing one package (recursively) will remove its dependencies and
|
||||
leave the other package broken.
|
||||
|
||||
|
||||
|
||||
Build System Configuration Support
|
||||
----------------------------------
|
||||
|
||||
Imagine a developer creating a CMake-based (or Autotools) project in a local
|
||||
directory, which depends on libraries A-Z. Once Spack has installed
|
||||
those dependencies, one would like to run ``cmake`` with appropriate
|
||||
command line and environment so CMake can find them. The ``spack
|
||||
setup`` command does this conveniently, producing a CMake
|
||||
configuration that is essentially the same as how Spack *would have*
|
||||
configured the project. This can be demonstrated with a usage
|
||||
example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd myproject
|
||||
spack setup myproject@local
|
||||
mkdir build; cd build
|
||||
../spconfig.py ..
|
||||
make
|
||||
make install
|
||||
|
||||
Notes:
|
||||
* Spack must have ``myproject/package.py`` in its repository for
|
||||
this to work.
|
||||
* ``spack setup`` produces the executable script ``spconfig.py`` in
|
||||
the local directory, and also creates the module file for the
|
||||
package. ``spconfig.py`` is normally run from the user's
|
||||
out-of-source build directory.
|
||||
* The version number given to ``spack setup`` is arbitrary, just
|
||||
like ``spack diy``. ``myproject/package.py`` does not need to
|
||||
have any valid downloadable versions listed (typical when a
|
||||
project is new).
|
||||
* spconfig.py produces a CMake configuration that *does not* use the
|
||||
Spack wrappers. Any resulting binaries *will not* use RPATH,
|
||||
unless the user has enabled it. This is recommended for
|
||||
development purposes, not production.
|
||||
* ``spconfig.py`` is human readable, and can serve as a developer
|
||||
reference of what dependencies are being used.
|
||||
* ``make install`` installs the package into the Spack repository,
|
||||
where it may be used by other Spack packages.
|
||||
* CMake-generated makefiles re-run CMake in some circumstances. Use
|
||||
of ``spconfig.py`` breaks this behavior, requiring the developer
|
||||
to manually re-run ``spconfig.py`` when a ``CMakeLists.txt`` file
|
||||
has changed.
|
||||
|
||||
CMakePackage
|
||||
~~~~~~~~~~~~
|
||||
|
||||
In order ot enable ``spack setup`` functionality, the author of
|
||||
``myproject/package.py`` must subclass from ``CMakePackage`` instead
|
||||
of the standard ``Package`` superclass. Because CMake is
|
||||
standardized, the packager does not need to tell Spack how to run
|
||||
``cmake; make; make install``. Instead the packager only needs to
|
||||
create (optional) methods ``configure_args()`` and ``configure_env()``, which
|
||||
provide the arguments (as a list) and extra environment variables (as
|
||||
a dict) to provide to the ``cmake`` command. Usually, these will
|
||||
translate variant flags into CMake definitions. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
spec = self.spec
|
||||
return [
|
||||
'-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
|
||||
'-DBUILD_PYTHON=%s' % ('YES' if '+python' in spec else 'NO'),
|
||||
'-DBUILD_GRIDGEN=%s' % ('YES' if '+gridgen' in spec else 'NO'),
|
||||
'-DBUILD_COUPLER=%s' % ('YES' if '+coupler' in spec else 'NO'),
|
||||
'-DUSE_PISM=%s' % ('YES' if '+pism' in spec else 'NO')]
|
||||
|
||||
If needed, a packager may also override methods defined in
|
||||
``StagedPackage`` (see below).
|
||||
|
||||
|
||||
StagedPackage
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
``CMakePackage`` is implemented by subclassing the ``StagedPackage``
|
||||
superclass, which breaks down the standard ``Package.install()``
|
||||
method into several sub-stages: ``setup``, ``configure``, ``build``
|
||||
and ``install``. Details:
|
||||
|
||||
* Instead of implementing the standard ``install()`` method, package
|
||||
authors implement the methods for the sub-stages
|
||||
``install_setup()``, ``install_configure()``,
|
||||
``install_build()``, and ``install_install()``.
|
||||
|
||||
* The ``spack install`` command runs the sub-stages ``configure``,
|
||||
``build`` and ``install`` in order. (The ``setup`` stage is
|
||||
not run by default; see below).
|
||||
* The ``spack setup`` command runs the sub-stages ``setup``
|
||||
and a dummy install (to create the module file).
|
||||
* The sub-stage install methods take no arguments (other than
|
||||
``self``). The arguments ``spec`` and ``prefix`` to the standard
|
||||
``install()`` method may be accessed via ``self.spec`` and
|
||||
``self.prefix``.
|
||||
|
||||
GNU Autotools
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
The ``setup`` functionality is currently only available for
|
||||
CMake-based packages. Extending this functionality to GNU
|
||||
Autotools-based packages would be easy (and should be done by a
|
||||
developer who actively uses Autotools). Packages that use
|
||||
non-standard build systems can gain ``setup`` functionality by
|
||||
subclassing ``StagedPackage`` directly.
|
||||
|
||||
105
lib/spack/env/cc
vendored
105
lib/spack/env/cc
vendored
@@ -55,7 +55,10 @@ parameters=(
|
||||
|
||||
# The compiler input variables are checked for sanity later:
|
||||
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
|
||||
# Debug flag is optional; set to "TRUE" for debug logging:
|
||||
# The default compiler flags are passed from these variables:
|
||||
# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
|
||||
# SPACK_LDFLAGS, SPACK_LDLIBS
|
||||
# Debug env var is optional; set to true for debug logging:
|
||||
# SPACK_DEBUG
|
||||
# Test command is used to unit test the compiler script.
|
||||
# SPACK_TEST_COMMAND
|
||||
@@ -99,21 +102,25 @@ case "$command" in
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
;;
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
;;
|
||||
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||
ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
;;
|
||||
f77|gfortran|ifort|pgfortran|xlf|nagfor)
|
||||
f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
;;
|
||||
ld)
|
||||
mode=ld
|
||||
@@ -131,7 +138,7 @@ if [[ -z $mode ]]; then
|
||||
if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
|
||||
mode=vcheck
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
@@ -167,6 +174,28 @@ if [[ -z $command ]]; then
|
||||
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
|
||||
fi
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
#
|
||||
IFS=':' read -ra env_path <<< "$PATH"
|
||||
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
|
||||
spack_env_dirs+=("" ".")
|
||||
PATH=""
|
||||
for dir in "${env_path[@]}"; do
|
||||
addpath=true
|
||||
for env_dir in "${spack_env_dirs[@]}"; do
|
||||
if [[ $dir == $env_dir ]]; then
|
||||
addpath=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
if $addpath; then
|
||||
PATH="${PATH:+$PATH:}$dir"
|
||||
fi
|
||||
done
|
||||
export PATH
|
||||
|
||||
if [[ $mode == vcheck ]]; then
|
||||
exec ${command} "$@"
|
||||
fi
|
||||
@@ -188,6 +217,42 @@ fi
|
||||
input_command="$@"
|
||||
args=("$@")
|
||||
|
||||
# Prepend cppflags, cflags, cxxflags, fcflags, fflags, and ldflags
|
||||
|
||||
# Add ldflags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
args=(${SPACK_LDFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
|
||||
# Add compiler flags.
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
# Add c, cxx, fc, and f flags
|
||||
case $lang_flags in
|
||||
C)
|
||||
args=(${SPACK_CFLAGS[@]} "${args[@]}") ;;
|
||||
CXX)
|
||||
args=(${SPACK_CXXFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Add cppflags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
args=(${SPACK_CPPFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
|
||||
case "$mode" in cc|ccld)
|
||||
# Add fortran flags
|
||||
case $lang_flags in
|
||||
F)
|
||||
args=(${SPACK_FFLAGS[@]} "${args[@]}") ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Read spack dependencies from the path environment variable
|
||||
IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
|
||||
for dep in "${deps[@]}"; do
|
||||
@@ -230,6 +295,12 @@ elif [[ $mode == ld ]]; then
|
||||
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "${args[@]}")
|
||||
fi
|
||||
|
||||
# Add SPACK_LDLIBS to args
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
args=("${args[@]}" ${SPACK_LDLIBS[@]}) ;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Unset pesky environment variables that could affect build sanity.
|
||||
#
|
||||
@@ -237,28 +308,6 @@ unset LD_LIBRARY_PATH
|
||||
unset LD_RUN_PATH
|
||||
unset DYLD_LIBRARY_PATH
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
#
|
||||
IFS=':' read -ra env_path <<< "$PATH"
|
||||
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
|
||||
spack_env_dirs+=("" ".")
|
||||
PATH=""
|
||||
for dir in "${env_path[@]}"; do
|
||||
addpath=true
|
||||
for env_dir in "${spack_env_dirs[@]}"; do
|
||||
if [[ $dir == $env_dir ]]; then
|
||||
addpath=false
|
||||
break
|
||||
fi
|
||||
done
|
||||
if $addpath; then
|
||||
PATH="${PATH:+$PATH:}$dir"
|
||||
fi
|
||||
done
|
||||
export PATH
|
||||
|
||||
full_command=("$command" "${args[@]}")
|
||||
|
||||
# In test command mode, write out full command for Spack tests.
|
||||
@@ -275,8 +324,8 @@ fi
|
||||
if [[ $SPACK_DEBUG == TRUE ]]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
|
||||
echo "[$mode] $command $input_command" >> $input_log
|
||||
echo "[$mode] ${full_command[@]}" >> $output_log
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command[@]}" >> "$output_log"
|
||||
fi
|
||||
|
||||
exec "${full_command[@]}"
|
||||
|
||||
1
lib/spack/env/cray/CC
vendored
Symbolic link
1
lib/spack/env/cray/CC
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
||||
1
lib/spack/env/cray/cc
vendored
Symbolic link
1
lib/spack/env/cray/cc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
||||
1
lib/spack/env/cray/ftn
vendored
Symbolic link
1
lib/spack/env/cray/ftn
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
||||
1
lib/spack/env/craype/CC
vendored
Symbolic link
1
lib/spack/env/craype/CC
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
||||
1
lib/spack/env/craype/cc
vendored
Symbolic link
1
lib/spack/env/craype/cc
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
||||
1
lib/spack/env/craype/ftn
vendored
Symbolic link
1
lib/spack/env/craype/ftn
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
||||
@@ -22,33 +22,34 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
|
||||
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
|
||||
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
|
||||
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
|
||||
'set_executable', 'copy_mode', 'unset_executable_mode',
|
||||
'remove_dead_links', 'remove_linked_tree', 'find_library_path',
|
||||
'fix_darwin_install_name']
|
||||
|
||||
import os
|
||||
import glob
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import errno
|
||||
import getpass
|
||||
from contextlib import contextmanager, closing
|
||||
from tempfile import NamedTemporaryFile
|
||||
from contextlib import contextmanager
|
||||
import subprocess
|
||||
import fileinput
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from spack.util.compression import ALLOWED_ARCHIVE_TYPES
|
||||
|
||||
__all__ = ['set_install_permissions', 'install', 'install_tree',
|
||||
'traverse_tree',
|
||||
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
|
||||
'force_remove', 'join_path', 'ancestor', 'can_access',
|
||||
'filter_file',
|
||||
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
|
||||
'set_executable', 'copy_mode', 'unset_executable_mode',
|
||||
'remove_dead_links', 'remove_linked_tree', 'find_library_path',
|
||||
'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
|
||||
|
||||
|
||||
def filter_file(regex, repl, *filenames, **kwargs):
|
||||
"""Like sed, but uses python regular expressions.
|
||||
|
||||
Filters every line of file through regex and replaces the file
|
||||
Filters every line of each file through regex and replaces the file
|
||||
with a filtered version. Preserves mode of filtered files.
|
||||
|
||||
As with re.sub, ``repl`` can be either a string or a callable.
|
||||
@@ -59,7 +60,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
||||
|
||||
Keyword Options:
|
||||
string[=False] If True, treat regex as a plain string.
|
||||
backup[=True] Make a backup files suffixed with ~
|
||||
backup[=True] Make backup file(s) suffixed with ~
|
||||
ignore_absent[=False] Ignore any files that don't exist.
|
||||
"""
|
||||
string = kwargs.get('string', False)
|
||||
@@ -69,6 +70,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||
if not callable(repl):
|
||||
unescaped = repl.replace(r'\\', '\\')
|
||||
|
||||
def replace_groups_with_groupid(m):
|
||||
def groupid_to_group(x):
|
||||
return m.group(int(x.group(1)))
|
||||
@@ -79,30 +81,32 @@ def groupid_to_group(x):
|
||||
regex = re.escape(regex)
|
||||
|
||||
for filename in filenames:
|
||||
backup = filename + "~"
|
||||
backup_filename = filename + "~"
|
||||
|
||||
if ignore_absent and not os.path.exists(filename):
|
||||
continue
|
||||
|
||||
shutil.copy(filename, backup)
|
||||
# Create backup file. Don't overwrite an existing backup
|
||||
# file in case this file is being filtered multiple times.
|
||||
if not os.path.exists(backup_filename):
|
||||
shutil.copy(filename, backup_filename)
|
||||
|
||||
try:
|
||||
with closing(open(backup)) as infile:
|
||||
with closing(open(filename, 'w')) as outfile:
|
||||
for line in infile:
|
||||
foo = re.sub(regex, repl, line)
|
||||
outfile.write(foo)
|
||||
for line in fileinput.input(filename, inplace=True):
|
||||
print(re.sub(regex, repl, line.rstrip('\n')))
|
||||
except:
|
||||
# clean up the original file on failure.
|
||||
shutil.move(backup, filename)
|
||||
shutil.move(backup_filename, filename)
|
||||
raise
|
||||
|
||||
finally:
|
||||
if not backup:
|
||||
shutil.rmtree(backup, ignore_errors=True)
|
||||
os.remove(backup_filename)
|
||||
|
||||
|
||||
class FileFilter(object):
|
||||
"""Convenience class for calling filter_file a lot."""
|
||||
|
||||
def __init__(self, *filenames):
|
||||
self.filenames = filenames
|
||||
|
||||
@@ -113,7 +117,7 @@ def filter(self, regex, repl, **kwargs):
|
||||
def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
"""Find all sed search/replace commands and change the delimiter.
|
||||
e.g., if the file contains seds that look like 's///', you can
|
||||
call change_sed_delimeter('/', '@', file) to change the
|
||||
call change_sed_delimiter('/', '@', file) to change the
|
||||
delimiter to '@'.
|
||||
|
||||
NOTE that this routine will fail if the delimiter is ' or ".
|
||||
@@ -157,9 +161,12 @@ def set_install_permissions(path):
|
||||
def copy_mode(src, dest):
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR
|
||||
if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP
|
||||
if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH
|
||||
if src_mode & stat.S_IXUSR:
|
||||
dest_mode |= stat.S_IXUSR
|
||||
if src_mode & stat.S_IXGRP:
|
||||
dest_mode |= stat.S_IXGRP
|
||||
if src_mode & stat.S_IXOTH:
|
||||
dest_mode |= stat.S_IXOTH
|
||||
os.chmod(dest, dest_mode)
|
||||
|
||||
|
||||
@@ -175,7 +182,7 @@ def install(src, dest):
|
||||
"""Manually install a file to a particular location."""
|
||||
tty.debug("Installing %s to %s" % (src, dest))
|
||||
|
||||
# Expand dsst to its eventual full path if it is a directory.
|
||||
# Expand dest to its eventual full path if it is a directory.
|
||||
if os.path.isdir(dest):
|
||||
dest = join_path(dest, os.path.basename(src))
|
||||
|
||||
@@ -185,7 +192,7 @@ def install(src, dest):
|
||||
|
||||
|
||||
def install_tree(src, dest, **kwargs):
|
||||
"""Manually install a file to a particular location."""
|
||||
"""Manually install a directory tree to a particular location."""
|
||||
tty.debug("Installing %s to %s" % (src, dest))
|
||||
shutil.copytree(src, dest, **kwargs)
|
||||
|
||||
@@ -215,7 +222,7 @@ def mkdirp(*paths):
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
elif not os.path.isdir(path):
|
||||
raise OSError(errno.EEXIST, "File alredy exists", path)
|
||||
raise OSError(errno.EEXIST, "File already exists", path)
|
||||
|
||||
|
||||
def force_remove(*paths):
|
||||
@@ -224,9 +231,10 @@ def force_remove(*paths):
|
||||
for path in paths:
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError, e:
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@contextmanager
|
||||
def working_dir(dirname, **kwargs):
|
||||
if kwargs.get('create', False):
|
||||
@@ -240,7 +248,7 @@ def working_dir(dirname, **kwargs):
|
||||
|
||||
def touch(path):
|
||||
"""Creates an empty file at the specified path."""
|
||||
with open(path, 'a') as file:
|
||||
with open(path, 'a'):
|
||||
os.utime(path, None)
|
||||
|
||||
|
||||
@@ -253,7 +261,7 @@ def touchp(path):
|
||||
def force_symlink(src, dest):
|
||||
try:
|
||||
os.symlink(src, dest)
|
||||
except OSError as e:
|
||||
except OSError:
|
||||
os.remove(dest)
|
||||
os.symlink(src, dest)
|
||||
|
||||
@@ -275,7 +283,7 @@ def ancestor(dir, n=1):
|
||||
|
||||
def can_access(file_name):
|
||||
"""True if we have read/write access to the file."""
|
||||
return os.access(file_name, os.R_OK|os.W_OK)
|
||||
return os.access(file_name, os.R_OK | os.W_OK)
|
||||
|
||||
|
||||
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
@@ -304,7 +312,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
|
||||
Optional args:
|
||||
|
||||
order=[pre|post] -- Whether to do pre- or post-order traveral.
|
||||
order=[pre|post] -- Whether to do pre- or post-order traversal.
|
||||
|
||||
ignore=<predicate> -- Predicate indicating which files to ignore.
|
||||
|
||||
@@ -343,13 +351,15 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
||||
|
||||
# Treat as a directory
|
||||
if os.path.isdir(source_child) and (
|
||||
follow_links or not os.path.islink(source_child)):
|
||||
follow_links or not os.path.islink(source_child)):
|
||||
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
|
||||
for t in tuples: yield t
|
||||
tuples = traverse_tree(
|
||||
source_root, dest_root, rel_child, **kwargs)
|
||||
for t in tuples:
|
||||
yield t
|
||||
|
||||
# Treat as a file.
|
||||
elif not ignore(os.path.join(rel_path, f)):
|
||||
@@ -379,6 +389,7 @@ def remove_dead_links(root):
|
||||
if not os.path.exists(real_path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
def remove_linked_tree(path):
|
||||
"""
|
||||
Removes a directory and its contents. If the directory is a
|
||||
@@ -402,28 +413,53 @@ def fix_darwin_install_name(path):
|
||||
Fix install name of dynamic libraries on Darwin to have full path.
|
||||
There are two parts of this task:
|
||||
(i) use install_name('-id',...) to change install name of a single lib;
|
||||
(ii) use install_name('-change',...) to change the cross linking between libs.
|
||||
The function assumes that all libraries are in one folder and currently won't
|
||||
follow subfolders.
|
||||
(ii) use install_name('-change',...) to change the cross linking between
|
||||
libs. The function assumes that all libraries are in one folder and
|
||||
currently won't follow subfolders.
|
||||
|
||||
Args:
|
||||
path: directory in which .dylib files are alocated
|
||||
path: directory in which .dylib files are located
|
||||
|
||||
"""
|
||||
libs = glob.glob(join_path(path,"*.dylib"))
|
||||
libs = glob.glob(join_path(path, "*.dylib"))
|
||||
for lib in libs:
|
||||
# fix install name first:
|
||||
subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0]
|
||||
long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n')
|
||||
subprocess.Popen(
|
||||
["install_name_tool", "-id", lib, lib],
|
||||
stdout=subprocess.PIPE).communicate()[0]
|
||||
long_deps = subprocess.Popen(
|
||||
["otool", "-L", lib],
|
||||
stdout=subprocess.PIPE).communicate()[0].split('\n')
|
||||
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
|
||||
# fix all dependencies:
|
||||
for dep in deps:
|
||||
for loc in libs:
|
||||
if dep == os.path.basename(loc):
|
||||
subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0]
|
||||
subprocess.Popen(
|
||||
["install_name_tool", "-change", dep, loc, lib],
|
||||
stdout=subprocess.PIPE).communicate()[0]
|
||||
break
|
||||
|
||||
|
||||
def to_lib_name(library):
|
||||
"""Transforms a path to the library /path/to/lib<name>.xyz into <name>
|
||||
"""
|
||||
# Assume libXYZ.suffix
|
||||
return os.path.basename(library)[3:].split(".")[0]
|
||||
|
||||
|
||||
def to_link_flags(library):
|
||||
"""Transforms a path to a <library> into linking flags -L<dir> -l<name>.
|
||||
|
||||
Return:
|
||||
A string of linking flags.
|
||||
"""
|
||||
dir = os.path.dirname(library)
|
||||
name = to_lib_name(library)
|
||||
res = '-L%s -l%s' % (dir, name)
|
||||
return res
|
||||
|
||||
|
||||
def find_library_path(libname, *paths):
|
||||
"""Searches for a file called <libname> in each path.
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
##############################################################################
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import functools
|
||||
import collections
|
||||
import inspect
|
||||
@@ -39,14 +38,15 @@ def index_by(objects, *funcs):
|
||||
Values are used as keys. For example, suppose you have four
|
||||
objects with attributes that look like this:
|
||||
|
||||
a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
|
||||
b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
|
||||
c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
|
||||
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
|
||||
a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
|
||||
b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
|
||||
c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
|
||||
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
|
||||
|
||||
list_of_specs = [a,b,c,d]
|
||||
index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler)
|
||||
index2 = index_by(list_of_specs, lambda s: s.compiler)
|
||||
list_of_specs = [a,b,c,d]
|
||||
index1 = index_by(list_of_specs, lambda s: s.arch,
|
||||
lambda s: s.compiler)
|
||||
index2 = index_by(list_of_specs, lambda s: s.compiler)
|
||||
|
||||
``index1'' now has two levels of dicts, with lists at the
|
||||
leaves, like this:
|
||||
@@ -137,7 +137,7 @@ def get_calling_module_name():
|
||||
finally:
|
||||
del stack
|
||||
|
||||
if not '__module__' in caller_locals:
|
||||
if '__module__' not in caller_locals:
|
||||
raise RuntimeError("Must invoke get_calling_module_name() "
|
||||
"from inside a class definition!")
|
||||
|
||||
@@ -173,11 +173,11 @@ def has_method(cls, name):
|
||||
class memoized(object):
|
||||
"""Decorator that caches the results of a function, storing them
|
||||
in an attribute of that function."""
|
||||
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
|
||||
|
||||
def __call__(self, *args):
|
||||
if not isinstance(args, collections.Hashable):
|
||||
# Not hashable, so just call the function.
|
||||
@@ -187,12 +187,10 @@ def __call__(self, *args):
|
||||
self.cache[args] = self.func(*args)
|
||||
return self.cache[args]
|
||||
|
||||
|
||||
def __get__(self, obj, objtype):
|
||||
"""Support instance methods."""
|
||||
return functools.partial(self.__call__, obj)
|
||||
|
||||
|
||||
def clear(self):
|
||||
"""Expunge cache so that self.func will be called again."""
|
||||
self.cache.clear()
|
||||
@@ -237,13 +235,21 @@ def setter(name, value):
|
||||
if not has_method(cls, '_cmp_key'):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
setter('__eq__',
|
||||
lambda s, o:
|
||||
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter('__lt__',
|
||||
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
setter('__le__',
|
||||
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
|
||||
|
||||
setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
setter('__ne__',
|
||||
lambda s, o:
|
||||
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
|
||||
setter('__gt__',
|
||||
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
|
||||
setter('__ge__',
|
||||
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
|
||||
|
||||
setter('__hash__', lambda self: hash(self._cmp_key()))
|
||||
|
||||
@@ -254,10 +260,10 @@ def setter(name, value):
|
||||
class HashableMap(dict):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
def _cmp_key(self):
|
||||
return tuple(sorted(self.values()))
|
||||
|
||||
|
||||
def copy(self):
|
||||
"""Type-agnostic clone method. Preserves subclass type."""
|
||||
# Construct a new dict of my type
|
||||
@@ -336,24 +342,39 @@ def match(string):
|
||||
return match
|
||||
|
||||
|
||||
|
||||
def DictWrapper(dictionary):
|
||||
"""Returns a class that wraps a dictionary and enables it to be used
|
||||
like an object."""
|
||||
class wrapper(object):
|
||||
def __getattr__(self, name): return dictionary[name]
|
||||
def __setattr__(self, name, value): dictionary[name] = value
|
||||
def setdefault(self, *args): return dictionary.setdefault(*args)
|
||||
def get(self, *args): return dictionary.get(*args)
|
||||
def keys(self): return dictionary.keys()
|
||||
def values(self): return dictionary.values()
|
||||
def items(self): return dictionary.items()
|
||||
def __iter__(self): return iter(dictionary)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return dictionary[name]
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
dictionary[name] = value
|
||||
|
||||
def setdefault(self, *args):
|
||||
return dictionary.setdefault(*args)
|
||||
|
||||
def get(self, *args):
|
||||
return dictionary.get(*args)
|
||||
|
||||
def keys(self):
|
||||
return dictionary.keys()
|
||||
|
||||
def values(self):
|
||||
return dictionary.values()
|
||||
|
||||
def items(self):
|
||||
return dictionary.items()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(dictionary)
|
||||
|
||||
return wrapper()
|
||||
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
|
||||
def __init__(self, message):
|
||||
super(RequiredAttributeError, self).__init__(message)
|
||||
|
||||
@@ -23,12 +23,13 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
"""LinkTree class for setting up trees of symbolic links."""
|
||||
__all__ = ['LinkTree']
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
__all__ = ['LinkTree']
|
||||
|
||||
empty_file_name = '.spack-empty'
|
||||
|
||||
|
||||
@@ -43,13 +44,13 @@ class LinkTree(object):
|
||||
modified.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, source_root):
|
||||
if not os.path.exists(source_root):
|
||||
raise IOError("No such file or directory: '%s'", source_root)
|
||||
|
||||
self._root = source_root
|
||||
|
||||
|
||||
def find_conflict(self, dest_root, **kwargs):
|
||||
"""Returns the first file in dest that conflicts with src"""
|
||||
kwargs['follow_nonexisting'] = False
|
||||
@@ -61,9 +62,9 @@ def find_conflict(self, dest_root, **kwargs):
|
||||
return dest
|
||||
return None
|
||||
|
||||
|
||||
def merge(self, dest_root, **kwargs):
|
||||
"""Link all files in src into dest, creating directories if necessary."""
|
||||
"""Link all files in src into dest, creating directories
|
||||
if necessary."""
|
||||
kwargs['order'] = 'pre'
|
||||
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
|
||||
if os.path.isdir(src):
|
||||
@@ -83,7 +84,6 @@ def merge(self, dest_root, **kwargs):
|
||||
assert(not os.path.exists(dest))
|
||||
os.symlink(src, dest)
|
||||
|
||||
|
||||
def unmerge(self, dest_root, **kwargs):
|
||||
"""Unlink all files in dest that exist in src.
|
||||
|
||||
|
||||
@@ -28,6 +28,9 @@
|
||||
import time
|
||||
import socket
|
||||
|
||||
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
|
||||
'LockError']
|
||||
|
||||
# Default timeout in seconds, after which locks will raise exceptions.
|
||||
_default_timeout = 60
|
||||
|
||||
@@ -36,13 +39,21 @@
|
||||
|
||||
|
||||
class Lock(object):
|
||||
def __init__(self,file_path):
|
||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||
|
||||
In Python, `lockf` actually calls `fcntl`, so this should work with any
|
||||
filesystem implementation that supports locking through the fcntl calls.
|
||||
This includes distributed filesystems like Lustre (when flock is enabled)
|
||||
and recent NFS versions.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
self._file_path = file_path
|
||||
self._fd = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
|
||||
def _lock(self, op, timeout):
|
||||
"""This takes a lock using POSIX locks (``fnctl.lockf``).
|
||||
|
||||
@@ -63,7 +74,9 @@ def _lock(self, op, timeout):
|
||||
|
||||
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
|
||||
if op == fcntl.LOCK_EX:
|
||||
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
|
||||
os.write(
|
||||
self._fd,
|
||||
"pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
|
||||
return
|
||||
|
||||
except IOError as error:
|
||||
@@ -75,7 +88,6 @@ def _lock(self, op, timeout):
|
||||
|
||||
raise LockError("Timed out waiting for lock.")
|
||||
|
||||
|
||||
def _unlock(self):
|
||||
"""Releases a lock using POSIX locks (``fcntl.lockf``)
|
||||
|
||||
@@ -83,11 +95,10 @@ def _unlock(self):
|
||||
be masquerading as write locks, but this removes either.
|
||||
|
||||
"""
|
||||
fcntl.lockf(self._fd,fcntl.LOCK_UN)
|
||||
fcntl.lockf(self._fd, fcntl.LOCK_UN)
|
||||
os.close(self._fd)
|
||||
self._fd = None
|
||||
|
||||
|
||||
def acquire_read(self, timeout=_default_timeout):
|
||||
"""Acquires a recursive, shared lock for reading.
|
||||
|
||||
@@ -107,7 +118,6 @@ def acquire_read(self, timeout=_default_timeout):
|
||||
self._reads += 1
|
||||
return False
|
||||
|
||||
|
||||
def acquire_write(self, timeout=_default_timeout):
|
||||
"""Acquires a recursive, exclusive lock for writing.
|
||||
|
||||
@@ -127,7 +137,6 @@ def acquire_write(self, timeout=_default_timeout):
|
||||
self._writes += 1
|
||||
return False
|
||||
|
||||
|
||||
def release_read(self):
|
||||
"""Releases a read lock.
|
||||
|
||||
@@ -148,7 +157,6 @@ def release_read(self):
|
||||
self._reads -= 1
|
||||
return False
|
||||
|
||||
|
||||
def release_write(self):
|
||||
"""Releases a write lock.
|
||||
|
||||
@@ -170,6 +178,70 @@ def release_write(self):
|
||||
return False
|
||||
|
||||
|
||||
class LockTransaction(object):
|
||||
"""Simple nested transaction context manager that uses a file lock.
|
||||
|
||||
This class can trigger actions when the lock is acquired for the
|
||||
first time and released for the last.
|
||||
|
||||
If the acquire_fn returns a value, it is used as the return value for
|
||||
__enter__, allowing it to be passed as the `as` argument of a `with`
|
||||
statement.
|
||||
|
||||
If acquire_fn returns a context manager, *its* `__enter__` function will be
|
||||
called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
|
||||
called before `release_fn` in `__exit__`, allowing you to nest a context
|
||||
manager to be used along with the lock.
|
||||
|
||||
Timeout for lock is customizable.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, lock, acquire_fn=None, release_fn=None,
|
||||
timeout=_default_timeout):
|
||||
self._lock = lock
|
||||
self._timeout = timeout
|
||||
self._acquire_fn = acquire_fn
|
||||
self._release_fn = release_fn
|
||||
self._as = None
|
||||
|
||||
def __enter__(self):
|
||||
if self._enter() and self._acquire_fn:
|
||||
self._as = self._acquire_fn()
|
||||
if hasattr(self._as, '__enter__'):
|
||||
return self._as.__enter__()
|
||||
else:
|
||||
return self._as
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
suppress = False
|
||||
if self._exit():
|
||||
if self._as and hasattr(self._as, '__exit__'):
|
||||
if self._as.__exit__(type, value, traceback):
|
||||
suppress = True
|
||||
if self._release_fn:
|
||||
if self._release_fn(type, value, traceback):
|
||||
suppress = True
|
||||
return suppress
|
||||
|
||||
|
||||
class ReadTransaction(LockTransaction):
|
||||
|
||||
def _enter(self):
|
||||
return self._lock.acquire_read(self._timeout)
|
||||
|
||||
def _exit(self):
|
||||
return self._lock.release_read()
|
||||
|
||||
|
||||
class WriteTransaction(LockTransaction):
|
||||
|
||||
def _enter(self):
|
||||
return self._lock.acquire_write(self._timeout)
|
||||
|
||||
def _exit(self):
|
||||
return self._lock.release_write()
|
||||
|
||||
|
||||
class LockError(Exception):
|
||||
"""Raised when an attempt to acquire a lock times out."""
|
||||
pass
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
_verbose = False
|
||||
indent = " "
|
||||
|
||||
|
||||
def is_verbose():
|
||||
return _verbose
|
||||
|
||||
@@ -64,12 +65,14 @@ def info(message, *args, **kwargs):
|
||||
format = kwargs.get('format', '*b')
|
||||
stream = kwargs.get('stream', sys.stdout)
|
||||
wrap = kwargs.get('wrap', False)
|
||||
break_long_words = kwargs.get('break_long_words', False)
|
||||
|
||||
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
|
||||
for arg in args:
|
||||
if wrap:
|
||||
lines = textwrap.wrap(
|
||||
str(arg), initial_indent=indent, subsequent_indent=indent)
|
||||
str(arg), initial_indent=indent, subsequent_indent=indent,
|
||||
break_long_words=break_long_words)
|
||||
for line in lines:
|
||||
stream.write(line + '\n')
|
||||
else:
|
||||
@@ -146,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs):
|
||||
elif default_value is False:
|
||||
prompt += ' [y/N] '
|
||||
else:
|
||||
raise ValueError("default for get_yes_no() must be True, False, or None.")
|
||||
raise ValueError(
|
||||
"default for get_yes_no() must be True, False, or None.")
|
||||
|
||||
result = None
|
||||
while result is None:
|
||||
@@ -172,8 +176,9 @@ def hline(label=None, **kwargs):
|
||||
char = kwargs.pop('char', '-')
|
||||
max_width = kwargs.pop('max_width', 64)
|
||||
if kwargs:
|
||||
raise TypeError("'%s' is an invalid keyword argument for this function."
|
||||
% next(kwargs.iterkeys()))
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for this function."
|
||||
% next(kwargs.iterkeys()))
|
||||
|
||||
rows, cols = terminal_size()
|
||||
if not cols:
|
||||
@@ -198,7 +203,8 @@ def terminal_size():
|
||||
"""Gets the dimensions of the console: (rows, cols)."""
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
|
||||
rc = struct.unpack('hh', fcntl.ioctl(
|
||||
fd, termios.TIOCGWINSZ, '1234'))
|
||||
except:
|
||||
return
|
||||
return rc
|
||||
|
||||
@@ -27,15 +27,14 @@
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import fcntl
|
||||
import termios
|
||||
import struct
|
||||
from StringIO import StringIO
|
||||
|
||||
from llnl.util.tty import terminal_size
|
||||
from llnl.util.tty.color import clen, cextra
|
||||
|
||||
|
||||
class ColumnConfig:
|
||||
|
||||
def __init__(self, cols):
|
||||
self.cols = cols
|
||||
self.line_length = 0
|
||||
@@ -43,7 +42,8 @@ def __init__(self, cols):
|
||||
self.widths = [0] * cols # does not include ansi colors
|
||||
|
||||
def __repr__(self):
|
||||
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
|
||||
attrs = [(a, getattr(self, a))
|
||||
for a in dir(self) if not a.startswith("__")]
|
||||
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0):
|
||||
max_cols = min(len(elts), max_cols)
|
||||
|
||||
# Range of column counts to try. If forced, use the supplied value.
|
||||
col_range = [cols] if cols else xrange(1, max_cols+1)
|
||||
col_range = [cols] if cols else xrange(1, max_cols + 1)
|
||||
|
||||
# Determine the most columns possible for the console width.
|
||||
configs = [ColumnConfig(c) for c in col_range]
|
||||
@@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
|
||||
|
||||
# 'clen' ignores length of ansi color sequences.
|
||||
max_len = max(clen(e) for e in elts) + padding
|
||||
max_clen = max(len(e) for e in elts) + padding
|
||||
if cols == 0:
|
||||
cols = max(1, console_width / max_len)
|
||||
cols = min(len(elts), cols)
|
||||
@@ -130,17 +129,19 @@ def colify(elts, **options):
|
||||
output=<stream> A file object to write to. Default is sys.stdout.
|
||||
indent=<int> Optionally indent all columns by some number of spaces.
|
||||
padding=<int> Spaces between columns. Default is 2.
|
||||
width=<int> Width of the output. Default is 80 if tty is not detected.
|
||||
width=<int> Width of the output. Default is 80 if tty not detected.
|
||||
|
||||
cols=<int> Force number of columns. Default is to size to terminal,
|
||||
or single-column if no tty
|
||||
|
||||
tty=<bool> Whether to attempt to write to a tty. Default is to
|
||||
autodetect a tty. Set to False to force single-column output.
|
||||
autodetect a tty. Set to False to force
|
||||
single-column output.
|
||||
|
||||
method=<string> Method to use to fit columns. Options are variable or uniform.
|
||||
Variable-width columns are tighter, uniform columns are all the
|
||||
same width and fit less data on the screen.
|
||||
method=<string> Method to use to fit columns. Options are variable or
|
||||
uniform. Variable-width columns are tighter, uniform
|
||||
columns are all the same width and fit less data on
|
||||
the screen.
|
||||
"""
|
||||
# Get keyword arguments or set defaults
|
||||
cols = options.pop("cols", 0)
|
||||
@@ -152,8 +153,9 @@ def colify(elts, **options):
|
||||
console_cols = options.pop("width", None)
|
||||
|
||||
if options:
|
||||
raise TypeError("'%s' is an invalid keyword argument for this function."
|
||||
% next(options.iterkeys()))
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for this function."
|
||||
% next(options.iterkeys()))
|
||||
|
||||
# elts needs to be an array of strings so we can count the elements
|
||||
elts = [str(elt) for elt in elts]
|
||||
@@ -167,7 +169,8 @@ def colify(elts, **options):
|
||||
r, c = env_size.split('x')
|
||||
console_rows, console_cols = int(r), int(c)
|
||||
tty = True
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
|
||||
# Use only one column if not a tty.
|
||||
if not tty:
|
||||
@@ -198,8 +201,13 @@ def colify(elts, **options):
|
||||
for col in xrange(cols):
|
||||
elt = col * rows + row
|
||||
width = config.widths[col] + cextra(elts[elt])
|
||||
fmt = '%%-%ds' % width
|
||||
output.write(fmt % elts[elt])
|
||||
if col < cols - 1:
|
||||
fmt = '%%-%ds' % width
|
||||
output.write(fmt % elts[elt])
|
||||
else:
|
||||
# Don't pad the rightmost column (sapces can wrap on
|
||||
# small teriminals if one line is overlong)
|
||||
output.write(elts[elt])
|
||||
|
||||
output.write("\n")
|
||||
row += 1
|
||||
@@ -223,6 +231,7 @@ def colify_table(table, **options):
|
||||
raise ValueError("Table is empty in colify_table!")
|
||||
|
||||
columns = len(table[0])
|
||||
|
||||
def transpose():
|
||||
for i in xrange(columns):
|
||||
for row in table:
|
||||
|
||||
@@ -75,25 +75,27 @@
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
class ColorParseError(Exception):
|
||||
"""Raised when a color format fails to parse."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(ColorParseError, self).__init__(message)
|
||||
|
||||
# Text styles for ansi codes
|
||||
styles = {'*' : '1', # bold
|
||||
'_' : '4', # underline
|
||||
None : '0' } # plain
|
||||
styles = {'*': '1', # bold
|
||||
'_': '4', # underline
|
||||
None: '0'} # plain
|
||||
|
||||
# Dim and bright ansi colors
|
||||
colors = {'k' : 30, 'K' : 90, # black
|
||||
'r' : 31, 'R' : 91, # red
|
||||
'g' : 32, 'G' : 92, # green
|
||||
'y' : 33, 'Y' : 93, # yellow
|
||||
'b' : 34, 'B' : 94, # blue
|
||||
'm' : 35, 'M' : 95, # magenta
|
||||
'c' : 36, 'C' : 96, # cyan
|
||||
'w' : 37, 'W' : 97 } # white
|
||||
colors = {'k': 30, 'K': 90, # black
|
||||
'r': 31, 'R': 91, # red
|
||||
'g': 32, 'G': 92, # green
|
||||
'y': 33, 'Y': 93, # yellow
|
||||
'b': 34, 'B': 94, # blue
|
||||
'm': 35, 'M': 95, # magenta
|
||||
'c': 36, 'C': 96, # cyan
|
||||
'w': 37, 'W': 97} # white
|
||||
|
||||
# Regex to be used for color formatting
|
||||
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
|
||||
@@ -104,6 +106,7 @@ def __init__(self, message):
|
||||
|
||||
|
||||
class match_to_ansi(object):
|
||||
|
||||
def __init__(self, color=True):
|
||||
self.color = color
|
||||
|
||||
@@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None):
|
||||
"""Same as cwrite, but writes a trailing newline to the stream."""
|
||||
cwrite(string + "\n", stream, color)
|
||||
|
||||
|
||||
def cescape(string):
|
||||
"""Replace all @ with @@ in the string provided."""
|
||||
return str(string).replace('@', '@@')
|
||||
|
||||
|
||||
class ColorStream(object):
|
||||
|
||||
def __init__(self, stream, color=None):
|
||||
self._stream = stream
|
||||
self._color = color
|
||||
@@ -196,7 +201,7 @@ def write(self, string, **kwargs):
|
||||
color = self._color
|
||||
if self._color is None:
|
||||
if raw:
|
||||
color=True
|
||||
color = True
|
||||
else:
|
||||
color = self._stream.isatty() or _force_color
|
||||
raw_write(colorize(string, color=color))
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
|
||||
|
||||
|
||||
def _strip(line):
|
||||
"""Strip color and control characters from a line."""
|
||||
return _escape.sub('', line)
|
||||
@@ -58,10 +59,10 @@ class keyboard_input(object):
|
||||
When the with block completes, this will restore settings before
|
||||
canonical and echo were disabled.
|
||||
"""
|
||||
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
self.old_cfg = None
|
||||
|
||||
@@ -86,10 +87,9 @@ def __enter__(self):
|
||||
# Apply new settings for terminal
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg)
|
||||
|
||||
except Exception, e:
|
||||
except Exception:
|
||||
pass # Some OS's do not support termios, so ignore.
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exception, traceback):
|
||||
# If termios was avaialble, restore old settings after the
|
||||
# with block
|
||||
@@ -114,6 +114,7 @@ class log_output(object):
|
||||
Closes the provided stream when done with the block.
|
||||
If echo is True, also prints the output to stdout.
|
||||
"""
|
||||
|
||||
def __init__(self, stream, echo=False, force_color=False, debug=False):
|
||||
self.stream = stream
|
||||
|
||||
@@ -122,7 +123,7 @@ def __init__(self, stream, echo=False, force_color=False, debug=False):
|
||||
self.force_color = force_color
|
||||
self.debug = debug
|
||||
|
||||
# Default is to try file-descriptor reassignment unless the system
|
||||
# Default is to try file-descriptor reassignment unless the system
|
||||
# out/err streams do not have an associated file descriptor
|
||||
self.directAssignment = False
|
||||
|
||||
@@ -130,7 +131,6 @@ def trace(self, frame, event, arg):
|
||||
"""Jumps to __exit__ on the child process."""
|
||||
raise _SkipWithBlock()
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
"""Redirect output from the with block to a file.
|
||||
|
||||
@@ -154,7 +154,8 @@ def __enter__(self):
|
||||
with self.stream as log_file:
|
||||
with keyboard_input(sys.stdin):
|
||||
while True:
|
||||
rlist, w, x = select.select([read_file, sys.stdin], [], [])
|
||||
rlist, w, x = select.select(
|
||||
[read_file, sys.stdin], [], [])
|
||||
if not rlist:
|
||||
break
|
||||
|
||||
@@ -211,7 +212,6 @@ def __enter__(self):
|
||||
if self.debug:
|
||||
tty._debug = True
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exception, traceback):
|
||||
"""Exits on child, handles skipping the with block on parent."""
|
||||
# Child should just exit here.
|
||||
@@ -235,7 +235,7 @@ def __exit__(self, exc_type, exception, traceback):
|
||||
sys.stderr = self._stderr
|
||||
else:
|
||||
os.dup2(self._stdout, sys.stdout.fileno())
|
||||
os.dup2(self._stderr, sys.stderr.fileno())
|
||||
os.dup2(self._stderr, sys.stderr.fileno())
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# flake8: noqa
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
@@ -39,13 +40,26 @@
|
||||
lib_path = join_path(spack_root, "lib", "spack")
|
||||
build_env_path = join_path(lib_path, "env")
|
||||
module_path = join_path(lib_path, "spack")
|
||||
platform_path = join_path(module_path, 'platforms')
|
||||
compilers_path = join_path(module_path, "compilers")
|
||||
operating_system_path = join_path(module_path, 'operating_systems')
|
||||
test_path = join_path(module_path, "test")
|
||||
hooks_path = join_path(module_path, "hooks")
|
||||
var_path = join_path(spack_root, "var", "spack")
|
||||
stage_path = join_path(var_path, "stage")
|
||||
repos_path = join_path(var_path, "repos")
|
||||
share_path = join_path(spack_root, "share", "spack")
|
||||
cache_path = join_path(var_path, "cache")
|
||||
|
||||
# User configuration location
|
||||
user_config_path = os.path.expanduser('~/.spack')
|
||||
|
||||
import spack.fetch_strategy
|
||||
fetch_cache = spack.fetch_strategy.FsCache(cache_path)
|
||||
|
||||
from spack.file_cache import FileCache
|
||||
user_cache_path = join_path(user_config_path, 'cache')
|
||||
user_cache = FileCache(user_cache_path)
|
||||
|
||||
prefix = spack_root
|
||||
opt_path = join_path(prefix, "opt")
|
||||
@@ -105,7 +119,7 @@
|
||||
|
||||
# Version information
|
||||
from spack.version import Version
|
||||
spack_version = Version("0.9")
|
||||
spack_version = Version("0.9.1")
|
||||
|
||||
#
|
||||
# Executables used by Spack
|
||||
@@ -134,7 +148,7 @@
|
||||
_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
|
||||
for path in _tmp_candidates:
|
||||
# don't add a second username if it's already unique by user.
|
||||
if not _tmp_user in path:
|
||||
if _tmp_user not in path:
|
||||
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
|
||||
else:
|
||||
tmp_dirs.append(join_path(path, 'spack-stage'))
|
||||
@@ -166,13 +180,17 @@
|
||||
# Spack internal code should call 'import spack' and accesses other
|
||||
# variables (spack.repo, paths, etc.) directly.
|
||||
#
|
||||
# TODO: maybe this should be separated out and should go in build_environment.py?
|
||||
# TODO: it's not clear where all the stuff that needs to be included in packages
|
||||
# should live. This file is overloaded for spack core vs. for packages.
|
||||
# TODO: maybe this should be separated out to build_environment.py?
|
||||
# TODO: it's not clear where all the stuff that needs to be included in
|
||||
# packages should live. This file is overloaded for spack core vs.
|
||||
# for packages.
|
||||
#
|
||||
__all__ = ['Package', 'Version', 'when', 'ver']
|
||||
__all__ = ['Package', 'StagedPackage', 'CMakePackage',
|
||||
'Version', 'when', 'ver', 'alldeps', 'nolink']
|
||||
from spack.package import Package, ExtensionConflictError
|
||||
from spack.package import StagedPackage, CMakePackage
|
||||
from spack.version import Version, ver
|
||||
from spack.spec import DependencySpec, alldeps, nolink
|
||||
from spack.multimethod import when
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -188,8 +206,8 @@
|
||||
__all__ += spack.util.executable.__all__
|
||||
|
||||
from spack.package import \
|
||||
install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
|
||||
InstallError, ExternalPackageError
|
||||
install_dependency_symlinks, flatten_dependencies, \
|
||||
DependencyConflictError, InstallError, ExternalPackageError
|
||||
__all__ += [
|
||||
'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
|
||||
'InstallError', 'ExternalPackageError']
|
||||
'install_dependency_symlinks', 'flatten_dependencies',
|
||||
'DependencyConflictError', 'InstallError', 'ExternalPackageError']
|
||||
|
||||
@@ -30,14 +30,15 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
|
||||
class ABI(object):
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(self, parent, child):
|
||||
"""Returns true iff the parent and child specs have ABI compatible architectures."""
|
||||
return not parent.architecture or not child.architecture or parent.architecture == child.architecture
|
||||
|
||||
"""Return true if parent and child have ABI compatible targets."""
|
||||
return not parent.architecture or not child.architecture or \
|
||||
parent.architecture == child.architecture
|
||||
|
||||
@memoized
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
@@ -60,8 +61,9 @@ def _gcc_get_libstdcxx_version(self, version):
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
output = rungcc("--print-file-name=%s" % libname, return_output=True)
|
||||
except ProcessError, e:
|
||||
output = rungcc("--print-file-name=%s" % libname,
|
||||
return_output=True)
|
||||
except ProcessError:
|
||||
return None
|
||||
if not output:
|
||||
return None
|
||||
@@ -70,7 +72,6 @@ def _gcc_get_libstdcxx_version(self, version):
|
||||
return None
|
||||
return os.path.basename(libpath)
|
||||
|
||||
|
||||
@memoized
|
||||
def _gcc_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the gcc version pversion and cversion
|
||||
@@ -81,7 +82,6 @@ def _gcc_compiler_compare(self, pversion, cversion):
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
|
||||
def _intel_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
@@ -91,9 +91,8 @@ def _intel_compiler_compare(self, pversion, cversion):
|
||||
return False
|
||||
return pversion.version[:2] == cversion.version[:2]
|
||||
|
||||
|
||||
def compiler_compatible(self, parent, child, **kwargs):
|
||||
"""Returns true iff the compilers for parent and child specs are ABI compatible"""
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
|
||||
@@ -108,8 +107,8 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
# TODO: into compiler classes?
|
||||
for pversion in parent.compiler.versions:
|
||||
for cversion in child.compiler.versions:
|
||||
# For a few compilers use specialized comparisons. Otherwise
|
||||
# match on version match.
|
||||
# For a few compilers use specialized comparisons.
|
||||
# Otherwise match on version match.
|
||||
if pversion.satisfies(cversion):
|
||||
return True
|
||||
elif (parent.compiler.name == "gcc" and
|
||||
@@ -120,9 +119,8 @@ def compiler_compatible(self, parent, child, **kwargs):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def compatible(self, parent, child, **kwargs):
|
||||
"""Returns true iff a parent and child spec are ABI compatible"""
|
||||
loosematch = kwargs.get('loose', False)
|
||||
return self.architecture_compatible(parent, child) and \
|
||||
self.compiler_compatible(parent, child, loose=loosematch)
|
||||
self.compiler_compatible(parent, child, loose=loosematch)
|
||||
|
||||
@@ -22,68 +22,523 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
"""
|
||||
This module contains all the elements that are required to create an
|
||||
architecture object. These include, the target processor, the operating system,
|
||||
and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
|
||||
|
||||
from llnl.util.lang import memoized
|
||||
On a multiple architecture machine, the architecture spec field can be set to
|
||||
build a package against any target and operating system that is present on the
|
||||
platform. On Cray platforms or any other architecture that has different front
|
||||
and back end environments, the operating system will determine the method of
|
||||
compiler
|
||||
detection.
|
||||
|
||||
There are two different types of compiler detection:
|
||||
1. Through the $PATH env variable (front-end detection)
|
||||
2. Through the tcl module system. (back-end detection)
|
||||
|
||||
Depending on which operating system is specified, the compiler will be detected
|
||||
using one of those methods.
|
||||
|
||||
For platforms such as linux and darwin, the operating system is autodetected
|
||||
and the target is set to be x86_64.
|
||||
|
||||
The command line syntax for specifying an architecture is as follows:
|
||||
|
||||
target=<Target name> os=<OperatingSystem name>
|
||||
|
||||
If the user wishes to use the defaults, either target or os can be left out of
|
||||
the command line and Spack will concretize using the default. These defaults
|
||||
are set in the 'platforms/' directory which contains the different subclasses
|
||||
for platforms. If the machine has multiple architectures, the user can
|
||||
also enter front-end, or fe or back-end or be. These settings will concretize
|
||||
to their respective front-end and back-end targets and operating systems.
|
||||
Additional platforms can be added by creating a subclass of Platform
|
||||
and adding it inside the platform directory.
|
||||
|
||||
Platforms are an abstract class that are extended by subclasses. If the user
|
||||
wants to add a new type of platform (such as cray_xe), they can create a
|
||||
subclass and set all the class attributes such as priority, front_target,
|
||||
back_target, front_os, back_os. Platforms also contain a priority class
|
||||
attribute. A lower number signifies higher priority. These numbers are
|
||||
arbitrarily set and can be changed though often there isn't much need unless a
|
||||
new platform is added and the user wants that to be detected first.
|
||||
|
||||
Targets are created inside the platform subclasses. Most architecture
|
||||
(like linux, and darwin) will have only one target (x86_64) but in the case of
|
||||
Cray machines, there is both a frontend and backend processor. The user can
|
||||
specify which targets are present on front-end and back-end architecture
|
||||
|
||||
Depending on the platform, operating systems are either auto-detected or are
|
||||
set. The user can set the front-end and back-end operating setting by the class
|
||||
attributes front_os and back_os. The operating system as described earlier,
|
||||
will be responsible for compiler detection.
|
||||
"""
|
||||
import os
|
||||
import inspect
|
||||
|
||||
from llnl.util.lang import memoized, list_modules, key_ordering
|
||||
from llnl.util.filesystem import join_path
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.compilers
|
||||
from spack.util.naming import mod_to_class
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.multiproc import parmap
|
||||
import spack.error as serr
|
||||
|
||||
|
||||
class InvalidSysTypeError(serr.SpackError):
|
||||
def __init__(self, sys_type):
|
||||
super(InvalidSysTypeError,
|
||||
self).__init__("Invalid sys_type value for Spack: " + sys_type)
|
||||
class NoPlatformError(serr.SpackError):
|
||||
|
||||
|
||||
class NoSysTypeError(serr.SpackError):
|
||||
def __init__(self):
|
||||
super(NoSysTypeError,
|
||||
self).__init__("Could not determine sys_type for this machine.")
|
||||
super(NoPlatformError, self).__init__(
|
||||
"Could not determine a platform for this machine.")
|
||||
|
||||
|
||||
def get_sys_type_from_spack_globals():
|
||||
"""Return the SYS_TYPE from spack globals, or None if it isn't set."""
|
||||
if not hasattr(spack, "sys_type"):
|
||||
return None
|
||||
elif hasattr(spack.sys_type, "__call__"):
|
||||
return spack.sys_type()
|
||||
@key_ordering
|
||||
class Target(object):
|
||||
""" Target is the processor of the host machine.
|
||||
The host machine may have different front-end and back-end targets,
|
||||
especially if it is a Cray machine. The target will have a name and
|
||||
also the module_name (e.g craype-compiler). Targets will also
|
||||
recognize which platform they came from using the set_platform method.
|
||||
Targets will have compiler finding strategies
|
||||
"""
|
||||
|
||||
def __init__(self, name, module_name=None):
|
||||
self.name = name # case of cray "ivybridge" but if it's x86_64
|
||||
self.module_name = module_name # craype-ivybridge
|
||||
|
||||
# Sets only the platform name to avoid recursiveness
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.module_name)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
@key_ordering
|
||||
class Platform(object):
|
||||
""" Abstract class that each type of Platform will subclass.
|
||||
Will return a instance of it once it
|
||||
is returned
|
||||
"""
|
||||
|
||||
priority = None # Subclass sets number. Controls detection order
|
||||
front_end = None
|
||||
back_end = None
|
||||
default = None # The default back end target. On cray ivybridge
|
||||
|
||||
front_os = None
|
||||
back_os = None
|
||||
default_os = None
|
||||
|
||||
def __init__(self, name):
|
||||
self.targets = {}
|
||||
self.operating_sys = {}
|
||||
self.name = name
|
||||
|
||||
def add_target(self, name, target):
|
||||
"""Used by the platform specific subclass to list available targets.
|
||||
Raises an error if the platform specifies a name
|
||||
that is reserved by spack as an alias.
|
||||
"""
|
||||
if name in ['frontend', 'fe', 'backend', 'be', 'default_target']:
|
||||
raise ValueError(
|
||||
"%s is a spack reserved alias "
|
||||
"and cannot be the name of a target" % name)
|
||||
self.targets[name] = target
|
||||
|
||||
def target(self, name):
|
||||
"""This is a getter method for the target dictionary
|
||||
that handles defaulting based on the values provided by default,
|
||||
front-end, and back-end. This can be overwritten
|
||||
by a subclass for which we want to provide further aliasing options.
|
||||
"""
|
||||
if name == 'default_target':
|
||||
name = self.default
|
||||
elif name == 'frontend' or name == 'fe':
|
||||
name = self.front_end
|
||||
elif name == 'backend' or name == 'be':
|
||||
name = self.back_end
|
||||
|
||||
return self.targets.get(name, None)
|
||||
|
||||
def add_operating_system(self, name, os_class):
|
||||
""" Add the operating_system class object into the
|
||||
platform.operating_sys dictionary
|
||||
"""
|
||||
if name in ['frontend', 'fe', 'backend', 'be', 'default_os']:
|
||||
raise ValueError(
|
||||
"%s is a spack reserved alias "
|
||||
"and cannot be the name of an OS" % name)
|
||||
self.operating_sys[name] = os_class
|
||||
|
||||
def operating_system(self, name):
|
||||
if name == 'default_os':
|
||||
name = self.default_os
|
||||
if name == 'frontend' or name == "fe":
|
||||
name = self.front_os
|
||||
if name == 'backend' or name == 'be':
|
||||
name = self.back_os
|
||||
|
||||
return self.operating_sys.get(name, None)
|
||||
|
||||
@classmethod
|
||||
def setup_platform_environment(self, pkg, env):
|
||||
""" Subclass can override this method if it requires any
|
||||
platform-specific build environment modifications.
|
||||
"""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def detect(self):
|
||||
""" Subclass is responsible for implementing this method.
|
||||
Returns True if the Platform class detects that
|
||||
it is the current platform
|
||||
and False if it's not.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def _cmp_key(self):
|
||||
t_keys = ''.join(str(t._cmp_key()) for t in
|
||||
sorted(self.targets.values()))
|
||||
o_keys = ''.join(str(o._cmp_key()) for o in
|
||||
sorted(self.operating_sys.values()))
|
||||
return (self.name,
|
||||
self.default,
|
||||
self.front_end,
|
||||
self.back_end,
|
||||
self.default_os,
|
||||
self.front_os,
|
||||
self.back_os,
|
||||
t_keys,
|
||||
o_keys)
|
||||
|
||||
|
||||
@key_ordering
|
||||
class OperatingSystem(object):
|
||||
""" Operating System will be like a class similar to platform extended
|
||||
by subclasses for the specifics. Operating System will contain the
|
||||
compiler finding logic. Instead of calling two separate methods to
|
||||
find compilers we call find_compilers method for each operating system
|
||||
"""
|
||||
|
||||
def __init__(self, name, version):
|
||||
self.name = name
|
||||
self.version = version
|
||||
|
||||
def __str__(self):
|
||||
return self.name + self.version
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.version)
|
||||
|
||||
def find_compilers(self, *paths):
|
||||
"""
|
||||
Return a list of compilers found in the suppied paths.
|
||||
This invokes the find() method for each Compiler class,
|
||||
and appends the compilers detected to a list.
|
||||
"""
|
||||
if not paths:
|
||||
paths = get_path('PATH')
|
||||
# Make sure path elements exist, and include /bin directories
|
||||
# under prefixes.
|
||||
filtered_path = []
|
||||
for p in paths:
|
||||
# Eliminate symlinks and just take the real directories.
|
||||
p = os.path.realpath(p)
|
||||
if not os.path.isdir(p):
|
||||
continue
|
||||
filtered_path.append(p)
|
||||
|
||||
# Check for a bin directory, add it if it exists
|
||||
bin = join_path(p, 'bin')
|
||||
if os.path.isdir(bin):
|
||||
filtered_path.append(os.path.realpath(bin))
|
||||
|
||||
# Once the paths are cleaned up, do a search for each type of
|
||||
# compiler. We can spawn a bunch of parallel searches to reduce
|
||||
# the overhead of spelunking all these directories.
|
||||
types = spack.compilers.all_compiler_types()
|
||||
compiler_lists = parmap(lambda cmp_cls:
|
||||
self.find_compiler(cmp_cls, *filtered_path),
|
||||
types)
|
||||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x, y: x + y, compiler_lists)
|
||||
return clist
|
||||
|
||||
def find_compiler(self, cmp_cls, *path):
|
||||
"""Try to find the given type of compiler in the user's
|
||||
environment. For each set of compilers found, this returns
|
||||
compiler objects with the cc, cxx, f77, fc paths and the
|
||||
version filled in.
|
||||
|
||||
This will search for compilers with the names in cc_names,
|
||||
cxx_names, etc. and it will group them if they have common
|
||||
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
|
||||
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
|
||||
"""
|
||||
dicts = parmap(
|
||||
lambda t: cmp_cls._find_matches_in_path(*t),
|
||||
[(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
|
||||
(cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
|
||||
(cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
|
||||
(cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
|
||||
|
||||
all_keys = set()
|
||||
for d in dicts:
|
||||
all_keys.update(d)
|
||||
|
||||
compilers = {}
|
||||
for k in all_keys:
|
||||
ver, pre, suf = k
|
||||
|
||||
# Skip compilers with unknown version.
|
||||
if ver == 'unknown':
|
||||
continue
|
||||
|
||||
paths = tuple(pn[k] if k in pn else None for pn in dicts)
|
||||
spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
|
||||
|
||||
if ver in compilers:
|
||||
prev = compilers[ver]
|
||||
|
||||
# prefer the one with more compilers.
|
||||
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
|
||||
newcount = len([p for p in paths if p is not None])
|
||||
prevcount = len([p for p in prev_paths if p is not None])
|
||||
|
||||
# Don't add if it's not an improvement over prev compiler.
|
||||
if newcount <= prevcount:
|
||||
continue
|
||||
|
||||
compilers[ver] = cmp_cls(spec, self, paths)
|
||||
|
||||
return list(compilers.values())
|
||||
|
||||
def to_dict(self):
|
||||
d = {}
|
||||
d['name'] = self.name
|
||||
d['version'] = self.version
|
||||
return d
|
||||
|
||||
|
||||
@key_ordering
|
||||
class Arch(object):
|
||||
"""Architecture is now a class to help with setting attributes.
|
||||
|
||||
TODO: refactor so that we don't need this class.
|
||||
"""
|
||||
|
||||
def __init__(self, plat=None, os=None, target=None):
|
||||
self.platform = plat
|
||||
if plat and os:
|
||||
os = self.platform.operating_system(os)
|
||||
self.platform_os = os
|
||||
if plat and target:
|
||||
target = self.platform.target(target)
|
||||
self.target = target
|
||||
|
||||
# Hooks for parser to use when platform is set after target or os
|
||||
self.target_string = None
|
||||
self.os_string = None
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
return all((self.platform is not None,
|
||||
isinstance(self.platform, Platform),
|
||||
self.platform_os is not None,
|
||||
isinstance(self.platform_os, OperatingSystem),
|
||||
self.target is not None, isinstance(self.target, Target)))
|
||||
|
||||
def __str__(self):
|
||||
if self.platform or self.platform_os or self.target:
|
||||
if self.platform.name == 'darwin':
|
||||
os_name = self.platform_os.name if self.platform_os else "None"
|
||||
else:
|
||||
os_name = str(self.platform_os)
|
||||
|
||||
return (str(self.platform) + "-" +
|
||||
os_name + "-" + str(self.target))
|
||||
else:
|
||||
return ''
|
||||
|
||||
def __contains__(self, string):
|
||||
return string in str(self)
|
||||
|
||||
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
|
||||
def __nonzero__(self):
|
||||
return (self.platform is not None or
|
||||
self.platform_os is not None or
|
||||
self.target is not None)
|
||||
__bool__ = __nonzero__
|
||||
|
||||
def _cmp_key(self):
|
||||
if isinstance(self.platform, Platform):
|
||||
platform = self.platform.name
|
||||
else:
|
||||
platform = self.platform
|
||||
if isinstance(self.platform_os, OperatingSystem):
|
||||
platform_os = self.platform_os.name
|
||||
else:
|
||||
platform_os = self.platform_os
|
||||
if isinstance(self.target, Target):
|
||||
target = self.target.name
|
||||
else:
|
||||
target = self.target
|
||||
return (platform, platform_os, target)
|
||||
|
||||
def to_dict(self):
|
||||
d = {}
|
||||
d['platform'] = str(self.platform) if self.platform else None
|
||||
d['platform_os'] = str(self.platform_os) if self.platform_os else None
|
||||
d['target'] = str(self.target) if self.target else None
|
||||
|
||||
return d
|
||||
|
||||
|
||||
def _target_from_dict(target_name, plat=None):
|
||||
""" Creates new instance of target and assigns all the attributes of
|
||||
that target from the dictionary
|
||||
"""
|
||||
if not plat:
|
||||
plat = platform()
|
||||
return plat.target(target_name)
|
||||
|
||||
|
||||
def _operating_system_from_dict(os_name, plat=None):
|
||||
""" uses platform's operating system method to grab the constructed
|
||||
operating systems that are valid on the platform.
|
||||
"""
|
||||
if not plat:
|
||||
plat = platform()
|
||||
if isinstance(os_name, dict):
|
||||
name = os_name['name']
|
||||
version = os_name['version']
|
||||
return plat.operating_system(name + version)
|
||||
else:
|
||||
return spack.sys_type
|
||||
return plat.operating_system(os_name)
|
||||
|
||||
|
||||
def get_sys_type_from_environment():
|
||||
"""Return $SYS_TYPE or None if it's not defined."""
|
||||
return os.environ.get('SYS_TYPE')
|
||||
def _platform_from_dict(platform_name):
|
||||
""" Constructs a platform from a dictionary. """
|
||||
platform_list = all_platforms()
|
||||
for p in platform_list:
|
||||
if platform_name.replace("_", "").lower() == p.__name__.lower():
|
||||
return p()
|
||||
|
||||
|
||||
def get_sys_type_from_platform():
|
||||
"""Return the architecture from Python's platform module."""
|
||||
sys_type = platform.system() + '-' + platform.machine()
|
||||
sys_type = re.sub(r'[^\w-]', '_', sys_type)
|
||||
return sys_type.lower()
|
||||
def arch_from_dict(d):
|
||||
""" Uses _platform_from_dict, _operating_system_from_dict, _target_from_dict
|
||||
helper methods to recreate the arch tuple from the dictionary read from
|
||||
a yaml file
|
||||
"""
|
||||
arch = Arch()
|
||||
|
||||
if isinstance(d, basestring):
|
||||
# We have an old spec using a string for the architecture
|
||||
arch.platform = Platform('spack_compatibility')
|
||||
arch.platform_os = OperatingSystem('unknown', '')
|
||||
arch.target = Target(d)
|
||||
|
||||
arch.os_string = None
|
||||
arch.target_string = None
|
||||
else:
|
||||
if d is None:
|
||||
return None
|
||||
platform_name = d['platform']
|
||||
os_name = d['platform_os']
|
||||
target_name = d['target']
|
||||
|
||||
if platform_name:
|
||||
arch.platform = _platform_from_dict(platform_name)
|
||||
else:
|
||||
arch.platform = None
|
||||
if target_name:
|
||||
arch.target = _target_from_dict(target_name, arch.platform)
|
||||
else:
|
||||
arch.target = None
|
||||
if os_name:
|
||||
arch.platform_os = _operating_system_from_dict(os_name,
|
||||
arch.platform)
|
||||
else:
|
||||
arch.platform_os = None
|
||||
|
||||
arch.os_string = None
|
||||
arch.target_string = None
|
||||
|
||||
return arch
|
||||
|
||||
|
||||
@memoized
|
||||
def all_platforms():
|
||||
classes = []
|
||||
mod_path = spack.platform_path
|
||||
parent_module = "spack.platforms"
|
||||
|
||||
for name in list_modules(mod_path):
|
||||
mod_name = '%s.%s' % (parent_module, name)
|
||||
class_name = mod_to_class(name)
|
||||
mod = __import__(mod_name, fromlist=[class_name])
|
||||
if not hasattr(mod, class_name):
|
||||
tty.die('No class %s defined in %s' % (class_name, mod_name))
|
||||
cls = getattr(mod, class_name)
|
||||
if not inspect.isclass(cls):
|
||||
tty.die('%s.%s is not a class' % (mod_name, class_name))
|
||||
|
||||
classes.append(cls)
|
||||
|
||||
return classes
|
||||
|
||||
|
||||
@memoized
|
||||
def platform():
|
||||
"""Detects the platform for this machine.
|
||||
|
||||
Gather a list of all available subclasses of platforms.
|
||||
Sorts the list according to their priority looking. Priority is
|
||||
an arbitrarily set number. Detects platform either using uname or
|
||||
a file path (/opt/cray...)
|
||||
"""
|
||||
# Try to create a Platform object using the config file FIRST
|
||||
platform_list = all_platforms()
|
||||
platform_list.sort(key=lambda a: a.priority)
|
||||
|
||||
for platform_cls in platform_list:
|
||||
if platform_cls.detect():
|
||||
return platform_cls()
|
||||
|
||||
|
||||
@memoized
|
||||
def sys_type():
|
||||
"""Returns a SysType for the current machine."""
|
||||
methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
|
||||
get_sys_type_from_platform]
|
||||
"""Print out the "default" platform-os-target tuple for this machine.
|
||||
|
||||
# search for a method that doesn't return None
|
||||
sys_type = None
|
||||
for method in methods:
|
||||
sys_type = method()
|
||||
if sys_type:
|
||||
break
|
||||
On machines with only one target OS/target, prints out the
|
||||
platform-os-target for the frontend. For machines with a frontend
|
||||
and a backend, prints the default backend.
|
||||
|
||||
# Couldn't determine the sys_type for this machine.
|
||||
if sys_type is None:
|
||||
return "unknown_arch"
|
||||
TODO: replace with use of more explicit methods to get *all* the
|
||||
backends, as client code should really be aware of cross-compiled
|
||||
architectures.
|
||||
|
||||
if not isinstance(sys_type, basestring):
|
||||
raise InvalidSysTypeError(sys_type)
|
||||
|
||||
return sys_type
|
||||
"""
|
||||
arch = Arch(platform(), 'default_os', 'default_target')
|
||||
return str(arch)
|
||||
|
||||
@@ -51,15 +51,16 @@
|
||||
Skimming this module is a nice way to get acquainted with the types of
|
||||
calls you can make from within the install() function.
|
||||
"""
|
||||
import multiprocessing
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
import shutil
|
||||
import multiprocessing
|
||||
import platform
|
||||
|
||||
import spack
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack
|
||||
from spack.environment import EnvironmentModifications, validate
|
||||
from spack.util.environment import *
|
||||
from spack.util.executable import Executable, which
|
||||
@@ -74,20 +75,19 @@
|
||||
# set_build_environment_variables and used to pass parameters to
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
|
||||
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
|
||||
SPACK_PREFIX = 'SPACK_PREFIX'
|
||||
SPACK_INSTALL = 'SPACK_INSTALL'
|
||||
SPACK_DEBUG = 'SPACK_DEBUG'
|
||||
SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
|
||||
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
|
||||
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
|
||||
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
|
||||
SPACK_PREFIX = 'SPACK_PREFIX'
|
||||
SPACK_INSTALL = 'SPACK_INSTALL'
|
||||
SPACK_DEBUG = 'SPACK_DEBUG'
|
||||
SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
|
||||
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
|
||||
|
||||
|
||||
# Platform-specific library suffix.
|
||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can
|
||||
specify parallel or not on a per-invocation basis. Using
|
||||
@@ -98,6 +98,7 @@ class MakeExecutable(Executable):
|
||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
||||
everything.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs):
|
||||
super(MakeExecutable, self).__init__(name)
|
||||
self.jobs = jobs
|
||||
@@ -113,30 +114,95 @@ def __call__(self, *args, **kwargs):
|
||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def load_module(mod):
|
||||
"""Takes a module name and removes modules until it is possible to
|
||||
load that module. It then loads the provided module. Depends on the
|
||||
modulecmd implementation of modules used in cray and lmod.
|
||||
"""
|
||||
# Create an executable of the module command that will output python code
|
||||
modulecmd = which('modulecmd')
|
||||
modulecmd.add_default_arg('python')
|
||||
|
||||
# Read the module and remove any conflicting modules
|
||||
# We do this without checking that they are already installed
|
||||
# for ease of programming because unloading a module that is not
|
||||
# loaded does nothing.
|
||||
text = modulecmd('show', mod, output=str, error=str).split()
|
||||
for i, word in enumerate(text):
|
||||
if word == 'conflict':
|
||||
exec(compile(modulecmd('unload', text[i + 1], output=str,
|
||||
error=str), '<string>', 'exec'))
|
||||
# Load the module now that there are no conflicts
|
||||
load = modulecmd('load', mod, output=str, error=str)
|
||||
exec(compile(load, '<string>', 'exec'))
|
||||
|
||||
|
||||
def get_path_from_module(mod):
|
||||
"""Inspects a TCL module for entries that indicate the absolute path
|
||||
at which the library supported by said module can be found.
|
||||
"""
|
||||
# Create a modulecmd executable
|
||||
modulecmd = which('modulecmd')
|
||||
modulecmd.add_default_arg('python')
|
||||
|
||||
# Read the module
|
||||
text = modulecmd('show', mod, output=str, error=str).split('\n')
|
||||
# If it lists its package directory, return that
|
||||
for line in text:
|
||||
if line.find(mod.upper() + '_DIR') >= 0:
|
||||
words = line.split()
|
||||
return words[2]
|
||||
|
||||
# If it lists a -rpath instruction, use that
|
||||
for line in text:
|
||||
rpath = line.find('-rpath/')
|
||||
if rpath >= 0:
|
||||
return line[rpath + 6:line.find('/lib')]
|
||||
|
||||
# If it lists a -L instruction, use that
|
||||
for line in text:
|
||||
L = line.find('-L/')
|
||||
if L >= 0:
|
||||
return line[L + 2:line.find('/lib')]
|
||||
|
||||
# If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that
|
||||
for line in text:
|
||||
if line.find('LD_LIBRARY_PATH') >= 0:
|
||||
words = line.split()
|
||||
path = words[2]
|
||||
return path[:path.find('/lib')]
|
||||
# Unable to find module path
|
||||
return None
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
assert(pkg.spec.concrete)
|
||||
compiler = pkg.compiler
|
||||
flags = pkg.spec.compiler_flags
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in pkg.compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
|
||||
assert all(key in compiler.link_paths for key in (
|
||||
'cc', 'cxx', 'f77', 'fc'))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
||||
# ttyout, ttyerr, etc.
|
||||
link_dir = spack.build_env_path
|
||||
env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
|
||||
env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
|
||||
env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
|
||||
env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to call
|
||||
compiler = pkg.compiler
|
||||
if compiler.cc:
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
env.set('SPACK_CC', compiler.cc)
|
||||
env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
|
||||
if compiler.cxx:
|
||||
env.set('SPACK_CXX', compiler.cxx)
|
||||
env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
|
||||
if compiler.f77:
|
||||
env.set('SPACK_F77', compiler.f77)
|
||||
env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
|
||||
if compiler.fc:
|
||||
env.set('SPACK_FC', compiler.fc)
|
||||
env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
|
||||
@@ -144,13 +210,26 @@ def set_compiler_environment_variables(pkg, env):
|
||||
env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
|
||||
env.set('SPACK_FC_RPATH_ARG', compiler.fc_rpath_arg)
|
||||
|
||||
# Add every valid compiler flag to the environment, prefixed with "SPACK_"
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
# Concreteness guarantees key safety here
|
||||
if flags[flag] != []:
|
||||
env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
|
||||
|
||||
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
|
||||
|
||||
for mod in compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def set_build_environment_variables(pkg, env):
|
||||
def set_build_environment_variables(pkg, env, dirty=False):
|
||||
"""
|
||||
This ensures a clean install environment when we build packages
|
||||
This ensures a clean install environment when we build packages.
|
||||
|
||||
Arguments:
|
||||
dirty -- skip unsetting the user's environment settings.
|
||||
"""
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add both spack.env_path, which includes default
|
||||
@@ -163,7 +242,8 @@ def set_build_environment_variables(pkg, env):
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
|
||||
compiler_specific = join_path(spack.build_env_path, pkg.compiler.name)
|
||||
for item in [spack.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = join_path(item, 'case-insensitive')
|
||||
if os.path.isdir(ci):
|
||||
@@ -174,9 +254,11 @@ def set_build_environment_variables(pkg, env):
|
||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
||||
|
||||
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
|
||||
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
|
||||
dep_prefixes = [d.prefix
|
||||
for d in pkg.spec.traverse(root=False, deptype='build')]
|
||||
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
|
||||
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
|
||||
# Add dependencies to CMAKE_PREFIX_PATH
|
||||
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
|
||||
|
||||
# Install prefix
|
||||
env.set(SPACK_PREFIX, pkg.prefix)
|
||||
@@ -184,15 +266,30 @@ def set_build_environment_variables(pkg, env):
|
||||
# Install root prefix
|
||||
env.set(SPACK_INSTALL, spack.install_path)
|
||||
|
||||
# Remove these vars from the environment during build because they
|
||||
# can affect how some packages find libraries. We want to make
|
||||
# sure that builds never pull in unintended external dependencies.
|
||||
env.unset('LD_LIBRARY_PATH')
|
||||
env.unset('LD_RUN_PATH')
|
||||
env.unset('DYLD_LIBRARY_PATH')
|
||||
# Stuff in here sanitizes the build environemnt to eliminate
|
||||
# anything the user has set that may interfere.
|
||||
if not dirty:
|
||||
# Remove these vars from the environment during build because they
|
||||
# can affect how some packages find libraries. We want to make
|
||||
# sure that builds never pull in unintended external dependencies.
|
||||
env.unset('LD_LIBRARY_PATH')
|
||||
env.unset('LIBRARY_PATH')
|
||||
env.unset('CPATH')
|
||||
env.unset('LD_RUN_PATH')
|
||||
env.unset('DYLD_LIBRARY_PATH')
|
||||
|
||||
# Remove any macports installs from the PATH. The macports ld can
|
||||
# cause conflicts with the built-in linker on el capitan. Solves
|
||||
# assembler issues, e.g.:
|
||||
# suffix or operands invalid for `movq'"
|
||||
path = get_path('PATH')
|
||||
for p in path:
|
||||
if '/macports/' in p:
|
||||
env.remove_path('PATH', p)
|
||||
|
||||
# Add bin directories from dependencies to the PATH for the build.
|
||||
bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
|
||||
bin_dirs = reversed(
|
||||
filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
|
||||
for item in bin_dirs:
|
||||
env.prepend_path('PATH', item)
|
||||
|
||||
@@ -203,13 +300,14 @@ def set_build_environment_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
|
||||
|
||||
# Add any pkgconfig directories to PKG_CONFIG_PATH
|
||||
pkg_config_dirs = []
|
||||
for p in dep_prefixes:
|
||||
for maybe in ('lib', 'lib64', 'share'):
|
||||
pcdir = join_path(p, maybe, 'pkgconfig')
|
||||
for pre in dep_prefixes:
|
||||
for directory in ('lib', 'lib64', 'share'):
|
||||
pcdir = join_path(pre, directory, 'pkgconfig')
|
||||
if os.path.isdir(pcdir):
|
||||
pkg_config_dirs.append(pcdir)
|
||||
env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
|
||||
env.prepend_path('PKG_CONFIG_PATH', pcdir)
|
||||
|
||||
if pkg.spec.architecture.target.module_name:
|
||||
load_module(pkg.spec.architecture.target.module_name)
|
||||
|
||||
return env
|
||||
|
||||
@@ -218,7 +316,7 @@ def set_module_variables_for_package(pkg, module):
|
||||
"""Populate the module scope of install() with some useful functions.
|
||||
This makes things easier for package writers.
|
||||
"""
|
||||
# number of jobs spack will to build with.
|
||||
# number of jobs spack will build with.
|
||||
jobs = multiprocessing.cpu_count()
|
||||
if not pkg.parallel:
|
||||
jobs = 1
|
||||
@@ -229,8 +327,9 @@ def set_module_variables_for_package(pkg, module):
|
||||
m.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable('make', jobs)
|
||||
m.make = MakeExecutable('make', jobs)
|
||||
m.gmake = MakeExecutable('gmake', jobs)
|
||||
m.scons = MakeExecutable('scons', jobs)
|
||||
|
||||
# easy shortcut to os.environ
|
||||
m.env = os.environ
|
||||
@@ -239,11 +338,8 @@ def set_module_variables_for_package(pkg, module):
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable('./configure')
|
||||
|
||||
# TODO: shouldn't really use "which" here. Consider adding notion
|
||||
# TODO: of build dependencies, as opposed to link dependencies.
|
||||
# TODO: Currently, everything is a link dependency, but tools like
|
||||
# TODO: this shouldn't be.
|
||||
m.cmake = Executable('cmake')
|
||||
m.ctest = Executable('ctest')
|
||||
|
||||
# standard CMake arguments
|
||||
m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
|
||||
@@ -253,33 +349,34 @@ def set_module_variables_for_package(pkg, module):
|
||||
|
||||
# Set up CMake rpath
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
|
||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' %
|
||||
":".join(get_rpaths(pkg)))
|
||||
|
||||
# Put spack compiler paths in module scope.
|
||||
link_dir = spack.build_env_path
|
||||
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
|
||||
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
|
||||
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
|
||||
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
|
||||
m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
|
||||
m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
m.pwd = os.getcwd
|
||||
m.cd = os.chdir
|
||||
m.mkdir = os.mkdir
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = os.symlink
|
||||
m.pwd = os.getcwd
|
||||
m.cd = os.chdir
|
||||
m.mkdir = os.mkdir
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = os.symlink
|
||||
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
m.install_tree = install_tree
|
||||
m.rmtree = shutil.rmtree
|
||||
m.move = shutil.move
|
||||
m.rmtree = shutil.rmtree
|
||||
m.move = shutil.move
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
m.prefix = pkg.prefix
|
||||
m.prefix = pkg.prefix
|
||||
|
||||
# Platform-specific library suffix.
|
||||
m.dso_suffix = dso_suffix
|
||||
@@ -288,30 +385,45 @@ def set_module_variables_for_package(pkg, module):
|
||||
def get_rpaths(pkg):
|
||||
"""Get a list of all the rpaths for a package."""
|
||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||
rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
|
||||
deps = pkg.spec.dependencies(deptype='link')
|
||||
rpaths.extend(d.prefix.lib for d in deps
|
||||
if os.path.isdir(d.prefix.lib))
|
||||
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
|
||||
rpaths.extend(d.prefix.lib64 for d in deps
|
||||
if os.path.isdir(d.prefix.lib64))
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
|
||||
return rpaths
|
||||
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""Get list of super class modules that are all descend from spack.Package"""
|
||||
"""
|
||||
Get list of super class modules that are all descend from spack.Package
|
||||
"""
|
||||
if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
|
||||
return []
|
||||
result = []
|
||||
module = sys.modules.get(cls.__module__)
|
||||
if module:
|
||||
result = [ module ]
|
||||
result = [module]
|
||||
for c in cls.__bases__:
|
||||
result.extend(parent_class_modules(c))
|
||||
return result
|
||||
|
||||
|
||||
def setup_package(pkg):
|
||||
def load_external_modules(pkg):
|
||||
""" traverse the spec list and find any specs that have external modules.
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
if dep.external_module:
|
||||
load_module(dep.external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty=False):
|
||||
"""Execute all environment setup routines."""
|
||||
spack_env = EnvironmentModifications()
|
||||
run_env = EnvironmentModifications()
|
||||
run_env = EnvironmentModifications()
|
||||
|
||||
# Before proceeding, ensure that specs and packages are consistent
|
||||
#
|
||||
@@ -327,14 +439,16 @@ def setup_package(pkg):
|
||||
# throwaway environment, but it is kind of dirty.
|
||||
#
|
||||
# TODO: Think about how to avoid this fix and do something cleaner.
|
||||
for s in pkg.spec.traverse(): s.package.spec = s
|
||||
for s in pkg.spec.traverse():
|
||||
s.package.spec = s
|
||||
|
||||
set_compiler_environment_variables(pkg, spack_env)
|
||||
set_build_environment_variables(pkg, spack_env)
|
||||
|
||||
set_build_environment_variables(pkg, spack_env, dirty)
|
||||
pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env)
|
||||
load_external_modules(pkg)
|
||||
# traverse in postorder so package can use vars from its dependencies
|
||||
spec = pkg.spec
|
||||
for dspec in pkg.spec.traverse(order='post', root=False):
|
||||
for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
|
||||
# If a user makes their own package repo, e.g.
|
||||
# spack.repos.mystuff.libelf.Libelf, and they inherit from
|
||||
# an existing class like spack.repos.original.libelf.Libelf,
|
||||
@@ -359,7 +473,7 @@ def setup_package(pkg):
|
||||
spack_env.apply_modifications()
|
||||
|
||||
|
||||
def fork(pkg, function):
|
||||
def fork(pkg, function, dirty=False):
|
||||
"""Fork a child process to do part of a spack build.
|
||||
|
||||
Arguments:
|
||||
@@ -367,6 +481,7 @@ def fork(pkg, function):
|
||||
pkg -- pkg whose environemnt we should set up the
|
||||
forked process for.
|
||||
function -- arg-less function to run in the child process.
|
||||
dirty -- If True, do NOT clean the environment before building.
|
||||
|
||||
Usage:
|
||||
def child_fun():
|
||||
@@ -390,7 +505,7 @@ def child_fun():
|
||||
|
||||
if pid == 0:
|
||||
# Give the child process the package's build environment.
|
||||
setup_package(pkg)
|
||||
setup_package(pkg, dirty=dirty)
|
||||
|
||||
try:
|
||||
# call the forked function.
|
||||
@@ -415,7 +530,9 @@ def child_fun():
|
||||
# message. Just make the parent exit with an error code.
|
||||
pid, returncode = os.waitpid(pid, 0)
|
||||
if returncode != 0:
|
||||
raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
|
||||
message = "Installation process had nonzero exit code : {code}"
|
||||
strcode = str(returncode)
|
||||
raise InstallError(message.format(code=strcode))
|
||||
|
||||
|
||||
class InstallError(spack.error.SpackError):
|
||||
|
||||
@@ -27,16 +27,18 @@
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import attr_setdefault
|
||||
|
||||
import spack
|
||||
import spack.spec
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from llnl.util.lang import *
|
||||
from llnl.util.tty.colify import *
|
||||
from llnl.util.tty.color import *
|
||||
|
||||
#
|
||||
# Settings for commands that modify configuration
|
||||
#
|
||||
# Commands that modify confguration By default modify the *highest* priority scope.
|
||||
# Commands that modify confguration By default modify the *highest*
|
||||
# priority scope.
|
||||
default_modify_scope = spack.config.highest_precedence_scope().name
|
||||
# Commands that list confguration list *all* scopes by default.
|
||||
default_list_scope = None
|
||||
@@ -48,7 +50,7 @@
|
||||
ignore_files = r'^\.|^__init__.py$|^#'
|
||||
|
||||
SETUP_PARSER = "setup_parser"
|
||||
DESCRIPTION = "description"
|
||||
DESCRIPTION = "description"
|
||||
|
||||
command_path = os.path.join(spack.lib_path, "spack", "cmd")
|
||||
|
||||
@@ -71,7 +73,7 @@ def get_module(name):
|
||||
module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
|
||||
level=0)
|
||||
|
||||
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
||||
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
||||
attr_setdefault(module, DESCRIPTION, "")
|
||||
|
||||
fn_name = get_cmd_function_name(name)
|
||||
@@ -101,17 +103,17 @@ def parse_specs(args, **kwargs):
|
||||
specs = spack.spec.parse(args)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize() # implies normalize
|
||||
spec.concretize() # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize()
|
||||
|
||||
return specs
|
||||
|
||||
except spack.parse.ParseError, e:
|
||||
except spack.parse.ParseError as e:
|
||||
tty.error(e.message, e.string, e.pos * " " + "^")
|
||||
sys.exit(1)
|
||||
|
||||
except spack.spec.SpecError, e:
|
||||
except spack.spec.SpecError as e:
|
||||
tty.error(e.message)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10):
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[:max_num-1] + ['...'] + line_list[-1:]
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
@@ -138,10 +140,104 @@ def disambiguate_spec(spec):
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
|
||||
elif len(matching_specs) > 1:
|
||||
args = ["%s matches multiple packages." % spec,
|
||||
"Matching packages:"]
|
||||
args = ["%s matches multiple packages." % spec,
|
||||
"Matching packages:"]
|
||||
args += [" " + str(s) for s in matching_specs]
|
||||
args += ["Use a more specific spec."]
|
||||
tty.die(*args)
|
||||
|
||||
return matching_specs[0]
|
||||
|
||||
|
||||
def ask_for_confirmation(message):
|
||||
while True:
|
||||
tty.msg(message + '[y/n]')
|
||||
choice = raw_input().lower()
|
||||
if choice == 'y':
|
||||
break
|
||||
elif choice == 'n':
|
||||
raise SystemExit('Operation aborted')
|
||||
tty.warn('Please reply either "y" or "n"')
|
||||
|
||||
|
||||
def gray_hash(spec, length):
|
||||
return colorize('@K{%s}' % spec.dag_hash(length))
|
||||
|
||||
|
||||
def display_specs(specs, **kwargs):
|
||||
mode = kwargs.get('mode', 'short')
|
||||
hashes = kwargs.get('long', False)
|
||||
namespace = kwargs.get('namespace', False)
|
||||
flags = kwargs.get('show_flags', False)
|
||||
variants = kwargs.get('variants', False)
|
||||
|
||||
hlen = 7
|
||||
if kwargs.get('very_long', False):
|
||||
hashes = True
|
||||
hlen = None
|
||||
|
||||
nfmt = '.' if namespace else '_'
|
||||
ffmt = '$%+' if flags else ''
|
||||
vfmt = '$+' if variants else ''
|
||||
format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
|
||||
|
||||
# Make a dict with specs keyed by architecture and compiler.
|
||||
index = index_by(specs, ('architecture', 'compiler'))
|
||||
|
||||
# Traverse the index and print out each package
|
||||
for i, (architecture, compiler) in enumerate(sorted(index)):
|
||||
if i > 0:
|
||||
print
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
|
||||
architecture, spack.spec.compiler_color,
|
||||
compiler)
|
||||
tty.hline(colorize(header), char='-')
|
||||
|
||||
specs = index[(architecture, compiler)]
|
||||
specs.sort()
|
||||
|
||||
abbreviated = [s.format(format_string, color=True) for s in specs]
|
||||
if mode == 'paths':
|
||||
# Print one spec per line along with prefix path
|
||||
width = max(len(s) for s in abbreviated)
|
||||
width += 2
|
||||
format = " %%-%ds%%s" % width
|
||||
|
||||
for abbrv, spec in zip(abbreviated, specs):
|
||||
if hashes:
|
||||
print(gray_hash(spec, hlen), )
|
||||
print(format % (abbrv, spec.prefix))
|
||||
|
||||
elif mode == 'deps':
|
||||
for spec in specs:
|
||||
print(spec.tree(
|
||||
format=format_string,
|
||||
color=True,
|
||||
indent=4,
|
||||
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
|
||||
|
||||
elif mode == 'short':
|
||||
# Print columns of output if not printing flags
|
||||
if not flags:
|
||||
|
||||
def fmt(s):
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + ' '
|
||||
string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
|
||||
|
||||
return string
|
||||
|
||||
colify(fmt(s) for s in specs)
|
||||
# Print one entry per line if including flags
|
||||
else:
|
||||
for spec in specs:
|
||||
# Print the hash if necessary
|
||||
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
|
||||
print(hsh + spec.format(format_string, color=True) + '\n')
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Invalid mode for display_specs: %s. Must be one of (paths,"
|
||||
"deps, short)." % mode)
|
||||
|
||||
@@ -29,12 +29,14 @@
|
||||
|
||||
description = "Activate a package extension."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
help="Activate without first activating dependencies.")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="spec of package extension to activate.")
|
||||
|
||||
|
||||
def activate(parser, args):
|
||||
|
||||
@@ -22,14 +22,10 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import spack
|
||||
import spack.architecture as architecture
|
||||
|
||||
description = "Print the architecture for this machine"
|
||||
|
||||
|
||||
def arch(parser, args):
|
||||
configured_sys_type = architecture.get_sys_type_from_spack_globals()
|
||||
if not configured_sys_type:
|
||||
configured_sys_type = "autodetect"
|
||||
print "Configured sys_type: %s" % configured_sys_type
|
||||
print "Autodetected default sys_type: %s" % architecture.sys_type()
|
||||
print architecture.sys_type()
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
from subprocess import check_call
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
@@ -31,26 +30,49 @@
|
||||
import spack
|
||||
from spack.util.executable import which
|
||||
|
||||
_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
|
||||
|
||||
description = "Create a new installation of spack in another prefix"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('prefix', help="names of prefix where we should install spack")
|
||||
subparser.add_argument(
|
||||
'-r', '--remote', action='store', dest='remote',
|
||||
help="name of the remote to bootstrap from", default='origin')
|
||||
subparser.add_argument(
|
||||
'prefix',
|
||||
help="names of prefix where we should install spack")
|
||||
|
||||
|
||||
def get_origin_url():
|
||||
def get_origin_info(remote):
|
||||
git_dir = join_path(spack.prefix, '.git')
|
||||
git = which('git', required=True)
|
||||
origin_url = git(
|
||||
'--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
|
||||
output=str)
|
||||
return origin_url.strip()
|
||||
try:
|
||||
branch = git('symbolic-ref', '--short', 'HEAD', output=str)
|
||||
except ProcessError:
|
||||
branch = 'develop'
|
||||
tty.warn('No branch found; using default branch: %s' % branch)
|
||||
if remote == 'origin' and \
|
||||
branch not in ('master', 'develop'):
|
||||
branch = 'develop'
|
||||
tty.warn('Unknown branch found; using default branch: %s' % branch)
|
||||
try:
|
||||
origin_url = git(
|
||||
'--git-dir=%s' % git_dir,
|
||||
'config', '--get', 'remote.%s.url' % remote,
|
||||
output=str)
|
||||
except ProcessError:
|
||||
origin_url = _SPACK_UPSTREAM
|
||||
tty.warn('No git repository found; '
|
||||
'using default upstream URL: %s' % origin_url)
|
||||
return (origin_url.strip(), branch.strip())
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
origin_url = get_origin_url()
|
||||
origin_url, branch = get_origin_info(args.remote)
|
||||
prefix = args.prefix
|
||||
|
||||
tty.msg("Fetching spack from origin: %s" % origin_url)
|
||||
tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))
|
||||
|
||||
if os.path.isfile(prefix):
|
||||
tty.die("There is already a file at %s" % prefix)
|
||||
@@ -62,7 +84,8 @@ def bootstrap(parser, args):
|
||||
|
||||
files_in_the_way = os.listdir(prefix)
|
||||
if files_in_the_way:
|
||||
tty.die("There are already files there! Delete these files before boostrapping spack.",
|
||||
tty.die("There are already files there! "
|
||||
"Delete these files before boostrapping spack.",
|
||||
*files_in_the_way)
|
||||
|
||||
tty.msg("Installing:",
|
||||
@@ -73,8 +96,10 @@ def bootstrap(parser, args):
|
||||
git = which('git', required=True)
|
||||
git('init', '--shared', '-q')
|
||||
git('remote', 'add', 'origin', origin_url)
|
||||
git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q')
|
||||
git('reset', '--hard', 'origin/master', '-q')
|
||||
git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
|
||||
'-n', '-q')
|
||||
git('reset', '--hard', 'origin/%s' % branch, '-q')
|
||||
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
|
||||
|
||||
tty.msg("Successfully created a new spack in %s" % prefix,
|
||||
"Run %s/bin/spack to use this installation." % prefix)
|
||||
|
||||
@@ -25,7 +25,8 @@
|
||||
import spack.cmd.location
|
||||
import spack.modules
|
||||
|
||||
description="cd to spack directories in the shell."
|
||||
description = "cd to spack directories in the shell."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
"""This is for decoration -- spack cd is used through spack's
|
||||
|
||||
@@ -42,7 +42,8 @@ def setup_parser(subparser):
|
||||
'--keep-stage', action='store_true', dest='keep_stage',
|
||||
help="Don't clean up staging area when command completes.")
|
||||
subparser.add_argument(
|
||||
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
|
||||
'versions', nargs=argparse.REMAINDER,
|
||||
help='Versions to generate checksums for')
|
||||
|
||||
|
||||
def get_checksums(versions, urls, **kwargs):
|
||||
@@ -59,10 +60,10 @@ def get_checksums(versions, urls, **kwargs):
|
||||
with Stage(url, keep=keep_stage) as stage:
|
||||
stage.fetch()
|
||||
if i == 0 and first_stage_function:
|
||||
first_stage_function(stage)
|
||||
first_stage_function(stage, url)
|
||||
|
||||
hashes.append((version,
|
||||
spack.util.crypto.checksum(hashlib.md5, stage.archive_file)))
|
||||
hashes.append((version, spack.util.crypto.checksum(
|
||||
hashlib.md5, stage.archive_file)))
|
||||
i += 1
|
||||
except FailedDownloadError as e:
|
||||
tty.msg("Failed to fetch %s" % url)
|
||||
@@ -79,12 +80,12 @@ def checksum(parser, args):
|
||||
# If the user asked for specific versions, use those.
|
||||
if args.versions:
|
||||
versions = {}
|
||||
for v in args.versions:
|
||||
v = ver(v)
|
||||
if not isinstance(v, Version):
|
||||
for version in args.versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or " +
|
||||
"version ranges. Use unambiguous versions.")
|
||||
versions[v] = pkg.url_for_version(v)
|
||||
versions[version] = pkg.url_for_version(version)
|
||||
else:
|
||||
versions = pkg.fetch_remote_versions()
|
||||
if not versions:
|
||||
@@ -111,5 +112,7 @@ def checksum(parser, args):
|
||||
if not version_hashes:
|
||||
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||
|
||||
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
|
||||
version_lines = [
|
||||
" version('%s', '%s')" % (v, h) for v, h in version_hashes
|
||||
]
|
||||
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
|
||||
description = "Remove build stage and source tarball for packages."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to clean")
|
||||
|
||||
24
lib/spack/spack/cmd/common/__init__.py
Normal file
24
lib/spack/spack/cmd/common/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
96
lib/spack/spack/cmd/common/arguments.py
Normal file
96
lib/spack/spack/cmd/common/arguments.py
Normal file
@@ -0,0 +1,96 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import argparse
|
||||
|
||||
import spack.modules
|
||||
from spack.util.pattern import Bunch
|
||||
__all__ = ['add_common_arguments']
|
||||
|
||||
_arguments = {}
|
||||
|
||||
|
||||
def add_common_arguments(parser, list_of_arguments):
|
||||
for argument in list_of_arguments:
|
||||
if argument not in _arguments:
|
||||
message = 'Trying to add non existing argument "{0}" to a command'
|
||||
raise KeyError(message.format(argument))
|
||||
x = _arguments[argument]
|
||||
parser.add_argument(*x.flags, **x.kwargs)
|
||||
|
||||
|
||||
class ConstraintAction(argparse.Action):
|
||||
"""Constructs a list of specs based on a constraint given on the command line
|
||||
|
||||
An instance of this class is supposed to be used as an argument action
|
||||
in a parser. It will read a constraint and will attach a list of matching
|
||||
specs to the namespace
|
||||
"""
|
||||
qualifiers = {}
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
# Query specs from command line
|
||||
d = self.qualifiers.get(namespace.subparser_name, {})
|
||||
specs = [s for s in spack.installed_db.query(**d)]
|
||||
values = ' '.join(values)
|
||||
if values:
|
||||
specs = [x for x in specs if x.satisfies(values, strict=True)]
|
||||
namespace.specs = specs
|
||||
|
||||
parms = Bunch(
|
||||
flags=('constraint',),
|
||||
kwargs={
|
||||
'nargs': '*',
|
||||
'help': 'Constraint to select a subset of installed packages',
|
||||
'action': ConstraintAction
|
||||
})
|
||||
_arguments['constraint'] = parms
|
||||
|
||||
parms = Bunch(
|
||||
flags=('-m', '--module-type'),
|
||||
kwargs={
|
||||
'help': 'Type of module files',
|
||||
'default': 'tcl',
|
||||
'choices': spack.modules.module_types
|
||||
})
|
||||
_arguments['module_type'] = parms
|
||||
|
||||
parms = Bunch(
|
||||
flags=('-y', '--yes-to-all'),
|
||||
kwargs={
|
||||
'action': 'store_true',
|
||||
'dest': 'yes_to_all',
|
||||
'help': 'Assume "yes" is the answer to every confirmation request.'
|
||||
})
|
||||
_arguments['yes_to_all'] = parms
|
||||
|
||||
parms = Bunch(
|
||||
flags=('-r', '--dependencies'),
|
||||
kwargs={
|
||||
'action': 'store_true',
|
||||
'dest': 'recurse_dependencies',
|
||||
'help': 'Recursively traverse spec dependencies'
|
||||
})
|
||||
_arguments['recurse_dependencies'] = parms
|
||||
@@ -37,6 +37,7 @@
|
||||
|
||||
description = "Manage compilers"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(
|
||||
metavar='SUBCOMMAND', dest='compiler_command')
|
||||
@@ -44,43 +45,58 @@ def setup_parser(subparser):
|
||||
scopes = spack.config.config_scopes
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
|
||||
find_parser = sp.add_parser(
|
||||
'find', aliases=['add'],
|
||||
help='Search the system for compilers to add to Spack configuration.')
|
||||
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
|
||||
remove_parser = sp.add_parser(
|
||||
'remove', aliases=['rm'], help='Remove compiler by spec.')
|
||||
remove_parser.add_argument(
|
||||
'-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
|
||||
'-a', '--all', action='store_true',
|
||||
help='Remove ALL compilers that match spec.')
|
||||
remove_parser.add_argument('compiler_spec')
|
||||
remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
remove_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser('list', help='list available compilers')
|
||||
list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
list_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
|
||||
# Info
|
||||
info_parser = sp.add_parser('info', help='Show compiler paths.')
|
||||
info_parser.add_argument('compiler_spec')
|
||||
info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
info_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
|
||||
|
||||
def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths for compilers and add them
|
||||
to Spack's configuration."""
|
||||
"""Search either $PATH or a list of paths OR MODULES for compilers and
|
||||
add them to Spack's configuration.
|
||||
|
||||
"""
|
||||
paths = args.add_paths
|
||||
if not paths:
|
||||
paths = get_path('PATH')
|
||||
|
||||
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
|
||||
if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
|
||||
|
||||
# Don't initialize compilers config via compilers.get_compiler_config.
|
||||
# Just let compiler_find do the
|
||||
# entire process and return an empty config from all_compilers
|
||||
# Default for any other process is init_config=True
|
||||
compilers = [c for c in spack.compilers.find_compilers(*paths)
|
||||
if c.spec not in spack.compilers.all_compilers(
|
||||
scope=args.scope, init_config=False)]
|
||||
if compilers:
|
||||
spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
|
||||
spack.compilers.add_compilers_to_config(compilers, scope=args.scope,
|
||||
init_config=False)
|
||||
n = len(compilers)
|
||||
s = 's' if n > 1 else ''
|
||||
filename = spack.config.get_config_filename(args.scope, 'compilers')
|
||||
@@ -93,17 +109,17 @@ def compiler_find(args):
|
||||
def compiler_remove(args):
|
||||
cspec = CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||
tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
spack.compilers.remove_compiler_from_config(
|
||||
compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec)
|
||||
|
||||
|
||||
@@ -121,13 +137,16 @@ def compiler_info(args):
|
||||
print "\tcxx = %s" % c.cxx
|
||||
print "\tf77 = %s" % c.f77
|
||||
print "\tfc = %s" % c.fc
|
||||
print "\tmodules = %s" % c.modules
|
||||
print "\toperating system = %s" % c.operating_system
|
||||
|
||||
|
||||
def compiler_list(args):
|
||||
tty.msg("Available compilers")
|
||||
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
|
||||
for i, (name, compilers) in enumerate(index.items()):
|
||||
if i >= 1: print
|
||||
if i >= 1:
|
||||
print
|
||||
|
||||
cname = "%s{%s}" % (spack.spec.compiler_color, name)
|
||||
tty.hline(colorize(cname), char='-')
|
||||
@@ -135,10 +154,10 @@ def compiler_list(args):
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
action = { 'add' : compiler_find,
|
||||
'find' : compiler_find,
|
||||
'remove' : compiler_remove,
|
||||
'rm' : compiler_remove,
|
||||
'info' : compiler_info,
|
||||
'list' : compiler_list }
|
||||
action = {'add': compiler_find,
|
||||
'find': compiler_find,
|
||||
'remove': compiler_remove,
|
||||
'rm': compiler_remove,
|
||||
'info': compiler_info,
|
||||
'list': compiler_list}
|
||||
action[args.compiler_command](args)
|
||||
|
||||
@@ -22,18 +22,16 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.lang import index_by
|
||||
|
||||
import spack
|
||||
from spack.cmd.compiler import compiler_list
|
||||
|
||||
description = "List available compilers. Same as 'spack compiler list'."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('--scope', choices=spack.config.config_scopes,
|
||||
help="Configuration scope to read/modify.")
|
||||
|
||||
|
||||
def compilers(parser, args):
|
||||
compiler_list(args)
|
||||
|
||||
@@ -22,15 +22,11 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
|
||||
description = "Get and set configuration options."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
# User can only choose one
|
||||
scope_group = subparser.add_mutually_exclusive_group()
|
||||
@@ -64,6 +60,6 @@ def config_edit(args):
|
||||
|
||||
|
||||
def config(parser, args):
|
||||
action = { 'get' : config_get,
|
||||
'edit' : config_edit }
|
||||
action = {'get': config_get,
|
||||
'edit': config_edit}
|
||||
action[args.config_command](args)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
_copyright = """\
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
@@ -23,10 +22,8 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
"""
|
||||
import string
|
||||
import os
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
from ordereddict_backport import OrderedDict
|
||||
@@ -41,16 +38,37 @@
|
||||
from spack.spec import Spec
|
||||
from spack.util.naming import *
|
||||
from spack.repository import Repo, RepoError
|
||||
import spack.util.crypto as crypto
|
||||
|
||||
from spack.util.executable import which
|
||||
from spack.stage import Stage
|
||||
|
||||
|
||||
description = "Create a new package file from an archive URL"
|
||||
|
||||
package_template = string.Template(
|
||||
_copyright + """
|
||||
package_template = string.Template("""\
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
#
|
||||
# This is a template package file for Spack. We've put "FIXME"
|
||||
# next to all the things you'll want to change. Once you've handled
|
||||
@@ -68,26 +86,107 @@
|
||||
#
|
||||
from spack import *
|
||||
|
||||
|
||||
class ${class_name}(Package):
|
||||
""\"FIXME: put a proper description of your package here.""\"
|
||||
# FIXME: add a proper url for your package's homepage here.
|
||||
""\"FIXME: Put a proper description of your package here.""\"
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
url = "${url}"
|
||||
|
||||
${versions}
|
||||
|
||||
# FIXME: Add dependencies if this package requires them.
|
||||
# depends_on("foo")
|
||||
${dependencies}
|
||||
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Modify the configure line to suit your build system here.
|
||||
${configure}
|
||||
|
||||
# FIXME: Add logic to build and install here
|
||||
make()
|
||||
make("install")
|
||||
${install}
|
||||
""")
|
||||
|
||||
# Build dependencies and extensions
|
||||
dependencies_dict = {
|
||||
'autotools': """\
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')""",
|
||||
|
||||
'cmake': """\
|
||||
# FIXME: Add additional dependencies if required.
|
||||
depends_on('cmake', type='build')""",
|
||||
|
||||
'scons': """\
|
||||
# FIXME: Add additional dependencies if required.
|
||||
depends_on('scons', type='build')""",
|
||||
|
||||
'python': """\
|
||||
extends('python')
|
||||
|
||||
# FIXME: Add additional dependencies if required.
|
||||
# depends_on('py-setuptools', type='build')
|
||||
# depends_on('py-foo', type=nolink)""",
|
||||
|
||||
'R': """\
|
||||
extends('R')
|
||||
|
||||
# FIXME: Add additional dependencies if required.
|
||||
# depends_on('r-foo', type=nolink)""",
|
||||
|
||||
'octave': """\
|
||||
extends('octave')
|
||||
|
||||
# FIXME: Add additional dependencies if required.
|
||||
# depends_on('octave-foo', type=nolink)""",
|
||||
|
||||
'unknown': """\
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('foo')"""
|
||||
}
|
||||
|
||||
# Default installation instructions
|
||||
install_dict = {
|
||||
'autotools': """\
|
||||
# FIXME: Modify the configure line to suit your build system here.
|
||||
configure('--prefix={0}'.format(prefix))
|
||||
|
||||
# FIXME: Add logic to build and install here.
|
||||
make()
|
||||
make('install')""",
|
||||
|
||||
'cmake': """\
|
||||
with working_dir('spack-build', create=True):
|
||||
# FIXME: Modify the cmake line to suit your build system here.
|
||||
cmake('..', *std_cmake_args)
|
||||
|
||||
# FIXME: Add logic to build and install here.
|
||||
make()
|
||||
make('install')""",
|
||||
|
||||
'scons': """\
|
||||
# FIXME: Add logic to build and install here.
|
||||
scons('prefix={0}'.format(prefix))
|
||||
scons('install')""",
|
||||
|
||||
'python': """\
|
||||
# FIXME: Add logic to build and install here.
|
||||
setup_py('install', '--prefix={0}'.format(prefix))""",
|
||||
|
||||
'R': """\
|
||||
# FIXME: Add logic to build and install here.
|
||||
R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
|
||||
self.stage.source_path)""",
|
||||
|
||||
'octave': """\
|
||||
# FIXME: Add logic to build and install here.
|
||||
octave('--quiet', '--norc',
|
||||
'--built-in-docstrings-file=/dev/null',
|
||||
'--texi-macros-file=/dev/null',
|
||||
'--eval', 'pkg prefix {0}; pkg install {1}'.format(
|
||||
prefix, self.stage.archive_file))""",
|
||||
|
||||
'unknown': """\
|
||||
# FIXME: Unknown build system
|
||||
make()
|
||||
make('install')"""
|
||||
}
|
||||
|
||||
|
||||
def make_version_calls(ver_hash_tuples):
|
||||
"""Adds a version() call to the package for each version found."""
|
||||
@@ -118,41 +217,53 @@ def setup_parser(subparser):
|
||||
setup_parser.subparser = subparser
|
||||
|
||||
|
||||
class ConfigureGuesser(object):
|
||||
def __call__(self, stage):
|
||||
"""Try to guess the type of build system used by the project, and return
|
||||
an appropriate configure line.
|
||||
"""
|
||||
autotools = "configure('--prefix=%s' % prefix)"
|
||||
cmake = "cmake('.', *std_cmake_args)"
|
||||
python = "python('setup.py', 'install', '--prefix=%s' % prefix)"
|
||||
r = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)"
|
||||
class BuildSystemGuesser(object):
|
||||
|
||||
config_lines = ((r'/configure$', 'autotools', autotools),
|
||||
(r'/CMakeLists.txt$', 'cmake', cmake),
|
||||
(r'/setup.py$', 'python', python),
|
||||
(r'/NAMESPACE$', 'r', r))
|
||||
def __call__(self, stage, url):
|
||||
"""Try to guess the type of build system used by a project based on
|
||||
the contents of its archive or the URL it was downloaded from."""
|
||||
|
||||
# Peek inside the tarball.
|
||||
tar = which('tar')
|
||||
output = tar(
|
||||
"--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
lines = output.split("\n")
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# http://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if 'downloads.sourceforge.net/octave/' in url:
|
||||
self.build_system = 'octave'
|
||||
return
|
||||
|
||||
# Set the configure line to the one that matched.
|
||||
for pattern, bs, cl in config_lines:
|
||||
if any(re.search(pattern, l) for l in lines):
|
||||
config_line = cl
|
||||
build_system = bs
|
||||
break
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
# archive, the corresponding build system is assumed.
|
||||
clues = [
|
||||
(r'/configure$', 'autotools'),
|
||||
(r'/CMakeLists.txt$', 'cmake'),
|
||||
(r'/SConstruct$', 'scons'),
|
||||
(r'/setup.py$', 'python'),
|
||||
(r'/NAMESPACE$', 'R')
|
||||
]
|
||||
|
||||
# Peek inside the compressed file.
|
||||
if stage.archive_file.endswith('.zip'):
|
||||
try:
|
||||
unzip = which('unzip')
|
||||
output = unzip('-l', stage.archive_file, output=str)
|
||||
except:
|
||||
output = ''
|
||||
else:
|
||||
# None matched -- just put both, with cmake commented out
|
||||
config_line = "# FIXME: Spack couldn't guess one, so here are some options:\n"
|
||||
config_line += " # " + autotools + "\n"
|
||||
config_line += " # " + cmake
|
||||
build_system = 'unknown'
|
||||
try:
|
||||
tar = which('tar')
|
||||
output = tar('--exclude=*/*/*', '-tf',
|
||||
stage.archive_file, output=str)
|
||||
except:
|
||||
output = ''
|
||||
lines = output.split('\n')
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
build_system = 'unknown'
|
||||
for pattern, bs in clues:
|
||||
if any(re.search(pattern, l) for l in lines):
|
||||
build_system = bs
|
||||
|
||||
self.configure = config_line
|
||||
self.build_system = build_system
|
||||
|
||||
|
||||
@@ -168,7 +279,7 @@ def guess_name_and_version(url, args):
|
||||
else:
|
||||
try:
|
||||
name = spack.url.parse_name(url, version)
|
||||
except spack.url.UndetectableNameError, e:
|
||||
except spack.url.UndetectableNameError:
|
||||
# Use a user-supplied name if one is present
|
||||
tty.die("Couldn't guess a name for this package. Try running:", "",
|
||||
"spack create --name <name> <url>")
|
||||
@@ -182,7 +293,8 @@ def guess_name_and_version(url, args):
|
||||
def find_repository(spec, args):
|
||||
# figure out namespace for spec
|
||||
if spec.namespace and args.namespace and spec.namespace != args.namespace:
|
||||
tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace))
|
||||
tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace,
|
||||
args.namespace))
|
||||
|
||||
if not spec.namespace and args.namespace:
|
||||
spec.namespace = args.namespace
|
||||
@@ -193,8 +305,8 @@ def find_repository(spec, args):
|
||||
try:
|
||||
repo = Repo(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die("Can't create package with namespace %s in repo with namespace %s"
|
||||
% (spec.namespace, repo.namespace))
|
||||
tty.die("Can't create package with namespace %s in repo with "
|
||||
"namespace %s" % (spec.namespace, repo.namespace))
|
||||
except RepoError as e:
|
||||
tty.die(str(e))
|
||||
else:
|
||||
@@ -214,11 +326,7 @@ def find_repository(spec, args):
|
||||
|
||||
def fetch_tarballs(url, name, version):
|
||||
"""Try to find versions of the supplied archive by scraping the web.
|
||||
|
||||
Prompts the user to select how many to download if many are found.
|
||||
|
||||
|
||||
"""
|
||||
Prompts the user to select how many to download if many are found."""
|
||||
versions = spack.util.web.find_versions_of_archive(url)
|
||||
rkeys = sorted(versions.keys(), reverse=True)
|
||||
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
|
||||
@@ -226,11 +334,11 @@ def fetch_tarballs(url, name, version):
|
||||
archives_to_fetch = 1
|
||||
if not versions:
|
||||
# If the fetch failed for some reason, revert to what the user provided
|
||||
versions = { version : url }
|
||||
versions = {version: url}
|
||||
elif len(versions) > 1:
|
||||
tty.msg("Found %s versions of %s:" % (len(versions), name),
|
||||
*spack.cmd.elide_list(
|
||||
["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
|
||||
["%-10s%s" % (v, u) for v, u in versions.iteritems()]))
|
||||
print
|
||||
archives_to_fetch = tty.get_number(
|
||||
"Include how many checksums in the package file?",
|
||||
@@ -253,7 +361,7 @@ def create(parser, args):
|
||||
# Figure out a name and repo for the package.
|
||||
name, version = guess_name_and_version(url, args)
|
||||
spec = Spec(name)
|
||||
name = spec.name # factors out namespace, if any
|
||||
name = spec.name.lower() # factors out namespace, if any
|
||||
repo = find_repository(spec, args)
|
||||
|
||||
tty.msg("This looks like a URL for %s version %s" % (name, version))
|
||||
@@ -262,8 +370,8 @@ def create(parser, args):
|
||||
# Fetch tarballs (prompting user if necessary)
|
||||
versions, urls = fetch_tarballs(url, name, version)
|
||||
|
||||
# Try to guess what configure system is used.
|
||||
guesser = ConfigureGuesser()
|
||||
# Try to guess what build system is used.
|
||||
guesser = BuildSystemGuesser()
|
||||
ver_hash_tuples = spack.cmd.checksum.get_checksums(
|
||||
versions, urls,
|
||||
first_stage_function=guesser,
|
||||
@@ -272,13 +380,13 @@ def create(parser, args):
|
||||
if not ver_hash_tuples:
|
||||
tty.die("Could not fetch any tarballs for %s" % name)
|
||||
|
||||
# Prepend 'py-' to python package names, by convention.
|
||||
# Add prefix to package name if it is an extension.
|
||||
if guesser.build_system == 'python':
|
||||
name = 'py-%s' % name
|
||||
|
||||
# Prepend 'r-' to R package names, by convention.
|
||||
if guesser.build_system == 'r':
|
||||
name = 'r-%s' % name
|
||||
name = 'py-{0}'.format(name)
|
||||
if guesser.build_system == 'R':
|
||||
name = 'r-{0}'.format(name)
|
||||
if guesser.build_system == 'octave':
|
||||
name = 'octave-{0}'.format(name)
|
||||
|
||||
# Create a directory for the new package.
|
||||
pkg_path = repo.filename_for_package_name(name)
|
||||
@@ -292,10 +400,11 @@ def create(parser, args):
|
||||
pkg_file.write(
|
||||
package_template.substitute(
|
||||
name=name,
|
||||
configure=guesser.configure,
|
||||
class_name=mod_to_class(name),
|
||||
url=url,
|
||||
versions=make_version_calls(ver_hash_tuples)))
|
||||
versions=make_version_calls(ver_hash_tuples),
|
||||
dependencies=dependencies_dict[guesser.build_system],
|
||||
install=install_dict[guesser.build_system]))
|
||||
|
||||
# If everything checks out, go ahead and edit.
|
||||
spack.editor(pkg_path)
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
|
||||
description = "Deactivate a package extension."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
@@ -40,7 +41,8 @@ def setup_parser(subparser):
|
||||
help="Deactivate all extensions of an extendable package, or "
|
||||
"deactivate an extension AND its dependencies.")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="spec of package extension to deactivate.")
|
||||
|
||||
|
||||
def deactivate(parser, args):
|
||||
@@ -65,7 +67,8 @@ def deactivate(parser, args):
|
||||
if not args.force and not spec.package.activated:
|
||||
tty.die("%s is not activated." % pkg.spec.short_spec)
|
||||
|
||||
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
|
||||
tty.msg("Deactivating %s and all dependencies." %
|
||||
pkg.spec.short_spec)
|
||||
|
||||
topo_order = topological_sort(spec)
|
||||
index = spec.index()
|
||||
@@ -79,7 +82,9 @@ def deactivate(parser, args):
|
||||
epkg.do_deactivate(force=args.force)
|
||||
|
||||
else:
|
||||
tty.die("spack deactivate --all requires an extendable package or an extension.")
|
||||
tty.die(
|
||||
"spack deactivate --all requires an extendable package "
|
||||
"or an extension.")
|
||||
|
||||
else:
|
||||
if not pkg.is_extension:
|
||||
|
||||
@@ -31,9 +31,11 @@
|
||||
|
||||
description = "Show installed packages that depend on another."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.")
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs to list dependencies of.")
|
||||
|
||||
|
||||
def dependents(parser, args):
|
||||
@@ -42,5 +44,6 @@ def dependents(parser, args):
|
||||
tty.die("spack dependents takes only one spec.")
|
||||
|
||||
fmt = '$_$@$%@$+$=$#'
|
||||
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
|
||||
deps = [d.format(fmt, color=True)
|
||||
for d in specs[0].package.installed_dependents]
|
||||
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
|
||||
description = "Do-It-Yourself: build from an existing source directory."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
|
||||
@@ -50,7 +51,7 @@ def setup_parser(subparser):
|
||||
help="Do not display verbose build output while installing.")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs to use for install. Must contain package AND verison.")
|
||||
help="specs to use for install. Must contain package AND version.")
|
||||
|
||||
|
||||
def diy(self, args):
|
||||
@@ -76,14 +77,17 @@ def diy(self, args):
|
||||
return
|
||||
|
||||
if not spec.versions.concrete:
|
||||
tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?")
|
||||
tty.die(
|
||||
"spack diy spec must have a single, concrete version. "
|
||||
"Did you forget a package version number?")
|
||||
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if package.installed:
|
||||
tty.error("Already installed in %s" % package.prefix)
|
||||
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
|
||||
tty.msg("Uninstall or try adding a version suffix for this "
|
||||
"DIY build.")
|
||||
sys.exit(1)
|
||||
|
||||
# Forces the build to run out of the current directory.
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
|
||||
description = "Run pydoc from within spack."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('entity', help="Run pydoc help on entity")
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False):
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
tty.die("Something's wrong. '%s' is not a file!" % path)
|
||||
if not os.access(path, os.R_OK|os.W_OK):
|
||||
if not os.access(path, os.R_OK | os.W_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
elif not force:
|
||||
tty.die("No package '%s'. Use spack create, or supply -f/--force "
|
||||
@@ -93,19 +93,23 @@ def setup_parser(subparser):
|
||||
# Various filetypes you can edit directly from the cmd line.
|
||||
excl_args.add_argument(
|
||||
'-c', '--command', dest='path', action='store_const',
|
||||
const=spack.cmd.command_path, help="Edit the command with the supplied name.")
|
||||
const=spack.cmd.command_path,
|
||||
help="Edit the command with the supplied name.")
|
||||
excl_args.add_argument(
|
||||
'-t', '--test', dest='path', action='store_const',
|
||||
const=spack.test_path, help="Edit the test with the supplied name.")
|
||||
excl_args.add_argument(
|
||||
'-m', '--module', dest='path', action='store_const',
|
||||
const=spack.module_path, help="Edit the main spack module with the supplied name.")
|
||||
const=spack.module_path,
|
||||
help="Edit the main spack module with the supplied name.")
|
||||
|
||||
# Options for editing packages
|
||||
excl_args.add_argument(
|
||||
'-r', '--repo', default=None, help="Path to repo to edit package in.")
|
||||
'-r', '--repo', default=None,
|
||||
help="Path to repo to edit package in.")
|
||||
excl_args.add_argument(
|
||||
'-N', '--namespace', default=None, help="Namespace of package to edit.")
|
||||
'-N', '--namespace', default=None,
|
||||
help="Namespace of package to edit.")
|
||||
|
||||
subparser.add_argument(
|
||||
'name', nargs='?', default=None, help="name of package to edit")
|
||||
|
||||
@@ -28,11 +28,13 @@
|
||||
import spack.cmd
|
||||
import spack.build_environment as build_env
|
||||
|
||||
description = "Run a command with the environment for a particular spec's install."
|
||||
description = "Run a command with the install environment for a spec."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs of package environment to emulate.")
|
||||
|
||||
|
||||
def env(parser, args):
|
||||
@@ -47,7 +49,7 @@ def env(parser, args):
|
||||
if sep in args.spec:
|
||||
s = args.spec.index(sep)
|
||||
spec = args.spec[:s]
|
||||
cmd = args.spec[s+1:]
|
||||
cmd = args.spec[s + 1:]
|
||||
else:
|
||||
spec = args.spec[0]
|
||||
cmd = args.spec[1:]
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -34,6 +33,7 @@
|
||||
|
||||
description = "List extensions for package."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
@@ -47,7 +47,8 @@ def setup_parser(subparser):
|
||||
help='Show full dependency DAG of extensions')
|
||||
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to list extensions for')
|
||||
|
||||
|
||||
def extensions(parser, args):
|
||||
@@ -85,7 +86,8 @@ def extensions(parser, args):
|
||||
#
|
||||
# List specs of installed extensions.
|
||||
#
|
||||
installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)]
|
||||
installed = [
|
||||
s.spec for s in spack.installed_db.installed_extensions_for(spec)]
|
||||
print
|
||||
if not installed:
|
||||
tty.msg("None installed.")
|
||||
@@ -102,4 +104,5 @@ def extensions(parser, args):
|
||||
tty.msg("None activated.")
|
||||
return
|
||||
tty.msg("%d currently activated:" % len(activated))
|
||||
spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long)
|
||||
spack.cmd.find.display_specs(
|
||||
activated.values(), mode=args.mode, long=args.long)
|
||||
|
||||
@@ -29,16 +29,21 @@
|
||||
|
||||
description = "Fetch archives for packages"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
subparser.add_argument(
|
||||
'-m', '--missing', action='store_true', help="Also fetch all missing dependencies")
|
||||
'-m', '--missing', action='store_true',
|
||||
help="Also fetch all missing dependencies")
|
||||
subparser.add_argument(
|
||||
'-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
|
||||
'-D', '--dependencies', action='store_true',
|
||||
help="Also fetch all dependencies")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
|
||||
'packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to fetch")
|
||||
|
||||
|
||||
def fetch(parser, args):
|
||||
if not args.packages:
|
||||
@@ -50,8 +55,7 @@ def fetch(parser, args):
|
||||
specs = spack.cmd.parse_specs(args.packages, concretize=True)
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
to_fetch = set()
|
||||
for s in spec.traverse():
|
||||
for s in spec.traverse(deptype_query=spack.alldeps):
|
||||
package = spack.repo.get(s)
|
||||
if args.missing and package.installed:
|
||||
continue
|
||||
|
||||
@@ -22,144 +22,93 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import collections
|
||||
import itertools
|
||||
import argparse
|
||||
from StringIO import StringIO
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import *
|
||||
from llnl.util.tty.color import *
|
||||
from llnl.util.lang import *
|
||||
|
||||
import spack
|
||||
import spack.spec
|
||||
from llnl.util.lang import *
|
||||
from llnl.util.tty.colify import *
|
||||
from llnl.util.tty.color import *
|
||||
from spack.cmd import display_specs
|
||||
|
||||
description = "Find installed spack packages"
|
||||
|
||||
description ="Find installed spack packages"
|
||||
|
||||
def setup_parser(subparser):
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument('-s', '--short',
|
||||
action='store_const',
|
||||
dest='mode',
|
||||
const='short',
|
||||
help='Show only specs (default)')
|
||||
format_group.add_argument('-p', '--paths',
|
||||
action='store_const',
|
||||
dest='mode',
|
||||
const='paths',
|
||||
help='Show paths to package install directories')
|
||||
format_group.add_argument(
|
||||
'-s', '--short', action='store_const', dest='mode', const='short',
|
||||
help='Show only specs (default)')
|
||||
format_group.add_argument(
|
||||
'-p', '--paths', action='store_const', dest='mode', const='paths',
|
||||
help='Show paths to package install directories')
|
||||
format_group.add_argument(
|
||||
'-d', '--deps', action='store_const', dest='mode', const='deps',
|
||||
'-d', '--deps',
|
||||
action='store_const',
|
||||
dest='mode',
|
||||
const='deps',
|
||||
help='Show full dependency DAG of installed packages')
|
||||
|
||||
subparser.add_argument(
|
||||
'-l', '--long', action='store_true',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
subparser.add_argument(
|
||||
'-L', '--very-long', action='store_true',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
subparser.add_argument('-l', '--long',
|
||||
action='store_true',
|
||||
dest='long',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
subparser.add_argument('-L', '--very-long',
|
||||
action='store_true',
|
||||
dest='very_long',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
subparser.add_argument('-f', '--show-flags',
|
||||
action='store_true',
|
||||
dest='show_flags',
|
||||
help='Show spec compiler flags.')
|
||||
|
||||
subparser.add_argument(
|
||||
'-e', '--explicit', action='store_true',
|
||||
'-e', '--explicit',
|
||||
action='store_true',
|
||||
help='Show only specs that were installed explicitly')
|
||||
subparser.add_argument(
|
||||
'-E', '--implicit', action='store_true',
|
||||
'-E', '--implicit',
|
||||
action='store_true',
|
||||
help='Show only specs that were installed as dependencies')
|
||||
subparser.add_argument(
|
||||
'-u', '--unknown', action='store_true',
|
||||
'-u', '--unknown',
|
||||
action='store_true',
|
||||
dest='unknown',
|
||||
help='Show only specs Spack does not have a package for.')
|
||||
subparser.add_argument(
|
||||
'-m', '--missing', action='store_true',
|
||||
'-m', '--missing',
|
||||
action='store_true',
|
||||
dest='missing',
|
||||
help='Show missing dependencies as well as installed specs.')
|
||||
subparser.add_argument(
|
||||
'-M', '--only-missing', action='store_true',
|
||||
help='Show only missing dependencies.')
|
||||
subparser.add_argument(
|
||||
'-N', '--namespace', action='store_true',
|
||||
help='Show fully qualified package names.')
|
||||
'-v', '--variants',
|
||||
action='store_true',
|
||||
dest='variants',
|
||||
help='Show variants in output (can be long)')
|
||||
subparser.add_argument('-M', '--only-missing',
|
||||
action='store_true',
|
||||
dest='only_missing',
|
||||
help='Show only missing dependencies.')
|
||||
subparser.add_argument('-N', '--namespace',
|
||||
action='store_true',
|
||||
help='Show fully qualified package names.')
|
||||
|
||||
subparser.add_argument(
|
||||
'query_specs', nargs=argparse.REMAINDER,
|
||||
help='optional specs to filter results')
|
||||
subparser.add_argument('query_specs',
|
||||
nargs=argparse.REMAINDER,
|
||||
help='optional specs to filter results')
|
||||
|
||||
|
||||
def gray_hash(spec, length):
|
||||
return colorize('@K{%s}' % spec.dag_hash(length))
|
||||
|
||||
|
||||
def display_specs(specs, **kwargs):
|
||||
mode = kwargs.get('mode', 'short')
|
||||
hashes = kwargs.get('long', False)
|
||||
namespace = kwargs.get('namespace', False)
|
||||
|
||||
hlen = 7
|
||||
if kwargs.get('very_long', False):
|
||||
hashes = True
|
||||
hlen = None
|
||||
|
||||
# Make a dict with specs keyed by architecture and compiler.
|
||||
index = index_by(specs, ('architecture', 'compiler'))
|
||||
|
||||
# Traverse the index and print out each package
|
||||
for i, (architecture, compiler) in enumerate(sorted(index)):
|
||||
if i > 0: print
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color, architecture,
|
||||
spack.spec.compiler_color, compiler)
|
||||
tty.hline(colorize(header), char='-')
|
||||
|
||||
specs = index[(architecture,compiler)]
|
||||
specs.sort()
|
||||
|
||||
nfmt = '.' if namespace else '_'
|
||||
abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs]
|
||||
if mode == 'paths':
|
||||
# Print one spec per line along with prefix path
|
||||
width = max(len(s) for s in abbreviated)
|
||||
width += 2
|
||||
format = " %%-%ds%%s" % width
|
||||
|
||||
for abbrv, spec in zip(abbreviated, specs):
|
||||
if hashes:
|
||||
print gray_hash(spec, hlen),
|
||||
print format % (abbrv, spec.prefix)
|
||||
|
||||
elif mode == 'deps':
|
||||
for spec in specs:
|
||||
print spec.tree(
|
||||
format='$%s$@$+' % nfmt,
|
||||
color=True,
|
||||
indent=4,
|
||||
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
|
||||
|
||||
elif mode == 'short':
|
||||
def fmt(s):
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + ' '
|
||||
string += s.format('$-%s$@$+' % nfmt, color=True)
|
||||
|
||||
return string
|
||||
colify(fmt(s) for s in specs)
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
|
||||
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
# Filter out specs that don't exist.
|
||||
query_specs = spack.cmd.parse_specs(args.query_specs)
|
||||
query_specs, nonexisting = partition_list(
|
||||
query_specs, lambda s: spack.repo.exists(s.name))
|
||||
|
||||
if nonexisting:
|
||||
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
|
||||
msg += ", ".join(s.name for s in nonexisting)
|
||||
tty.msg(msg)
|
||||
|
||||
if not query_specs:
|
||||
return
|
||||
def query_arguments(args):
|
||||
# Check arguments
|
||||
if args.explicit and args.implicit:
|
||||
tty.error('You can\'t pass -E and -e options simultaneously.')
|
||||
raise SystemExit(1)
|
||||
|
||||
# Set up query arguments.
|
||||
installed, known = True, any
|
||||
@@ -169,20 +118,37 @@ def find(parser, args):
|
||||
installed = any
|
||||
if args.unknown:
|
||||
known = False
|
||||
|
||||
explicit = None
|
||||
explicit = any
|
||||
if args.explicit:
|
||||
explicit = False
|
||||
if args.implicit:
|
||||
explicit = True
|
||||
if args.implicit:
|
||||
explicit = False
|
||||
q_args = {'installed': installed, 'known': known, "explicit": explicit}
|
||||
return q_args
|
||||
|
||||
q_args = { 'installed' : installed, 'known' : known, "explicit" : explicit }
|
||||
|
||||
def find(parser, args):
|
||||
# Filter out specs that don't exist.
|
||||
query_specs = spack.cmd.parse_specs(args.query_specs)
|
||||
query_specs, nonexisting = partition_list(
|
||||
query_specs, lambda s: spack.repo.exists(s.name) or not s.name)
|
||||
|
||||
if nonexisting:
|
||||
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
|
||||
msg += ", ".join(s.name for s in nonexisting)
|
||||
tty.msg(msg)
|
||||
|
||||
if not query_specs:
|
||||
return
|
||||
|
||||
q_args = query_arguments(args)
|
||||
|
||||
# Get all the specs the user asked for
|
||||
if not query_specs:
|
||||
specs = set(spack.installed_db.query(**q_args))
|
||||
else:
|
||||
results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs]
|
||||
results = [set(spack.installed_db.query(qs, **q_args))
|
||||
for qs in query_specs]
|
||||
specs = set.union(*results)
|
||||
|
||||
if not args.mode:
|
||||
@@ -190,7 +156,10 @@ def find(parser, args):
|
||||
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("%d installed packages." % len(specs))
|
||||
display_specs(specs, mode=args.mode,
|
||||
display_specs(specs,
|
||||
mode=args.mode,
|
||||
long=args.long,
|
||||
very_long=args.very_long,
|
||||
namespace=args.namespace)
|
||||
show_flags=args.show_flags,
|
||||
namespace=args.namespace,
|
||||
variants=args.variants)
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
|
||||
description = "Generate graphs of package dependency relationships."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
|
||||
@@ -42,10 +43,12 @@ def setup_parser(subparser):
|
||||
help="Generate graph in dot format and print to stdout.")
|
||||
|
||||
subparser.add_argument(
|
||||
'--concretize', action='store_true', help="Concretize specs before graphing.")
|
||||
'--concretize', action='store_true',
|
||||
help="Concretize specs before graphing.")
|
||||
|
||||
subparser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
|
||||
'specs', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to graph.")
|
||||
|
||||
|
||||
def graph(parser, args):
|
||||
@@ -56,11 +59,11 @@ def graph(parser, args):
|
||||
setup_parser.parser.print_help()
|
||||
return 1
|
||||
|
||||
if args.dot: # Dot graph only if asked for.
|
||||
if args.dot: # Dot graph only if asked for.
|
||||
graph_dot(*specs)
|
||||
|
||||
elif specs: # ascii is default: user doesn't need to provide it explicitly
|
||||
elif specs: # ascii is default: user doesn't need to provide it explicitly
|
||||
graph_ascii(specs[0], debug=spack.debug)
|
||||
for spec in specs[1:]:
|
||||
print # extra line bt/w independent graphs
|
||||
print # extra line bt/w independent graphs
|
||||
graph_ascii(spec, debug=spack.debug)
|
||||
|
||||
@@ -22,14 +22,14 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
|
||||
description = "Get help on spack and its commands"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('help_command', nargs='?', default=None,
|
||||
help='command to get help on')
|
||||
|
||||
|
||||
def help(parser, args):
|
||||
if args.help_command:
|
||||
parser.parse_args([args.help_command, '-h'])
|
||||
|
||||
@@ -29,9 +29,11 @@
|
||||
|
||||
description = "Get detailed information on a particular package"
|
||||
|
||||
|
||||
def padder(str_list, extra=0):
|
||||
"""Return a function to pad elements of a list."""
|
||||
length = max(len(str(s)) for s in str_list) + extra
|
||||
|
||||
def pad(string):
|
||||
string = str(string)
|
||||
padding = max(0, length - len(string))
|
||||
@@ -40,7 +42,8 @@ def pad(string):
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
|
||||
subparser.add_argument(
|
||||
'name', metavar="PACKAGE", help="Name of package to get info for.")
|
||||
|
||||
|
||||
def print_text_info(pkg):
|
||||
@@ -81,12 +84,14 @@ def print_text_info(pkg):
|
||||
|
||||
print " " + fmt % (name, default, desc)
|
||||
|
||||
print
|
||||
print "Dependencies:"
|
||||
if pkg.dependencies:
|
||||
colify(pkg.dependencies, indent=4)
|
||||
else:
|
||||
print " None"
|
||||
for deptype in ('build', 'link', 'run'):
|
||||
print
|
||||
print "%s Dependencies:" % deptype.capitalize()
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
print " None"
|
||||
|
||||
print
|
||||
print "Virtual packages: "
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
|
||||
description = "Build and install packages"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
|
||||
@@ -52,9 +53,16 @@ def setup_parser(subparser):
|
||||
help="Display verbose build output while installing.")
|
||||
subparser.add_argument(
|
||||
'--fake', action='store_true', dest='fake',
|
||||
help="Fake install. Just remove the prefix and touch a fake file in it.")
|
||||
help="Fake install. Just remove prefix and create a fake file.")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
|
||||
'--dirty', action='store_true', dest='dirty',
|
||||
help="Install a package *without* cleaning the environment.")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to install")
|
||||
subparser.add_argument(
|
||||
'--run-tests', action='store_true', dest='run_tests',
|
||||
help="Run tests during installation of a package.")
|
||||
|
||||
|
||||
def install(parser, args):
|
||||
@@ -77,6 +85,8 @@ def install(parser, args):
|
||||
keep_stage=args.keep_stage,
|
||||
ignore_deps=args.ignore_deps,
|
||||
make_jobs=args.jobs,
|
||||
run_tests=args.run_tests,
|
||||
verbose=args.verbose,
|
||||
fake=args.fake,
|
||||
dirty=args.dirty,
|
||||
explicit=True)
|
||||
|
||||
@@ -29,36 +29,62 @@
|
||||
|
||||
import spack
|
||||
import fnmatch
|
||||
import re
|
||||
|
||||
description = "List available spack packages"
|
||||
|
||||
description ="List available spack packages"
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'filter', nargs=argparse.REMAINDER,
|
||||
help='Optional glob patterns to filter results.')
|
||||
subparser.add_argument(
|
||||
'-i', '--insensitive', action='store_true', default=False,
|
||||
help='Filtering will be case insensitive.')
|
||||
'-s', '--sensitive', action='store_true', default=False,
|
||||
help='Use case-sensitive filtering. Default is case sensitive, '
|
||||
'unless the query contains a capital letter.')
|
||||
subparser.add_argument(
|
||||
'-d', '--search-description', action='store_true', default=False,
|
||||
help='Filtering will also search the description for a match.')
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
# Start with all package names.
|
||||
pkgs = spack.repo.all_package_names()
|
||||
pkgs = set(spack.repo.all_package_names())
|
||||
|
||||
# filter if a filter arg was provided
|
||||
if args.filter:
|
||||
def match(p, f):
|
||||
if args.insensitive:
|
||||
p = p.lower()
|
||||
f = f.lower()
|
||||
return fnmatch.fnmatchcase(p, f)
|
||||
pkgs = [p for p in pkgs if any(match(p, f) for f in args.filter)]
|
||||
res = []
|
||||
for f in args.filter:
|
||||
if '*' not in f and '?' not in f:
|
||||
r = fnmatch.translate('*' + f + '*')
|
||||
else:
|
||||
r = fnmatch.translate(f)
|
||||
|
||||
re_flags = re.I
|
||||
if any(l.isupper for l in f) or args.sensitive:
|
||||
re_flags = 0
|
||||
rc = re.compile(r, flags=re_flags)
|
||||
res.append(rc)
|
||||
|
||||
if args.search_description:
|
||||
def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg = spack.repo.get(p)
|
||||
if pkg.__doc__:
|
||||
return f.match(pkg.__doc__)
|
||||
return False
|
||||
else:
|
||||
def match(p, f):
|
||||
return f.match(p)
|
||||
pkgs = [p for p in pkgs if any(match(p, f) for f in res)]
|
||||
|
||||
# sort before displaying.
|
||||
sorted_packages = sorted(pkgs, key=lambda s:s.lower())
|
||||
sorted_packages = sorted(pkgs, key=lambda s: s.lower())
|
||||
|
||||
# Print all the package names in columns
|
||||
indent=0
|
||||
indent = 0
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("%d packages." % len(sorted_packages))
|
||||
colify(sorted_packages, indent=indent)
|
||||
|
||||
@@ -25,13 +25,16 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description ="Add package to environment using modules."
|
||||
description = "Add package to environment using modules."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
"""Parser is only constructed so that this prints a nice help
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help='Spec of package to load with modules.')
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="Spec of package to load with modules. "
|
||||
"(If -, read specs from STDIN)")
|
||||
|
||||
|
||||
def load(parser, args):
|
||||
|
||||
@@ -22,8 +22,6 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -32,16 +30,19 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description="Print out locations of various directories used by Spack"
|
||||
description = "Print out locations of various directories used by Spack"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
global directories
|
||||
directories = subparser.add_mutually_exclusive_group()
|
||||
|
||||
directories.add_argument(
|
||||
'-m', '--module-dir', action='store_true', help="Spack python module directory.")
|
||||
'-m', '--module-dir', action='store_true',
|
||||
help="Spack python module directory.")
|
||||
directories.add_argument(
|
||||
'-r', '--spack-root', action='store_true', help="Spack installation root.")
|
||||
'-r', '--spack-root', action='store_true',
|
||||
help="Spack installation root.")
|
||||
|
||||
directories.add_argument(
|
||||
'-i', '--install-dir', action='store_true',
|
||||
@@ -53,15 +54,19 @@ def setup_parser(subparser):
|
||||
'-P', '--packages', action='store_true',
|
||||
help="Top-level packages directory for Spack.")
|
||||
directories.add_argument(
|
||||
'-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
|
||||
'-s', '--stage-dir', action='store_true',
|
||||
help="Stage directory for a spec.")
|
||||
directories.add_argument(
|
||||
'-S', '--stages', action='store_true', help="Top level Stage directory.")
|
||||
'-S', '--stages', action='store_true',
|
||||
help="Top level Stage directory.")
|
||||
directories.add_argument(
|
||||
'-b', '--build-dir', action='store_true',
|
||||
help="Checked out or expanded source directory for a spec (requires it to be staged first).")
|
||||
help="Checked out or expanded source directory for a spec "
|
||||
"(requires it to be staged first).")
|
||||
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.")
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="spec of package to fetch directory for.")
|
||||
|
||||
|
||||
def location(parser, args):
|
||||
@@ -104,9 +109,9 @@ def location(parser, args):
|
||||
if args.stage_dir:
|
||||
print pkg.stage.path
|
||||
|
||||
else: # args.build_dir is the default.
|
||||
else: # args.build_dir is the default.
|
||||
if not pkg.stage.source_path:
|
||||
tty.die("Build directory does not exist yet. Run this to create it:",
|
||||
tty.die("Build directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
print pkg.stage.source_path
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparser.add_argument('files', nargs=argparse.REMAINDER,
|
||||
help="Files to checksum.")
|
||||
help="Files/urls to checksum.")
|
||||
|
||||
|
||||
def compute_md5_checksum(url):
|
||||
@@ -67,6 +67,7 @@ def md5(parser, args):
|
||||
tty.warn("%s" % e)
|
||||
|
||||
# Dump the MD5s at last without interleaving them with downloads
|
||||
tty.msg("%d MD5 checksums:" % len(results))
|
||||
checksum = 'checksum' if len(results) == 1 else 'checksums'
|
||||
tty.msg("%d MD5 %s:" % (len(results), checksum))
|
||||
for checksum, url in results:
|
||||
print "%s %s" % (checksum, url)
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import argparse
|
||||
@@ -40,6 +39,7 @@
|
||||
|
||||
description = "Manage mirrors."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
@@ -61,8 +61,9 @@ def setup_parser(subparser):
|
||||
'-D', '--dependencies', action='store_true',
|
||||
help="Also fetch all dependencies")
|
||||
create_parser.add_argument(
|
||||
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
|
||||
help="Only fetch one 'preferred' version per spec, not all known versions.")
|
||||
'-o', '--one-version-per-spec', action='store_const',
|
||||
const=1, default=0,
|
||||
help="Only fetch one 'preferred' version per spec, not all known.")
|
||||
|
||||
scopes = spack.config.config_scopes
|
||||
|
||||
@@ -70,7 +71,7 @@ def setup_parser(subparser):
|
||||
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
||||
add_parser.add_argument('name', help="Mnemonic name for mirror.")
|
||||
add_parser.add_argument(
|
||||
'url', help="URL of mirror directory created by 'spack mirror create'.")
|
||||
'url', help="URL of mirror directory from 'spack mirror create'.")
|
||||
add_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
@@ -107,7 +108,7 @@ def mirror_add(args):
|
||||
tty.die("Mirror with url %s already exists." % url)
|
||||
# should only be one item per mirror dict.
|
||||
|
||||
items = [(n,u) for n,u in mirrors.items()]
|
||||
items = [(n, u) for n, u in mirrors.items()]
|
||||
items.insert(0, (args.name, url))
|
||||
mirrors = syaml_dict(items)
|
||||
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
||||
@@ -121,7 +122,7 @@ def mirror_remove(args):
|
||||
if not mirrors:
|
||||
mirrors = syaml_dict()
|
||||
|
||||
if not name in mirrors:
|
||||
if name not in mirrors:
|
||||
tty.die("No mirror with name %s" % name)
|
||||
|
||||
old_value = mirrors.pop(name)
|
||||
@@ -152,7 +153,7 @@ def _read_specs_from_file(filename):
|
||||
s.package
|
||||
specs.append(s)
|
||||
except SpackError, e:
|
||||
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
|
||||
tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
|
||||
">>> " + string, str(e))
|
||||
return specs
|
||||
|
||||
@@ -179,7 +180,7 @@ def mirror_create(args):
|
||||
new_specs = set()
|
||||
for spec in specs:
|
||||
spec.concretize()
|
||||
for s in spec.traverse():
|
||||
for s in spec.traverse(deptype_query=spack.alldeps):
|
||||
new_specs.add(s)
|
||||
specs = list(new_specs)
|
||||
|
||||
@@ -214,10 +215,10 @@ def mirror_create(args):
|
||||
|
||||
|
||||
def mirror(parser, args):
|
||||
action = { 'create' : mirror_create,
|
||||
'add' : mirror_add,
|
||||
'remove' : mirror_remove,
|
||||
'rm' : mirror_remove,
|
||||
'list' : mirror_list }
|
||||
action = {'create': mirror_create,
|
||||
'add': mirror_add,
|
||||
'remove': mirror_remove,
|
||||
'rm': mirror_remove,
|
||||
'list': mirror_list}
|
||||
|
||||
action[args.mirror_command](args)
|
||||
|
||||
@@ -22,83 +22,241 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.cmd
|
||||
from llnl.util.filesystem import mkdirp
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import llnl.util.filesystem as filesystem
|
||||
from spack.modules import module_types
|
||||
from spack.util.string import *
|
||||
|
||||
description = "Manipulate modules and dotkits."
|
||||
description = "Manipulate module files"
|
||||
|
||||
# Dictionary that will be populated with the list of sub-commands
|
||||
# Each sub-command must be callable and accept 3 arguments :
|
||||
# - mtype : the type of the module file
|
||||
# - specs : the list of specs to be processed
|
||||
# - args : namespace containing the parsed command line arguments
|
||||
callbacks = {}
|
||||
|
||||
|
||||
def subcommand(subparser_name):
|
||||
"""Registers a function in the callbacks dictionary"""
|
||||
def decorator(callback):
|
||||
callbacks[subparser_name] = callback
|
||||
return callback
|
||||
return decorator
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
|
||||
|
||||
sp.add_parser('refresh', help='Regenerate all module files.')
|
||||
# spack module refresh
|
||||
refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
|
||||
refresh_parser.add_argument(
|
||||
'--delete-tree',
|
||||
help='Delete the module file tree before refresh',
|
||||
action='store_true'
|
||||
)
|
||||
arguments.add_common_arguments(
|
||||
refresh_parser, ['constraint', 'module_type', 'yes_to_all']
|
||||
)
|
||||
|
||||
find_parser = sp.add_parser('find', help='Find module files for packages.')
|
||||
find_parser.add_argument('module_type',
|
||||
help="Type of module to find file for. [" +
|
||||
'|'.join(module_types) + "]")
|
||||
find_parser.add_argument('spec',
|
||||
nargs='+',
|
||||
help='spec to find a module file for.')
|
||||
# spack module find
|
||||
find_parser = sp.add_parser('find', help='Find module files for packages')
|
||||
arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
|
||||
|
||||
# spack module rm
|
||||
rm_parser = sp.add_parser('rm', help='Remove module files')
|
||||
arguments.add_common_arguments(
|
||||
rm_parser, ['constraint', 'module_type', 'yes_to_all']
|
||||
)
|
||||
|
||||
# spack module loads
|
||||
loads_parser = sp.add_parser(
|
||||
'loads',
|
||||
help='Prompt the list of modules associated with a constraint'
|
||||
)
|
||||
loads_parser.add_argument(
|
||||
'--input-only', action='store_false', dest='shell',
|
||||
help='Generate input for module command (instead of a shell script)'
|
||||
)
|
||||
loads_parser.add_argument(
|
||||
'-p', '--prefix', dest='prefix', default='',
|
||||
help='Prepend to module names when issuing module load commands'
|
||||
)
|
||||
arguments.add_common_arguments(
|
||||
loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
|
||||
)
|
||||
|
||||
|
||||
def module_find(mtype, spec_array):
|
||||
"""Look at all installed packages and see if the spec provided
|
||||
matches any. If it does, check whether there is a module file
|
||||
of type <mtype> there, and print out the name that the user
|
||||
should type to use that package's module.
|
||||
class MultipleMatches(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoMatch(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@subcommand('loads')
|
||||
def loads(mtype, specs, args):
|
||||
"""Prompt the list of modules associated with a list of specs"""
|
||||
# Get a comprehensive list of specs
|
||||
if args.recurse_dependencies:
|
||||
specs_from_user_constraint = specs[:]
|
||||
specs = []
|
||||
# FIXME : during module file creation nodes seem to be visited
|
||||
# FIXME : multiple times even if cover='nodes' is given. This
|
||||
# FIXME : work around permits to get a unique list of spec anyhow.
|
||||
# FIXME : (same problem as in spack/modules.py)
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
for spec in specs_from_user_constraint:
|
||||
specs.extend(
|
||||
[item for item in spec.traverse(order='post', cover='nodes')
|
||||
if not (item in seen or seen_add(item))]
|
||||
)
|
||||
|
||||
module_cls = module_types[mtype]
|
||||
modules = [(spec, module_cls(spec).use_name)
|
||||
for spec in specs if os.path.exists(module_cls(spec).file_name)]
|
||||
|
||||
module_commands = {
|
||||
'tcl': 'module load ',
|
||||
'dotkit': 'dotkit use '
|
||||
}
|
||||
|
||||
d = {
|
||||
'command': '' if not args.shell else module_commands[mtype],
|
||||
'prefix': args.prefix
|
||||
}
|
||||
|
||||
prompt_template = '{comment}{command}{prefix}{name}'
|
||||
for spec, mod in modules:
|
||||
d['comment'] = '' if not args.shell else '# {0}\n'.format(
|
||||
spec.format())
|
||||
d['name'] = mod
|
||||
print(prompt_template.format(**d))
|
||||
|
||||
|
||||
@subcommand('find')
|
||||
def find(mtype, specs, args):
|
||||
"""
|
||||
Look at all installed packages and see if the spec provided
|
||||
matches any. If it does, check whether there is a module file
|
||||
of type <mtype> there, and print out the name that the user
|
||||
should type to use that package's module.
|
||||
"""
|
||||
if mtype not in module_types:
|
||||
tty.die("Invalid module type: '%s'. Options are %s" %
|
||||
(mtype, comma_or(module_types)))
|
||||
|
||||
specs = spack.cmd.parse_specs(spec_array)
|
||||
if len(specs) > 1:
|
||||
tty.die("You can only pass one spec.")
|
||||
spec = specs[0]
|
||||
|
||||
specs = spack.installed_db.query(spec)
|
||||
if len(specs) == 0:
|
||||
tty.die("No installed packages match spec %s" % spec)
|
||||
raise NoMatch()
|
||||
|
||||
if len(specs) > 1:
|
||||
tty.error("Multiple matches for spec %s. Choose one:" % spec)
|
||||
for s in specs:
|
||||
sys.stderr.write(s.tree(color=True))
|
||||
sys.exit(1)
|
||||
raise MultipleMatches()
|
||||
|
||||
mt = module_types[mtype]
|
||||
mod = mt(specs[0])
|
||||
spec = specs.pop()
|
||||
mod = module_types[mtype](spec)
|
||||
if not os.path.isfile(mod.file_name):
|
||||
tty.die("No %s module is installed for %s" % (mtype, spec))
|
||||
|
||||
print(mod.use_name)
|
||||
|
||||
|
||||
def module_refresh():
|
||||
"""Regenerate all module files for installed packages known to
|
||||
spack (some packages may no longer exist)."""
|
||||
specs = [s for s in spack.installed_db.query(installed=True, known=True)]
|
||||
@subcommand('rm')
|
||||
def rm(mtype, specs, args):
|
||||
"""Deletes module files associated with items in specs"""
|
||||
module_cls = module_types[mtype]
|
||||
specs_with_modules = [
|
||||
spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
|
||||
modules = [module_cls(spec) for spec in specs_with_modules]
|
||||
|
||||
for name, cls in module_types.items():
|
||||
tty.msg("Regenerating %s module files." % name)
|
||||
if os.path.isdir(cls.path):
|
||||
shutil.rmtree(cls.path, ignore_errors=False)
|
||||
mkdirp(cls.path)
|
||||
for spec in specs:
|
||||
cls(spec).write()
|
||||
if not modules:
|
||||
tty.msg('No module file matches your query')
|
||||
raise SystemExit(1)
|
||||
|
||||
# Ask for confirmation
|
||||
if not args.yes_to_all:
|
||||
tty.msg(
|
||||
'You are about to remove {0} module files the following specs:\n'
|
||||
.format(mtype))
|
||||
spack.cmd.display_specs(specs_with_modules, long=True)
|
||||
print('')
|
||||
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
|
||||
|
||||
# Remove the module files
|
||||
for s in modules:
|
||||
s.remove()
|
||||
|
||||
|
||||
@subcommand('refresh')
|
||||
def refresh(mtype, specs, args):
|
||||
"""Regenerate module files for item in specs"""
|
||||
# Prompt a message to the user about what is going to change
|
||||
if not specs:
|
||||
tty.msg('No package matches your query')
|
||||
return
|
||||
|
||||
if not args.yes_to_all:
|
||||
tty.msg(
|
||||
'You are about to regenerate {name} module files for:\n'
|
||||
.format(name=mtype))
|
||||
spack.cmd.display_specs(specs, long=True)
|
||||
print('')
|
||||
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
|
||||
|
||||
cls = module_types[mtype]
|
||||
|
||||
# Detect name clashes
|
||||
writers = [cls(spec) for spec in specs]
|
||||
file2writer = collections.defaultdict(list)
|
||||
for item in writers:
|
||||
file2writer[item.file_name].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
message = 'Name clashes detected in module files:\n'
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += '\nfile : {0}\n'.format(filename)
|
||||
for x in writer_list:
|
||||
message += 'spec : {0}\n'.format(x.spec.format(color=True))
|
||||
tty.error(message)
|
||||
tty.error('Operation aborted')
|
||||
raise SystemExit(1)
|
||||
|
||||
# Proceed regenerating module files
|
||||
tty.msg('Regenerating {name} module files'.format(name=mtype))
|
||||
if os.path.isdir(cls.path) and args.delete_tree:
|
||||
shutil.rmtree(cls.path, ignore_errors=False)
|
||||
filesystem.mkdirp(cls.path)
|
||||
for x in writers:
|
||||
x.write(overwrite=True)
|
||||
|
||||
|
||||
def module(parser, args):
|
||||
if args.module_command == 'refresh':
|
||||
module_refresh()
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
'refresh': {
|
||||
'installed': True,
|
||||
'known': True
|
||||
},
|
||||
}
|
||||
arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
|
||||
|
||||
elif args.module_command == 'find':
|
||||
module_find(args.module_type, args.spec)
|
||||
module_type = args.module_type
|
||||
constraint = args.constraint
|
||||
try:
|
||||
callbacks[args.subparser_name](module_type, args.specs, args)
|
||||
except MultipleMatches:
|
||||
message = ('the constraint \'{query}\' matches multiple packages, '
|
||||
'and this is not allowed in this context')
|
||||
tty.error(message.format(query=constraint))
|
||||
for s in args.specs:
|
||||
sys.stderr.write(s.format(color=True) + '\n')
|
||||
raise SystemExit(1)
|
||||
except NoMatch:
|
||||
message = ('the constraint \'{query}\' match no package, '
|
||||
'and this is not allowed in this context')
|
||||
tty.die(message.format(query=constraint))
|
||||
|
||||
@@ -22,10 +22,8 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import re
|
||||
import cgi
|
||||
from StringIO import StringIO
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import *
|
||||
import spack
|
||||
|
||||
@@ -34,21 +32,22 @@
|
||||
|
||||
def github_url(pkg):
|
||||
"""Link to a package file on github."""
|
||||
return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" %
|
||||
pkg.name)
|
||||
url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py"
|
||||
return (url % pkg.name)
|
||||
|
||||
|
||||
def rst_table(elts):
|
||||
"""Print out a RST-style table."""
|
||||
cols = StringIO()
|
||||
ncol, widths = colify(elts, output=cols, tty=True)
|
||||
header = " ".join("=" * (w-1) for w in widths)
|
||||
header = " ".join("=" * (w - 1) for w in widths)
|
||||
return "%s\n%s%s" % (header, cols.getvalue(), header)
|
||||
|
||||
|
||||
def print_rst_package_list():
|
||||
"""Print out information on all packages in restructured text."""
|
||||
pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
|
||||
pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower())
|
||||
pkg_names = [p.name for p in pkgs]
|
||||
|
||||
print ".. _package-list:"
|
||||
print
|
||||
@@ -62,7 +61,7 @@ def print_rst_package_list():
|
||||
|
||||
print "Spack currently has %d mainline packages:" % len(pkgs)
|
||||
print
|
||||
print rst_table("`%s`_" % p.name for p in pkgs)
|
||||
print rst_table("`%s`_" % p for p in pkg_names)
|
||||
print
|
||||
print "-----"
|
||||
|
||||
@@ -79,12 +78,17 @@ def print_rst_package_list():
|
||||
print
|
||||
if pkg.versions:
|
||||
print "Versions:"
|
||||
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
|
||||
if pkg.dependencies:
|
||||
print "Dependencies"
|
||||
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
|
||||
for d in pkg.dependencies)
|
||||
print
|
||||
print " " + ", ".join(str(v) for v in
|
||||
reversed(sorted(pkg.versions)))
|
||||
|
||||
for deptype in spack.alldeps:
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
if deps:
|
||||
print "%s Dependencies" % deptype.capitalize()
|
||||
print " " + ", ".join("%s_" % d if d in pkg_names
|
||||
else d for d in deps)
|
||||
print
|
||||
|
||||
print "Description:"
|
||||
print pkg.format_doc(indent=2)
|
||||
print
|
||||
|
||||
@@ -29,14 +29,16 @@
|
||||
import spack
|
||||
|
||||
|
||||
description="Patch expanded archive sources in preparation for install"
|
||||
description = "Patch expanded archive sources in preparation for install"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check downloaded packages against checksum")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
|
||||
'packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to stage")
|
||||
|
||||
|
||||
def patch(parser, args):
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
|
||||
description = "Query packages associated with particular git revisions."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(
|
||||
metavar='SUBCOMMAND', dest='pkg_command')
|
||||
@@ -46,22 +47,28 @@ def setup_parser(subparser):
|
||||
help="Revision to list packages for.")
|
||||
|
||||
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
|
||||
diff_parser.add_argument('rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
diff_parser.add_argument('rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
diff_parser.add_argument(
|
||||
'rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
diff_parser.add_argument(
|
||||
'rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
|
||||
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
|
||||
add_parser.add_argument('rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
add_parser.add_argument('rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
add_parser.add_argument(
|
||||
'rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
add_parser.add_argument(
|
||||
'rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
|
||||
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
|
||||
rm_parser.add_argument('rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
rm_parser.add_argument('rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
rm_parser.add_argument(
|
||||
'rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
rm_parser.add_argument(
|
||||
'rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
|
||||
|
||||
def get_git():
|
||||
@@ -88,7 +95,8 @@ def pkg_add(args):
|
||||
for pkg_name in args.packages:
|
||||
filename = spack.repo.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
tty.die("No such package: %s. Path does not exist:" %
|
||||
pkg_name, filename)
|
||||
|
||||
git = get_git()
|
||||
git('-C', spack.packages_path, 'add', filename)
|
||||
@@ -112,7 +120,8 @@ def pkg_diff(args):
|
||||
if u1:
|
||||
print "%s:" % args.rev1
|
||||
colify(sorted(u1), indent=4)
|
||||
if u1: print
|
||||
if u1:
|
||||
print
|
||||
|
||||
if u2:
|
||||
print "%s:" % args.rev2
|
||||
@@ -122,19 +131,21 @@ def pkg_diff(args):
|
||||
def pkg_removed(args):
|
||||
"""Show packages removed since a commit."""
|
||||
u1, u2 = diff_packages(args.rev1, args.rev2)
|
||||
if u1: colify(sorted(u1))
|
||||
if u1:
|
||||
colify(sorted(u1))
|
||||
|
||||
|
||||
def pkg_added(args):
|
||||
"""Show packages added since a commit."""
|
||||
u1, u2 = diff_packages(args.rev1, args.rev2)
|
||||
if u2: colify(sorted(u2))
|
||||
if u2:
|
||||
colify(sorted(u2))
|
||||
|
||||
|
||||
def pkg(parser, args):
|
||||
action = { 'add' : pkg_add,
|
||||
'diff' : pkg_diff,
|
||||
'list' : pkg_list,
|
||||
'removed' : pkg_removed,
|
||||
'added' : pkg_added }
|
||||
action = {'add': pkg_add,
|
||||
'diff': pkg_diff,
|
||||
'list': pkg_list,
|
||||
'removed': pkg_removed,
|
||||
'added': pkg_added}
|
||||
action[args.pkg_command](args)
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -30,11 +29,13 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description ="List packages that provide a particular virtual package"
|
||||
description = "List packages that provide a particular virtual package"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
|
||||
help='Find packages that provide this virtual package')
|
||||
subparser.add_argument(
|
||||
'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
|
||||
help='Find packages that provide this virtual package')
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
|
||||
@@ -22,9 +22,37 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import spack
|
||||
import spack.stage as stage
|
||||
|
||||
description = "Remove all temporary build files and downloaded archives"
|
||||
description = "Remove temporary build files and/or downloaded archives"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-s', '--stage', action='store_true', default=True,
|
||||
help="Remove all temporary build stages (default).")
|
||||
subparser.add_argument(
|
||||
'-d', '--downloads', action='store_true',
|
||||
help="Remove cached downloads.")
|
||||
subparser.add_argument(
|
||||
'-u', '--user-cache', action='store_true',
|
||||
help="Remove caches in user home directory. Includes virtual indices.")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true',
|
||||
help="Remove all of the above.")
|
||||
|
||||
|
||||
def purge(parser, args):
|
||||
stage.purge()
|
||||
# Special case: no flags.
|
||||
if not any((args.stage, args.downloads, args.user_cache, args.all)):
|
||||
stage.purge()
|
||||
return
|
||||
|
||||
# handle other flags with fall through.
|
||||
if args.stage or args.all:
|
||||
stage.purge()
|
||||
if args.downloads or args.all:
|
||||
spack.fetch_cache.destroy()
|
||||
if args.user_cache or args.all:
|
||||
spack.user_cache.destroy()
|
||||
|
||||
@@ -30,18 +30,22 @@
|
||||
|
||||
import spack
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-c', dest='python_command', help='Command to execute.')
|
||||
subparser.add_argument(
|
||||
'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.")
|
||||
'python_args', nargs=argparse.REMAINDER,
|
||||
help="File to run plus arguments.")
|
||||
|
||||
|
||||
description = "Launch an interpreter as spack would launch a command"
|
||||
|
||||
|
||||
def python(parser, args):
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({'__name__' : '__main__',
|
||||
'spack' : spack})
|
||||
console = code.InteractiveConsole({'__name__': '__main__',
|
||||
'spack': spack})
|
||||
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
|
||||
@@ -22,10 +22,10 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import argparse
|
||||
import spack
|
||||
|
||||
description = "Rebuild Spack's package database."
|
||||
|
||||
|
||||
def reindex(parser, args):
|
||||
spack.installed_db.reindex(spack.install_layout)
|
||||
|
||||
@@ -23,20 +23,16 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
||||
from external import argparse
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
|
||||
import spack.spec
|
||||
import spack.config
|
||||
from spack.util.environment import get_path
|
||||
from spack.repository import *
|
||||
|
||||
description = "Manage package source repositories."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
|
||||
scopes = spack.config.config_scopes
|
||||
@@ -57,13 +53,15 @@ def setup_parser(subparser):
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser('add', help=repo_add.__doc__)
|
||||
add_parser.add_argument('path', help="Path to a Spack package repository directory.")
|
||||
add_parser.add_argument(
|
||||
'path', help="Path to a Spack package repository directory.")
|
||||
add_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
|
||||
remove_parser = sp.add_parser(
|
||||
'remove', help=repo_remove.__doc__, aliases=['rm'])
|
||||
remove_parser.add_argument(
|
||||
'path_or_namespace',
|
||||
help="Path or namespace of a Spack package repository.")
|
||||
@@ -100,7 +98,8 @@ def repo_add(args):
|
||||
|
||||
# If that succeeds, finally add it to the configuration.
|
||||
repos = spack.config.get_config('repos', args.scope)
|
||||
if not repos: repos = []
|
||||
if not repos:
|
||||
repos = []
|
||||
|
||||
if repo.root in repos or path in repos:
|
||||
tty.die("Repository is already registered with Spack: %s" % path)
|
||||
@@ -135,7 +134,7 @@ def repo_remove(args):
|
||||
tty.msg("Removed repository %s with namespace '%s'."
|
||||
% (repo.root, repo.namespace))
|
||||
return
|
||||
except RepoError as e:
|
||||
except RepoError:
|
||||
continue
|
||||
|
||||
tty.die("No repository with path or namespace: %s"
|
||||
@@ -149,7 +148,7 @@ def repo_list(args):
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(Repo(r))
|
||||
except RepoError as e:
|
||||
except RepoError:
|
||||
continue
|
||||
|
||||
msg = "%d package repositor" % len(repos)
|
||||
@@ -166,9 +165,9 @@ def repo_list(args):
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
action = { 'create' : repo_create,
|
||||
'list' : repo_list,
|
||||
'add' : repo_add,
|
||||
'remove' : repo_remove,
|
||||
'rm' : repo_remove}
|
||||
action = {'create': repo_create,
|
||||
'list': repo_list,
|
||||
'add': repo_add,
|
||||
'remove': repo_remove,
|
||||
'rm': repo_remove}
|
||||
action[args.repo_command](args)
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
|
||||
description = "Revert checked out package source code."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to restage")
|
||||
|
||||
94
lib/spack/spack/cmd/setup.py
Normal file
94
lib/spack/spack/cmd/setup.py
Normal file
@@ -0,0 +1,94 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Elizabeth Fischer
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
from spack.cmd.edit import edit_package
|
||||
from spack.stage import DIYStage
|
||||
|
||||
description = "Create a configuration script and module, but don't build."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
|
||||
help="Do not try to install dependencies of requested packages.")
|
||||
subparser.add_argument(
|
||||
'-v', '--verbose', action='store_true', dest='verbose',
|
||||
help="Display verbose build output while installing.")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs to use for install. Must contain package AND version.")
|
||||
|
||||
|
||||
def setup(self, args):
|
||||
if not args.spec:
|
||||
tty.die("spack setup requires a package spec argument.")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) > 1:
|
||||
tty.die("spack setup only takes one spec.")
|
||||
|
||||
# Take a write lock before checking for existence.
|
||||
with spack.installed_db.write_transaction():
|
||||
spec = specs[0]
|
||||
if not spack.repo.exists(spec.name):
|
||||
tty.warn("No such package: %s" % spec.name)
|
||||
create = tty.get_yes_or_no("Create this package?", default=False)
|
||||
if not create:
|
||||
tty.msg("Exiting without creating.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
tty.msg("Running 'spack edit -f %s'" % spec.name)
|
||||
edit_package(spec.name, spack.repo.first_repo(), None, True)
|
||||
return
|
||||
|
||||
if not spec.versions.concrete:
|
||||
tty.die(
|
||||
"spack setup spec must have a single, concrete version. "
|
||||
"Did you forget a package version number?")
|
||||
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
# It's OK if the package is already installed.
|
||||
|
||||
# Forces the build to run out of the current directory.
|
||||
package.stage = DIYStage(os.getcwd())
|
||||
|
||||
# TODO: make this an argument, not a global.
|
||||
spack.do_checksum = False
|
||||
|
||||
package.do_install(
|
||||
keep_prefix=True, # Don't remove install directory
|
||||
ignore_deps=args.ignore_deps,
|
||||
verbose=args.verbose,
|
||||
keep_stage=True, # don't remove source dir for SETUP.
|
||||
install_phases=set(['setup', 'provenance']))
|
||||
@@ -25,23 +25,22 @@
|
||||
import argparse
|
||||
import spack.cmd
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.url as url
|
||||
|
||||
description = "print out abstract and concrete versions of a spec."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('-i', '--ids', action='store_true',
|
||||
help="show numerical ids for dependencies.")
|
||||
subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages")
|
||||
subparser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER, help="specs of packages")
|
||||
|
||||
|
||||
def spec(parser, args):
|
||||
kwargs = { 'ids' : args.ids,
|
||||
'indent' : 2,
|
||||
'color' : True }
|
||||
kwargs = {'ids': args.ids,
|
||||
'indent': 2,
|
||||
'color': True}
|
||||
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
print "Input spec"
|
||||
|
||||
@@ -22,14 +22,14 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description="Expand downloaded archive in preparation for install"
|
||||
description = "Expand downloaded archive in preparation for install"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
|
||||
@@ -36,25 +36,25 @@
|
||||
from spack.build_environment import InstallError
|
||||
from spack.fetch_strategy import FetchError
|
||||
|
||||
description = "Run package installation as a unit test, output formatted results."
|
||||
description = "Run package install as a unit test, output formatted results."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('-j',
|
||||
'--jobs',
|
||||
action='store',
|
||||
type=int,
|
||||
help="Explicitly set number of make jobs. Default is #cpus.")
|
||||
subparser.add_argument(
|
||||
'-j', '--jobs', action='store', type=int,
|
||||
help="Explicitly set number of make jobs. Default is #cpus.")
|
||||
|
||||
subparser.add_argument('-n',
|
||||
'--no-checksum',
|
||||
action='store_true',
|
||||
dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
|
||||
subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
|
||||
subparser.add_argument(
|
||||
'-o', '--output', action='store',
|
||||
help="test output goes in this file")
|
||||
|
||||
subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
|
||||
subparser.add_argument(
|
||||
'package', nargs=argparse.REMAINDER,
|
||||
help="spec of package to install")
|
||||
|
||||
|
||||
class TestResult(object):
|
||||
@@ -65,6 +65,7 @@ class TestResult(object):
|
||||
|
||||
|
||||
class TestSuite(object):
|
||||
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
self.root = ET.Element('testsuite')
|
||||
@@ -75,14 +76,17 @@ def __enter__(self):
|
||||
|
||||
def append(self, item):
|
||||
if not isinstance(item, TestCase):
|
||||
raise TypeError('only TestCase instances may be appended to a TestSuite instance')
|
||||
raise TypeError(
|
||||
'only TestCase instances may be appended to TestSuite')
|
||||
self.tests.append(item) # Append the item to the list of tests
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
# Prepare the header for the entire test suite
|
||||
number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
|
||||
number_of_errors = sum(
|
||||
x.result_type == TestResult.ERRORED for x in self.tests)
|
||||
self.root.set('errors', str(number_of_errors))
|
||||
number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
|
||||
number_of_failures = sum(
|
||||
x.result_type == TestResult.FAILED for x in self.tests)
|
||||
self.root.set('failures', str(number_of_failures))
|
||||
self.root.set('tests', str(len(self.tests)))
|
||||
|
||||
@@ -112,7 +116,8 @@ def __init__(self, classname, name, time=None):
|
||||
self.element.set('time', str(time))
|
||||
self.result_type = None
|
||||
|
||||
def set_result(self, result_type, message=None, error_type=None, text=None):
|
||||
def set_result(self, result_type,
|
||||
message=None, error_type=None, text=None):
|
||||
self.result_type = result_type
|
||||
result = TestCase.results[self.result_type]
|
||||
if result is not None and result is not TestResult.PASSED:
|
||||
@@ -133,7 +138,12 @@ def fetch_log(path):
|
||||
|
||||
|
||||
def failed_dependencies(spec):
|
||||
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
|
||||
def get_deps(deptype):
|
||||
return set(item for item in spec.dependencies(deptype)
|
||||
if not spack.repo.get(item).installed)
|
||||
link_deps = get_deps('link')
|
||||
run_deps = get_deps('run')
|
||||
return link_deps.union(run_deps)
|
||||
|
||||
|
||||
def get_top_spec_or_die(args):
|
||||
@@ -150,13 +160,19 @@ def install_single_spec(spec, number_of_jobs):
|
||||
# If it is already installed, skip the test
|
||||
if spack.repo.get(spec).installed:
|
||||
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
|
||||
testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
|
||||
testcase.set_result(
|
||||
TestResult.SKIPPED,
|
||||
message='Skipped [already installed]',
|
||||
error_type='already_installed')
|
||||
return testcase
|
||||
|
||||
# If it relies on dependencies that did not install, skip
|
||||
if failed_dependencies(spec):
|
||||
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
|
||||
testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
|
||||
testcase.set_result(
|
||||
TestResult.SKIPPED,
|
||||
message='Skipped [failed dependencies]',
|
||||
error_type='dep_failed')
|
||||
return testcase
|
||||
|
||||
# Otherwise try to install the spec
|
||||
@@ -172,26 +188,30 @@ def install_single_spec(spec, number_of_jobs):
|
||||
testcase = TestCase(package.name, package.spec.short_spec, duration)
|
||||
testcase.set_result(TestResult.PASSED)
|
||||
except InstallError:
|
||||
# An InstallError is considered a failure (the recipe didn't work correctly)
|
||||
# An InstallError is considered a failure (the recipe didn't work
|
||||
# correctly)
|
||||
duration = time.time() - start_time
|
||||
# Try to get the log
|
||||
lines = fetch_log(package.build_log_path)
|
||||
text = '\n'.join(lines)
|
||||
testcase = TestCase(package.name, package.spec.short_spec, duration)
|
||||
testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
|
||||
testcase.set_result(TestResult.FAILED,
|
||||
message='Installation failure', text=text)
|
||||
|
||||
except FetchError:
|
||||
# A FetchError is considered an error (we didn't even start building)
|
||||
duration = time.time() - start_time
|
||||
testcase = TestCase(package.name, package.spec.short_spec, duration)
|
||||
testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
|
||||
testcase.set_result(TestResult.ERRORED,
|
||||
message='Unable to fetch package')
|
||||
|
||||
return testcase
|
||||
|
||||
|
||||
def get_filename(args, top_spec):
|
||||
if not args.output:
|
||||
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
|
||||
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(
|
||||
x=top_spec, hash=top_spec.dag_hash())
|
||||
output_directory = join_path(os.getcwd(), 'test-output')
|
||||
if not os.path.exists(output_directory):
|
||||
os.mkdir(output_directory)
|
||||
|
||||
@@ -23,33 +23,55 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
from pprint import pprint
|
||||
|
||||
from llnl.util.filesystem import join_path, mkdirp
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.lang import list_modules
|
||||
|
||||
import spack
|
||||
import spack.test
|
||||
from spack.fetch_strategy import FetchError
|
||||
|
||||
description = "Run unit tests"
|
||||
|
||||
description ="Run unit tests"
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'names', nargs='*', help="Names of tests to run.")
|
||||
subparser.add_argument(
|
||||
'-l', '--list', action='store_true', dest='list', help="Show available tests")
|
||||
'-l', '--list', action='store_true', dest='list',
|
||||
help="Show available tests")
|
||||
subparser.add_argument(
|
||||
'--createXmlOutput', action='store_true', dest='createXmlOutput',
|
||||
'--createXmlOutput', action='store_true', dest='createXmlOutput',
|
||||
help="Create JUnit XML from test results")
|
||||
subparser.add_argument(
|
||||
'--xmlOutputDir', dest='xmlOutputDir',
|
||||
'--xmlOutputDir', dest='xmlOutputDir',
|
||||
help="Nose creates XML files in this directory")
|
||||
subparser.add_argument(
|
||||
'-v', '--verbose', action='store_true', dest='verbose',
|
||||
help="verbose output")
|
||||
|
||||
|
||||
class MockCache(object):
|
||||
|
||||
def store(self, copyCmd, relativeDst):
|
||||
pass
|
||||
|
||||
def fetcher(self, targetPath, digest):
|
||||
return MockCacheFetcher()
|
||||
|
||||
|
||||
class MockCacheFetcher(object):
|
||||
|
||||
def set_stage(self, stage):
|
||||
pass
|
||||
|
||||
def fetch(self):
|
||||
raise FetchError("Mock cache always fails for tests")
|
||||
|
||||
def __str__(self):
|
||||
return "[mock fetcher]"
|
||||
|
||||
|
||||
def test(parser, args):
|
||||
if args.list:
|
||||
print "Available tests:"
|
||||
@@ -63,7 +85,8 @@ def test(parser, args):
|
||||
outputDir = join_path(os.getcwd(), "test-output")
|
||||
else:
|
||||
outputDir = os.path.abspath(args.xmlOutputDir)
|
||||
|
||||
|
||||
if not os.path.exists(outputDir):
|
||||
mkdirp(outputDir)
|
||||
spack.fetch_cache = MockCache()
|
||||
spack.test.run(args.names, outputDir, args.verbose)
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.repository
|
||||
from spack.cmd.find import display_specs
|
||||
|
||||
description = "Remove an installed package"
|
||||
|
||||
@@ -39,51 +38,54 @@
|
||||
b) use spack uninstall -a to uninstall ALL matching specs.
|
||||
"""
|
||||
|
||||
|
||||
def ask_for_confirmation(message):
|
||||
while True:
|
||||
tty.msg(message + '[y/n]')
|
||||
choice = raw_input().lower()
|
||||
if choice == 'y':
|
||||
break
|
||||
elif choice == 'n':
|
||||
raise SystemExit('Operation aborted')
|
||||
tty.warn('Please reply either "y" or "n"')
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {
|
||||
'long': True,
|
||||
'show_flags': True,
|
||||
'variants': True
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true', dest='force',
|
||||
help="Remove regardless of whether other packages depend on this one.")
|
||||
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true', dest='all',
|
||||
help="USE CAREFULLY. Remove ALL installed packages that match each " +
|
||||
"supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
|
||||
"libelf are uninstalled. This is both useful and dangerous, like rm -r.")
|
||||
help="USE CAREFULLY. Remove ALL installed packages that match each "
|
||||
"supplied spec. i.e., if you say uninstall libelf, ALL versions "
|
||||
"of libelf are uninstalled. This is both useful and dangerous, "
|
||||
"like rm -r.")
|
||||
|
||||
subparser.add_argument(
|
||||
'-d', '--dependents', action='store_true', dest='dependents',
|
||||
help='Also uninstall any packages that depend on the ones given via command line.'
|
||||
)
|
||||
help='Also uninstall any packages that depend on the ones given '
|
||||
'via command line.')
|
||||
|
||||
subparser.add_argument(
|
||||
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
|
||||
help='Assume "yes" is the answer to every confirmation asked to the user.'
|
||||
help='Assume "yes" is the answer to every confirmation requested')
|
||||
|
||||
)
|
||||
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
|
||||
subparser.add_argument(
|
||||
'packages',
|
||||
nargs=argparse.REMAINDER,
|
||||
help="specs of packages to uninstall")
|
||||
|
||||
|
||||
def concretize_specs(specs, allow_multiple_matches=False, force=False):
|
||||
"""
|
||||
Returns a list of specs matching the non necessarily concretized specs given from cli
|
||||
"""Returns a list of specs matching the non necessarily
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
specs: list of specs to be matched against installed packages
|
||||
allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
|
||||
allow_multiple_matches : if True multiple matches are admitted
|
||||
|
||||
Return:
|
||||
list of specs
|
||||
"""
|
||||
specs_from_cli = [] # List of specs that match expressions given via command line
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
matching = spack.installed_db.query(spec)
|
||||
@@ -92,7 +94,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
tty.error("%s matches multiple packages:" % spec)
|
||||
print()
|
||||
display_specs(matching, long=True)
|
||||
spack.cmd.display_specs(matching, **display_args)
|
||||
print()
|
||||
has_errors = True
|
||||
|
||||
@@ -109,8 +111,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
|
||||
|
||||
|
||||
def installed_dependents(specs):
|
||||
"""
|
||||
Returns a dictionary that maps a spec with a list of its installed dependents
|
||||
"""Returns a dictionary that maps a spec with a list of its
|
||||
installed dependents
|
||||
|
||||
Args:
|
||||
specs: list of specs to be checked for dependents
|
||||
@@ -140,7 +142,7 @@ def do_uninstall(specs, force):
|
||||
try:
|
||||
# should work if package is known to spack
|
||||
packages.append(item.package)
|
||||
except spack.repository.UnknownPackageError as e:
|
||||
except spack.repository.UnknownPackageError:
|
||||
# The package.py file has gone away -- but still
|
||||
# want to uninstall.
|
||||
spack.Package(item).do_uninstall(force=True)
|
||||
@@ -162,17 +164,20 @@ def uninstall(parser, args):
|
||||
with spack.installed_db.write_transaction():
|
||||
specs = spack.cmd.parse_specs(args.packages)
|
||||
# Gets the list of installed specs that match the ones give via cli
|
||||
uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
|
||||
dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
|
||||
# takes care of '-a' is given in the cli
|
||||
uninstall_list = concretize_specs(specs, args.all, args.force)
|
||||
dependent_list = installed_dependents(
|
||||
uninstall_list) # takes care of '-d'
|
||||
|
||||
# Process dependent_list and update uninstall_list
|
||||
has_error = False
|
||||
if dependent_list and not args.dependents and not args.force:
|
||||
for spec, lst in dependent_list.items():
|
||||
tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
|
||||
tty.error("Will not uninstall %s" %
|
||||
spec.format("$_$@$%@$#", color=True))
|
||||
print('')
|
||||
print("The following packages depend on it:")
|
||||
display_specs(lst, long=True)
|
||||
spack.cmd.display_specs(lst, **display_args)
|
||||
print('')
|
||||
has_error = True
|
||||
elif args.dependents:
|
||||
@@ -181,14 +186,15 @@ def uninstall(parser, args):
|
||||
uninstall_list = list(set(uninstall_list))
|
||||
|
||||
if has_error:
|
||||
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
|
||||
tty.die('You can use spack uninstall --dependents '
|
||||
'to uninstall these dependencies as well')
|
||||
|
||||
if not args.yes_to_all:
|
||||
tty.msg("The following packages will be uninstalled : ")
|
||||
print('')
|
||||
display_specs(uninstall_list, long=True)
|
||||
spack.cmd.display_specs(uninstall_list, **display_args)
|
||||
print('')
|
||||
ask_for_confirmation('Do you want to proceed ? ')
|
||||
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
|
||||
|
||||
# Uninstall everything on the list
|
||||
do_uninstall(uninstall_list, args.force)
|
||||
|
||||
@@ -25,13 +25,15 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description ="Remove package from environment using module."
|
||||
description = "Remove package from environment using module."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
"""Parser is only constructed so that this prints a nice help
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.')
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to unload with modules.')
|
||||
|
||||
|
||||
def unload(parser, args):
|
||||
|
||||
@@ -25,13 +25,15 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description ="Remove package from environment using dotkit."
|
||||
description = "Remove package from environment using dotkit."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
"""Parser is only constructed so that this prints a nice help
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.')
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to unuse with dotkit.')
|
||||
|
||||
|
||||
def unuse(parser, args):
|
||||
|
||||
@@ -22,28 +22,28 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.url
|
||||
from spack.util.web import find_versions_of_archive
|
||||
|
||||
description = "Show parsing of a URL, optionally spider web for other versions."
|
||||
description = "Show parsing of a URL, optionally spider web for versions."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('url', help="url of a package archive")
|
||||
subparser.add_argument(
|
||||
'-s', '--spider', action='store_true', help="Spider the source page for versions.")
|
||||
'-s', '--spider', action='store_true',
|
||||
help="Spider the source page for versions.")
|
||||
|
||||
|
||||
def print_name_and_version(url):
|
||||
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
|
||||
underlines = [" "] * max(ns+nl, vs+vl)
|
||||
for i in range(ns, ns+nl):
|
||||
underlines = [" "] * max(ns + nl, vs + vl)
|
||||
for i in range(ns, ns + nl):
|
||||
underlines[i] = '-'
|
||||
for i in range(vs, vs+vl):
|
||||
for i in range(vs, vs + vl):
|
||||
underlines[i] = '~'
|
||||
|
||||
print " %s" % url
|
||||
|
||||
@@ -22,12 +22,12 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import sys
|
||||
import spack
|
||||
import spack.url
|
||||
|
||||
description = "Inspect urls used by packages in spack."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-c', '--color', action='store_true',
|
||||
@@ -53,6 +53,7 @@ def urls(parser, args):
|
||||
|
||||
for url in sorted(urls):
|
||||
if args.color or args.extrapolation:
|
||||
print spack.url.color_url(url, subs=args.extrapolation, errors=True)
|
||||
print spack.url.color_url(
|
||||
url, subs=args.extrapolation, errors=True)
|
||||
else:
|
||||
print url
|
||||
|
||||
@@ -25,13 +25,15 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description ="Add package to environment using dotkit."
|
||||
description = "Add package to environment using dotkit."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
"""Parser is only constructed so that this prints a nice help
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.')
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to use with dotkit.')
|
||||
|
||||
|
||||
def use(parser, args):
|
||||
|
||||
@@ -22,15 +22,16 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
from llnl.util.tty.colify import colify
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
|
||||
description ="List available versions of a package"
|
||||
description = "List available versions of a package"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
|
||||
subparser.add_argument('package', metavar='PACKAGE',
|
||||
help='Package to list versions for')
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
|
||||
295
lib/spack/spack/cmd/view.py
Normal file
295
lib/spack/spack/cmd/view.py
Normal file
@@ -0,0 +1,295 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License (as published by
|
||||
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with this program; if not, write to the Free Software Foundation,
|
||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
'''Produce a "view" of a Spack DAG.
|
||||
|
||||
A "view" is file hierarchy representing the union of a number of
|
||||
Spack-installed package file hierarchies. The union is formed from:
|
||||
|
||||
- specs resolved from the package names given by the user (the seeds)
|
||||
|
||||
- all depenencies of the seeds unless user specifies `--no-depenencies`
|
||||
|
||||
- less any specs with names matching the regular expressions given by
|
||||
`--exclude`
|
||||
|
||||
The `view` can be built and tore down via a number of methods (the "actions"):
|
||||
|
||||
- symlink :: a file system view which is a directory hierarchy that is
|
||||
the union of the hierarchies of the installed packages in the DAG
|
||||
where installed files are referenced via symlinks.
|
||||
|
||||
- hardlink :: like the symlink view but hardlinks are used.
|
||||
|
||||
- statlink :: a view producing a status report of a symlink or
|
||||
hardlink view.
|
||||
|
||||
The file system view concept is imspired by Nix, implemented by
|
||||
brett.viren@gmail.com ca 2016.
|
||||
|
||||
'''
|
||||
# Implementation notes:
|
||||
#
|
||||
# This is implemented as a visitor pattern on the set of package specs.
|
||||
#
|
||||
# The command line ACTION maps to a visitor_*() function which takes
|
||||
# the set of package specs and any args which may be specific to the
|
||||
# ACTION.
|
||||
#
|
||||
# To add a new view:
|
||||
# 1. add a new cmd line args sub parser ACTION
|
||||
# 2. add any action-specific options/arguments, most likely a list of specs.
|
||||
# 3. add a visitor_MYACTION() function
|
||||
# 4. add any visitor_MYALIAS assignments to match any command line aliases
|
||||
|
||||
import os
|
||||
import re
|
||||
import spack
|
||||
import spack.cmd
|
||||
import llnl.util.tty as tty
|
||||
|
||||
description = "Produce a single-rooted directory view of a spec."
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
setup_parser.parser = sp
|
||||
|
||||
sp.add_argument(
|
||||
'-v', '--verbose', action='store_true', default=False,
|
||||
help="Display verbose output.")
|
||||
sp.add_argument(
|
||||
'-e', '--exclude', action='append', default=[],
|
||||
help="Exclude packages with names matching the given regex pattern.")
|
||||
sp.add_argument(
|
||||
'-d', '--dependencies', choices=['true', 'false', 'yes', 'no'],
|
||||
default='true',
|
||||
help="Follow dependencies.")
|
||||
|
||||
ssp = sp.add_subparsers(metavar='ACTION', dest='action')
|
||||
|
||||
specs_opts = dict(metavar='spec', nargs='+',
|
||||
help="Seed specs of the packages to view.")
|
||||
|
||||
# The action parameterizes the command but in keeping with Spack
|
||||
# patterns we make it a subcommand.
|
||||
file_system_view_actions = [
|
||||
ssp.add_parser(
|
||||
'symlink', aliases=['add', 'soft'],
|
||||
help='Add package files to a filesystem view via symbolic links.'),
|
||||
ssp.add_parser(
|
||||
'hardlink', aliases=['hard'],
|
||||
help='Add packages files to a filesystem via via hard links.'),
|
||||
ssp.add_parser(
|
||||
'remove', aliases=['rm'],
|
||||
help='Remove packages from a filesystem view.'),
|
||||
ssp.add_parser(
|
||||
'statlink', aliases=['status', 'check'],
|
||||
help='Check status of packages in a filesystem view.')
|
||||
]
|
||||
# All these options and arguments are common to every action.
|
||||
for act in file_system_view_actions:
|
||||
act.add_argument('path', nargs=1,
|
||||
help="Path to file system view directory.")
|
||||
act.add_argument('specs', **specs_opts)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def assuredir(path):
|
||||
'Assure path exists as a directory'
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
|
||||
def relative_to(prefix, path):
|
||||
'Return end of `path` relative to `prefix`'
|
||||
assert 0 == path.find(prefix)
|
||||
reldir = path[len(prefix):]
|
||||
if reldir.startswith('/'):
|
||||
reldir = reldir[1:]
|
||||
return reldir
|
||||
|
||||
|
||||
def transform_path(spec, path, prefix=None):
|
||||
'Return the a relative path corresponding to given path spec.prefix'
|
||||
if os.path.isabs(path):
|
||||
path = relative_to(spec.prefix, path)
|
||||
subdirs = path.split(os.path.sep)
|
||||
if subdirs[0] == '.spack':
|
||||
lst = ['.spack', spec.name] + subdirs[1:]
|
||||
path = os.path.join(*lst)
|
||||
if prefix:
|
||||
path = os.path.join(prefix, path)
|
||||
return path
|
||||
|
||||
|
||||
def purge_empty_directories(path):
|
||||
'''Ascend up from the leaves accessible from `path`
|
||||
and remove empty directories.'''
|
||||
for dirpath, subdirs, files in os.walk(path, topdown=False):
|
||||
for sd in subdirs:
|
||||
sdp = os.path.join(dirpath, sd)
|
||||
try:
|
||||
os.rmdir(sdp)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def filter_exclude(specs, exclude):
|
||||
'Filter specs given sequence of exclude regex'
|
||||
to_exclude = [re.compile(e) for e in exclude]
|
||||
|
||||
def exclude(spec):
|
||||
for e in to_exclude:
|
||||
if e.match(spec.name):
|
||||
return True
|
||||
return False
|
||||
return [s for s in specs if not exclude(s)]
|
||||
|
||||
|
||||
def flatten(seeds, descend=True):
|
||||
'Normalize and flattend seed specs and descend hiearchy'
|
||||
flat = set()
|
||||
for spec in seeds:
|
||||
if not descend:
|
||||
flat.add(spec)
|
||||
continue
|
||||
flat.update(spec.normalized().traverse())
|
||||
return flat
|
||||
|
||||
|
||||
def check_one(spec, path, verbose=False):
|
||||
'Check status of view in path against spec'
|
||||
dotspack = os.path.join(path, '.spack', spec.name)
|
||||
if os.path.exists(os.path.join(dotspack)):
|
||||
tty.info('Package in view: "%s"' % spec.name)
|
||||
return
|
||||
tty.info('Package not in view: "%s"' % spec.name)
|
||||
return
|
||||
|
||||
|
||||
def remove_one(spec, path, verbose=False):
|
||||
'Remove any files found in `spec` from `path` and purge empty directories.'
|
||||
|
||||
if not os.path.exists(path):
|
||||
return # done, short circuit
|
||||
|
||||
dotspack = transform_path(spec, '.spack', path)
|
||||
if not os.path.exists(dotspack):
|
||||
if verbose:
|
||||
tty.info('Skipping nonexistent package: "%s"' % spec.name)
|
||||
return
|
||||
|
||||
if verbose:
|
||||
tty.info('Removing package: "%s"' % spec.name)
|
||||
for dirpath, dirnames, filenames in os.walk(spec.prefix):
|
||||
if not filenames:
|
||||
continue
|
||||
targdir = transform_path(spec, dirpath, path)
|
||||
for fname in filenames:
|
||||
dst = os.path.join(targdir, fname)
|
||||
if not os.path.exists(dst):
|
||||
continue
|
||||
os.unlink(dst)
|
||||
|
||||
|
||||
def link_one(spec, path, link=os.symlink, verbose=False):
|
||||
'Link all files in `spec` into directory `path`.'
|
||||
|
||||
dotspack = transform_path(spec, '.spack', path)
|
||||
if os.path.exists(dotspack):
|
||||
tty.warn('Skipping existing package: "%s"' % spec.name)
|
||||
return
|
||||
|
||||
if verbose:
|
||||
tty.info('Linking package: "%s"' % spec.name)
|
||||
for dirpath, dirnames, filenames in os.walk(spec.prefix):
|
||||
if not filenames:
|
||||
continue # avoid explicitly making empty dirs
|
||||
|
||||
targdir = transform_path(spec, dirpath, path)
|
||||
assuredir(targdir)
|
||||
|
||||
for fname in filenames:
|
||||
src = os.path.join(dirpath, fname)
|
||||
dst = os.path.join(targdir, fname)
|
||||
if os.path.exists(dst):
|
||||
if '.spack' in dst.split(os.path.sep):
|
||||
continue # silence these
|
||||
tty.warn("Skipping existing file: %s" % dst)
|
||||
continue
|
||||
link(src, dst)
|
||||
|
||||
|
||||
def visitor_symlink(specs, args):
|
||||
'Symlink all files found in specs'
|
||||
path = args.path[0]
|
||||
assuredir(path)
|
||||
for spec in specs:
|
||||
link_one(spec, path, verbose=args.verbose)
|
||||
visitor_add = visitor_symlink
|
||||
visitor_soft = visitor_symlink
|
||||
|
||||
|
||||
def visitor_hardlink(specs, args):
|
||||
'Hardlink all files found in specs'
|
||||
path = args.path[0]
|
||||
assuredir(path)
|
||||
for spec in specs:
|
||||
link_one(spec, path, os.link, verbose=args.verbose)
|
||||
visitor_hard = visitor_hardlink
|
||||
|
||||
|
||||
def visitor_remove(specs, args):
|
||||
'Remove all files and directories found in specs from args.path'
|
||||
path = args.path[0]
|
||||
for spec in specs:
|
||||
remove_one(spec, path, verbose=args.verbose)
|
||||
purge_empty_directories(path)
|
||||
visitor_rm = visitor_remove
|
||||
|
||||
|
||||
def visitor_statlink(specs, args):
|
||||
'Give status of view in args.path relative to specs'
|
||||
path = args.path[0]
|
||||
for spec in specs:
|
||||
check_one(spec, path, verbose=args.verbose)
|
||||
visitor_status = visitor_statlink
|
||||
visitor_check = visitor_statlink
|
||||
|
||||
|
||||
def view(parser, args):
|
||||
'Produce a view of a set of packages.'
|
||||
|
||||
# Process common args
|
||||
seeds = [spack.cmd.disambiguate_spec(s) for s in args.specs]
|
||||
specs = flatten(seeds, args.dependencies.lower() in ['yes', 'true'])
|
||||
specs = filter_exclude(specs, args.exclude)
|
||||
|
||||
# Execute the visitation.
|
||||
try:
|
||||
visitor = globals()['visitor_' + args.action]
|
||||
except KeyError:
|
||||
tty.error('Unknown action: "%s"' % args.action)
|
||||
visitor(specs, args)
|
||||
@@ -25,21 +25,20 @@
|
||||
import os
|
||||
import re
|
||||
import itertools
|
||||
from datetime import datetime
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.filesystem import join_path
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.architecture
|
||||
from spack.util.multiproc import parmap
|
||||
from spack.util.executable import *
|
||||
from spack.util.environment import get_path
|
||||
from spack.version import Version
|
||||
|
||||
__all__ = ['Compiler', 'get_compiler_version']
|
||||
|
||||
|
||||
def _verify_executables(*paths):
|
||||
for path in paths:
|
||||
if not os.path.isfile(path) and os.access(path, os.X_OK):
|
||||
@@ -48,8 +47,9 @@ def _verify_executables(*paths):
|
||||
|
||||
_version_cache = {}
|
||||
|
||||
|
||||
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
|
||||
if not compiler_path in _version_cache:
|
||||
if compiler_path not in _version_cache:
|
||||
compiler = Executable(compiler_path)
|
||||
output = compiler(version_arg, output=str, error=str)
|
||||
|
||||
@@ -107,22 +107,41 @@ def f77_rpath_arg(self):
|
||||
@property
|
||||
def fc_rpath_arg(self):
|
||||
return '-Wl,-rpath,'
|
||||
# Cray PrgEnv name that can be used to load this compiler
|
||||
PrgEnv = None
|
||||
# Name of module used to switch versions of this compiler
|
||||
PrgEnv_compiler = None
|
||||
|
||||
|
||||
def __init__(self, cspec, cc, cxx, f77, fc):
|
||||
def __init__(self, cspec, operating_system,
|
||||
paths, modules=[], alias=None, **kwargs):
|
||||
def check(exe):
|
||||
if exe is None:
|
||||
return None
|
||||
_verify_executables(exe)
|
||||
return exe
|
||||
|
||||
self.cc = check(cc)
|
||||
self.cxx = check(cxx)
|
||||
self.f77 = check(f77)
|
||||
self.fc = check(fc)
|
||||
self.cc = check(paths[0])
|
||||
self.cxx = check(paths[1])
|
||||
if len(paths) > 2:
|
||||
self.f77 = check(paths[2])
|
||||
if len(paths) == 3:
|
||||
self.fc = self.f77
|
||||
else:
|
||||
self.fc = check(paths[3])
|
||||
|
||||
# Unfortunately have to make sure these params are accepted
|
||||
# in the same order they are returned by sorted(flags)
|
||||
# in compilers/__init__.py
|
||||
self.flags = {}
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
value = kwargs.get(flag, None)
|
||||
if value is not None:
|
||||
self.flags[flag] = value.split()
|
||||
|
||||
self.operating_system = operating_system
|
||||
self.spec = cspec
|
||||
|
||||
self.modules = modules
|
||||
self.alias = alias
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
@@ -133,31 +152,30 @@ def version(self):
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
tty.die("The compiler you have chosen does not currently support OpenMP.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
tty.die(
|
||||
"The compiler you have chosen does not currently support OpenMP.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++11 is supported by that compiler
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
tty.die("The compiler you have chosen does not currently support C++11.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
tty.die(
|
||||
"The compiler you have chosen does not currently support C++11.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++14 is supported by that compiler
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
tty.die("The compiler you have chosen does not currently support C++14.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
|
||||
tty.die(
|
||||
"The compiler you have chosen does not currently support C++14.",
|
||||
"If you think it should, please edit the compiler subclass and",
|
||||
"submit a pull request or issue.")
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
@@ -166,7 +184,6 @@ def cxx14_flag(self):
|
||||
# Compiler *instances* are just data objects, and can only be
|
||||
# constructed from an actual set of executables.
|
||||
#
|
||||
|
||||
@classmethod
|
||||
def default_version(cls, cc):
|
||||
"""Override just this to override all compiler version functions."""
|
||||
@@ -188,7 +205,6 @@ def f77_version(cls, f77):
|
||||
def fc_version(cls, fc):
|
||||
return cls.default_version(fc)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
|
||||
"""Finds compilers in the paths supplied.
|
||||
@@ -234,91 +250,46 @@ def check(key):
|
||||
version = detect_version(full_path)
|
||||
return (version, prefix, suffix, full_path)
|
||||
except ProcessError, e:
|
||||
tty.debug("Couldn't get version for compiler %s" % full_path, e)
|
||||
tty.debug(
|
||||
"Couldn't get version for compiler %s" % full_path, e)
|
||||
return None
|
||||
except Exception, e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
tty.debug("Error while executing candidate compiler %s" % full_path,
|
||||
"%s: %s" %(e.__class__.__name__, e))
|
||||
tty.debug("Error while executing candidate compiler %s"
|
||||
% full_path,
|
||||
"%s: %s" % (e.__class__.__name__, e))
|
||||
return None
|
||||
|
||||
successful = [key for key in parmap(check, checks) if key is not None]
|
||||
successful = [k for k in parmap(check, checks) if k is not None]
|
||||
|
||||
# The 'successful' list is ordered like the input paths.
|
||||
# Reverse it here so that the dict creation (last insert wins)
|
||||
# does not spoil the intented precedence.
|
||||
successful.reverse()
|
||||
return dict(((v, p, s), path) for v, p, s, path in successful)
|
||||
|
||||
@classmethod
|
||||
def find(cls, *path):
|
||||
"""Try to find this type of compiler in the user's
|
||||
environment. For each set of compilers found, this returns
|
||||
compiler objects with the cc, cxx, f77, fc paths and the
|
||||
version filled in.
|
||||
|
||||
This will search for compilers with the names in cc_names,
|
||||
cxx_names, etc. and it will group them if they have common
|
||||
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
|
||||
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
|
||||
"""
|
||||
dicts = parmap(
|
||||
lambda t: cls._find_matches_in_path(*t),
|
||||
[(cls.cc_names, cls.cc_version) + tuple(path),
|
||||
(cls.cxx_names, cls.cxx_version) + tuple(path),
|
||||
(cls.f77_names, cls.f77_version) + tuple(path),
|
||||
(cls.fc_names, cls.fc_version) + tuple(path)])
|
||||
|
||||
all_keys = set()
|
||||
for d in dicts:
|
||||
all_keys.update(d)
|
||||
|
||||
compilers = {}
|
||||
for k in all_keys:
|
||||
ver, pre, suf = k
|
||||
|
||||
# Skip compilers with unknown version.
|
||||
if ver == 'unknown':
|
||||
continue
|
||||
|
||||
paths = tuple(pn[k] if k in pn else None for pn in dicts)
|
||||
spec = spack.spec.CompilerSpec(cls.name, ver)
|
||||
|
||||
if ver in compilers:
|
||||
prev = compilers[ver]
|
||||
|
||||
# prefer the one with more compilers.
|
||||
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
|
||||
newcount = len([p for p in paths if p is not None])
|
||||
prevcount = len([p for p in prev_paths if p is not None])
|
||||
|
||||
# Don't add if it's not an improvement over prev compiler.
|
||||
if newcount <= prevcount:
|
||||
continue
|
||||
|
||||
compilers[ver] = cls(spec, *paths)
|
||||
|
||||
return list(compilers.values())
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""Return a string representation of the compiler toolchain."""
|
||||
return self.__str__()
|
||||
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of the compiler toolchain."""
|
||||
return "%s(%s)" % (
|
||||
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
|
||||
self.name, '\n '.join((str(s) for s in (
|
||||
self.cc, self.cxx, self.f77, self.fc, self.modules,
|
||||
str(self.operating_system)))))
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, path):
|
||||
super(CompilerAccessError, self).__init__(
|
||||
"'%s' is not a valid compiler." % path)
|
||||
|
||||
|
||||
class InvalidCompilerError(spack.error.SpackError):
|
||||
|
||||
def __init__(self):
|
||||
super(InvalidCompilerError, self).__init__(
|
||||
"Compiler has no executables.")
|
||||
|
||||
@@ -26,10 +26,9 @@
|
||||
system and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import imp
|
||||
import os
|
||||
import platform
|
||||
|
||||
from llnl.util.lang import memoized, list_modules
|
||||
from llnl.util.lang import list_modules
|
||||
from llnl.util.filesystem import join_path
|
||||
|
||||
import spack
|
||||
@@ -38,14 +37,12 @@
|
||||
import spack.config
|
||||
import spack.architecture
|
||||
|
||||
from spack.util.multiproc import parmap
|
||||
from spack.compiler import Compiler
|
||||
from spack.util.executable import which
|
||||
from spack.util.naming import mod_to_class
|
||||
from spack.util.environment import get_path
|
||||
|
||||
_imported_compilers_module = 'spack.compilers'
|
||||
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_other_instance_vars = ['modules', 'operating_system']
|
||||
_cache_config_file = []
|
||||
|
||||
# TODO: customize order in config file
|
||||
if platform.system() == 'Darwin':
|
||||
@@ -64,107 +61,111 @@ def converter(cspec_like, *args, **kwargs):
|
||||
|
||||
def _to_dict(compiler):
|
||||
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||
return {
|
||||
str(compiler.spec) : dict(
|
||||
(attr, getattr(compiler, attr, None))
|
||||
for attr in _required_instance_vars)
|
||||
}
|
||||
d = {}
|
||||
d['spec'] = str(compiler.spec)
|
||||
d['paths'] = dict((attr, getattr(compiler, attr, None))
|
||||
for attr in _path_instance_vars)
|
||||
d['operating_system'] = str(compiler.operating_system)
|
||||
d['modules'] = compiler.modules if compiler.modules else []
|
||||
|
||||
if compiler.alias:
|
||||
d['alias'] = compiler.alias
|
||||
|
||||
return {'compiler': d}
|
||||
|
||||
|
||||
def get_compiler_config(arch=None, scope=None):
|
||||
def get_compiler_config(scope=None, init_config=True):
|
||||
"""Return the compiler configuration for the specified architecture.
|
||||
"""
|
||||
# Check whether we're on a front-end (native) architecture.
|
||||
my_arch = spack.architecture.sys_type()
|
||||
if arch is None:
|
||||
arch = my_arch
|
||||
|
||||
def init_compiler_config():
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
config[arch] = {}
|
||||
compilers = find_compilers(*get_path('PATH'))
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
config[arch].update(_to_dict(compiler))
|
||||
spack.config.update_config('compilers', config, scope=scope)
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.update_config('compilers', compilers_dict, scope=scope)
|
||||
|
||||
config = spack.config.get_config('compilers', scope=scope)
|
||||
|
||||
# Update the configuration if there are currently no compilers
|
||||
# configured. Avoid updating automatically if there ARE site
|
||||
# compilers configured but no user ones.
|
||||
if arch == my_arch and arch not in config:
|
||||
if not config and init_config:
|
||||
if scope is None:
|
||||
# We know no compilers were configured in any scope.
|
||||
init_compiler_config()
|
||||
config = spack.config.get_config('compilers', scope=scope)
|
||||
elif scope == 'user':
|
||||
# Check the site config and update the user config if
|
||||
# nothing is configured at the site level.
|
||||
site_config = spack.config.get_config('compilers', scope='site')
|
||||
if not site_config:
|
||||
init_compiler_config()
|
||||
|
||||
return config[arch] if arch in config else {}
|
||||
config = spack.config.get_config('compilers', scope=scope)
|
||||
return config
|
||||
elif config:
|
||||
return config
|
||||
else:
|
||||
return [] # Return empty list which we will later append to.
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, arch=None, scope=None):
|
||||
def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
- compilers: a list of Compiler objects.
|
||||
- arch: arch to add compilers for.
|
||||
- scope: configuration scope to modify.
|
||||
"""
|
||||
if arch is None:
|
||||
arch = spack.architecture.sys_type()
|
||||
|
||||
compiler_config = get_compiler_config(arch, scope)
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
for compiler in compilers:
|
||||
compiler_config[str(compiler.spec)] = dict(
|
||||
(c, getattr(compiler, c, "None"))
|
||||
for c in _required_instance_vars)
|
||||
|
||||
update = { arch : compiler_config }
|
||||
spack.config.update_config('compilers', update, scope)
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
global _cache_config_file
|
||||
_cache_config_file = compiler_config
|
||||
spack.config.update_config('compilers', compiler_config, scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from the config, by spec.
|
||||
|
||||
Arguments:
|
||||
- compiler_specs: a list of CompilerSpec objects.
|
||||
- arch: arch to add compilers for.
|
||||
- scope: configuration scope to modify.
|
||||
"""
|
||||
if arch is None:
|
||||
arch = spack.architecture.sys_type()
|
||||
# Need a better way for this
|
||||
global _cache_config_file
|
||||
|
||||
compiler_config = get_compiler_config(arch, scope)
|
||||
del compiler_config[str(compiler_spec)]
|
||||
update = { arch : compiler_config }
|
||||
compiler_config = get_compiler_config(scope)
|
||||
config_length = len(compiler_config)
|
||||
|
||||
spack.config.update_config('compilers', update, scope)
|
||||
filtered_compiler_config = [
|
||||
comp for comp in compiler_config
|
||||
if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
|
||||
|
||||
# Update the cache for changes
|
||||
_cache_config_file = filtered_compiler_config
|
||||
if len(filtered_compiler_config) == config_length: # No items removed
|
||||
CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
spack.config.update_config('compilers', filtered_compiler_config, scope)
|
||||
|
||||
|
||||
def all_compilers_config(arch=None, scope=None):
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
# Get compilers for this architecture.
|
||||
arch_config = get_compiler_config(arch, scope)
|
||||
|
||||
# Merge 'all' compilers with arch-specific ones.
|
||||
# Arch-specific compilers have higher precedence.
|
||||
merged_config = get_compiler_config('all', scope=scope)
|
||||
merged_config = spack.config._merge_yaml(merged_config, arch_config)
|
||||
|
||||
return merged_config
|
||||
# Create a cache of the config file so we don't load all the time.
|
||||
global _cache_config_file
|
||||
if not _cache_config_file:
|
||||
_cache_config_file = get_compiler_config(scope, init_config)
|
||||
return _cache_config_file
|
||||
else:
|
||||
return _cache_config_file
|
||||
|
||||
|
||||
def all_compilers(arch=None, scope=None):
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [spack.spec.CompilerSpec(s)
|
||||
for s in all_compilers_config(arch, scope)]
|
||||
return [spack.spec.CompilerSpec(s['compiler']['spec'])
|
||||
for s in all_compilers_config(scope, init_config)]
|
||||
|
||||
|
||||
def default_compiler():
|
||||
@@ -179,36 +180,18 @@ def default_compiler():
|
||||
return sorted(versions)[-1]
|
||||
|
||||
|
||||
def find_compilers(*path):
|
||||
def find_compilers(*paths):
|
||||
"""Return a list of compilers found in the suppied paths.
|
||||
This invokes the find() method for each Compiler class,
|
||||
and appends the compilers detected to a list.
|
||||
This invokes the find_compilers() method for each operating
|
||||
system associated with the host platform, and appends
|
||||
the compilers detected to a list.
|
||||
"""
|
||||
# Make sure path elements exist, and include /bin directories
|
||||
# under prefixes.
|
||||
filtered_path = []
|
||||
for p in path:
|
||||
# Eliminate symlinks and just take the real directories.
|
||||
p = os.path.realpath(p)
|
||||
if not os.path.isdir(p):
|
||||
continue
|
||||
filtered_path.append(p)
|
||||
|
||||
# Check for a bin directory, add it if it exists
|
||||
bin = join_path(p, 'bin')
|
||||
if os.path.isdir(bin):
|
||||
filtered_path.append(os.path.realpath(bin))
|
||||
|
||||
# Once the paths are cleaned up, do a search for each type of
|
||||
# compiler. We can spawn a bunch of parallel searches to reduce
|
||||
# the overhead of spelunking all these directories.
|
||||
types = all_compiler_types()
|
||||
compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
|
||||
|
||||
# ensure all the version calls we made are cached in the parent
|
||||
# process, as well. This speeds up Spack a lot.
|
||||
clist = reduce(lambda x,y: x+y, compiler_lists)
|
||||
return clist
|
||||
# Find compilers for each operating system class
|
||||
oss = all_os_classes()
|
||||
compiler_lists = []
|
||||
for o in oss:
|
||||
compiler_lists.extend(o.find_compilers(*paths))
|
||||
return compiler_lists
|
||||
|
||||
|
||||
def supported_compilers():
|
||||
@@ -227,47 +210,85 @@ def supported(compiler_spec):
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def find(compiler_spec, arch=None, scope=None):
|
||||
def find(compiler_spec, scope=None):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an list if nothing found."""
|
||||
return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
|
||||
return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(compiler_spec, arch=None, scope=None):
|
||||
def compilers_for_spec(compiler_spec, scope=None, **kwargs):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(arch, scope)
|
||||
platform = kwargs.get("platform", None)
|
||||
config = all_compilers_config(scope)
|
||||
|
||||
def get_compiler(cspec):
|
||||
items = config[str(cspec)]
|
||||
def get_compilers(cspec):
|
||||
compilers = []
|
||||
|
||||
if not all(n in items for n in _required_instance_vars):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
for items in config:
|
||||
if items['compiler']['spec'] != str(cspec):
|
||||
continue
|
||||
items = items['compiler']
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
compiler_paths = []
|
||||
for c in _required_instance_vars:
|
||||
compiler_path = items[c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
if not ('paths' in items and
|
||||
all(n in items['paths'] for n in _path_instance_vars)):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
|
||||
compiler_paths = []
|
||||
for c in _path_instance_vars:
|
||||
compiler_path = items['paths'][c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
|
||||
mods = items.get('modules')
|
||||
if mods == 'None':
|
||||
mods = []
|
||||
|
||||
if 'operating_system' in items:
|
||||
os = spack.architecture._operating_system_from_dict(
|
||||
items['operating_system'], platform)
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
os = None
|
||||
|
||||
return cls(cspec, *compiler_paths)
|
||||
alias = items['alias'] if 'alias' in items else None
|
||||
|
||||
matches = find(compiler_spec, arch, scope)
|
||||
return [get_compiler(cspec) for cspec in matches]
|
||||
flags = {}
|
||||
for f in spack.spec.FlagMap.valid_compiler_flags():
|
||||
if f in items:
|
||||
flags[f] = items[f]
|
||||
|
||||
compilers.append(
|
||||
cls(cspec, os, compiler_paths, mods, alias, **flags))
|
||||
|
||||
return compilers
|
||||
|
||||
matches = set(find(compiler_spec, scope))
|
||||
compilers = []
|
||||
for cspec in matches:
|
||||
compilers.extend(get_compilers(cspec))
|
||||
return compilers
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compiler_for_spec(compiler_spec):
|
||||
def compiler_for_spec(compiler_spec, arch):
|
||||
"""Get the compiler that satisfies compiler_spec. compiler_spec must
|
||||
be concrete."""
|
||||
operating_system = arch.platform_os
|
||||
assert(compiler_spec.concrete)
|
||||
compilers = compilers_for_spec(compiler_spec)
|
||||
assert(len(compilers) == 1)
|
||||
|
||||
compilers = [
|
||||
c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
|
||||
if c.operating_system == operating_system]
|
||||
if len(compilers) < 1:
|
||||
raise NoCompilerForSpecError(compiler_spec, operating_system)
|
||||
if len(compilers) > 1:
|
||||
raise CompilerSpecInsufficientlySpecificError(compiler_spec)
|
||||
return compilers[0]
|
||||
|
||||
|
||||
@@ -285,18 +306,47 @@ def class_for_compiler_name(compiler_name):
|
||||
return cls
|
||||
|
||||
|
||||
def all_os_classes():
|
||||
"""
|
||||
Return the list of classes for all operating systems available on
|
||||
this platform
|
||||
"""
|
||||
classes = []
|
||||
|
||||
platform = spack.architecture.platform()
|
||||
for os_class in platform.operating_sys.values():
|
||||
classes.append(os_class)
|
||||
|
||||
return classes
|
||||
|
||||
|
||||
def all_compiler_types():
|
||||
return [class_for_compiler_name(c) for c in supported_compilers()]
|
||||
|
||||
|
||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, compiler_spec):
|
||||
super(InvalidCompilerConfigurationError, self).__init__(
|
||||
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
|
||||
"Compiler configuration must contain entries for all compilers: %s"
|
||||
% _required_instance_vars)
|
||||
% _path_instance_vars)
|
||||
|
||||
|
||||
class NoCompilersError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super(NoCompilersError, self).__init__("Spack could not find any compilers!")
|
||||
super(NoCompilersError, self).__init__(
|
||||
"Spack could not find any compilers!")
|
||||
|
||||
|
||||
class NoCompilerForSpecError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, target):
|
||||
super(NoCompilerForSpecError, self).__init__(
|
||||
"No compilers for operating system %s satisfy spec %s"
|
||||
% (target, compiler_spec))
|
||||
|
||||
|
||||
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super(CompilerSpecInsufficientlySpecificError, self).__init__(
|
||||
"Multiple compilers satisfy spec %s" % compiler_spec)
|
||||
|
||||
55
lib/spack/spack/compilers/cce.py
Normal file
55
lib/spack/spack/compilers/cce.py
Normal file
@@ -0,0 +1,55 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack.compiler import *
|
||||
|
||||
|
||||
class Cce(Compiler):
|
||||
"""Cray compiler environment compiler."""
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['cc']
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ['CC']
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ['ftn']
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ['ftn']
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r'-mp-\d\.\d']
|
||||
|
||||
PrgEnv = 'PrgEnv-cray'
|
||||
PrgEnv_compiler = 'cce'
|
||||
|
||||
link_paths = {'cc': 'cc',
|
||||
'cxx': 'c++',
|
||||
'f77': 'f77',
|
||||
'fc': 'fc'}
|
||||
|
||||
@classmethod
|
||||
def default_version(cls, comp):
|
||||
return get_compiler_version(comp, '-V', r'[Vv]ersion.*(\d+(\.\d+)+)')
|
||||
@@ -29,6 +29,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Clang(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['clang']
|
||||
@@ -43,11 +44,12 @@ class Clang(Compiler):
|
||||
fc_names = []
|
||||
|
||||
# Named wrapper links within spack.build_env_path
|
||||
link_paths = { 'cc' : 'clang/clang',
|
||||
'cxx' : 'clang/clang++',
|
||||
# Use default wrappers for fortran, in case provided in compilers.yaml
|
||||
'f77' : 'f77',
|
||||
'fc' : 'f90' }
|
||||
link_paths = {'cc': 'clang/clang',
|
||||
'cxx': 'clang/clang++',
|
||||
# Use default wrappers for fortran, in case provided in
|
||||
# compilers.yaml
|
||||
'f77': 'f77',
|
||||
'fc': 'f90'}
|
||||
|
||||
@property
|
||||
def is_apple(self):
|
||||
@@ -73,7 +75,7 @@ def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@classmethod
|
||||
def default_version(self, comp):
|
||||
def default_version(cls, comp):
|
||||
"""The '--version' option works for clang compilers.
|
||||
On most platforms, output looks like this::
|
||||
|
||||
@@ -99,7 +101,7 @@ def default_version(self, comp):
|
||||
ver = match.group(1) + '-apple'
|
||||
else:
|
||||
# Normal clang compiler versions are left as-is
|
||||
match = re.search(r'^clang version ([^ )]+)', output)
|
||||
match = re.search(r'clang version ([^ )]+)', output)
|
||||
if match:
|
||||
ver = match.group(1)
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
from spack.compiler import *
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Gcc(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['gcc']
|
||||
@@ -44,10 +45,13 @@ class Gcc(Compiler):
|
||||
suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
|
||||
|
||||
# Named wrapper links within spack.build_env_path
|
||||
link_paths = {'cc' : 'gcc/gcc',
|
||||
'cxx' : 'gcc/g++',
|
||||
'f77' : 'gcc/gfortran',
|
||||
'fc' : 'gcc/gfortran' }
|
||||
link_paths = {'cc': 'gcc/gcc',
|
||||
'cxx': 'gcc/g++',
|
||||
'f77': 'gcc/gfortran',
|
||||
'fc': 'gcc/gfortran'}
|
||||
|
||||
PrgEnv = 'PrgEnv-gnu'
|
||||
PrgEnv_compiler = 'gcc'
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
@@ -76,7 +80,6 @@ def fc_version(cls, fc):
|
||||
# older gfortran versions don't have simple dumpversion output.
|
||||
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
|
||||
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user