Compare commits
889 Commits
bugfix/env
...
bugfix/tra
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d46de5741 | ||
|
|
b9cf63aa41 | ||
|
|
2d77e44f6f | ||
|
|
033599c4cd | ||
|
|
8096ed4b22 | ||
|
|
b49bfe25af | ||
|
|
8b2f34d802 | ||
|
|
3daed0d6a7 | ||
|
|
d6c1f75e8d | ||
|
|
0ced62480d | ||
|
|
c80a4c1ddc | ||
|
|
466abcb62d | ||
|
|
69e99f0c16 | ||
|
|
bbee6dfc58 | ||
|
|
2d60cf120b | ||
|
|
db17fc2f33 | ||
|
|
c62080d498 | ||
|
|
f9bbe549fa | ||
|
|
55d7fec69c | ||
|
|
e938907150 | ||
|
|
0c40b86e96 | ||
|
|
3d4cf0d8eb | ||
|
|
966e19d278 | ||
|
|
8f930462bd | ||
|
|
bf4fccee15 | ||
|
|
784771a008 | ||
|
|
e4a9d9ae5b | ||
|
|
a6886983dc | ||
|
|
265d80cee3 | ||
|
|
0f1d36585e | ||
|
|
93a34a9635 | ||
|
|
91a54029f9 | ||
|
|
5400b49ed6 | ||
|
|
c17fc3c0c1 | ||
|
|
6f248836ea | ||
|
|
693c1821b0 | ||
|
|
62afe3bd5a | ||
|
|
53a756d045 | ||
|
|
321b687ae6 | ||
|
|
c8617f0574 | ||
|
|
7843e2ead0 | ||
|
|
dca3d071d7 | ||
|
|
436f077482 | ||
|
|
ab3f705019 | ||
|
|
d739989ec8 | ||
|
|
52ee1967d6 | ||
|
|
1af7284b5d | ||
|
|
e1bcefd805 | ||
|
|
2159b0183d | ||
|
|
078fd225a9 | ||
|
|
83974828c7 | ||
|
|
2412f74557 | ||
|
|
db06d3621d | ||
|
|
c25170d2f9 | ||
|
|
b3dfe13670 | ||
|
|
6358e84b48 | ||
|
|
8e634d8e49 | ||
|
|
1a21376515 | ||
|
|
bf45a2b6d3 | ||
|
|
475ce955e7 | ||
|
|
5e44289787 | ||
|
|
e66888511f | ||
|
|
e9e5beee1f | ||
|
|
ffd134c09d | ||
|
|
bfadd5c9a5 | ||
|
|
16e9279420 | ||
|
|
ac0903ef9f | ||
|
|
648839dffd | ||
|
|
489a604920 | ||
|
|
2ac3435810 | ||
|
|
69ea180d26 | ||
|
|
f52f217df0 | ||
|
|
df74aa5d7e | ||
|
|
41932c53ae | ||
|
|
4296db794f | ||
|
|
9ab9302409 | ||
|
|
0187376e54 | ||
|
|
7340d2cb83 | ||
|
|
641d4477d5 | ||
|
|
3ff2fb69af | ||
|
|
e3024b1bcb | ||
|
|
e733b87865 | ||
|
|
919985dc1b | ||
|
|
d746f7d427 | ||
|
|
b6deab515b | ||
|
|
848220c4ba | ||
|
|
98462bd27e | ||
|
|
2e2515266d | ||
|
|
776ab13276 | ||
|
|
c2ce9a6d93 | ||
|
|
4e3ed56dfa | ||
|
|
dcfcc03497 | ||
|
|
125c20bc06 | ||
|
|
f7696a4480 | ||
|
|
a5d7667cb6 | ||
|
|
d45818ccff | ||
|
|
bcb7af6eb3 | ||
|
|
f438fb6c79 | ||
|
|
371a8a361a | ||
|
|
86b9ce1c88 | ||
|
|
05232034f5 | ||
|
|
7a3da0f606 | ||
|
|
d96406a161 | ||
|
|
ffa5962356 | ||
|
|
67e74da3ba | ||
|
|
9ee2d79de1 | ||
|
|
79e4a13eee | ||
|
|
4627438373 | ||
|
|
badaaf7092 | ||
|
|
815ac000cc | ||
|
|
7bc5b26c52 | ||
|
|
a0e7ca94b2 | ||
|
|
e56c90d839 | ||
|
|
54003d4d72 | ||
|
|
c47b554fa1 | ||
|
|
b027f64a7f | ||
|
|
3765a5f7f8 | ||
|
|
690661eadd | ||
|
|
f7bbc326e4 | ||
|
|
a184bfc1a6 | ||
|
|
81634440fb | ||
|
|
711d7683ac | ||
|
|
967356bcf5 | ||
|
|
c006ed034a | ||
|
|
d065c65d94 | ||
|
|
e23c372ff1 | ||
|
|
25d2de5629 | ||
|
|
d73a23ce35 | ||
|
|
a62cb3c0f4 | ||
|
|
177da4595e | ||
|
|
e4f05129fe | ||
|
|
c25b994917 | ||
|
|
95c4c5270a | ||
|
|
1cf6a15a08 | ||
|
|
47d206611a | ||
|
|
a6789cf653 | ||
|
|
933cd858e0 | ||
|
|
8856361076 | ||
|
|
d826df7ef6 | ||
|
|
d8a9b42da6 | ||
|
|
7d926f86e8 | ||
|
|
1579544d57 | ||
|
|
1cee3fb4a5 | ||
|
|
a8e2ad53dd | ||
|
|
6821fa7246 | ||
|
|
09c68da1bd | ||
|
|
73064d62cf | ||
|
|
168ed2a782 | ||
|
|
9f60b29495 | ||
|
|
7abcd78426 | ||
|
|
d5295301de | ||
|
|
beccc49b81 | ||
|
|
037e7ffe33 | ||
|
|
293da8ed20 | ||
|
|
2780ab2f6c | ||
|
|
1ed3c81b58 | ||
|
|
50ce0a25b2 | ||
|
|
d784227603 | ||
|
|
ab9ed91539 | ||
|
|
421256063e | ||
|
|
75459bc70c | ||
|
|
33752eabb8 | ||
|
|
f1d1bb9167 | ||
|
|
68eaff24b0 | ||
|
|
862024cae1 | ||
|
|
9d6bcd67c3 | ||
|
|
d97ecfe147 | ||
|
|
0d991de50a | ||
|
|
4f278a0255 | ||
|
|
6e72a3cff1 | ||
|
|
1532c77ce6 | ||
|
|
5ffbce275c | ||
|
|
0e2ff2dddb | ||
|
|
c0c446a095 | ||
|
|
33dbd44449 | ||
|
|
7b0979c1e9 | ||
|
|
c9849dd41d | ||
|
|
d44e97d3f2 | ||
|
|
8713ab0f67 | ||
|
|
6a47339bf8 | ||
|
|
1c0fb6d641 | ||
|
|
b45eee29eb | ||
|
|
6d26274459 | ||
|
|
2fb07de7bc | ||
|
|
7678dc6b49 | ||
|
|
1944dd55a7 | ||
|
|
0b6c724743 | ||
|
|
fa98023375 | ||
|
|
e79a911bac | ||
|
|
fd3efc71fd | ||
|
|
0458de18de | ||
|
|
f94ac8c770 | ||
|
|
a03c28a916 | ||
|
|
7b7fdf27f3 | ||
|
|
192e564e26 | ||
|
|
b8c5099cde | ||
|
|
ea5bca9067 | ||
|
|
e33eafd34f | ||
|
|
e1344b5497 | ||
|
|
cf9dc3fc81 | ||
|
|
d265dd2487 | ||
|
|
a2a6e65e27 | ||
|
|
0085280db8 | ||
|
|
6e07bf149d | ||
|
|
811cd5e7ef | ||
|
|
081e21f55e | ||
|
|
c5a24675a1 | ||
|
|
e9bfe5cd35 | ||
|
|
ca84c96478 | ||
|
|
c9a790bce9 | ||
|
|
91c5b4aeb0 | ||
|
|
c2968b4d8c | ||
|
|
c08be95d5e | ||
|
|
4e5fb62679 | ||
|
|
cafc21c43d | ||
|
|
72699b43ab | ||
|
|
6c85f59a89 | ||
|
|
eef2536055 | ||
|
|
e2ae60a3b0 | ||
|
|
d942fd62b5 | ||
|
|
99d511d3b0 | ||
|
|
ab8661533b | ||
|
|
f423edd526 | ||
|
|
0a4d4da5ce | ||
|
|
845187f270 | ||
|
|
9b35c3cdcc | ||
|
|
fe8734cd52 | ||
|
|
40b1aa6b67 | ||
|
|
ced8ce6c34 | ||
|
|
9201b66792 | ||
|
|
fd45839c04 | ||
|
|
2e25db0755 | ||
|
|
ebfc706c8c | ||
|
|
644a10ee35 | ||
|
|
bb96e4c9cc | ||
|
|
d204a08aea | ||
|
|
8e18297cf2 | ||
|
|
b06d20be19 | ||
|
|
a14d6fe56d | ||
|
|
47ec6a6ae5 | ||
|
|
5c7dda7e14 | ||
|
|
0e87243284 | ||
|
|
384f5f9960 | ||
|
|
c0f020d021 | ||
|
|
dc58449bbf | ||
|
|
d8a72b68dd | ||
|
|
040c6e486e | ||
|
|
4fa7880b19 | ||
|
|
f090b05346 | ||
|
|
0c69e5a442 | ||
|
|
8da29d1231 | ||
|
|
297329f4b5 | ||
|
|
1b6621a14b | ||
|
|
bfa54da292 | ||
|
|
730ab1574f | ||
|
|
2c17c4e632 | ||
|
|
ec800cccbb | ||
|
|
85cc9097cb | ||
|
|
830ee6a1eb | ||
|
|
0da7b83d0b | ||
|
|
f51a4a1ae1 | ||
|
|
e49f10a28e | ||
|
|
1d96fdc74a | ||
|
|
8eb1829554 | ||
|
|
e70755f692 | ||
|
|
ebb40ee0d1 | ||
|
|
a2ea30aceb | ||
|
|
9a37c8fcb1 | ||
|
|
49677b9be5 | ||
|
|
6fc44eb540 | ||
|
|
234febe545 | ||
|
|
83a1245bfd | ||
|
|
241c37fcf7 | ||
|
|
a8114ec52c | ||
|
|
f92b5d586f | ||
|
|
492d68c339 | ||
|
|
2dcc55d6c5 | ||
|
|
dc897535df | ||
|
|
45e1d3498c | ||
|
|
af0f094292 | ||
|
|
44b51acb7b | ||
|
|
13dd05e5ec | ||
|
|
89520467e0 | ||
|
|
9e1440ec7b | ||
|
|
71cd94e524 | ||
|
|
ba696de71b | ||
|
|
06b63cfce3 | ||
|
|
5be1e6e852 | ||
|
|
e651c2b122 | ||
|
|
ec2a4869ef | ||
|
|
082fb1f6e9 | ||
|
|
95a65e85df | ||
|
|
d9e7aa4253 | ||
|
|
5578209117 | ||
|
|
b013a2de50 | ||
|
|
d1c722a49c | ||
|
|
3446feff70 | ||
|
|
41afeacaba | ||
|
|
0139288ced | ||
|
|
d0cba2bf35 | ||
|
|
6afa2d6298 | ||
|
|
947445ccdd | ||
|
|
b605cd0151 | ||
|
|
273bccbccf | ||
|
|
d5d596a851 | ||
|
|
0ddb5de27c | ||
|
|
8942909852 | ||
|
|
0143d5bf01 | ||
|
|
e17d6d5eee | ||
|
|
2a54cda953 | ||
|
|
097d3e15b4 | ||
|
|
374264f610 | ||
|
|
d6bf9bc8f1 | ||
|
|
fa7719a031 | ||
|
|
f6497972b8 | ||
|
|
5c3ec5f47c | ||
|
|
16bddf152e | ||
|
|
4403df4f08 | ||
|
|
7cf45442a7 | ||
|
|
edb8bc91b2 | ||
|
|
9610ecb936 | ||
|
|
b2a8e8734e | ||
|
|
c287dbbf13 | ||
|
|
1808ce11de | ||
|
|
13b0b1d574 | ||
|
|
261a34a7a4 | ||
|
|
222b44bd45 | ||
|
|
c105ac07bb | ||
|
|
9ef062fcca | ||
|
|
ddea33bdc0 | ||
|
|
803425780e | ||
|
|
d600aef4f4 | ||
|
|
af9b9f6baf | ||
|
|
bbc779f3f0 | ||
|
|
3ecb84d398 | ||
|
|
b2c3973d4a | ||
|
|
35e1dc8eba | ||
|
|
bf71b78094 | ||
|
|
85730de055 | ||
|
|
bc88b581b4 | ||
|
|
1ae556829a | ||
|
|
0c5a5e2ce0 | ||
|
|
c3593e5b48 | ||
|
|
3c40d9588f | ||
|
|
16613408e4 | ||
|
|
6f22b5d724 | ||
|
|
420e093e42 | ||
|
|
8e73eeb4b9 | ||
|
|
a5300b5726 | ||
|
|
86d3bad1e0 | ||
|
|
600955edd4 | ||
|
|
95e61f2fdf | ||
|
|
e6d37b3b61 | ||
|
|
cf5daff6f5 | ||
|
|
e5dcaebd43 | ||
|
|
84a70c26d9 | ||
|
|
3bfd948ec8 | ||
|
|
58e527935c | ||
|
|
c511fbb717 | ||
|
|
541cdbbef2 | ||
|
|
eda806ab97 | ||
|
|
605aa45aab | ||
|
|
e8462f82ef | ||
|
|
387ee494b0 | ||
|
|
1f24fb1dd8 | ||
|
|
379aec5757 | ||
|
|
a091f7642d | ||
|
|
edc70942d7 | ||
|
|
a94d18ad08 | ||
|
|
1491d8471d | ||
|
|
03d1841385 | ||
|
|
7c8590ee44 | ||
|
|
71aa12f72c | ||
|
|
c7e60f441a | ||
|
|
11aa2d721e | ||
|
|
c110bcc5af | ||
|
|
4edd364a8b | ||
|
|
42ede698c2 | ||
|
|
68a4b2e4e4 | ||
|
|
131e1c0937 | ||
|
|
b8136d7052 | ||
|
|
c4ac9246e2 | ||
|
|
3dd3526de9 | ||
|
|
5c4636c86d | ||
|
|
a6ee9369b6 | ||
|
|
b8f35c4aa7 | ||
|
|
e5a48033bd | ||
|
|
49497dd254 | ||
|
|
d60b055f64 | ||
|
|
59e2ef6ad6 | ||
|
|
a7744b0dbc | ||
|
|
78cfad7881 | ||
|
|
60d3ed86d9 | ||
|
|
c16c5ad106 | ||
|
|
9c854bf78e | ||
|
|
27bce8d489 | ||
|
|
a92f1e37aa | ||
|
|
99c3ecc139 | ||
|
|
0f6170875c | ||
|
|
986809c4c5 | ||
|
|
d9c128132a | ||
|
|
a0cac6c6bf | ||
|
|
470523cc35 | ||
|
|
c82d4fdc08 | ||
|
|
c03448c827 | ||
|
|
59cd0711ba | ||
|
|
8ef31b23a4 | ||
|
|
1bae07e54e | ||
|
|
c7a2766ded | ||
|
|
2a95b1e282 | ||
|
|
5eb217e878 | ||
|
|
22486eeb4e | ||
|
|
e5665730b6 | ||
|
|
edccf0d819 | ||
|
|
815b2f542a | ||
|
|
b50f6510d4 | ||
|
|
226f331a21 | ||
|
|
bc0477f3e0 | ||
|
|
cf924e397f | ||
|
|
bfe0bc1c6b | ||
|
|
1d2e30b8b2 | ||
|
|
e11f635174 | ||
|
|
611c24c01c | ||
|
|
2f49e20b12 | ||
|
|
409bba7cf9 | ||
|
|
378aa835f8 | ||
|
|
2500443f11 | ||
|
|
f5c32d57e0 | ||
|
|
ba6dadf760 | ||
|
|
83535ed503 | ||
|
|
051d668ca4 | ||
|
|
3029e943b1 | ||
|
|
3d48bd88d3 | ||
|
|
fcf9068a04 | ||
|
|
724c34db5f | ||
|
|
1b79725229 | ||
|
|
06e6c341e4 | ||
|
|
a2fc3dbfc5 | ||
|
|
fbda3e23ac | ||
|
|
3c3a4c7577 | ||
|
|
cfb34d19fe | ||
|
|
8183210e59 | ||
|
|
896a81ba35 | ||
|
|
21cadf96e0 | ||
|
|
cceeb96e06 | ||
|
|
7853ffc881 | ||
|
|
4363b1c2dc | ||
|
|
c85877566f | ||
|
|
fc201a2b75 | ||
|
|
ddd191b1c0 | ||
|
|
fec20bb567 | ||
|
|
63869bba47 | ||
|
|
eacf11e6cc | ||
|
|
7bd987aa5b | ||
|
|
cba8d1253d | ||
|
|
c87cc5c7b1 | ||
|
|
4a7893bed6 | ||
|
|
44196085f6 | ||
|
|
076660804d | ||
|
|
e57db5c528 | ||
|
|
01cdc4f960 | ||
|
|
93e88853c6 | ||
|
|
561c192e30 | ||
|
|
65c625a44f | ||
|
|
08291cb63d | ||
|
|
dcd0f8d252 | ||
|
|
fa0367691e | ||
|
|
e3a76bf2a9 | ||
|
|
e2f9cdfbdc | ||
|
|
1eb4e30d28 | ||
|
|
2f529a7320 | ||
|
|
cb4234b971 | ||
|
|
ee7cdb8a68 | ||
|
|
7dc0bf5fcb | ||
|
|
d01b542df7 | ||
|
|
c59eb8cdea | ||
|
|
ed34c4a004 | ||
|
|
7cbaf2ff56 | ||
|
|
cd851e173d | ||
|
|
8be2f017e7 | ||
|
|
f6464abfcb | ||
|
|
ce81f15e6f | ||
|
|
4d597bdecc | ||
|
|
62ea6bed89 | ||
|
|
742fbd458d | ||
|
|
10b6651d35 | ||
|
|
3f3fc804c6 | ||
|
|
c7d586d03a | ||
|
|
b17feb9b06 | ||
|
|
2e4a9f1abf | ||
|
|
117d374a61 | ||
|
|
67cf37d750 | ||
|
|
6e276ecb4c | ||
|
|
8c6a3f15e5 | ||
|
|
65bbd2a1e6 | ||
|
|
426f8e987b | ||
|
|
a78e061117 | ||
|
|
17e14757e2 | ||
|
|
2bb3553f24 | ||
|
|
6267a1f68d | ||
|
|
666ceb998c | ||
|
|
e4cc4018c1 | ||
|
|
5551b7a506 | ||
|
|
facc93a30e | ||
|
|
b3101d1c85 | ||
|
|
0d7890baa5 | ||
|
|
d43ae9fa10 | ||
|
|
055e5abc93 | ||
|
|
16f86c7f96 | ||
|
|
0292568f97 | ||
|
|
61c24dc7c3 | ||
|
|
cbb8900e6e | ||
|
|
6084b9be5b | ||
|
|
e4eba24191 | ||
|
|
aa0b013efe | ||
|
|
9d49664679 | ||
|
|
8c4bd595f3 | ||
|
|
8669844084 | ||
|
|
2477fe2ff7 | ||
|
|
3f7ea01e9d | ||
|
|
c334a16a2f | ||
|
|
4fec857504 | ||
|
|
f859da6119 | ||
|
|
1c19eccf29 | ||
|
|
1b7bf9a95b | ||
|
|
8e7b2c999a | ||
|
|
dec0c540e9 | ||
|
|
d74b02f59a | ||
|
|
37335f8fcf | ||
|
|
eb4552542d | ||
|
|
b5f546b72b | ||
|
|
b2fb851d18 | ||
|
|
f4b0510156 | ||
|
|
a371e1e645 | ||
|
|
80881f9119 | ||
|
|
c82594de21 | ||
|
|
eb15d49d61 | ||
|
|
97beb2658b | ||
|
|
75bf6d665c | ||
|
|
c76023d7ac | ||
|
|
6edfc07092 | ||
|
|
a6b2a713d7 | ||
|
|
416b570825 | ||
|
|
28ad553856 | ||
|
|
a17f5efa75 | ||
|
|
c8b21f43eb | ||
|
|
b1be17144b | ||
|
|
977ec0bd91 | ||
|
|
4d58eaaf15 | ||
|
|
3092bd2559 | ||
|
|
48347bbd7c | ||
|
|
07dbbff728 | ||
|
|
b5d66e0144 | ||
|
|
81d3df3289 | ||
|
|
d8169ba440 | ||
|
|
7df2865dce | ||
|
|
e69d693727 | ||
|
|
af9c466d86 | ||
|
|
43089a3931 | ||
|
|
1638821831 | ||
|
|
97c03b4c31 | ||
|
|
405ab9551f | ||
|
|
edd44032fd | ||
|
|
fad766e8d0 | ||
|
|
932195a913 | ||
|
|
a09fcca192 | ||
|
|
df9bd3c439 | ||
|
|
f5b1d1f494 | ||
|
|
43a069a60f | ||
|
|
f3ad153dcd | ||
|
|
794ab80086 | ||
|
|
8fc8e8019a | ||
|
|
6880bfd412 | ||
|
|
90e31c39cc | ||
|
|
6d1969e4d7 | ||
|
|
90b4753f72 | ||
|
|
9285f591cc | ||
|
|
cf2ca0c9f3 | ||
|
|
dffa5c56c8 | ||
|
|
302855ea86 | ||
|
|
a747a6e1f1 | ||
|
|
9e45571ebc | ||
|
|
dae888b981 | ||
|
|
d98e129931 | ||
|
|
a6b9f88b6c | ||
|
|
6412b7ac5d | ||
|
|
410f00e36d | ||
|
|
240d40e159 | ||
|
|
b34a354efe | ||
|
|
2b3939a65e | ||
|
|
2be4200455 | ||
|
|
a3354a547f | ||
|
|
8143c086a0 | ||
|
|
806f630c8f | ||
|
|
9d2aab415d | ||
|
|
a5d1c645a0 | ||
|
|
d443a44854 | ||
|
|
6808df5729 | ||
|
|
6e4e7ab8b1 | ||
|
|
d8451b0c3f | ||
|
|
255c9ed5e9 | ||
|
|
bd8e27503e | ||
|
|
459c5cfad6 | ||
|
|
ff689be250 | ||
|
|
aaac8b0545 | ||
|
|
066e1e083d | ||
|
|
5c742d4f2b | ||
|
|
c64ca97877 | ||
|
|
cd4dddbef1 | ||
|
|
e77b1da772 | ||
|
|
e1e8d3b66e | ||
|
|
962df6334d | ||
|
|
ba255cf5ec | ||
|
|
2ca049e74a | ||
|
|
cd2893640d | ||
|
|
b686974160 | ||
|
|
3303dcbfbd | ||
|
|
950079845c | ||
|
|
57b56499b2 | ||
|
|
00715a66a1 | ||
|
|
581bd6bb9a | ||
|
|
056eb659bb | ||
|
|
6476b2d79c | ||
|
|
a2078d709b | ||
|
|
c8ad4807c2 | ||
|
|
7cf53a647e | ||
|
|
cac44b9e15 | ||
|
|
354d59500b | ||
|
|
a639b22c7c | ||
|
|
4f13e2fa15 | ||
|
|
e3109a96d4 | ||
|
|
0b606b01dc | ||
|
|
134b954c7f | ||
|
|
bf970ebf9d | ||
|
|
af47b9170c | ||
|
|
bf04551bf5 | ||
|
|
a419ffcf50 | ||
|
|
de7c82f13f | ||
|
|
37fb2c8dce | ||
|
|
329fa0f067 | ||
|
|
9d97ed1190 | ||
|
|
794fc47722 | ||
|
|
5842e17085 | ||
|
|
5ef7e90462 | ||
|
|
05215e016b | ||
|
|
1ddde85221 | ||
|
|
da8be02e66 | ||
|
|
c348776096 | ||
|
|
683b933f68 | ||
|
|
5c5f3c00a3 | ||
|
|
d5c549f1e6 | ||
|
|
8fb38e3982 | ||
|
|
61ce48c2ac | ||
|
|
af19e51966 | ||
|
|
04aaaa8270 | ||
|
|
f174bca689 | ||
|
|
7c9359d57e | ||
|
|
09100ac5d9 | ||
|
|
2b2c84aa8a | ||
|
|
a513272665 | ||
|
|
843c0c060b | ||
|
|
772772f14d | ||
|
|
be133cd3d6 | ||
|
|
0ffedf2eba | ||
|
|
82cfccaf85 | ||
|
|
7d9e531de2 | ||
|
|
c0f096467e | ||
|
|
9684b03bff | ||
|
|
1fb5707235 | ||
|
|
381f1b76a7 | ||
|
|
1b7cf171ce | ||
|
|
01913d08e7 | ||
|
|
d5fa062e4a | ||
|
|
b164c03b09 | ||
|
|
d51af675ef | ||
|
|
c7508dc216 | ||
|
|
6ca41cfbcb | ||
|
|
ae909b3688 | ||
|
|
3a5e48f476 | ||
|
|
899838b325 | ||
|
|
001bf93ea3 | ||
|
|
b260234faf | ||
|
|
2f30da1762 | ||
|
|
d92c21ec97 | ||
|
|
5960d74dca | ||
|
|
c7232f4505 | ||
|
|
08fd8c8d0a | ||
|
|
eb72217228 | ||
|
|
945d032f08 | ||
|
|
78bfeb11ae | ||
|
|
9745865250 | ||
|
|
accbf2cffc | ||
|
|
5d8ca81e82 | ||
|
|
11e5b0cd91 | ||
|
|
6845f41d67 | ||
|
|
1ee049ccc3 | ||
|
|
d8a26905ee | ||
|
|
480b7f397e | ||
|
|
2bc1779a71 | ||
|
|
9ea7937f6d | ||
|
|
381c0af988 | ||
|
|
4519b42214 | ||
|
|
8e50c08b3f | ||
|
|
121ef695e3 | ||
|
|
275bfc15b4 | ||
|
|
036695ac94 | ||
|
|
19b55d6b27 | ||
|
|
993303f840 | ||
|
|
a676f706a8 | ||
|
|
2081ab8be1 | ||
|
|
f2efec7fcc | ||
|
|
e7b549216c | ||
|
|
426570fc3f | ||
|
|
0e55055394 | ||
|
|
1bbeb61668 | ||
|
|
376d87619b | ||
|
|
f82b4a68b8 | ||
|
|
abb236e98a | ||
|
|
adf0ce6621 | ||
|
|
5368049128 | ||
|
|
8afa166edf | ||
|
|
79a1b3bc12 | ||
|
|
4b6ab53a10 | ||
|
|
58f389779a | ||
|
|
508cad0bbc | ||
|
|
d22fa1be19 | ||
|
|
77830d92bd | ||
|
|
14075f9f4c | ||
|
|
fc58f5d174 | ||
|
|
25ba8702c7 | ||
|
|
b064f9d1e0 | ||
|
|
d43690a91b | ||
|
|
b6c2487f74 | ||
|
|
32c3ce204d | ||
|
|
8893605b1e | ||
|
|
1345e98ae7 | ||
|
|
7dfcb37975 | ||
|
|
7854f64e1e | ||
|
|
e18f6ffc28 | ||
|
|
9e463adac1 | ||
|
|
839f820127 | ||
|
|
1c0b028aaa | ||
|
|
db38f4d75a | ||
|
|
4cbc0c420c | ||
|
|
856b6d6d01 | ||
|
|
51fb778358 | ||
|
|
cad16f5d3d | ||
|
|
cf817bdd4e | ||
|
|
22984edb1b | ||
|
|
03ba24b370 | ||
|
|
6d366c39ae | ||
|
|
242c1e4535 | ||
|
|
5071174194 | ||
|
|
2651def9fa | ||
|
|
a37ed72a07 | ||
|
|
0bd54147d8 | ||
|
|
0d5394e184 | ||
|
|
ff732bdb4d | ||
|
|
0ccc044f79 | ||
|
|
a465856cef | ||
|
|
0e8f23f202 | ||
|
|
f936a1ef7e | ||
|
|
54bcdb8dec | ||
|
|
2022b38fe2 | ||
|
|
06d3b882ad | ||
|
|
4508803643 | ||
|
|
f5c5a4a0b1 | ||
|
|
60ee39d3d8 | ||
|
|
3b98ad4479 | ||
|
|
78c54a42bb | ||
|
|
0d624d9842 | ||
|
|
b0eaab8242 | ||
|
|
4655936389 | ||
|
|
7f319429fd | ||
|
|
7c20aaaab8 | ||
|
|
a3d49a56a8 | ||
|
|
e7238a0f26 | ||
|
|
161485a774 | ||
|
|
b6c3cbff6f | ||
|
|
55adddc239 | ||
|
|
903626c899 | ||
|
|
afc6560429 | ||
|
|
03be2635eb | ||
|
|
efa51e124d | ||
|
|
6ab070d2ae | ||
|
|
433afe39a7 | ||
|
|
9ba4b0cab8 | ||
|
|
554d9125ef | ||
|
|
9e4dd4972f | ||
|
|
39c7b10735 | ||
|
|
4bb4225f09 | ||
|
|
1582ce6397 | ||
|
|
becebe99b4 | ||
|
|
e3bb6d98fb | ||
|
|
89b8445eb0 | ||
|
|
b7c2cc9b85 | ||
|
|
5efd689803 | ||
|
|
4d11001046 | ||
|
|
690394fabc | ||
|
|
7d083cf138 | ||
|
|
bfa94c5781 | ||
|
|
3710774d02 | ||
|
|
32808f4fb5 | ||
|
|
7c2fc29b6a | ||
|
|
5504fd6730 | ||
|
|
7066b61be3 | ||
|
|
9ec289857c | ||
|
|
92144d6375 | ||
|
|
0c2aafec33 | ||
|
|
2380810d60 | ||
|
|
7786da9117 | ||
|
|
87dca0130c | ||
|
|
f171d7ed15 | ||
|
|
e2812a6e96 | ||
|
|
2b47a7a22f | ||
|
|
a9db5620f5 | ||
|
|
f4833a9869 | ||
|
|
4a44e4bed9 | ||
|
|
44e2d1cd03 | ||
|
|
ff319e9863 | ||
|
|
d918ae0bde | ||
|
|
b940468890 | ||
|
|
0707ffd4e4 | ||
|
|
7801577cd5 | ||
|
|
8668635f1f | ||
|
|
49fe67572e | ||
|
|
ced6353e14 | ||
|
|
32f2d7ab7e | ||
|
|
135a6a07f1 | ||
|
|
a7ae996e6b | ||
|
|
e08c02c471 | ||
|
|
385834bd43 | ||
|
|
46e45507dc | ||
|
|
e3a420daed | ||
|
|
78af0545c4 | ||
|
|
c3f207428c | ||
|
|
169704f72a | ||
|
|
d4b8589b35 | ||
|
|
60b573727e | ||
|
|
e3e395769d | ||
|
|
8da72eb40b | ||
|
|
55fd89a576 | ||
|
|
a8b8c080a8 | ||
|
|
76024ed28c | ||
|
|
2eef4bd42a | ||
|
|
7df17ad7d6 | ||
|
|
729675bcf6 | ||
|
|
c12ce32b55 | ||
|
|
1d5e085549 | ||
|
|
d68378d312 | ||
|
|
9e9d5f72a9 | ||
|
|
7212aae423 | ||
|
|
77da0707b0 | ||
|
|
850b3bf27a | ||
|
|
e0546f1b99 | ||
|
|
a2365b68c2 | ||
|
|
f898406ae7 | ||
|
|
6d96977980 | ||
|
|
d79423fe9e | ||
|
|
f4658a520c | ||
|
|
94615285d1 | ||
|
|
bf81f812a5 | ||
|
|
4863d6f21b | ||
|
|
2e242897e0 | ||
|
|
31c583581f | ||
|
|
781769545f | ||
|
|
575bd1c3f8 | ||
|
|
6cfb9339ee | ||
|
|
9ddfdec193 | ||
|
|
4ba49f3814 | ||
|
|
b83ff386b8 | ||
|
|
d5d8b81e21 | ||
|
|
8e12eef4e1 | ||
|
|
f1c0775245 | ||
|
|
530669346a | ||
|
|
68f50f9b11 | ||
|
|
9996812067 | ||
|
|
2bf8284659 | ||
|
|
3edb044706 | ||
|
|
0e9b5a05e8 | ||
|
|
d75343031e | ||
|
|
6e659cc38b | ||
|
|
ae2dd867a1 | ||
|
|
d7dc26aeb7 | ||
|
|
ea04ad108f | ||
|
|
eecd2f984f | ||
|
|
1c3961bdd0 | ||
|
|
a88fdb216f | ||
|
|
8feeb07d4f |
8
.github/ISSUE_TEMPLATE/build_error.yml
vendored
8
.github/ISSUE_TEMPLATE/build_error.yml
vendored
@@ -9,7 +9,7 @@ body:
|
||||
Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
1. Title the issue `Installation issue: <name-of-the-package>`.
|
||||
2. Provide the information required below.
|
||||
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
@@ -29,7 +29,9 @@ body:
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary><pre>
|
||||
<details><summary>Error message</summary>
|
||||
|
||||
<pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
@@ -53,7 +55,7 @@ body:
|
||||
Please upload the following files:
|
||||
* **`spack-build-out.txt`**
|
||||
* **`spack-build-env.txt`**
|
||||
|
||||
|
||||
They should be present in the stage directory of the failing build. Also upload any `config.log` or similar file if one exists.
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/test_error.yml
vendored
4
.github/ISSUE_TEMPLATE/test_error.yml
vendored
@@ -21,7 +21,9 @@ body:
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary><pre>
|
||||
<details><summary>Error message</summary>
|
||||
|
||||
<pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
|
||||
8
.github/workflows/audit.yaml
vendored
8
.github/workflows/audit.yaml
vendored
@@ -19,13 +19,13 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
10
.github/workflows/build-containers.yml
vendored
10
.github/workflows/build-containers.yml
vendored
@@ -45,12 +45,18 @@ jobs:
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
37
.github/workflows/unit_tests.yaml
vendored
37
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
@@ -87,17 +87,17 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -133,10 +133,11 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -151,10 +152,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -163,7 +164,7 @@ jobs:
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -175,7 +176,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -185,16 +186,16 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade pytest codecov coverage[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade pip setuptools
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
@@ -210,6 +211,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
||||
13
.github/workflows/valid-style.yml
vendored
13
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,16 +35,16 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -68,10 +68,11 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
|
||||
97
.github/workflows/windows_python.yml
vendored
97
.github/workflows/windows_python.yml
vendored
@@ -15,15 +15,15 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
@@ -33,21 +33,21 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
@@ -57,99 +57,24 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
|
||||
218
CHANGELOG.md
218
CHANGELOG.md
@@ -1,3 +1,221 @@
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **`requires()` directive and enhanced package requirements**
|
||||
|
||||
We've added some more enhancements to requirements in Spack (#36286).
|
||||
|
||||
There is a new `requires()` directive for packages. `requires()` is the opposite of
|
||||
`conflicts()`. You can use it to impose constraints on this package when certain
|
||||
conditions are met:
|
||||
|
||||
```python
|
||||
requires(
|
||||
"%apple-clang",
|
||||
when="platform=darwin",
|
||||
msg="This package builds only with clang on macOS"
|
||||
)
|
||||
```
|
||||
|
||||
More on this in [the docs](
|
||||
https://spack.rtfd.io/en/latest/packaging_guide.html#conflicts-and-requirements).
|
||||
|
||||
You can also now add a `when:` clause to `requires:` in your `packages.yaml`
|
||||
configuration or in an environment:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "Only OpenMPI 4.1.5 and up can build with fancy compilers"
|
||||
```
|
||||
|
||||
More details can be found [here](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements)
|
||||
|
||||
2. **Exact versions**
|
||||
|
||||
Spack did not previously have a way to distinguish a version if it was a prefix of
|
||||
some other version. For example, `@3.2` would match `3.2`, `3.2.1`, `3.2.2`, etc. You
|
||||
can now match *exactly* `3.2` with `@=3.2`. This is useful, for example, if you need
|
||||
to patch *only* the `3.2` version of a package. The new syntax is described in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier).
|
||||
|
||||
Generally, when writing packages, you should prefer to use ranges like `@3.2` over
|
||||
the specific versions, as this allows the concretizer more leeway when selecting
|
||||
versions of dependencies. More details and recommendations are in the [packaging guide](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#ranges-versus-specific-versions).
|
||||
|
||||
See #36273 for full details on the version refactor.
|
||||
|
||||
3. **New testing interface**
|
||||
|
||||
Writing package tests is now much simpler with a new [test interface](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests).
|
||||
|
||||
Writing a test is now as easy as adding a method that starts with `test_`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""run installed example"""
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
```
|
||||
|
||||
You can use Python's native `assert` statement to implement your checks -- no more
|
||||
need to fiddle with `run_test` or other test framework methods. Spack will
|
||||
introspect the class and run `test_*` methods when you run `spack test`,
|
||||
|
||||
4. **More stable concretization**
|
||||
|
||||
* Now, `spack concretize` will *only* concretize the new portions of the environment
|
||||
and will not change existing parts of an environment unless you specify `--force`.
|
||||
This has always been true for `unify:false`, but not for `unify:true` and
|
||||
`unify:when_possible` environments. Now it is true for all of them (#37438, #37681).
|
||||
|
||||
* The concretizer has a new `--reuse-deps` argument that *only* reuses dependencies.
|
||||
That is, it will always treat the *roots* of your environment as it would with
|
||||
`--fresh`. This allows you to upgrade just the roots of your environment while
|
||||
keeping everything else stable (#30990).
|
||||
|
||||
5. **Weekly develop snapshot releases**
|
||||
|
||||
Since last year, we have maintained a buildcache of `develop` at
|
||||
https://binaries.spack.io/develop, but the cache can grow to contain so many builds
|
||||
as to be unwieldy. When we get a stable `develop` build, we snapshot the release and
|
||||
add a corresponding tag the Spack repository. So, you can use a stack from a specific
|
||||
day. There are now tags in the spack repository like:
|
||||
|
||||
* `develop-2023-05-14`
|
||||
* `develop-2023-05-18`
|
||||
|
||||
that correspond to build caches like:
|
||||
|
||||
* https://binaries.spack.io/develop-2023-05-14/e4s
|
||||
* https://binaries.spack.io/develop-2023-05-18/e4s
|
||||
|
||||
We plan to store these snapshot releases weekly.
|
||||
|
||||
6. **Specs in buildcaches can be referenced by hash.**
|
||||
|
||||
* Previously, you could run `spack buildcache list` and see the hashes in
|
||||
buildcaches, but referring to them by hash would fail.
|
||||
* You can now run commands like `spack spec` and `spack install` and refer to
|
||||
buildcache hashes directly, e.g. `spack install /abc123` (#35042)
|
||||
|
||||
7. **New package and buildcache index websites**
|
||||
|
||||
Our public websites for searching packages have been completely revamped and updated.
|
||||
You can check them out here:
|
||||
|
||||
* *Package Index*: https://packages.spack.io
|
||||
* *Buildcache Index*: https://cache.spack.io
|
||||
|
||||
Both are searchable and more interactive than before. Currently major releases are
|
||||
shown; UI for browsing `develop` snapshots is coming soon.
|
||||
|
||||
8. **Default CMake and Meson build types are now Release**
|
||||
|
||||
Spack has historically defaulted to building with optimization and debugging, but
|
||||
packages like `llvm` can be enormous with debug turned on. Our default build type for
|
||||
all Spack packages is now `Release` (#36679, #37436). This has a number of benefits:
|
||||
|
||||
* much smaller binaries;
|
||||
* higher default optimization level; and
|
||||
* defining `NDEBUG` disables assertions, which may lead to further speedups.
|
||||
|
||||
You can still get the old behavior back through requirements and package preferences.
|
||||
|
||||
## Other new commands and directives
|
||||
|
||||
* `spack checksum` can automatically add new versions to package (#24532)
|
||||
* new command: `spack pkg grep` to easily search package files (#34388)
|
||||
* New `maintainers` directive (#35083)
|
||||
* Add `spack buildcache push` (alias to `buildcache create`) (#34861)
|
||||
* Allow using `-j` to control the parallelism of concretization (#37608)
|
||||
* Add `--exclude` option to 'spack external find' (#35013)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* editing: add higher-precedence `SPACK_EDITOR` environment variable
|
||||
* Many YAML formatting improvements from updating `ruamel.yaml` to the latest version
|
||||
supporting Python 3.6. (#31091, #24885, #37008).
|
||||
* Requirements and preferences should not define (non-git) versions (#37687, #37747)
|
||||
* Environments now store spack version/commit in `spack.lock` (#32801)
|
||||
* User can specify the name of the `packages` subdirectory in repositories (#36643)
|
||||
* Add container images supporting RHEL alternatives (#36713)
|
||||
* make version(...) kwargs explicit (#36998)
|
||||
|
||||
## Notable refactors
|
||||
|
||||
* buildcache create: reproducible tarballs (#35623)
|
||||
* Bootstrap most of Spack dependencies using environments (#34029)
|
||||
* Split `satisfies(..., strict=True/False)` into two functions (#35681)
|
||||
* spack install: simplify behavior when inside environments (#35206)
|
||||
|
||||
## Binary cache and stack updates
|
||||
|
||||
* Major simplification of CI boilerplate in stacks (#34272, #36045)
|
||||
* Many improvements to our CI pipeline's reliability
|
||||
|
||||
## Removals, Deprecations, and disablements
|
||||
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||
* Support for Python 2 was deprecated in `v0.19.0` and has been removed. `v0.20.0` only
|
||||
supports Python 3.6 and higher.
|
||||
* Deprecated target names are no longer recognized by Spack. Use generic names instead:
|
||||
* `graviton` is now `cortex_a72`
|
||||
* `graviton2` is now `neoverse_n1`
|
||||
* `graviton3` is now `neoverse_v1`
|
||||
* `blacklist` and `whitelist` in module configuration were deprecated in `v0.19.0` and are
|
||||
removed in this release. Use `exclude` and `include` instead.
|
||||
* The `ignore=` parameter of the `extends()` directive has been removed. It was not used by
|
||||
any builtin packages and is no longer needed to avoid conflicts in environment views (#35588).
|
||||
* Support for the old YAML buildcache format has been removed. It was deprecated in `v0.19.0` (#34347).
|
||||
* `spack find --bootstrap` has been removed. It was deprecated in `v0.19.0`. Use `spack
|
||||
--bootstrap find` instead (#33964).
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are now removed, having been
|
||||
deprecated in `v0.19.0`. Use `spack bootstrap enable` and `spack bootstrap disable`.
|
||||
* The `--mirror-name`, `--mirror-url`, and `--directory` options to buildcache and
|
||||
mirror commands were deprecated in `v0.19.0` and have now been removed. They have been
|
||||
replaced by positional arguments (#37457).
|
||||
* Deprecate `env:` as top level environment key (#37424)
|
||||
* deprecate buildcache create --rel, buildcache install --allow-root (#37285)
|
||||
* Support for very old perl-like spec format strings (e.g., `$_$@$%@+$+$=`) has been
|
||||
removed (#37425). This was deprecated in in `v0.15` (#10556).
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* bugfix: don't fetch package metadata for unknown concrete specs (#36990)
|
||||
* Improve package source code context display on error (#37655)
|
||||
* Relax environment manifest filename requirements and lockfile identification criteria (#37413)
|
||||
* `installer.py`: drop build edges of installed packages by default (#36707)
|
||||
* Bugfix: package requirements with git commits (#35057, #36347)
|
||||
* Package requirements: allow single specs in requirement lists (#36258)
|
||||
* conditional variant values: allow boolean (#33939)
|
||||
* spack uninstall: follow run/link edges on --dependents (#34058)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,179 total packages, 499 new since `v0.19.0`
|
||||
* 329 new Python packages
|
||||
* 31 new R packages
|
||||
* 336 people contributed to this release
|
||||
* 317 committers to packages
|
||||
* 62 committers to core
|
||||
|
||||
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
@@ -23,8 +23,20 @@ packages:
|
||||
providers:
|
||||
elf: [libelf]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
unwind: [apple-libunwind]
|
||||
uuid: [apple-libuuid]
|
||||
apple-gl:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: apple-gl@4.1.0
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
apple-glu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: apple-glu@1.3.0
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
apple-libunwind:
|
||||
buildable: false
|
||||
externals:
|
||||
|
||||
@@ -40,9 +40,8 @@ modules:
|
||||
roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
# What type of modules to use
|
||||
enable:
|
||||
- tcl
|
||||
# What type of modules to use ("tcl" and/or "lmod")
|
||||
enable: []
|
||||
|
||||
tcl:
|
||||
all:
|
||||
|
||||
@@ -20,7 +20,7 @@ packages:
|
||||
awk: [gawk]
|
||||
blas: [openblas, amdblis]
|
||||
D: [ldc]
|
||||
daal: [intel-daal]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
@@ -30,7 +30,7 @@ packages:
|
||||
golang: [go, gcc]
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-ipp]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
@@ -40,7 +40,7 @@ packages:
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mkl: [intel-oneapi-mkl]
|
||||
mpe: [mpe2]
|
||||
mpi: [openmpi, mpich]
|
||||
mysql-client: [mysql, mariadb-c-client]
|
||||
|
||||
@@ -1103,16 +1103,31 @@ Below are more details about the specifiers that you can add to specs.
|
||||
Version specifier
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
A version specifier comes somewhere after a package name and starts
|
||||
with ``@``. It can be a single version, e.g. ``@1.0``, ``@3``, or
|
||||
``@1.2a7``. Or, it can be a range of versions, such as ``@1.0:1.5``
|
||||
(all versions between ``1.0`` and ``1.5``, inclusive). Version ranges
|
||||
can be open, e.g. ``:3`` means any version up to and including ``3``.
|
||||
This would include ``3.4`` and ``3.4.2``. ``4.2:`` means any version
|
||||
above and including ``4.2``. Finally, a version specifier can be a
|
||||
set of arbitrary versions, such as ``@1.0,1.5,1.7`` (``1.0``, ``1.5``,
|
||||
or ``1.7``). When you supply such a specifier to ``spack install``,
|
||||
it constrains the set of versions that Spack will install.
|
||||
A version specifier ``pkg@<specifier>`` comes after a package name
|
||||
and starts with ``@``. It can be something abstract that matches
|
||||
multiple known versions, or a specific version. During concretization,
|
||||
Spack will pick the optimal version within the spec's constraints
|
||||
according to policies set for the particular Spack installation.
|
||||
|
||||
The version specifier can be *a specific version*, such as ``@=1.0.0`` or
|
||||
``@=1.2a7``. Or, it can be *a range of versions*, such as ``@1.0:1.5``.
|
||||
Version ranges are inclusive, so this example includes both ``1.0``
|
||||
and any ``1.5.x`` version. Version ranges can be unbounded, e.g. ``@:3``
|
||||
means any version up to and including ``3``. This would include ``3.4``
|
||||
and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||
includes any version with major version ``3``.
|
||||
|
||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||
``3.2.2``, etc. In general it is preferable to use the range syntax
|
||||
``@3.2``, since ranges also match versions with one-off suffixes, such as
|
||||
``3.2-custom``.
|
||||
|
||||
A version specifier can also be a list of ranges and specific versions,
|
||||
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
|
||||
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
@@ -1121,36 +1136,35 @@ reference provided. Acceptable syntaxes for this are:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# branches and tags
|
||||
foo@git.develop # use the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
|
||||
# commit hashes
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
|
||||
|
||||
Spack versions from git reference either have an associated version supplied by the user,
|
||||
or infer a relationship to known versions from the structure of the git repository. If an
|
||||
associated version is supplied by the user, Spack treats the git version as equivalent to that
|
||||
version for all version comparisons in the package logic (e.g. ``depends_on('foo', when='@1.5')``).
|
||||
# branches and tags
|
||||
foo@git.develop # use the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
|
||||
The associated version can be assigned with ``[git ref]=[version]`` syntax, with the caveat that the specified version is known to Spack from either the package definition, or in the configuration preferences (i.e. ``packages.yaml``).
|
||||
Spack always needs to associate a Spack version with the git reference,
|
||||
which is used for version comparison. This Spack version is heuristically
|
||||
taken from the closest valid git tag among ancestors of the git ref.
|
||||
|
||||
Once a Spack version is associated with a git ref, it always printed with
|
||||
the git ref. For example, if the commit ``@git.abcdefg`` is tagged
|
||||
``0.19``, then the spec will be shown as ``@git.abcdefg=0.19``.
|
||||
|
||||
If the git ref is not exactly a tag, then the distance to the nearest tag
|
||||
is also part of the resolved version. ``@git.abcdefg=0.19.git.8`` means
|
||||
that the commit is 8 commits away from the ``0.19`` tag.
|
||||
|
||||
In cases where Spack cannot resolve a sensible version from a git ref,
|
||||
users can specify the Spack version to use for the git ref. This is done
|
||||
by appending ``=`` and the Spack version to the git ref. For example:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
foo@git.my_ref=3.2 # use the my_ref tag or branch, but treat it as version 3.2 for version comparisons
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop # use the given commit, but treat it as develop for version comparisons
|
||||
|
||||
If an associated version is not supplied then the tags in the git repo are used to determine
|
||||
the most recent previous version known to Spack. Details about how versions are compared
|
||||
and how Spack determines if one version is less than another are discussed in the developer guide.
|
||||
|
||||
If the version spec is not provided, then Spack will choose one
|
||||
according to policies set for the particular spack installation. If
|
||||
the spec is ambiguous, i.e. it could match multiple versions, Spack
|
||||
will choose a version within the spec's constraints according to
|
||||
policies set for the particular Spack installation.
|
||||
|
||||
Details about how versions are compared and how Spack determines if
|
||||
one version is less than another are discussed in the developer guide.
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ Build caches are created via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache create <path/url/mirror name> <spec>
|
||||
$ spack buildcache push <path/url/mirror name> <spec>
|
||||
|
||||
This command takes the locally installed spec and its dependencies, and
|
||||
creates tarballs of their install prefixes. It also generates metadata files,
|
||||
@@ -48,7 +48,7 @@ Here is an example where a build cache is created in a local directory named
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache create --allow-root ./spack-cache ninja
|
||||
$ spack buildcache push --allow-root ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
|
||||
Not that ``ninja`` must be installed locally for this to work.
|
||||
@@ -177,7 +177,7 @@ need to be adjusted for better re-locatability.
|
||||
--------------------
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack buildcache create``
|
||||
``spack buildcache push``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create tarball of installed Spack package and all dependencies.
|
||||
|
||||
@@ -325,42 +325,99 @@ on the command line, because it can specify constraints on packages
|
||||
is not possible to specify constraints on dependencies while also keeping
|
||||
those dependencies optional.
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``
|
||||
keyed by package name:
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Requirements syntax
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``,
|
||||
keyed by package name and expressed using the Spec syntax. In the simplest
|
||||
case you can specify attributes that you always want the package to have
|
||||
by providing a single spec string to ``require``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
|
||||
In the above example, ``libfabric`` will always build with version 1.13.2. If you
|
||||
need to compose multiple configuration scopes ``require`` accepts a list of
|
||||
strings:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require:
|
||||
- "@1.13.2"
|
||||
- "%gcc"
|
||||
|
||||
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
|
||||
as a compiler.
|
||||
|
||||
For more complex use cases, require accepts also a list of objects. These objects
|
||||
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
|
||||
and they can optionally have a ``when`` and a ``message`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["~cuda", "%gcc"]
|
||||
- any_of: ["@4.1.5", "%gcc"]
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
|
||||
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
|
||||
not ``openmpi@3.9%clang``.
|
||||
|
||||
If a custom message is provided, and the requirement is not satisfiable,
|
||||
Spack will print the custom error message:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec openmpi@3.9%clang
|
||||
==> Error: in this example only 4.1.5 can build with other compilers
|
||||
|
||||
We could express a similar requirement using the ``when`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
|
||||
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
|
||||
constraint:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- spec: "%gcc"
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
This code snippet and the one before it are semantically equivalent.
|
||||
|
||||
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
|
||||
concretized spec must match one and only one of them:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
|
||||
Requirements are expressed using Spec syntax (the same as what is provided
|
||||
to ``spack install``). In the simplest case, you can specify attributes
|
||||
that you always want the package to have by providing a single spec to
|
||||
``require``; in the above example, ``libfabric`` will always build
|
||||
with version 1.13.2.
|
||||
|
||||
You can provide a more-relaxed constraint and allow the concretizer to
|
||||
choose between a set of options using ``any_of`` or ``one_of``:
|
||||
|
||||
* ``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi+cuda%gcc``,
|
||||
``openmpi~cuda%clang`` or ``openmpi~cuda%gcc`` (in the last case,
|
||||
note that both specs in the ``any_of`` for ``openmpi`` are
|
||||
satisfied).
|
||||
* ``one_of`` is also a list of specs, and the final concretized spec
|
||||
must match exactly one of them. In the above example, that means
|
||||
you could build ``mpich+cuda`` or ``mpich+rocm`` but not
|
||||
``mpich+cuda+rocm`` (note the current package definition for
|
||||
``mpich`` already includes a conflict, so this is redundant but
|
||||
still demonstrates the concept).
|
||||
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -368,6 +425,13 @@ choose between a set of options using ``any_of`` or ``one_of``:
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
|
||||
.. note::
|
||||
|
||||
When using a conditional requirement, Spack is allowed to actively avoid the triggering
|
||||
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
|
||||
the optimization criteria. To check the current optimization criteria and their
|
||||
priorities you can run ``spack solve zlib``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting default requirements
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -215,8 +215,9 @@ def setup(sphinx):
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
||||
@@ -20,8 +20,9 @@ case you want to skip directly to specific docs:
|
||||
* :ref:`packages.yaml <build-settings>`
|
||||
* :ref:`repos.yaml <repositories>`
|
||||
|
||||
You can also add any of these as inline configuration in ``spack.yaml``
|
||||
in an :ref:`environment <environment-configuration>`.
|
||||
You can also add any of these as inline configuration in the YAML
|
||||
manifest file (``spack.yaml``) describing an :ref:`environment
|
||||
<environment-configuration>`.
|
||||
|
||||
-----------
|
||||
YAML Format
|
||||
|
||||
@@ -143,6 +143,26 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
* - AlmaLinux 8
|
||||
- ``almalinux:8``
|
||||
- ``spack/almalinux8``
|
||||
* - AlmaLinux 9
|
||||
- ``almalinux:9``
|
||||
- ``spack/almalinux9``
|
||||
* - Rocky Linux 8
|
||||
- ``rockylinux:8``
|
||||
- ``spack/rockylinux8``
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
|
||||
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
@@ -616,7 +636,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
||||
@@ -94,9 +94,9 @@ an Environment, the ``.spack-env`` directory also contains:
|
||||
* ``logs/``: A directory containing the build logs for the packages
|
||||
in this Environment.
|
||||
|
||||
Spack Environments can also be created from either a ``spack.yaml``
|
||||
manifest or a ``spack.lock`` lockfile. To create an Environment from a
|
||||
``spack.yaml`` manifest:
|
||||
Spack Environments can also be created from either a manifest file
|
||||
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
||||
To create an Environment from a manifest:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -174,7 +174,7 @@ Anonymous specs can be created in place using the command:
|
||||
|
||||
$ spack env create -d .
|
||||
|
||||
In this case Spack simply creates a spack.yaml file in the requested
|
||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
||||
directory.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -347,7 +347,7 @@ the Environment and then install the concretized specs.
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -395,7 +395,7 @@ version (and other constraints) passed as the spec argument to the
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
@@ -589,10 +589,11 @@ user support groups providing a large software stack for their HPC center.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When using *unified* concretization (when possible), the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead unified concretization is
|
||||
disabled, only the new specs will be concretized after any addition.
|
||||
The ``spack concretize`` command without additional arguments will *not* change any
|
||||
previously concretized specs. This may prevent it from finding a solution when using
|
||||
``unify: true``, and it may prevent it from finding a minimal solution when using
|
||||
``unify: when_possible``. You can force Spack to ignore the existing concrete environment
|
||||
with ``spack concretize -f``.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Spec Matrices
|
||||
@@ -1121,19 +1122,19 @@ index once every package is pushed. Note how this target uses the generated
|
||||
|
||||
SPACK ?= spack
|
||||
BUILDCACHE_DIR = $(CURDIR)/tarballs
|
||||
|
||||
|
||||
.PHONY: all
|
||||
|
||||
|
||||
all: push
|
||||
|
||||
|
||||
include env.mk
|
||||
|
||||
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
|
||||
@@ -41,12 +41,9 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
yum update -y
|
||||
yum install -y epel-release
|
||||
yum update -y
|
||||
yum --enablerepo epel groupinstall -y "Development Tools"
|
||||
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip
|
||||
python3 -m pip install boto3
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
@@ -368,7 +365,8 @@ Manual compiler configuration
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in your ``$EDITOR``.
|
||||
``spack config edit compilers``, which will open the file in
|
||||
:ref:`your favorite editor <controlling-the-editor>`.
|
||||
|
||||
Each compiler configuration in the file looks like this:
|
||||
|
||||
|
||||
@@ -163,7 +163,7 @@ your site.
|
||||
Mirror environment
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To create a mirror of all packages required by a concerte environment, activate the environment and call ``spack mirror create -a``.
|
||||
To create a mirror of all packages required by a concrete environment, activate the environment and call ``spack mirror create -a``.
|
||||
This is especially useful to create a mirror of an environment concretized on another machine.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -32,11 +32,16 @@ A package repository a directory structured like this::
|
||||
...
|
||||
|
||||
The top-level ``repo.yaml`` file contains configuration metadata for the
|
||||
repository, and the ``packages`` directory contains subdirectories for
|
||||
each package in the repository. Each package directory contains a
|
||||
``package.py`` file and any patches or other files needed to build the
|
||||
repository. The packages subdirectory, typically ``packages``, contains
|
||||
subdirectories for each package in the repository. Each package directory
|
||||
contains a ``package.py`` file and any patches or other files needed to build the
|
||||
package.
|
||||
|
||||
The ``repo.yaml`` file may also contain a ``subdirectory`` key,
|
||||
which can modify the name of the subdirectory used for packages. As seen above,
|
||||
the default value is ``packages``. An empty string (``subdirectory: ''``) requires
|
||||
a flattened repo structure in which the package names are top-level subdirectories.
|
||||
|
||||
Package repositories allow you to:
|
||||
|
||||
1. Maintain your own packages separately from Spack;
|
||||
@@ -373,6 +378,24 @@ You can supply a custom namespace with a second argument, e.g.:
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||
argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create -d applications myrepo apps
|
||||
==> Created repo with namespace 'apps'.
|
||||
==> To register it with Spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
applications/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: apps
|
||||
subdirectory: applications
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -10,3 +10,4 @@ python-levenshtein
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
|
||||
19
lib/spack/env/cc
vendored
19
lib/spack/env/cc
vendored
@@ -434,8 +434,6 @@ wl_expect_rpath=no
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
# drop -Wl
|
||||
shift
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
@@ -448,7 +446,9 @@ parse_Wl() {
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
@@ -456,7 +456,9 @@ parse_Wl() {
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
@@ -467,6 +469,11 @@ parse_Wl() {
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append other_args_list "-Wl,$1"
|
||||
;;
|
||||
@@ -576,7 +583,9 @@ while [ $# -ne 0 ]; do
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
parse_Wl $1
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
|
||||
7
lib/spack/external/__init__.py
vendored
7
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0-dev (commit f3667f95030c6573842fb5f6df0d647285597509)
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -101,10 +101,7 @@
|
||||
* Usage: Used for config files. Ruamel is based on PyYAML but is more
|
||||
actively maintained and has more features, including round-tripping
|
||||
comments read from config files.
|
||||
* Version: 0.11.15 (last version supporting Python 2.6)
|
||||
* Note: This package has been slightly modified to improve Python 2.6
|
||||
compatibility -- some ``{}`` format strings were replaced, and the
|
||||
import for ``OrderedDict`` was tweaked.
|
||||
* Version: 0.17.21
|
||||
|
||||
six
|
||||
---
|
||||
|
||||
1
lib/spack/external/_vendoring/ruamel.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/ruamel.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from ruamel import *
|
||||
@@ -1,21 +1,21 @@
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2022 Anthon van der Neut, Ruamel bvba
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
57
lib/spack/external/_vendoring/ruamel/yaml/__init__.py
vendored
Normal file
57
lib/spack/external/_vendoring/ruamel/yaml/__init__.py
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# coding: utf-8
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Dict, Any # NOQA
|
||||
|
||||
_package_data = dict(
|
||||
full_package_name='ruamel.yaml',
|
||||
version_info=(0, 17, 21),
|
||||
__version__='0.17.21',
|
||||
version_timestamp='2022-02-12 09:49:22',
|
||||
author='Anthon van der Neut',
|
||||
author_email='a.van.der.neut@ruamel.eu',
|
||||
description='ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
|
||||
entry_points=None,
|
||||
since=2014,
|
||||
extras_require={
|
||||
':platform_python_implementation=="CPython" and python_version<"3.11"': ['ruamel.yaml.clib>=0.2.6'], # NOQA
|
||||
'jinja2': ['ruamel.yaml.jinja2>=0.2'],
|
||||
'docs': ['ryd'],
|
||||
},
|
||||
classifiers=[
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
'Topic :: Text Processing :: Markup',
|
||||
'Typing :: Typed',
|
||||
],
|
||||
keywords='yaml 1.2 parser round-trip preserve quotes order config',
|
||||
read_the_docs='yaml',
|
||||
supported=[(3, 5)], # minimum
|
||||
tox=dict(
|
||||
env='*f', # f for 3.5
|
||||
fl8excl='_test/lib',
|
||||
),
|
||||
# universal=True,
|
||||
python_requires='>=3',
|
||||
rtfd='yaml',
|
||||
) # type: Dict[Any, Any]
|
||||
|
||||
|
||||
version_info = _package_data['version_info']
|
||||
__version__ = _package_data['__version__']
|
||||
|
||||
try:
|
||||
from .cyaml import * # NOQA
|
||||
|
||||
__with_libyaml__ = True
|
||||
except (ImportError, ValueError): # for Jython
|
||||
__with_libyaml__ = False
|
||||
|
||||
from ruamel.yaml.main import * # NOQA
|
||||
20
lib/spack/external/_vendoring/ruamel/yaml/anchor.py
vendored
Normal file
20
lib/spack/external/_vendoring/ruamel/yaml/anchor.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# coding: utf-8
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
|
||||
|
||||
anchor_attrib = '_yaml_anchor'
|
||||
|
||||
|
||||
class Anchor:
|
||||
__slots__ = 'value', 'always_dump'
|
||||
attrib = anchor_attrib
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.value = None
|
||||
self.always_dump = False
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
ad = ', (always dump)' if self.always_dump else ""
|
||||
return 'Anchor({!r}{})'.format(self.value, ad)
|
||||
1267
lib/spack/external/_vendoring/ruamel/yaml/comments.py
vendored
Normal file
1267
lib/spack/external/_vendoring/ruamel/yaml/comments.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
268
lib/spack/external/_vendoring/ruamel/yaml/compat.py
vendored
Normal file
268
lib/spack/external/_vendoring/ruamel/yaml/compat.py
vendored
Normal file
@@ -0,0 +1,268 @@
|
||||
# coding: utf-8
|
||||
|
||||
# partially from package six by Benjamin Peterson
|
||||
|
||||
import sys
|
||||
import os
|
||||
import io
|
||||
import traceback
|
||||
from abc import abstractmethod
|
||||
import collections.abc
|
||||
|
||||
|
||||
# fmt: off
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
|
||||
from typing import Optional # NOQA
|
||||
# fmt: on
|
||||
|
||||
_DEFAULT_YAML_VERSION = (1, 2)
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict # type: ignore
|
||||
|
||||
# to get the right name import ... as ordereddict doesn't do that
|
||||
|
||||
|
||||
class ordereddict(OrderedDict): # type: ignore
|
||||
if not hasattr(OrderedDict, 'insert'):
|
||||
|
||||
def insert(self, pos, key, value):
|
||||
# type: (int, Any, Any) -> None
|
||||
if pos >= len(self):
|
||||
self[key] = value
|
||||
return
|
||||
od = ordereddict()
|
||||
od.update(self)
|
||||
for k in od:
|
||||
del self[k]
|
||||
for index, old_key in enumerate(od):
|
||||
if pos == index:
|
||||
self[key] = value
|
||||
self[old_key] = od[old_key]
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
|
||||
# replace with f-strings when 3.5 support is dropped
|
||||
# ft = '42'
|
||||
# assert _F('abc {ft!r}', ft=ft) == 'abc %r' % ft
|
||||
# 'abc %r' % ft -> _F('abc {ft!r}' -> f'abc {ft!r}'
|
||||
def _F(s, *superfluous, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
if superfluous:
|
||||
raise TypeError
|
||||
return s.format(**kw)
|
||||
|
||||
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
|
||||
if False: # MYPY
|
||||
# StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
|
||||
# StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
|
||||
StreamType = Any
|
||||
|
||||
StreamTextType = StreamType # Union[Text, StreamType]
|
||||
VersionType = Union[List[int], str, Tuple[int, int]]
|
||||
|
||||
builtins_module = 'builtins'
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
# type: (Any, Any) -> Any
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta('NewBase', bases, {})
|
||||
|
||||
|
||||
DBG_TOKEN = 1
|
||||
DBG_EVENT = 2
|
||||
DBG_NODE = 4
|
||||
|
||||
|
||||
_debug = None # type: Optional[int]
|
||||
if 'RUAMELDEBUG' in os.environ:
|
||||
_debugx = os.environ.get('RUAMELDEBUG')
|
||||
if _debugx is None:
|
||||
_debug = 0
|
||||
else:
|
||||
_debug = int(_debugx)
|
||||
|
||||
|
||||
if bool(_debug):
|
||||
|
||||
class ObjectCounter:
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.map = {} # type: Dict[Any, Any]
|
||||
|
||||
def __call__(self, k):
|
||||
# type: (Any) -> None
|
||||
self.map[k] = self.map.get(k, 0) + 1
|
||||
|
||||
def dump(self):
|
||||
# type: () -> None
|
||||
for k in sorted(self.map):
|
||||
sys.stdout.write('{} -> {}'.format(k, self.map[k]))
|
||||
|
||||
object_counter = ObjectCounter()
|
||||
|
||||
|
||||
# used from yaml util when testing
|
||||
def dbg(val=None):
|
||||
# type: (Any) -> Any
|
||||
global _debug
|
||||
if _debug is None:
|
||||
# set to true or false
|
||||
_debugx = os.environ.get('YAMLDEBUG')
|
||||
if _debugx is None:
|
||||
_debug = 0
|
||||
else:
|
||||
_debug = int(_debugx)
|
||||
if val is None:
|
||||
return _debug
|
||||
return _debug & val
|
||||
|
||||
|
||||
class Nprint:
|
||||
def __init__(self, file_name=None):
|
||||
# type: (Any) -> None
|
||||
self._max_print = None # type: Any
|
||||
self._count = None # type: Any
|
||||
self._file_name = file_name
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
# type: (Any, Any) -> None
|
||||
if not bool(_debug):
|
||||
return
|
||||
out = sys.stdout if self._file_name is None else open(self._file_name, 'a')
|
||||
dbgprint = print # to fool checking for print statements by dv utility
|
||||
kw1 = kw.copy()
|
||||
kw1['file'] = out
|
||||
dbgprint(*args, **kw1)
|
||||
out.flush()
|
||||
if self._max_print is not None:
|
||||
if self._count is None:
|
||||
self._count = self._max_print
|
||||
self._count -= 1
|
||||
if self._count == 0:
|
||||
dbgprint('forced exit\n')
|
||||
traceback.print_stack()
|
||||
out.flush()
|
||||
sys.exit(0)
|
||||
if self._file_name:
|
||||
out.close()
|
||||
|
||||
def set_max_print(self, i):
|
||||
# type: (int) -> None
|
||||
self._max_print = i
|
||||
self._count = None
|
||||
|
||||
def fp(self, mode='a'):
|
||||
# type: (str) -> Any
|
||||
out = sys.stdout if self._file_name is None else open(self._file_name, mode)
|
||||
return out
|
||||
|
||||
|
||||
nprint = Nprint()
|
||||
nprintf = Nprint('/var/tmp/ruamel.yaml.log')
|
||||
|
||||
# char checkers following production rules
|
||||
|
||||
|
||||
def check_namespace_char(ch):
|
||||
# type: (Any) -> bool
|
||||
if '\x21' <= ch <= '\x7E': # ! to ~
|
||||
return True
|
||||
if '\xA0' <= ch <= '\uD7FF':
|
||||
return True
|
||||
if ('\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': # excl. byte order mark
|
||||
return True
|
||||
if '\U00010000' <= ch <= '\U0010FFFF':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_anchorname_char(ch):
|
||||
# type: (Any) -> bool
|
||||
if ch in ',[]{}':
|
||||
return False
|
||||
return check_namespace_char(ch)
|
||||
|
||||
|
||||
def version_tnf(t1, t2=None):
|
||||
# type: (Any, Any) -> Any
|
||||
"""
|
||||
return True if ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
|
||||
"""
|
||||
from ruamel.yaml import version_info # NOQA
|
||||
|
||||
if version_info < t1:
|
||||
return True
|
||||
if t2 is not None and version_info < t2:
|
||||
return None
|
||||
return False
|
||||
|
||||
|
||||
class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
|
||||
__slots__ = ()
|
||||
|
||||
def __getitem__(self, index):
|
||||
# type: (Any) -> Any
|
||||
if not isinstance(index, slice):
|
||||
return self.__getsingleitem__(index)
|
||||
return type(self)([self[i] for i in range(*index.indices(len(self)))]) # type: ignore
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
# type: (Any, Any) -> None
|
||||
if not isinstance(index, slice):
|
||||
return self.__setsingleitem__(index, value)
|
||||
assert iter(value)
|
||||
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
|
||||
if index.step is None:
|
||||
del self[index.start : index.stop]
|
||||
for elem in reversed(value):
|
||||
self.insert(0 if index.start is None else index.start, elem)
|
||||
else:
|
||||
range_parms = index.indices(len(self))
|
||||
nr_assigned_items = (range_parms[1] - range_parms[0] - 1) // range_parms[2] + 1
|
||||
# need to test before changing, in case TypeError is caught
|
||||
if nr_assigned_items < len(value):
|
||||
raise TypeError(
|
||||
'too many elements in value {} < {}'.format(nr_assigned_items, len(value))
|
||||
)
|
||||
elif nr_assigned_items > len(value):
|
||||
raise TypeError(
|
||||
'not enough elements in value {} > {}'.format(
|
||||
nr_assigned_items, len(value)
|
||||
)
|
||||
)
|
||||
for idx, i in enumerate(range(*range_parms)):
|
||||
self[i] = value[idx]
|
||||
|
||||
def __delitem__(self, index):
|
||||
# type: (Any) -> None
|
||||
if not isinstance(index, slice):
|
||||
return self.__delsingleitem__(index)
|
||||
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
|
||||
for i in reversed(range(*index.indices(len(self)))):
|
||||
del self[i]
|
||||
|
||||
@abstractmethod
|
||||
def __getsingleitem__(self, index):
|
||||
# type: (Any) -> Any
|
||||
raise IndexError
|
||||
|
||||
@abstractmethod
|
||||
def __setsingleitem__(self, index, value):
|
||||
# type: (Any, Any) -> None
|
||||
raise IndexError
|
||||
|
||||
@abstractmethod
|
||||
def __delsingleitem__(self, index):
|
||||
# type: (Any) -> None
|
||||
raise IndexError
|
||||
243
lib/spack/external/_vendoring/ruamel/yaml/composer.py
vendored
Normal file
243
lib/spack/external/_vendoring/ruamel/yaml/composer.py
vendored
Normal file
@@ -0,0 +1,243 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
|
||||
from ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent,
|
||||
StreamEndEvent,
|
||||
MappingStartEvent,
|
||||
MappingEndEvent,
|
||||
SequenceStartEvent,
|
||||
SequenceEndEvent,
|
||||
AliasEvent,
|
||||
ScalarEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
||||
__all__ = ['Composer', 'ComposerError']
|
||||
|
||||
|
||||
class ComposerError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Composer:
|
||||
def __init__(self, loader=None):
|
||||
# type: (Any) -> None
|
||||
self.loader = loader
|
||||
if self.loader is not None and getattr(self.loader, '_composer', None) is None:
|
||||
self.loader._composer = self
|
||||
self.anchors = {} # type: Dict[Any, Any]
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.loader, 'typ'):
|
||||
self.loader.parser
|
||||
return self.loader._parser
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
# type: () -> Any
|
||||
# assert self.loader._resolver is not None
|
||||
if hasattr(self.loader, 'typ'):
|
||||
self.loader.resolver
|
||||
return self.loader._resolver
|
||||
|
||||
def check_node(self):
|
||||
# type: () -> Any
|
||||
# Drop the STREAM-START event.
|
||||
if self.parser.check_event(StreamStartEvent):
|
||||
self.parser.get_event()
|
||||
|
||||
# If there are more documents available?
|
||||
return not self.parser.check_event(StreamEndEvent)
|
||||
|
||||
def get_node(self):
|
||||
# type: () -> Any
|
||||
# Get the root node of the next document.
|
||||
if not self.parser.check_event(StreamEndEvent):
|
||||
return self.compose_document()
|
||||
|
||||
def get_single_node(self):
|
||||
# type: () -> Any
|
||||
# Drop the STREAM-START event.
|
||||
self.parser.get_event()
|
||||
|
||||
# Compose a document if the stream is not empty.
|
||||
document = None # type: Any
|
||||
if not self.parser.check_event(StreamEndEvent):
|
||||
document = self.compose_document()
|
||||
|
||||
# Ensure that the stream contains no more documents.
|
||||
if not self.parser.check_event(StreamEndEvent):
|
||||
event = self.parser.get_event()
|
||||
raise ComposerError(
|
||||
'expected a single document in the stream',
|
||||
document.start_mark,
|
||||
'but found another document',
|
||||
event.start_mark,
|
||||
)
|
||||
|
||||
# Drop the STREAM-END event.
|
||||
self.parser.get_event()
|
||||
|
||||
return document
|
||||
|
||||
def compose_document(self):
|
||||
# type: (Any) -> Any
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.parser.get_event()
|
||||
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.parser.get_event()
|
||||
|
||||
self.anchors = {}
|
||||
return node
|
||||
|
||||
def return_alias(self, a):
|
||||
# type: (Any) -> Any
|
||||
return a
|
||||
|
||||
def compose_node(self, parent, index):
|
||||
# type: (Any, Any) -> Any
|
||||
if self.parser.check_event(AliasEvent):
|
||||
event = self.parser.get_event()
|
||||
alias = event.anchor
|
||||
if alias not in self.anchors:
|
||||
raise ComposerError(
|
||||
None,
|
||||
None,
|
||||
_F('found undefined alias {alias!r}', alias=alias),
|
||||
event.start_mark,
|
||||
)
|
||||
return self.return_alias(self.anchors[alias])
|
||||
event = self.parser.peek_event()
|
||||
anchor = event.anchor
|
||||
if anchor is not None: # have an anchor
|
||||
if anchor in self.anchors:
|
||||
# raise ComposerError(
|
||||
# "found duplicate anchor %r; first occurrence"
|
||||
# % (anchor), self.anchors[anchor].start_mark,
|
||||
# "second occurrence", event.start_mark)
|
||||
ws = (
|
||||
'\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
|
||||
'{}'.format((anchor), self.anchors[anchor].start_mark, event.start_mark)
|
||||
)
|
||||
warnings.warn(ws, ReusedAnchorWarning)
|
||||
self.resolver.descend_resolver(parent, index)
|
||||
if self.parser.check_event(ScalarEvent):
|
||||
node = self.compose_scalar_node(anchor)
|
||||
elif self.parser.check_event(SequenceStartEvent):
|
||||
node = self.compose_sequence_node(anchor)
|
||||
elif self.parser.check_event(MappingStartEvent):
|
||||
node = self.compose_mapping_node(anchor)
|
||||
self.resolver.ascend_resolver()
|
||||
return node
|
||||
|
||||
def compose_scalar_node(self, anchor):
|
||||
# type: (Any) -> Any
|
||||
event = self.parser.get_event()
|
||||
tag = event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolver.resolve(ScalarNode, event.value, event.implicit)
|
||||
node = ScalarNode(
|
||||
tag,
|
||||
event.value,
|
||||
event.start_mark,
|
||||
event.end_mark,
|
||||
style=event.style,
|
||||
comment=event.comment,
|
||||
anchor=anchor,
|
||||
)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
return node
|
||||
|
||||
def compose_sequence_node(self, anchor):
|
||||
# type: (Any) -> Any
|
||||
start_event = self.parser.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolver.resolve(SequenceNode, None, start_event.implicit)
|
||||
node = SequenceNode(
|
||||
tag,
|
||||
[],
|
||||
start_event.start_mark,
|
||||
None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment,
|
||||
anchor=anchor,
|
||||
)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
index = 0
|
||||
while not self.parser.check_event(SequenceEndEvent):
|
||||
node.value.append(self.compose_node(node, index))
|
||||
index += 1
|
||||
end_event = self.parser.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
if node.comment is not None:
|
||||
nprint(
|
||||
'Warning: unexpected end_event commment in sequence '
|
||||
'node {}'.format(node.flow_style)
|
||||
)
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def compose_mapping_node(self, anchor):
|
||||
# type: (Any) -> Any
|
||||
start_event = self.parser.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolver.resolve(MappingNode, None, start_event.implicit)
|
||||
node = MappingNode(
|
||||
tag,
|
||||
[],
|
||||
start_event.start_mark,
|
||||
None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment,
|
||||
anchor=anchor,
|
||||
)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
while not self.parser.check_event(MappingEndEvent):
|
||||
# key_event = self.parser.peek_event()
|
||||
item_key = self.compose_node(node, None)
|
||||
# if item_key in node.value:
|
||||
# raise ComposerError("while composing a mapping",
|
||||
# start_event.start_mark,
|
||||
# "found duplicate key", key_event.start_mark)
|
||||
item_value = self.compose_node(node, item_key)
|
||||
# node.value[item_key] = item_value
|
||||
node.value.append((item_key, item_value))
|
||||
end_event = self.parser.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def check_end_doc_comment(self, end_event, node):
|
||||
# type: (Any, Any) -> None
|
||||
if end_event.comment and end_event.comment[1]:
|
||||
# pre comments on an end_event, no following to move to
|
||||
if node.comment is None:
|
||||
node.comment = [None, None]
|
||||
assert not isinstance(node, ScalarEvent)
|
||||
# this is a post comment on a mapping node, add as third element
|
||||
# in the list
|
||||
node.comment.append(end_event.comment[1])
|
||||
end_event.comment[1] = None
|
||||
14
lib/spack/external/_vendoring/ruamel/yaml/configobjwalker.py
vendored
Normal file
14
lib/spack/external/_vendoring/ruamel/yaml/configobjwalker.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
|
||||
from ruamel.yaml.util import configobj_walker as new_configobj_walker
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any # NOQA
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
# type: (Any) -> Any
|
||||
warnings.warn('configobj_walker has moved to ruamel.yaml.util, please update your code')
|
||||
return new_configobj_walker(cfg)
|
||||
1845
lib/spack/external/_vendoring/ruamel/yaml/constructor.py
vendored
Normal file
1845
lib/spack/external/_vendoring/ruamel/yaml/constructor.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
183
lib/spack/external/_vendoring/ruamel/yaml/cyaml.py
vendored
Normal file
183
lib/spack/external/_vendoring/ruamel/yaml/cyaml.py
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
# coding: utf-8
|
||||
|
||||
from _ruamel_yaml import CParser, CEmitter # type: ignore
|
||||
|
||||
from ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructor
|
||||
from ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
|
||||
from ruamel.yaml.resolver import Resolver, BaseResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
|
||||
|
||||
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper']
|
||||
|
||||
|
||||
# this includes some hacks to solve the usage of resolver by lower level
|
||||
# parts of the parser
|
||||
|
||||
|
||||
class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
CParser.__init__(self, stream)
|
||||
self._parser = self._composer = self
|
||||
BaseConstructor.__init__(self, loader=self)
|
||||
BaseResolver.__init__(self, loadumper=self)
|
||||
# self.descend_resolver = self._resolver.descend_resolver
|
||||
# self.ascend_resolver = self._resolver.ascend_resolver
|
||||
# self.resolve = self._resolver.resolve
|
||||
|
||||
|
||||
class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
CParser.__init__(self, stream)
|
||||
self._parser = self._composer = self
|
||||
SafeConstructor.__init__(self, loader=self)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
# self.descend_resolver = self._resolver.descend_resolver
|
||||
# self.ascend_resolver = self._resolver.ascend_resolver
|
||||
# self.resolve = self._resolver.resolve
|
||||
|
||||
|
||||
class CLoader(CParser, Constructor, Resolver): # type: ignore
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
CParser.__init__(self, stream)
|
||||
self._parser = self._composer = self
|
||||
Constructor.__init__(self, loader=self)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
# self.descend_resolver = self._resolver.descend_resolver
|
||||
# self.ascend_resolver = self._resolver.ascend_resolver
|
||||
# self.resolve = self._resolver.resolve
|
||||
|
||||
|
||||
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
CEmitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
encoding=encoding,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
)
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
BaseRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
BaseResolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
CEmitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
encoding=encoding,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
)
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
SafeRepresenter.__init__(
|
||||
self, default_style=default_style, default_flow_style=default_flow_style
|
||||
)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class CDumper(CEmitter, Representer, Resolver): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
CEmitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
encoding=encoding,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
)
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
Representer.__init__(
|
||||
self, default_style=default_style, default_flow_style=default_flow_style
|
||||
)
|
||||
Resolver.__init__(self)
|
||||
219
lib/spack/external/_vendoring/ruamel/yaml/dumper.py
vendored
Normal file
219
lib/spack/external/_vendoring/ruamel/yaml/dumper.py
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.emitter import Emitter
|
||||
from ruamel.yaml.serializer import Serializer
|
||||
from ruamel.yaml.representer import (
|
||||
Representer,
|
||||
SafeRepresenter,
|
||||
BaseRepresenter,
|
||||
RoundTripRepresenter,
|
||||
)
|
||||
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamType, VersionType # NOQA
|
||||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
|
||||
|
||||
|
||||
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (Any, StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
BaseRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
BaseResolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
SafeRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
Representer.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
top_level_colon_align=top_level_colon_align,
|
||||
prefix_colon=prefix_colon,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
RoundTripRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
VersionedResolver.__init__(self, loader=self)
|
||||
1772
lib/spack/external/_vendoring/ruamel/yaml/emitter.py
vendored
Normal file
1772
lib/spack/external/_vendoring/ruamel/yaml/emitter.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
332
lib/spack/external/_vendoring/ruamel/yaml/error.py
vendored
Normal file
332
lib/spack/external/_vendoring/ruamel/yaml/error.py
vendored
Normal file
@@ -0,0 +1,332 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
import textwrap
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Text # NOQA
|
||||
|
||||
|
||||
__all__ = [
|
||||
'FileMark',
|
||||
'StringMark',
|
||||
'CommentMark',
|
||||
'YAMLError',
|
||||
'MarkedYAMLError',
|
||||
'ReusedAnchorWarning',
|
||||
'UnsafeLoaderWarning',
|
||||
'MarkedYAMLWarning',
|
||||
'MarkedYAMLFutureWarning',
|
||||
]
|
||||
|
||||
|
||||
class StreamMark:
|
||||
__slots__ = 'name', 'index', 'line', 'column'
|
||||
|
||||
def __init__(self, name, index, line, column):
|
||||
# type: (Any, int, int, int) -> None
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.line = line
|
||||
self.column = column
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
where = _F(
|
||||
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
|
||||
sname=self.name,
|
||||
sline1=self.line + 1,
|
||||
scolumn1=self.column + 1,
|
||||
)
|
||||
return where
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (Any) -> bool
|
||||
if self.line != other.line or self.column != other.column:
|
||||
return False
|
||||
if self.name != other.name or self.index != other.index:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (Any) -> bool
|
||||
return not self.__eq__(other)
|
||||
|
||||
|
||||
class FileMark(StreamMark):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class StringMark(StreamMark):
|
||||
__slots__ = 'name', 'index', 'line', 'column', 'buffer', 'pointer'
|
||||
|
||||
def __init__(self, name, index, line, column, buffer, pointer):
|
||||
# type: (Any, int, int, int, Any, Any) -> None
|
||||
StreamMark.__init__(self, name, index, line, column)
|
||||
self.buffer = buffer
|
||||
self.pointer = pointer
|
||||
|
||||
def get_snippet(self, indent=4, max_length=75):
|
||||
# type: (int, int) -> Any
|
||||
if self.buffer is None: # always False
|
||||
return None
|
||||
head = ""
|
||||
start = self.pointer
|
||||
while start > 0 and self.buffer[start - 1] not in '\0\r\n\x85\u2028\u2029':
|
||||
start -= 1
|
||||
if self.pointer - start > max_length / 2 - 1:
|
||||
head = ' ... '
|
||||
start += 5
|
||||
break
|
||||
tail = ""
|
||||
end = self.pointer
|
||||
while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
|
||||
end += 1
|
||||
if end - self.pointer > max_length / 2 - 1:
|
||||
tail = ' ... '
|
||||
end -= 5
|
||||
break
|
||||
snippet = self.buffer[start:end]
|
||||
caret = '^'
|
||||
caret = '^ (line: {})'.format(self.line + 1)
|
||||
return (
|
||||
' ' * indent
|
||||
+ head
|
||||
+ snippet
|
||||
+ tail
|
||||
+ '\n'
|
||||
+ ' ' * (indent + self.pointer - start + len(head))
|
||||
+ caret
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
snippet = self.get_snippet()
|
||||
where = _F(
|
||||
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
|
||||
sname=self.name,
|
||||
sline1=self.line + 1,
|
||||
scolumn1=self.column + 1,
|
||||
)
|
||||
if snippet is not None:
|
||||
where += ':\n' + snippet
|
||||
return where
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
snippet = self.get_snippet()
|
||||
where = _F(
|
||||
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
|
||||
sname=self.name,
|
||||
sline1=self.line + 1,
|
||||
scolumn1=self.column + 1,
|
||||
)
|
||||
if snippet is not None:
|
||||
where += ':\n' + snippet
|
||||
return where
|
||||
|
||||
|
||||
class CommentMark:
|
||||
__slots__ = ('column',)
|
||||
|
||||
def __init__(self, column):
|
||||
# type: (Any) -> None
|
||||
self.column = column
|
||||
|
||||
|
||||
class YAMLError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLError(YAMLError):
|
||||
def __init__(
|
||||
self,
|
||||
context=None,
|
||||
context_mark=None,
|
||||
problem=None,
|
||||
problem_mark=None,
|
||||
note=None,
|
||||
warn=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
# warn is ignored
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
lines = [] # type: List[str]
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None and (
|
||||
self.problem is None
|
||||
or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column
|
||||
):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None and self.note:
|
||||
note = textwrap.dedent(self.note)
|
||||
lines.append(note)
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class YAMLStreamError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class YAMLWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLWarning(YAMLWarning):
|
||||
def __init__(
|
||||
self,
|
||||
context=None,
|
||||
context_mark=None,
|
||||
problem=None,
|
||||
problem_mark=None,
|
||||
note=None,
|
||||
warn=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
self.warn = warn
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
lines = [] # type: List[str]
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None and (
|
||||
self.problem is None
|
||||
or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column
|
||||
):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None and self.note:
|
||||
note = textwrap.dedent(self.note)
|
||||
lines.append(note)
|
||||
if self.warn is not None and self.warn:
|
||||
warn = textwrap.dedent(self.warn)
|
||||
lines.append(warn)
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class ReusedAnchorWarning(YAMLWarning):
|
||||
pass
|
||||
|
||||
|
||||
class UnsafeLoaderWarning(YAMLWarning):
|
||||
text = """
|
||||
The default 'Loader' for 'load(stream)' without further arguments can be unsafe.
|
||||
Use 'load(stream, Loader=ruamel.yaml.Loader)' explicitly if that is OK.
|
||||
Alternatively include the following in your code:
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
|
||||
|
||||
In most other cases you should consider using 'safe_load(stream)'"""
|
||||
pass
|
||||
|
||||
|
||||
warnings.simplefilter('once', UnsafeLoaderWarning)
|
||||
|
||||
|
||||
class MantissaNoDotYAML1_1Warning(YAMLWarning):
|
||||
def __init__(self, node, flt_str):
|
||||
# type: (Any, Any) -> None
|
||||
self.node = node
|
||||
self.flt = flt_str
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
line = self.node.start_mark.line
|
||||
col = self.node.start_mark.column
|
||||
return """
|
||||
In YAML 1.1 floating point values should have a dot ('.') in their mantissa.
|
||||
See the Floating-Point Language-Independent Type for YAML™ Version 1.1 specification
|
||||
( http://yaml.org/type/float.html ). This dot is not required for JSON nor for YAML 1.2
|
||||
|
||||
Correct your float: "{}" on line: {}, column: {}
|
||||
|
||||
or alternatively include the following in your code:
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
|
||||
|
||||
""".format(
|
||||
self.flt, line, col
|
||||
)
|
||||
|
||||
|
||||
warnings.simplefilter('once', MantissaNoDotYAML1_1Warning)
|
||||
|
||||
|
||||
class YAMLFutureWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLFutureWarning(YAMLFutureWarning):
|
||||
def __init__(
|
||||
self,
|
||||
context=None,
|
||||
context_mark=None,
|
||||
problem=None,
|
||||
problem_mark=None,
|
||||
note=None,
|
||||
warn=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
self.warn = warn
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
lines = [] # type: List[str]
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
|
||||
if self.context_mark is not None and (
|
||||
self.problem is None
|
||||
or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column
|
||||
):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None and self.note:
|
||||
note = textwrap.dedent(self.note)
|
||||
lines.append(note)
|
||||
if self.warn is not None and self.warn:
|
||||
warn = textwrap.dedent(self.warn)
|
||||
lines.append(warn)
|
||||
return '\n'.join(lines)
|
||||
196
lib/spack/external/_vendoring/ruamel/yaml/events.py
vendored
Normal file
196
lib/spack/external/_vendoring/ruamel/yaml/events.py
vendored
Normal file
@@ -0,0 +1,196 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
|
||||
# Abstract classes.
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
||||
SHOW_LINES = False
|
||||
|
||||
|
||||
def CommentCheck():
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
|
||||
class Event:
|
||||
__slots__ = 'start_mark', 'end_mark', 'comment'
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, comment=CommentCheck):
|
||||
# type: (Any, Any, Any) -> None
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
# assert comment is not CommentCheck
|
||||
if comment is CommentCheck:
|
||||
comment = None
|
||||
self.comment = comment
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
if True:
|
||||
arguments = []
|
||||
if hasattr(self, 'value'):
|
||||
# if you use repr(getattr(self, 'value')) then flake8 complains about
|
||||
# abuse of getattr with a constant. When you change to self.value
|
||||
# then mypy throws an error
|
||||
arguments.append(repr(self.value)) # type: ignore
|
||||
for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
|
||||
v = getattr(self, key, None)
|
||||
if v is not None:
|
||||
arguments.append(_F('{key!s}={v!r}', key=key, v=v))
|
||||
if self.comment not in [None, CommentCheck]:
|
||||
arguments.append('comment={!r}'.format(self.comment))
|
||||
if SHOW_LINES:
|
||||
arguments.append(
|
||||
'({}:{}/{}:{})'.format(
|
||||
self.start_mark.line,
|
||||
self.start_mark.column,
|
||||
self.end_mark.line,
|
||||
self.end_mark.column,
|
||||
)
|
||||
)
|
||||
arguments = ', '.join(arguments) # type: ignore
|
||||
else:
|
||||
attributes = [
|
||||
key
|
||||
for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
|
||||
if hasattr(self, key)
|
||||
]
|
||||
arguments = ', '.join(
|
||||
[_F('{k!s}={attr!r}', k=key, attr=getattr(self, key)) for key in attributes]
|
||||
)
|
||||
if self.comment not in [None, CommentCheck]:
|
||||
arguments += ', comment={!r}'.format(self.comment)
|
||||
return _F(
|
||||
'{self_class_name!s}({arguments!s})',
|
||||
self_class_name=self.__class__.__name__,
|
||||
arguments=arguments,
|
||||
)
|
||||
|
||||
|
||||
class NodeEvent(Event):
|
||||
__slots__ = ('anchor',)
|
||||
|
||||
def __init__(self, anchor, start_mark=None, end_mark=None, comment=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.anchor = anchor
|
||||
|
||||
|
||||
class CollectionStartEvent(NodeEvent):
|
||||
__slots__ = 'tag', 'implicit', 'flow_style', 'nr_items'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
flow_style=None,
|
||||
comment=None,
|
||||
nr_items=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Optional[int]) -> None
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.flow_style = flow_style
|
||||
self.nr_items = nr_items
|
||||
|
||||
|
||||
class CollectionEndEvent(Event):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
# Implementations.
|
||||
|
||||
|
||||
class StreamStartEvent(Event):
|
||||
__slots__ = ('encoding',)
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None, comment=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.encoding = encoding
|
||||
|
||||
|
||||
class StreamEndEvent(Event):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class DocumentStartEvent(Event):
|
||||
__slots__ = 'explicit', 'version', 'tags'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
explicit=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
comment=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.explicit = explicit
|
||||
self.version = version
|
||||
self.tags = tags
|
||||
|
||||
|
||||
class DocumentEndEvent(Event):
|
||||
__slots__ = ('explicit',)
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, explicit=None, comment=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.explicit = explicit
|
||||
|
||||
|
||||
class AliasEvent(NodeEvent):
|
||||
__slots__ = 'style'
|
||||
|
||||
def __init__(self, anchor, start_mark=None, end_mark=None, style=None, comment=None):
|
||||
# type: (Any, Any, Any, Any, Any) -> None
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.style = style
|
||||
|
||||
|
||||
class ScalarEvent(NodeEvent):
|
||||
__slots__ = 'tag', 'implicit', 'value', 'style'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
value,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
style=None,
|
||||
comment=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.value = value
|
||||
self.style = style
|
||||
|
||||
|
||||
class SequenceStartEvent(CollectionStartEvent):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class SequenceEndEvent(CollectionEndEvent):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class MappingStartEvent(CollectionStartEvent):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class MappingEndEvent(CollectionEndEvent):
|
||||
__slots__ = ()
|
||||
75
lib/spack/external/_vendoring/ruamel/yaml/loader.py
vendored
Normal file
75
lib/spack/external/_vendoring/ruamel/yaml/loader.py
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.reader import Reader
|
||||
from ruamel.yaml.scanner import Scanner, RoundTripScanner
|
||||
from ruamel.yaml.parser import Parser, RoundTripParser
|
||||
from ruamel.yaml.composer import Composer
|
||||
from ruamel.yaml.constructor import (
|
||||
BaseConstructor,
|
||||
SafeConstructor,
|
||||
Constructor,
|
||||
RoundTripConstructor,
|
||||
)
|
||||
from ruamel.yaml.resolver import VersionedResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
|
||||
|
||||
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
|
||||
|
||||
|
||||
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
self.comment_handling = None
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
Scanner.__init__(self, loader=self)
|
||||
Parser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
BaseConstructor.__init__(self, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
|
||||
|
||||
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
self.comment_handling = None
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
Scanner.__init__(self, loader=self)
|
||||
Parser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
SafeConstructor.__init__(self, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
|
||||
|
||||
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
self.comment_handling = None
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
Scanner.__init__(self, loader=self)
|
||||
Parser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
Constructor.__init__(self, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
|
||||
|
||||
class RoundTripLoader(
|
||||
Reader,
|
||||
RoundTripScanner,
|
||||
RoundTripParser,
|
||||
Composer,
|
||||
RoundTripConstructor,
|
||||
VersionedResolver,
|
||||
):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
# self.reader = Reader.__init__(self, stream)
|
||||
self.comment_handling = None # issue 385
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
RoundTripScanner.__init__(self, loader=self)
|
||||
RoundTripParser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
RoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
1667
lib/spack/external/_vendoring/ruamel/yaml/main.py
vendored
Normal file
1667
lib/spack/external/_vendoring/ruamel/yaml/main.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
135
lib/spack/external/_vendoring/ruamel/yaml/nodes.py
vendored
Normal file
135
lib/spack/external/_vendoring/ruamel/yaml/nodes.py
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Dict, Any, Text # NOQA
|
||||
|
||||
|
||||
class Node:
|
||||
__slots__ = 'tag', 'value', 'start_mark', 'end_mark', 'comment', 'anchor'
|
||||
|
||||
def __init__(self, tag, value, start_mark, end_mark, comment=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.comment = comment
|
||||
self.anchor = anchor
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
value = self.value
|
||||
# if isinstance(value, list):
|
||||
# if len(value) == 0:
|
||||
# value = '<empty>'
|
||||
# elif len(value) == 1:
|
||||
# value = '<1 item>'
|
||||
# else:
|
||||
# value = f'<{len(value)} items>'
|
||||
# else:
|
||||
# if len(value) > 75:
|
||||
# value = repr(value[:70]+' ... ')
|
||||
# else:
|
||||
# value = repr(value)
|
||||
value = repr(value)
|
||||
return _F(
|
||||
'{class_name!s}(tag={self_tag!r}, value={value!s})',
|
||||
class_name=self.__class__.__name__,
|
||||
self_tag=self.tag,
|
||||
value=value,
|
||||
)
|
||||
|
||||
def dump(self, indent=0):
|
||||
# type: (int) -> None
|
||||
if isinstance(self.value, str):
|
||||
sys.stdout.write(
|
||||
'{}{}(tag={!r}, value={!r})\n'.format(
|
||||
' ' * indent, self.__class__.__name__, self.tag, self.value
|
||||
)
|
||||
)
|
||||
if self.comment:
|
||||
sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
|
||||
return
|
||||
sys.stdout.write(
|
||||
'{}{}(tag={!r})\n'.format(' ' * indent, self.__class__.__name__, self.tag)
|
||||
)
|
||||
if self.comment:
|
||||
sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
|
||||
for v in self.value:
|
||||
if isinstance(v, tuple):
|
||||
for v1 in v:
|
||||
v1.dump(indent + 1)
|
||||
elif isinstance(v, Node):
|
||||
v.dump(indent + 1)
|
||||
else:
|
||||
sys.stdout.write('Node value type? {}\n'.format(type(v)))
|
||||
|
||||
|
||||
class ScalarNode(Node):
|
||||
"""
|
||||
styles:
|
||||
? -> set() ? key, no value
|
||||
" -> double quoted
|
||||
' -> single quoted
|
||||
| -> literal style
|
||||
> -> folding style
|
||||
"""
|
||||
|
||||
__slots__ = ('style',)
|
||||
id = 'scalar'
|
||||
|
||||
def __init__(
|
||||
self, tag, value, start_mark=None, end_mark=None, style=None, comment=None, anchor=None
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment, anchor=anchor)
|
||||
self.style = style
|
||||
|
||||
|
||||
class CollectionNode(Node):
|
||||
__slots__ = ('flow_style',)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tag,
|
||||
value,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
flow_style=None,
|
||||
comment=None,
|
||||
anchor=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
|
||||
self.flow_style = flow_style
|
||||
self.anchor = anchor
|
||||
|
||||
|
||||
class SequenceNode(CollectionNode):
|
||||
__slots__ = ()
|
||||
id = 'sequence'
|
||||
|
||||
|
||||
class MappingNode(CollectionNode):
|
||||
__slots__ = ('merge',)
|
||||
id = 'mapping'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tag,
|
||||
value,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
flow_style=None,
|
||||
comment=None,
|
||||
anchor=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
CollectionNode.__init__(
|
||||
self, tag, value, start_mark, end_mark, flow_style, comment, anchor
|
||||
)
|
||||
self.merge = None
|
||||
884
lib/spack/external/_vendoring/ruamel/yaml/parser.py
vendored
Normal file
884
lib/spack/external/_vendoring/ruamel/yaml/parser.py
vendored
Normal file
@@ -0,0 +1,884 @@
|
||||
# coding: utf-8
|
||||
|
||||
# The following YAML grammar is LL(1) and is parsed by a recursive descent
|
||||
# parser.
|
||||
#
|
||||
# stream ::= STREAM-START implicit_document? explicit_document*
|
||||
# STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
# block_node_or_indentless_sequence ::=
|
||||
# ALIAS
|
||||
# | properties (block_content |
|
||||
# indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
|
||||
# BLOCK-END
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# FIRST sets:
|
||||
#
|
||||
# stream: { STREAM-START <}
|
||||
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
||||
# implicit_document: FIRST(block_node)
|
||||
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
|
||||
# BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START
|
||||
# FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_sequence: { BLOCK-SEQUENCE-START }
|
||||
# block_mapping: { BLOCK-MAPPING-START }
|
||||
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR
|
||||
# BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START BLOCK-ENTRY }
|
||||
# indentless_sequence: { ENTRY }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_sequence: { FLOW-SEQUENCE-START }
|
||||
# flow_mapping: { FLOW-MAPPING-START }
|
||||
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START KEY }
|
||||
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START KEY }
|
||||
|
||||
# need to have full path with import, as pkg_resources tries to load parser.py in __init__.py
|
||||
# only to not do anything with the package afterwards
|
||||
# and for Jython too
|
||||
|
||||
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
from ruamel.yaml.tokens import * # NOQA
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
|
||||
from ruamel.yaml.scanner import BlankLineComment
|
||||
from ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Optional # NOQA
|
||||
|
||||
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
|
||||
|
||||
|
||||
def xprintf(*args, **kw):
|
||||
# type: (Any, Any) -> Any
|
||||
return nprintf(*args, **kw)
|
||||
pass
|
||||
|
||||
|
||||
class ParserError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Parser:
|
||||
# Since writing a recursive-descendant parser is a straightforward task, we
|
||||
# do not give many comments here.
|
||||
|
||||
DEFAULT_TAGS = {'!': '!', '!!': 'tag:yaml.org,2002:'}
|
||||
|
||||
def __init__(self, loader):
|
||||
# type: (Any) -> None
|
||||
self.loader = loader
|
||||
if self.loader is not None and getattr(self.loader, '_parser', None) is None:
|
||||
self.loader._parser = self
|
||||
self.reset_parser()
|
||||
|
||||
def reset_parser(self):
|
||||
# type: () -> None
|
||||
# Reset the state attributes (to clear self-references)
|
||||
self.current_event = self.last_event = None
|
||||
self.tag_handles = {} # type: Dict[Any, Any]
|
||||
self.states = [] # type: List[Any]
|
||||
self.marks = [] # type: List[Any]
|
||||
self.state = self.parse_stream_start # type: Any
|
||||
|
||||
def dispose(self):
|
||||
# type: () -> None
|
||||
self.reset_parser()
|
||||
|
||||
@property
|
||||
def scanner(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.loader, 'typ'):
|
||||
return self.loader.scanner
|
||||
return self.loader._scanner
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.loader, 'typ'):
|
||||
return self.loader.resolver
|
||||
return self.loader._resolver
|
||||
|
||||
def check_event(self, *choices):
|
||||
# type: (Any) -> bool
|
||||
# Check the type of the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
if self.current_event is not None:
|
||||
if not choices:
|
||||
return True
|
||||
for choice in choices:
|
||||
if isinstance(self.current_event, choice):
|
||||
return True
|
||||
return False
|
||||
|
||||
def peek_event(self):
|
||||
# type: () -> Any
|
||||
# Get the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
return self.current_event
|
||||
|
||||
def get_event(self):
|
||||
# type: () -> Any
|
||||
# Get the next event and proceed further.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
# assert self.current_event is not None
|
||||
# if self.current_event.end_mark.line != self.peek_event().start_mark.line:
|
||||
xprintf('get_event', repr(self.current_event), self.peek_event().start_mark.line)
|
||||
self.last_event = value = self.current_event
|
||||
self.current_event = None
|
||||
return value
|
||||
|
||||
# stream ::= STREAM-START implicit_document? explicit_document*
|
||||
# STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
|
||||
def parse_stream_start(self):
|
||||
# type: () -> Any
|
||||
# Parse the stream start.
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding)
|
||||
|
||||
# Prepare the next state.
|
||||
self.state = self.parse_implicit_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_implicit_document_start(self):
|
||||
# type: () -> Any
|
||||
# Parse an implicit document.
|
||||
if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken):
|
||||
self.tag_handles = self.DEFAULT_TAGS
|
||||
token = self.scanner.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
event = DocumentStartEvent(start_mark, end_mark, explicit=False)
|
||||
|
||||
# Prepare the next state.
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_block_node
|
||||
|
||||
return event
|
||||
|
||||
else:
|
||||
return self.parse_document_start()
|
||||
|
||||
def parse_document_start(self):
|
||||
# type: () -> Any
|
||||
# Parse any extra document end indicators.
|
||||
while self.scanner.check_token(DocumentEndToken):
|
||||
self.scanner.get_token()
|
||||
# Parse an explicit document.
|
||||
if not self.scanner.check_token(StreamEndToken):
|
||||
version, tags = self.process_directives()
|
||||
if not self.scanner.check_token(DocumentStartToken):
|
||||
raise ParserError(
|
||||
None,
|
||||
None,
|
||||
_F(
|
||||
"expected '<document start>', but found {pt!r}",
|
||||
pt=self.scanner.peek_token().id,
|
||||
),
|
||||
self.scanner.peek_token().start_mark,
|
||||
)
|
||||
token = self.scanner.get_token()
|
||||
start_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
# if self.loader is not None and \
|
||||
# end_mark.line != self.scanner.peek_token().start_mark.line:
|
||||
# self.loader.scalar_after_indicator = False
|
||||
event = DocumentStartEvent(
|
||||
start_mark, end_mark, explicit=True, version=version, tags=tags,
|
||||
comment=token.comment
|
||||
) # type: Any
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_document_content
|
||||
else:
|
||||
# Parse the end of the stream.
|
||||
token = self.scanner.get_token()
|
||||
event = StreamEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
assert not self.states
|
||||
assert not self.marks
|
||||
self.state = None
|
||||
return event
|
||||
|
||||
def parse_document_end(self):
|
||||
# type: () -> Any
|
||||
# Parse the document end.
|
||||
token = self.scanner.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
explicit = False
|
||||
if self.scanner.check_token(DocumentEndToken):
|
||||
token = self.scanner.get_token()
|
||||
end_mark = token.end_mark
|
||||
explicit = True
|
||||
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
|
||||
|
||||
# Prepare the next state.
|
||||
if self.resolver.processing_version == (1, 1):
|
||||
self.state = self.parse_document_start
|
||||
else:
|
||||
self.state = self.parse_implicit_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_document_content(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(
|
||||
DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
|
||||
):
|
||||
event = self.process_empty_scalar(self.scanner.peek_token().start_mark)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
else:
|
||||
return self.parse_block_node()
|
||||
|
||||
def process_directives(self):
|
||||
# type: () -> Any
|
||||
yaml_version = None
|
||||
self.tag_handles = {}
|
||||
while self.scanner.check_token(DirectiveToken):
|
||||
token = self.scanner.get_token()
|
||||
if token.name == 'YAML':
|
||||
if yaml_version is not None:
|
||||
raise ParserError(
|
||||
None, None, 'found duplicate YAML directive', token.start_mark
|
||||
)
|
||||
major, minor = token.value
|
||||
if major != 1:
|
||||
raise ParserError(
|
||||
None,
|
||||
None,
|
||||
'found incompatible YAML document (version 1.* is required)',
|
||||
token.start_mark,
|
||||
)
|
||||
yaml_version = token.value
|
||||
elif token.name == 'TAG':
|
||||
handle, prefix = token.value
|
||||
if handle in self.tag_handles:
|
||||
raise ParserError(
|
||||
None,
|
||||
None,
|
||||
_F('duplicate tag handle {handle!r}', handle=handle),
|
||||
token.start_mark,
|
||||
)
|
||||
self.tag_handles[handle] = prefix
|
||||
if bool(self.tag_handles):
|
||||
value = yaml_version, self.tag_handles.copy() # type: Any
|
||||
else:
|
||||
value = yaml_version, None
|
||||
if self.loader is not None and hasattr(self.loader, 'tags'):
|
||||
self.loader.version = yaml_version
|
||||
if self.loader.tags is None:
|
||||
self.loader.tags = {}
|
||||
for k in self.tag_handles:
|
||||
self.loader.tags[k] = self.tag_handles[k]
|
||||
for key in self.DEFAULT_TAGS:
|
||||
if key not in self.tag_handles:
|
||||
self.tag_handles[key] = self.DEFAULT_TAGS[key]
|
||||
return value
|
||||
|
||||
# block_node_or_indentless_sequence ::= ALIAS
|
||||
# | properties (block_content | indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
|
||||
def parse_block_node(self):
|
||||
# type: () -> Any
|
||||
return self.parse_node(block=True)
|
||||
|
||||
def parse_flow_node(self):
|
||||
# type: () -> Any
|
||||
return self.parse_node()
|
||||
|
||||
def parse_block_node_or_indentless_sequence(self):
|
||||
# type: () -> Any
|
||||
return self.parse_node(block=True, indentless_sequence=True)
|
||||
|
||||
def transform_tag(self, handle, suffix):
|
||||
# type: (Any, Any) -> Any
|
||||
return self.tag_handles[handle] + suffix
|
||||
|
||||
def parse_node(self, block=False, indentless_sequence=False):
|
||||
# type: (bool, bool) -> Any
|
||||
if self.scanner.check_token(AliasToken):
|
||||
token = self.scanner.get_token()
|
||||
event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
|
||||
anchor = None
|
||||
tag = None
|
||||
start_mark = end_mark = tag_mark = None
|
||||
if self.scanner.check_token(AnchorToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
start_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if self.scanner.check_token(TagToken):
|
||||
token = self.scanner.get_token()
|
||||
tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
elif self.scanner.check_token(TagToken):
|
||||
token = self.scanner.get_token()
|
||||
start_mark = tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
if self.scanner.check_token(AnchorToken):
|
||||
token = self.scanner.get_token()
|
||||
start_mark = tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if tag is not None:
|
||||
handle, suffix = tag
|
||||
if handle is not None:
|
||||
if handle not in self.tag_handles:
|
||||
raise ParserError(
|
||||
'while parsing a node',
|
||||
start_mark,
|
||||
_F('found undefined tag handle {handle!r}', handle=handle),
|
||||
tag_mark,
|
||||
)
|
||||
tag = self.transform_tag(handle, suffix)
|
||||
else:
|
||||
tag = suffix
|
||||
# if tag == '!':
|
||||
# raise ParserError("while parsing a node", start_mark,
|
||||
# "found non-specific tag '!'", tag_mark,
|
||||
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
|
||||
# and share your opinion.")
|
||||
if start_mark is None:
|
||||
start_mark = end_mark = self.scanner.peek_token().start_mark
|
||||
event = None
|
||||
implicit = tag is None or tag == '!'
|
||||
if indentless_sequence and self.scanner.check_token(BlockEntryToken):
|
||||
comment = None
|
||||
pt = self.scanner.peek_token()
|
||||
if self.loader and self.loader.comment_handling is None:
|
||||
if pt.comment and pt.comment[0]:
|
||||
comment = [pt.comment[0], []]
|
||||
pt.comment[0] = None
|
||||
elif self.loader:
|
||||
if pt.comment:
|
||||
comment = pt.comment
|
||||
end_mark = self.scanner.peek_token().end_mark
|
||||
event = SequenceStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
|
||||
)
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
return event
|
||||
|
||||
if self.scanner.check_token(ScalarToken):
|
||||
token = self.scanner.get_token()
|
||||
# self.scanner.peek_token_same_line_comment(token)
|
||||
end_mark = token.end_mark
|
||||
if (token.plain and tag is None) or tag == '!':
|
||||
implicit = (True, False)
|
||||
elif tag is None:
|
||||
implicit = (False, True)
|
||||
else:
|
||||
implicit = (False, False)
|
||||
# nprint('se', token.value, token.comment)
|
||||
event = ScalarEvent(
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
token.value,
|
||||
start_mark,
|
||||
end_mark,
|
||||
style=token.style,
|
||||
comment=token.comment,
|
||||
)
|
||||
self.state = self.states.pop()
|
||||
elif self.scanner.check_token(FlowSequenceStartToken):
|
||||
pt = self.scanner.peek_token()
|
||||
end_mark = pt.end_mark
|
||||
event = SequenceStartEvent(
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
start_mark,
|
||||
end_mark,
|
||||
flow_style=True,
|
||||
comment=pt.comment,
|
||||
)
|
||||
self.state = self.parse_flow_sequence_first_entry
|
||||
elif self.scanner.check_token(FlowMappingStartToken):
|
||||
pt = self.scanner.peek_token()
|
||||
end_mark = pt.end_mark
|
||||
event = MappingStartEvent(
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
start_mark,
|
||||
end_mark,
|
||||
flow_style=True,
|
||||
comment=pt.comment,
|
||||
)
|
||||
self.state = self.parse_flow_mapping_first_key
|
||||
elif block and self.scanner.check_token(BlockSequenceStartToken):
|
||||
end_mark = self.scanner.peek_token().start_mark
|
||||
# should inserting the comment be dependent on the
|
||||
# indentation?
|
||||
pt = self.scanner.peek_token()
|
||||
comment = pt.comment
|
||||
# nprint('pt0', type(pt))
|
||||
if comment is None or comment[1] is None:
|
||||
comment = pt.split_old_comment()
|
||||
# nprint('pt1', comment)
|
||||
event = SequenceStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
|
||||
)
|
||||
self.state = self.parse_block_sequence_first_entry
|
||||
elif block and self.scanner.check_token(BlockMappingStartToken):
|
||||
end_mark = self.scanner.peek_token().start_mark
|
||||
comment = self.scanner.peek_token().comment
|
||||
event = MappingStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
|
||||
)
|
||||
self.state = self.parse_block_mapping_first_key
|
||||
elif anchor is not None or tag is not None:
|
||||
# Empty scalars are allowed even if a tag or an anchor is
|
||||
# specified.
|
||||
event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark)
|
||||
self.state = self.states.pop()
|
||||
else:
|
||||
if block:
|
||||
node = 'block'
|
||||
else:
|
||||
node = 'flow'
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
_F('while parsing a {node!s} node', node=node),
|
||||
start_mark,
|
||||
_F('expected the node content, but found {token_id!r}', token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
return event
|
||||
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_sequence_first_entry(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
# move any comment from start token
|
||||
# self.move_token_comment(token)
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_sequence_entry()
|
||||
|
||||
def parse_block_sequence_entry(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(BlockEntryToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
if not self.scanner.check_token(BlockEntryToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_block_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if not self.scanner.check_token(BlockEndToken):
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a block collection',
|
||||
self.marks[-1],
|
||||
_F('expected <block end>, but found {token_id!r}', token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
token = self.scanner.get_token() # BlockEndToken
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
|
||||
# indentless_sequence?
|
||||
# sequence:
|
||||
# - entry
|
||||
# - nested
|
||||
|
||||
def parse_indentless_sequence_entry(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(BlockEntryToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
if not self.scanner.check_token(
|
||||
BlockEntryToken, KeyToken, ValueToken, BlockEndToken
|
||||
):
|
||||
self.states.append(self.parse_indentless_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
token = self.scanner.peek_token()
|
||||
c = None
|
||||
if self.loader and self.loader.comment_handling is None:
|
||||
c = token.comment
|
||||
start_mark = token.start_mark
|
||||
else:
|
||||
start_mark = self.last_event.end_mark # type: ignore
|
||||
c = self.distribute_comment(token.comment, start_mark.line) # type: ignore
|
||||
event = SequenceEndEvent(start_mark, start_mark, comment=c)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_mapping_first_key(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_mapping_key()
|
||||
|
||||
def parse_block_mapping_key(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(KeyToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_value)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if self.resolver.processing_version > (1, 1) and self.scanner.check_token(ValueToken):
|
||||
self.state = self.parse_block_mapping_value
|
||||
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
|
||||
if not self.scanner.check_token(BlockEndToken):
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a block mapping',
|
||||
self.marks[-1],
|
||||
_F('expected <block end>, but found {token_id!r}', token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_block_mapping_value(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(ValueToken):
|
||||
token = self.scanner.get_token()
|
||||
# value token might have post comment move it to e.g. block
|
||||
if self.scanner.check_token(ValueToken):
|
||||
self.move_token_comment(token)
|
||||
else:
|
||||
if not self.scanner.check_token(KeyToken):
|
||||
self.move_token_comment(token, empty=True)
|
||||
# else: empty value for this key cannot move token.comment
|
||||
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_key)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
comment = token.comment
|
||||
if comment is None:
|
||||
token = self.scanner.peek_token()
|
||||
comment = token.comment
|
||||
if comment:
|
||||
token._comment = [None, comment[1]]
|
||||
comment = [comment[0], None]
|
||||
return self.process_empty_scalar(token.end_mark, comment=comment)
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
token = self.scanner.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# Note that while production rules for both flow_sequence_entry and
|
||||
# flow_mapping_entry are equal, their interpretations are different.
|
||||
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
||||
# generate an inline mapping (set syntax).
|
||||
|
||||
def parse_flow_sequence_first_entry(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_sequence_entry(first=True)
|
||||
|
||||
def parse_flow_sequence_entry(self, first=False):
|
||||
# type: (bool) -> Any
|
||||
if not self.scanner.check_token(FlowSequenceEndToken):
|
||||
if not first:
|
||||
if self.scanner.check_token(FlowEntryToken):
|
||||
self.scanner.get_token()
|
||||
else:
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a flow sequence',
|
||||
self.marks[-1],
|
||||
_F("expected ',' or ']', but got {token_id!r}", token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
|
||||
if self.scanner.check_token(KeyToken):
|
||||
token = self.scanner.peek_token()
|
||||
event = MappingStartEvent(
|
||||
None, None, True, token.start_mark, token.end_mark, flow_style=True
|
||||
) # type: Any
|
||||
self.state = self.parse_flow_sequence_entry_mapping_key
|
||||
return event
|
||||
elif not self.scanner.check_token(FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry)
|
||||
return self.parse_flow_node()
|
||||
token = self.scanner.get_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_sequence_entry_mapping_key(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_value(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(ValueToken):
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_end)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
token = self.scanner.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_end(self):
|
||||
# type: () -> Any
|
||||
self.state = self.parse_flow_sequence_entry
|
||||
token = self.scanner.peek_token()
|
||||
return MappingEndEvent(token.start_mark, token.start_mark)
|
||||
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
|
||||
def parse_flow_mapping_first_key(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_mapping_key(first=True)
|
||||
|
||||
def parse_flow_mapping_key(self, first=False):
|
||||
# type: (Any) -> Any
|
||||
if not self.scanner.check_token(FlowMappingEndToken):
|
||||
if not first:
|
||||
if self.scanner.check_token(FlowEntryToken):
|
||||
self.scanner.get_token()
|
||||
else:
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a flow mapping',
|
||||
self.marks[-1],
|
||||
_F("expected ',' or '}}', but got {token_id!r}", token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
if self.scanner.check_token(KeyToken):
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(
|
||||
ValueToken, FlowEntryToken, FlowMappingEndToken
|
||||
):
|
||||
self.states.append(self.parse_flow_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
elif self.resolver.processing_version > (1, 1) and self.scanner.check_token(
|
||||
ValueToken
|
||||
):
|
||||
self.state = self.parse_flow_mapping_value
|
||||
return self.process_empty_scalar(self.scanner.peek_token().end_mark)
|
||||
elif not self.scanner.check_token(FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_empty_value)
|
||||
return self.parse_flow_node()
|
||||
token = self.scanner.get_token()
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_mapping_value(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(ValueToken):
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_key)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
token = self.scanner.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_mapping_empty_value(self):
|
||||
# type: () -> Any
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
|
||||
|
||||
def process_empty_scalar(self, mark, comment=None):
|
||||
# type: (Any, Any) -> Any
|
||||
return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment)
|
||||
|
||||
def move_token_comment(self, token, nt=None, empty=False):
|
||||
# type: (Any, Optional[Any], Optional[bool]) -> Any
|
||||
pass
|
||||
|
||||
|
||||
class RoundTripParser(Parser):
|
||||
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
|
||||
|
||||
def transform_tag(self, handle, suffix):
|
||||
# type: (Any, Any) -> Any
|
||||
# return self.tag_handles[handle]+suffix
|
||||
if handle == '!!' and suffix in (
|
||||
'null',
|
||||
'bool',
|
||||
'int',
|
||||
'float',
|
||||
'binary',
|
||||
'timestamp',
|
||||
'omap',
|
||||
'pairs',
|
||||
'set',
|
||||
'str',
|
||||
'seq',
|
||||
'map',
|
||||
):
|
||||
return Parser.transform_tag(self, handle, suffix)
|
||||
return handle + suffix
|
||||
|
||||
def move_token_comment(self, token, nt=None, empty=False):
|
||||
# type: (Any, Optional[Any], Optional[bool]) -> Any
|
||||
token.move_old_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
|
||||
|
||||
|
||||
class RoundTripParserSC(RoundTripParser):
|
||||
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
|
||||
|
||||
# some of the differences are based on the superclass testing
|
||||
# if self.loader.comment_handling is not None
|
||||
|
||||
def move_token_comment(self, token, nt=None, empty=False):
|
||||
# type: (Any, Any, Any, Optional[bool]) -> None
|
||||
token.move_new_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
|
||||
|
||||
def distribute_comment(self, comment, line):
|
||||
# type: (Any, Any) -> Any
|
||||
# ToDo, look at indentation of the comment to determine attachment
|
||||
if comment is None:
|
||||
return None
|
||||
if not comment[0]:
|
||||
return None
|
||||
if comment[0][0] != line + 1:
|
||||
nprintf('>>>dcxxx', comment, line)
|
||||
assert comment[0][0] == line + 1
|
||||
# if comment[0] - line > 1:
|
||||
# return
|
||||
typ = self.loader.comment_handling & 0b11
|
||||
# nprintf('>>>dca', comment, line, typ)
|
||||
if typ == C_POST:
|
||||
return None
|
||||
if typ == C_PRE:
|
||||
c = [None, None, comment[0]]
|
||||
comment[0] = None
|
||||
return c
|
||||
# nprintf('>>>dcb', comment[0])
|
||||
for _idx, cmntidx in enumerate(comment[0]):
|
||||
# nprintf('>>>dcb', cmntidx)
|
||||
if isinstance(self.scanner.comments[cmntidx], BlankLineComment):
|
||||
break
|
||||
else:
|
||||
return None # no space found
|
||||
if _idx == 0:
|
||||
return None # first line was blank
|
||||
# nprintf('>>>dcc', idx)
|
||||
if typ == C_SPLIT_ON_FIRST_BLANK:
|
||||
c = [None, None, comment[0][:_idx]]
|
||||
comment[0] = comment[0][_idx:]
|
||||
return c
|
||||
raise NotImplementedError # reserved
|
||||
302
lib/spack/external/_vendoring/ruamel/yaml/reader.py
vendored
Normal file
302
lib/spack/external/_vendoring/ruamel/yaml/reader.py
vendored
Normal file
@@ -0,0 +1,302 @@
|
||||
# coding: utf-8
|
||||
|
||||
# This module contains abstractions for the input stream. You don't have to
|
||||
# looks further, there are no pretty code.
|
||||
#
|
||||
# We define two classes here.
|
||||
#
|
||||
# Mark(source, line, column)
|
||||
# It's just a record and its only use is producing nice error messages.
|
||||
# Parser does not use it for any other purposes.
|
||||
#
|
||||
# Reader(source, data)
|
||||
# Reader determines the encoding of `data` and converts it to unicode.
|
||||
# Reader provides the following methods and attributes:
|
||||
# reader.peek(length=1) - return the next `length` characters
|
||||
# reader.forward(length=1) - move the current position to `length`
|
||||
# characters.
|
||||
# reader.index - the number of the current character.
|
||||
# reader.line, stream.column - the line and the column of the current
|
||||
# character.
|
||||
|
||||
import codecs
|
||||
|
||||
from ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
|
||||
from ruamel.yaml.compat import _F # NOQA
|
||||
from ruamel.yaml.util import RegExp
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
|
||||
# from ruamel.yaml.compat import StreamTextType # NOQA
|
||||
|
||||
__all__ = ['Reader', 'ReaderError']
|
||||
|
||||
|
||||
class ReaderError(YAMLError):
|
||||
def __init__(self, name, position, character, encoding, reason):
|
||||
# type: (Any, Any, Any, Any, Any) -> None
|
||||
self.name = name
|
||||
self.character = character
|
||||
self.position = position
|
||||
self.encoding = encoding
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
if isinstance(self.character, bytes):
|
||||
return _F(
|
||||
"'{self_encoding!s}' codec can't decode byte #x{ord_self_character:02x}: "
|
||||
'{self_reason!s}\n'
|
||||
' in "{self_name!s}", position {self_position:d}',
|
||||
self_encoding=self.encoding,
|
||||
ord_self_character=ord(self.character),
|
||||
self_reason=self.reason,
|
||||
self_name=self.name,
|
||||
self_position=self.position,
|
||||
)
|
||||
else:
|
||||
return _F(
|
||||
'unacceptable character #x{self_character:04x}: {self_reason!s}\n'
|
||||
' in "{self_name!s}", position {self_position:d}',
|
||||
self_character=self.character,
|
||||
self_reason=self.reason,
|
||||
self_name=self.name,
|
||||
self_position=self.position,
|
||||
)
|
||||
|
||||
|
||||
class Reader:
|
||||
# Reader:
|
||||
# - determines the data encoding and converts it to a unicode string,
|
||||
# - checks if characters are in allowed range,
|
||||
# - adds '\0' to the end.
|
||||
|
||||
# Reader accepts
|
||||
# - a `bytes` object,
|
||||
# - a `str` object,
|
||||
# - a file-like object with its `read` method returning `str`,
|
||||
# - a file-like object with its `read` method returning `unicode`.
|
||||
|
||||
# Yeah, it's ugly and slow.
|
||||
|
||||
def __init__(self, stream, loader=None):
|
||||
# type: (Any, Any) -> None
|
||||
self.loader = loader
|
||||
if self.loader is not None and getattr(self.loader, '_reader', None) is None:
|
||||
self.loader._reader = self
|
||||
self.reset_reader()
|
||||
self.stream = stream # type: Any # as .read is called
|
||||
|
||||
def reset_reader(self):
|
||||
# type: () -> None
|
||||
self.name = None # type: Any
|
||||
self.stream_pointer = 0
|
||||
self.eof = True
|
||||
self.buffer = ""
|
||||
self.pointer = 0
|
||||
self.raw_buffer = None # type: Any
|
||||
self.raw_decode = None
|
||||
self.encoding = None # type: Optional[Text]
|
||||
self.index = 0
|
||||
self.line = 0
|
||||
self.column = 0
|
||||
|
||||
@property
|
||||
def stream(self):
|
||||
# type: () -> Any
|
||||
try:
|
||||
return self._stream
|
||||
except AttributeError:
|
||||
raise YAMLStreamError('input stream needs to specified')
|
||||
|
||||
@stream.setter
|
||||
def stream(self, val):
|
||||
# type: (Any) -> None
|
||||
if val is None:
|
||||
return
|
||||
self._stream = None
|
||||
if isinstance(val, str):
|
||||
self.name = '<unicode string>'
|
||||
self.check_printable(val)
|
||||
self.buffer = val + '\0'
|
||||
elif isinstance(val, bytes):
|
||||
self.name = '<byte string>'
|
||||
self.raw_buffer = val
|
||||
self.determine_encoding()
|
||||
else:
|
||||
if not hasattr(val, 'read'):
|
||||
raise YAMLStreamError('stream argument needs to have a read() method')
|
||||
self._stream = val
|
||||
self.name = getattr(self.stream, 'name', '<file>')
|
||||
self.eof = False
|
||||
self.raw_buffer = None
|
||||
self.determine_encoding()
|
||||
|
||||
def peek(self, index=0):
|
||||
# type: (int) -> Text
|
||||
try:
|
||||
return self.buffer[self.pointer + index]
|
||||
except IndexError:
|
||||
self.update(index + 1)
|
||||
return self.buffer[self.pointer + index]
|
||||
|
||||
def prefix(self, length=1):
|
||||
# type: (int) -> Any
|
||||
if self.pointer + length >= len(self.buffer):
|
||||
self.update(length)
|
||||
return self.buffer[self.pointer : self.pointer + length]
|
||||
|
||||
def forward_1_1(self, length=1):
|
||||
# type: (int) -> None
|
||||
if self.pointer + length + 1 >= len(self.buffer):
|
||||
self.update(length + 1)
|
||||
while length != 0:
|
||||
ch = self.buffer[self.pointer]
|
||||
self.pointer += 1
|
||||
self.index += 1
|
||||
if ch in '\n\x85\u2028\u2029' or (
|
||||
ch == '\r' and self.buffer[self.pointer] != '\n'
|
||||
):
|
||||
self.line += 1
|
||||
self.column = 0
|
||||
elif ch != '\uFEFF':
|
||||
self.column += 1
|
||||
length -= 1
|
||||
|
||||
def forward(self, length=1):
|
||||
# type: (int) -> None
|
||||
if self.pointer + length + 1 >= len(self.buffer):
|
||||
self.update(length + 1)
|
||||
while length != 0:
|
||||
ch = self.buffer[self.pointer]
|
||||
self.pointer += 1
|
||||
self.index += 1
|
||||
if ch == '\n' or (ch == '\r' and self.buffer[self.pointer] != '\n'):
|
||||
self.line += 1
|
||||
self.column = 0
|
||||
elif ch != '\uFEFF':
|
||||
self.column += 1
|
||||
length -= 1
|
||||
|
||||
def get_mark(self):
|
||||
# type: () -> Any
|
||||
if self.stream is None:
|
||||
return StringMark(
|
||||
self.name, self.index, self.line, self.column, self.buffer, self.pointer
|
||||
)
|
||||
else:
|
||||
return FileMark(self.name, self.index, self.line, self.column)
|
||||
|
||||
def determine_encoding(self):
|
||||
# type: () -> None
|
||||
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
|
||||
self.update_raw()
|
||||
if isinstance(self.raw_buffer, bytes):
|
||||
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
|
||||
self.raw_decode = codecs.utf_16_le_decode # type: ignore
|
||||
self.encoding = 'utf-16-le'
|
||||
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
|
||||
self.raw_decode = codecs.utf_16_be_decode # type: ignore
|
||||
self.encoding = 'utf-16-be'
|
||||
else:
|
||||
self.raw_decode = codecs.utf_8_decode # type: ignore
|
||||
self.encoding = 'utf-8'
|
||||
self.update(1)
|
||||
|
||||
NON_PRINTABLE = RegExp(
|
||||
'[^\x09\x0A\x0D\x20-\x7E\x85' '\xA0-\uD7FF' '\uE000-\uFFFD' '\U00010000-\U0010FFFF' ']'
|
||||
)
|
||||
|
||||
_printable_ascii = ('\x09\x0A\x0D' + "".join(map(chr, range(0x20, 0x7F)))).encode('ascii')
|
||||
|
||||
@classmethod
|
||||
def _get_non_printable_ascii(cls, data): # type: ignore
|
||||
# type: (Text, bytes) -> Optional[Tuple[int, Text]]
|
||||
ascii_bytes = data.encode('ascii') # type: ignore
|
||||
non_printables = ascii_bytes.translate(None, cls._printable_ascii) # type: ignore
|
||||
if not non_printables:
|
||||
return None
|
||||
non_printable = non_printables[:1]
|
||||
return ascii_bytes.index(non_printable), non_printable.decode('ascii')
|
||||
|
||||
@classmethod
|
||||
def _get_non_printable_regex(cls, data):
|
||||
# type: (Text) -> Optional[Tuple[int, Text]]
|
||||
match = cls.NON_PRINTABLE.search(data)
|
||||
if not bool(match):
|
||||
return None
|
||||
return match.start(), match.group()
|
||||
|
||||
@classmethod
|
||||
def _get_non_printable(cls, data):
|
||||
# type: (Text) -> Optional[Tuple[int, Text]]
|
||||
try:
|
||||
return cls._get_non_printable_ascii(data) # type: ignore
|
||||
except UnicodeEncodeError:
|
||||
return cls._get_non_printable_regex(data)
|
||||
|
||||
def check_printable(self, data):
|
||||
# type: (Any) -> None
|
||||
non_printable_match = self._get_non_printable(data)
|
||||
if non_printable_match is not None:
|
||||
start, character = non_printable_match
|
||||
position = self.index + (len(self.buffer) - self.pointer) + start
|
||||
raise ReaderError(
|
||||
self.name,
|
||||
position,
|
||||
ord(character),
|
||||
'unicode',
|
||||
'special characters are not allowed',
|
||||
)
|
||||
|
||||
def update(self, length):
|
||||
# type: (int) -> None
|
||||
if self.raw_buffer is None:
|
||||
return
|
||||
self.buffer = self.buffer[self.pointer :]
|
||||
self.pointer = 0
|
||||
while len(self.buffer) < length:
|
||||
if not self.eof:
|
||||
self.update_raw()
|
||||
if self.raw_decode is not None:
|
||||
try:
|
||||
data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof)
|
||||
except UnicodeDecodeError as exc:
|
||||
character = self.raw_buffer[exc.start]
|
||||
if self.stream is not None:
|
||||
position = self.stream_pointer - len(self.raw_buffer) + exc.start
|
||||
elif self.stream is not None:
|
||||
position = self.stream_pointer - len(self.raw_buffer) + exc.start
|
||||
else:
|
||||
position = exc.start
|
||||
raise ReaderError(self.name, position, character, exc.encoding, exc.reason)
|
||||
else:
|
||||
data = self.raw_buffer
|
||||
converted = len(data)
|
||||
self.check_printable(data)
|
||||
self.buffer += data
|
||||
self.raw_buffer = self.raw_buffer[converted:]
|
||||
if self.eof:
|
||||
self.buffer += '\0'
|
||||
self.raw_buffer = None
|
||||
break
|
||||
|
||||
def update_raw(self, size=None):
|
||||
# type: (Optional[int]) -> None
|
||||
if size is None:
|
||||
size = 4096
|
||||
data = self.stream.read(size)
|
||||
if self.raw_buffer is None:
|
||||
self.raw_buffer = data
|
||||
else:
|
||||
self.raw_buffer += data
|
||||
self.stream_pointer += len(data)
|
||||
if not data:
|
||||
self.eof = True
|
||||
|
||||
|
||||
# try:
|
||||
# import psyco
|
||||
# psyco.bind(Reader)
|
||||
# except ImportError:
|
||||
# pass
|
||||
1156
lib/spack/external/_vendoring/ruamel/yaml/representer.py
vendored
Normal file
1156
lib/spack/external/_vendoring/ruamel/yaml/representer.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,54 +1,166 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
|
||||
try:
|
||||
from .error import * # NOQA
|
||||
from .nodes import * # NOQA
|
||||
from .compat import string_types
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.error import * # NOQA
|
||||
from ruamel.yaml.nodes import * # NOQA
|
||||
from ruamel.yaml.compat import string_types
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Text, Optional # NOQA
|
||||
from ruamel.yaml.compat import VersionType # NOQA
|
||||
|
||||
from ruamel.yaml.compat import _DEFAULT_YAML_VERSION, _F # NOQA
|
||||
from ruamel.yaml.error import * # NOQA
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode # NOQA
|
||||
from ruamel.yaml.util import RegExp # NOQA
|
||||
|
||||
__all__ = ['BaseResolver', 'Resolver', 'VersionedResolver']
|
||||
|
||||
|
||||
_DEFAULT_VERSION = (1, 2)
|
||||
# fmt: off
|
||||
# resolvers consist of
|
||||
# - a list of applicable version
|
||||
# - a tag
|
||||
# - a regexp
|
||||
# - a list of first characters to match
|
||||
implicit_resolvers = [
|
||||
([(1, 2)],
|
||||
'tag:yaml.org,2002:bool',
|
||||
RegExp('''^(?:true|True|TRUE|false|False|FALSE)$''', re.X),
|
||||
list('tTfF')),
|
||||
([(1, 1)],
|
||||
'tag:yaml.org,2002:bool',
|
||||
RegExp('''^(?:y|Y|yes|Yes|YES|n|N|no|No|NO
|
||||
|true|True|TRUE|false|False|FALSE
|
||||
|on|On|ON|off|Off|OFF)$''', re.X),
|
||||
list('yYnNtTfFoO')),
|
||||
([(1, 2)],
|
||||
'tag:yaml.org,2002:float',
|
||||
RegExp('''^(?:
|
||||
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|
||||
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|
||||
|[-+]?\\.[0-9_]+(?:[eE][-+][0-9]+)?
|
||||
|[-+]?\\.(?:inf|Inf|INF)
|
||||
|\\.(?:nan|NaN|NAN))$''', re.X),
|
||||
list('-+0123456789.')),
|
||||
([(1, 1)],
|
||||
'tag:yaml.org,2002:float',
|
||||
RegExp('''^(?:
|
||||
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|
||||
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|
||||
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|
||||
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]* # sexagesimal float
|
||||
|[-+]?\\.(?:inf|Inf|INF)
|
||||
|\\.(?:nan|NaN|NAN))$''', re.X),
|
||||
list('-+0123456789.')),
|
||||
([(1, 2)],
|
||||
'tag:yaml.org,2002:int',
|
||||
RegExp('''^(?:[-+]?0b[0-1_]+
|
||||
|[-+]?0o?[0-7_]+
|
||||
|[-+]?[0-9_]+
|
||||
|[-+]?0x[0-9a-fA-F_]+)$''', re.X),
|
||||
list('-+0123456789')),
|
||||
([(1, 1)],
|
||||
'tag:yaml.org,2002:int',
|
||||
RegExp('''^(?:[-+]?0b[0-1_]+
|
||||
|[-+]?0?[0-7_]+
|
||||
|[-+]?(?:0|[1-9][0-9_]*)
|
||||
|[-+]?0x[0-9a-fA-F_]+
|
||||
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), # sexagesimal int
|
||||
list('-+0123456789')),
|
||||
([(1, 2), (1, 1)],
|
||||
'tag:yaml.org,2002:merge',
|
||||
RegExp('^(?:<<)$'),
|
||||
['<']),
|
||||
([(1, 2), (1, 1)],
|
||||
'tag:yaml.org,2002:null',
|
||||
RegExp('''^(?: ~
|
||||
|null|Null|NULL
|
||||
| )$''', re.X),
|
||||
['~', 'n', 'N', '']),
|
||||
([(1, 2), (1, 1)],
|
||||
'tag:yaml.org,2002:timestamp',
|
||||
RegExp('''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|
||||
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
|
||||
(?:[Tt]|[ \\t]+)[0-9][0-9]?
|
||||
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
|
||||
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
|
||||
list('0123456789')),
|
||||
([(1, 2), (1, 1)],
|
||||
'tag:yaml.org,2002:value',
|
||||
RegExp('^(?:=)$'),
|
||||
['=']),
|
||||
# The following resolver is only for documentation purposes. It cannot work
|
||||
# because plain scalars cannot start with '!', '&', or '*'.
|
||||
([(1, 2), (1, 1)],
|
||||
'tag:yaml.org,2002:yaml',
|
||||
RegExp('^(?:!|&|\\*)$'),
|
||||
list('!&*')),
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
|
||||
class ResolverError(YAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class BaseResolver(object):
|
||||
class BaseResolver:
|
||||
|
||||
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
|
||||
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
|
||||
DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
|
||||
DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
|
||||
DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
|
||||
DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
|
||||
|
||||
yaml_implicit_resolvers = {}
|
||||
yaml_path_resolvers = {}
|
||||
yaml_implicit_resolvers = {} # type: Dict[Any, Any]
|
||||
yaml_path_resolvers = {} # type: Dict[Any, Any]
|
||||
|
||||
def __init__(self):
|
||||
self._loader_version = None
|
||||
self.resolver_exact_paths = []
|
||||
self.resolver_prefix_paths = []
|
||||
def __init__(self, loadumper=None):
|
||||
# type: (Any, Any) -> None
|
||||
self.loadumper = loadumper
|
||||
if self.loadumper is not None and getattr(self.loadumper, '_resolver', None) is None:
|
||||
self.loadumper._resolver = self.loadumper
|
||||
self._loader_version = None # type: Any
|
||||
self.resolver_exact_paths = [] # type: List[Any]
|
||||
self.resolver_prefix_paths = [] # type: List[Any]
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
# type: () -> Any
|
||||
if self.loadumper is not None:
|
||||
if hasattr(self.loadumper, 'typ'):
|
||||
return self.loadumper.parser
|
||||
return self.loadumper._parser
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def add_implicit_resolver(cls, tag, regexp, first):
|
||||
def add_implicit_resolver_base(cls, tag, regexp, first):
|
||||
# type: (Any, Any, Any) -> None
|
||||
if 'yaml_implicit_resolvers' not in cls.__dict__:
|
||||
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
|
||||
# deepcopy doesn't work here
|
||||
cls.yaml_implicit_resolvers = dict(
|
||||
(k, cls.yaml_implicit_resolvers[k][:]) for k in cls.yaml_implicit_resolvers
|
||||
)
|
||||
if first is None:
|
||||
first = [None]
|
||||
for ch in first:
|
||||
cls.yaml_implicit_resolvers.setdefault(ch, []).append(
|
||||
(tag, regexp))
|
||||
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
|
||||
|
||||
@classmethod
|
||||
def add_implicit_resolver(cls, tag, regexp, first):
|
||||
# type: (Any, Any, Any) -> None
|
||||
if 'yaml_implicit_resolvers' not in cls.__dict__:
|
||||
# deepcopy doesn't work here
|
||||
cls.yaml_implicit_resolvers = dict(
|
||||
(k, cls.yaml_implicit_resolvers[k][:]) for k in cls.yaml_implicit_resolvers
|
||||
)
|
||||
if first is None:
|
||||
first = [None]
|
||||
for ch in first:
|
||||
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
|
||||
implicit_resolvers.append(([(1, 2), (1, 1)], tag, regexp, first))
|
||||
|
||||
# @classmethod
|
||||
# def add_implicit_resolver(cls, tag, regexp, first):
|
||||
|
||||
@classmethod
|
||||
def add_path_resolver(cls, tag, path, kind=None):
|
||||
# type: (Any, Any, Any) -> None
|
||||
# Note: `add_path_resolver` is experimental. The API could be changed.
|
||||
# `new_path` is a pattern that is matched against the path from the
|
||||
# root to the node that is being considered. `node_path` elements are
|
||||
@@ -63,7 +175,7 @@ def add_path_resolver(cls, tag, path, kind=None):
|
||||
# against a sequence value with the index equal to `index_check`.
|
||||
if 'yaml_path_resolvers' not in cls.__dict__:
|
||||
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
|
||||
new_path = []
|
||||
new_path = [] # type: List[Any]
|
||||
for element in path:
|
||||
if isinstance(element, (list, tuple)):
|
||||
if len(element) == 2:
|
||||
@@ -72,7 +184,9 @@ def add_path_resolver(cls, tag, path, kind=None):
|
||||
node_check = element[0]
|
||||
index_check = True
|
||||
else:
|
||||
raise ResolverError("Invalid path element: %s" % element)
|
||||
raise ResolverError(
|
||||
_F('Invalid path element: {element!s}', element=element)
|
||||
)
|
||||
else:
|
||||
node_check = None
|
||||
index_check = element
|
||||
@@ -82,13 +196,18 @@ def add_path_resolver(cls, tag, path, kind=None):
|
||||
node_check = SequenceNode
|
||||
elif node_check is dict:
|
||||
node_check = MappingNode
|
||||
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
|
||||
and not isinstance(node_check, string_types) \
|
||||
and node_check is not None:
|
||||
raise ResolverError("Invalid node checker: %s" % node_check)
|
||||
if not isinstance(index_check, (string_types, int)) \
|
||||
and index_check is not None:
|
||||
raise ResolverError("Invalid index checker: %s" % index_check)
|
||||
elif (
|
||||
node_check not in [ScalarNode, SequenceNode, MappingNode]
|
||||
and not isinstance(node_check, str)
|
||||
and node_check is not None
|
||||
):
|
||||
raise ResolverError(
|
||||
_F('Invalid node checker: {node_check!s}', node_check=node_check)
|
||||
)
|
||||
if not isinstance(index_check, (str, int)) and index_check is not None:
|
||||
raise ResolverError(
|
||||
_F('Invalid index checker: {index_check!s}', index_check=index_check)
|
||||
)
|
||||
new_path.append((node_check, index_check))
|
||||
if kind is str:
|
||||
kind = ScalarNode
|
||||
@@ -96,12 +215,12 @@ def add_path_resolver(cls, tag, path, kind=None):
|
||||
kind = SequenceNode
|
||||
elif kind is dict:
|
||||
kind = MappingNode
|
||||
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
|
||||
and kind is not None:
|
||||
raise ResolverError("Invalid node kind: %s" % kind)
|
||||
elif kind not in [ScalarNode, SequenceNode, MappingNode] and kind is not None:
|
||||
raise ResolverError(_F('Invalid node kind: {kind!s}', kind=kind))
|
||||
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
|
||||
|
||||
def descend_resolver(self, current_node, current_index):
|
||||
# type: (Any, Any) -> None
|
||||
if not self.yaml_path_resolvers:
|
||||
return
|
||||
exact_paths = {}
|
||||
@@ -109,13 +228,11 @@ def descend_resolver(self, current_node, current_index):
|
||||
if current_node:
|
||||
depth = len(self.resolver_prefix_paths)
|
||||
for path, kind in self.resolver_prefix_paths[-1]:
|
||||
if self.check_resolver_prefix(depth, path, kind,
|
||||
current_node, current_index):
|
||||
if self.check_resolver_prefix(depth, path, kind, current_node, current_index):
|
||||
if len(path) > depth:
|
||||
prefix_paths.append((path, kind))
|
||||
else:
|
||||
exact_paths[kind] = self.yaml_path_resolvers[path,
|
||||
kind]
|
||||
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
|
||||
else:
|
||||
for path, kind in self.yaml_path_resolvers:
|
||||
if not path:
|
||||
@@ -126,39 +243,40 @@ def descend_resolver(self, current_node, current_index):
|
||||
self.resolver_prefix_paths.append(prefix_paths)
|
||||
|
||||
def ascend_resolver(self):
|
||||
# type: () -> None
|
||||
if not self.yaml_path_resolvers:
|
||||
return
|
||||
self.resolver_exact_paths.pop()
|
||||
self.resolver_prefix_paths.pop()
|
||||
|
||||
def check_resolver_prefix(self, depth, path, kind,
|
||||
current_node, current_index):
|
||||
node_check, index_check = path[depth-1]
|
||||
if isinstance(node_check, string_types):
|
||||
def check_resolver_prefix(self, depth, path, kind, current_node, current_index):
|
||||
# type: (int, Any, Any, Any, Any) -> bool
|
||||
node_check, index_check = path[depth - 1]
|
||||
if isinstance(node_check, str):
|
||||
if current_node.tag != node_check:
|
||||
return
|
||||
return False
|
||||
elif node_check is not None:
|
||||
if not isinstance(current_node, node_check):
|
||||
return
|
||||
return False
|
||||
if index_check is True and current_index is not None:
|
||||
return
|
||||
if (index_check is False or index_check is None) \
|
||||
and current_index is None:
|
||||
return
|
||||
if isinstance(index_check, string_types):
|
||||
if not (isinstance(current_index, ScalarNode) and
|
||||
index_check == current_index.value):
|
||||
return
|
||||
elif isinstance(index_check, int) and not isinstance(index_check,
|
||||
bool):
|
||||
return False
|
||||
if (index_check is False or index_check is None) and current_index is None:
|
||||
return False
|
||||
if isinstance(index_check, str):
|
||||
if not (
|
||||
isinstance(current_index, ScalarNode) and index_check == current_index.value
|
||||
):
|
||||
return False
|
||||
elif isinstance(index_check, int) and not isinstance(index_check, bool):
|
||||
if index_check != current_index:
|
||||
return
|
||||
return False
|
||||
return True
|
||||
|
||||
def resolve(self, kind, value, implicit):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
if kind is ScalarNode and implicit[0]:
|
||||
if value == u'':
|
||||
resolvers = self.yaml_implicit_resolvers.get(u'', [])
|
||||
if value == "":
|
||||
resolvers = self.yaml_implicit_resolvers.get("", [])
|
||||
else:
|
||||
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
|
||||
resolvers += self.yaml_implicit_resolvers.get(None, [])
|
||||
@@ -166,7 +284,7 @@ def resolve(self, kind, value, implicit):
|
||||
if regexp.match(value):
|
||||
return tag
|
||||
implicit = implicit[1]
|
||||
if self.yaml_path_resolvers:
|
||||
if bool(self.yaml_path_resolvers):
|
||||
exact_paths = self.resolver_exact_paths[-1]
|
||||
if kind in exact_paths:
|
||||
return exact_paths[kind]
|
||||
@@ -181,158 +299,37 @@ def resolve(self, kind, value, implicit):
|
||||
|
||||
@property
|
||||
def processing_version(self):
|
||||
# type: () -> Any
|
||||
return None
|
||||
|
||||
|
||||
class Resolver(BaseResolver):
|
||||
pass
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:bool',
|
||||
re.compile(u'''^(?:yes|Yes|YES|no|No|NO
|
||||
|true|True|TRUE|false|False|FALSE
|
||||
|on|On|ON|off|Off|OFF)$''', re.X),
|
||||
list(u'yYnNtTfFoO'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:float',
|
||||
re.compile(u'''^(?:
|
||||
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|
||||
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|
||||
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|
||||
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
|
||||
|[-+]?\\.(?:inf|Inf|INF)
|
||||
|\\.(?:nan|NaN|NAN))$''', re.X),
|
||||
list(u'-+0123456789.'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:int',
|
||||
re.compile(u'''^(?:[-+]?0b[0-1_]+
|
||||
|[-+]?0o?[0-7_]+
|
||||
|[-+]?(?:0|[1-9][0-9_]*)
|
||||
|[-+]?0x[0-9a-fA-F_]+
|
||||
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
|
||||
list(u'-+0123456789'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:merge',
|
||||
re.compile(u'^(?:<<)$'),
|
||||
[u'<'])
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:null',
|
||||
re.compile(u'''^(?: ~
|
||||
|null|Null|NULL
|
||||
| )$''', re.X),
|
||||
[u'~', u'n', u'N', u''])
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:timestamp',
|
||||
re.compile(u'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|
||||
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
|
||||
(?:[Tt]|[ \\t]+)[0-9][0-9]?
|
||||
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
|
||||
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
|
||||
list(u'0123456789'))
|
||||
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:value',
|
||||
re.compile(u'^(?:=)$'),
|
||||
[u'='])
|
||||
|
||||
# The following resolver is only for documentation purposes. It cannot work
|
||||
# because plain scalars cannot start with '!', '&', or '*'.
|
||||
Resolver.add_implicit_resolver(
|
||||
u'tag:yaml.org,2002:yaml',
|
||||
re.compile(u'^(?:!|&|\\*)$'),
|
||||
list(u'!&*'))
|
||||
|
||||
# resolvers consist of
|
||||
# - a list of applicable version
|
||||
# - a tag
|
||||
# - a regexp
|
||||
# - a list of first characters to match
|
||||
implicit_resolvers = [
|
||||
([(1, 2)],
|
||||
u'tag:yaml.org,2002:bool',
|
||||
re.compile(u'''^(?:true|True|TRUE|false|False|FALSE)$''', re.X),
|
||||
list(u'tTfF')),
|
||||
([(1, 1)],
|
||||
u'tag:yaml.org,2002:bool',
|
||||
re.compile(u'''^(?:yes|Yes|YES|no|No|NO
|
||||
|true|True|TRUE|false|False|FALSE
|
||||
|on|On|ON|off|Off|OFF)$''', re.X),
|
||||
list(u'yYnNtTfFoO')),
|
||||
([(1, 2), (1, 1)],
|
||||
u'tag:yaml.org,2002:float',
|
||||
re.compile(u'''^(?:
|
||||
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|
||||
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|
||||
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|
||||
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
|
||||
|[-+]?\\.(?:inf|Inf|INF)
|
||||
|\\.(?:nan|NaN|NAN))$''', re.X),
|
||||
list(u'-+0123456789.')),
|
||||
([(1, 2)],
|
||||
u'tag:yaml.org,2002:int',
|
||||
re.compile(u'''^(?:[-+]?0b[0-1_]+
|
||||
|[-+]?0o?[0-7_]+
|
||||
|[-+]?(?:0|[1-9][0-9_]*)
|
||||
|[-+]?0x[0-9a-fA-F_]+)$''', re.X),
|
||||
list(u'-+0123456789')),
|
||||
([(1, 1)],
|
||||
u'tag:yaml.org,2002:int',
|
||||
re.compile(u'''^(?:[-+]?0b[0-1_]+
|
||||
|[-+]?0o?[0-7_]+
|
||||
|[-+]?(?:0|[1-9][0-9_]*)
|
||||
|[-+]?0x[0-9a-fA-F_]+
|
||||
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
|
||||
list(u'-+0123456789')),
|
||||
([(1, 2), (1, 1)],
|
||||
u'tag:yaml.org,2002:merge',
|
||||
re.compile(u'^(?:<<)$'),
|
||||
[u'<']),
|
||||
([(1, 2), (1, 1)],
|
||||
u'tag:yaml.org,2002:null',
|
||||
re.compile(u'''^(?: ~
|
||||
|null|Null|NULL
|
||||
| )$''', re.X),
|
||||
[u'~', u'n', u'N', u'']),
|
||||
([(1, 2), (1, 1)],
|
||||
u'tag:yaml.org,2002:timestamp',
|
||||
re.compile(u'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|
||||
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
|
||||
(?:[Tt]|[ \\t]+)[0-9][0-9]?
|
||||
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
|
||||
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
|
||||
list(u'0123456789')),
|
||||
([(1, 2), (1, 1)],
|
||||
u'tag:yaml.org,2002:value',
|
||||
re.compile(u'^(?:=)$'),
|
||||
[u'=']),
|
||||
# The following resolver is only for documentation purposes. It cannot work
|
||||
# because plain scalars cannot start with '!', '&', or '*'.
|
||||
([(1, 2), (1, 1)],
|
||||
u'tag:yaml.org,2002:yaml',
|
||||
re.compile(u'^(?:!|&|\\*)$'),
|
||||
list(u'!&*')),
|
||||
]
|
||||
for ir in implicit_resolvers:
|
||||
if (1, 2) in ir[0]:
|
||||
Resolver.add_implicit_resolver_base(*ir[1:])
|
||||
|
||||
|
||||
class VersionedResolver(BaseResolver):
|
||||
"""
|
||||
contrary to the "normal" resolver, the smart resolver delays loading
|
||||
the pattern matching rules. That way it can decide to load 1.1 rules
|
||||
or the (default) 1.2 that no longer support octal without 0o, sexagesimals
|
||||
or the (default) 1.2 rules, that no longer support octal without 0o, sexagesimals
|
||||
and Yes/No/On/Off booleans.
|
||||
"""
|
||||
|
||||
def __init__(self, version=None):
|
||||
BaseResolver.__init__(self)
|
||||
def __init__(self, version=None, loader=None, loadumper=None):
|
||||
# type: (Optional[VersionType], Any, Any) -> None
|
||||
if loader is None and loadumper is not None:
|
||||
loader = loadumper
|
||||
BaseResolver.__init__(self, loader)
|
||||
self._loader_version = self.get_loader_version(version)
|
||||
self._version_implicit_resolver = {}
|
||||
self._version_implicit_resolver = {} # type: Dict[Any, Any]
|
||||
|
||||
def add_version_implicit_resolver(self, version, tag, regexp, first):
|
||||
# type: (VersionType, Any, Any, Any) -> None
|
||||
if first is None:
|
||||
first = [None]
|
||||
impl_resolver = self._version_implicit_resolver.setdefault(version, {})
|
||||
@@ -340,19 +337,23 @@ def add_version_implicit_resolver(self, version, tag, regexp, first):
|
||||
impl_resolver.setdefault(ch, []).append((tag, regexp))
|
||||
|
||||
def get_loader_version(self, version):
|
||||
# type: (Optional[VersionType]) -> Any
|
||||
if version is None or isinstance(version, tuple):
|
||||
return version
|
||||
if isinstance(version, list):
|
||||
return tuple(version)
|
||||
# assume string
|
||||
return tuple(map(int, version.split(u'.')))
|
||||
return tuple(map(int, version.split('.')))
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
def versioned_resolver(self):
|
||||
# type: () -> Any
|
||||
"""
|
||||
select the resolver based on the version we are parsing
|
||||
"""
|
||||
version = self.processing_version
|
||||
if isinstance(version, str):
|
||||
version = tuple(map(int, version.split('.')))
|
||||
if version not in self._version_implicit_resolver:
|
||||
for x in implicit_resolvers:
|
||||
if version in x[0]:
|
||||
@@ -360,17 +361,18 @@ def resolver(self):
|
||||
return self._version_implicit_resolver[version]
|
||||
|
||||
def resolve(self, kind, value, implicit):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
if kind is ScalarNode and implicit[0]:
|
||||
if value == u'':
|
||||
resolvers = self.resolver.get(u'', [])
|
||||
if value == "":
|
||||
resolvers = self.versioned_resolver.get("", [])
|
||||
else:
|
||||
resolvers = self.resolver.get(value[0], [])
|
||||
resolvers += self.resolver.get(None, [])
|
||||
resolvers = self.versioned_resolver.get(value[0], [])
|
||||
resolvers += self.versioned_resolver.get(None, [])
|
||||
for tag, regexp in resolvers:
|
||||
if regexp.match(value):
|
||||
return tag
|
||||
implicit = implicit[1]
|
||||
if self.yaml_path_resolvers:
|
||||
if bool(self.yaml_path_resolvers):
|
||||
exact_paths = self.resolver_exact_paths[-1]
|
||||
if kind in exact_paths:
|
||||
return exact_paths[kind]
|
||||
@@ -385,13 +387,19 @@ def resolve(self, kind, value, implicit):
|
||||
|
||||
@property
|
||||
def processing_version(self):
|
||||
# type: () -> Any
|
||||
try:
|
||||
version = self.yaml_version
|
||||
version = self.loadumper._scanner.yaml_version
|
||||
except AttributeError:
|
||||
# dumping
|
||||
version = self.use_version
|
||||
try:
|
||||
if hasattr(self.loadumper, 'typ'):
|
||||
version = self.loadumper.version
|
||||
else:
|
||||
version = self.loadumper._serializer.use_version # dumping
|
||||
except AttributeError:
|
||||
version = None
|
||||
if version is None:
|
||||
version = self._loader_version
|
||||
if version is None:
|
||||
version = _DEFAULT_VERSION
|
||||
version = _DEFAULT_YAML_VERSION
|
||||
return version
|
||||
47
lib/spack/external/_vendoring/ruamel/yaml/scalarbool.py
vendored
Normal file
47
lib/spack/external/_vendoring/ruamel/yaml/scalarbool.py
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
You cannot subclass bool, and this is necessary for round-tripping anchored
|
||||
bool values (and also if you want to preserve the original way of writing)
|
||||
|
||||
bool.__bases__ is type 'int', so that is what is used as the basis for ScalarBoolean as well.
|
||||
|
||||
You can use these in an if statement, but not when testing equivalence
|
||||
"""
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = ['ScalarBoolean']
|
||||
|
||||
|
||||
class ScalarBoolean(int):
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
anchor = kw.pop('anchor', None)
|
||||
b = int.__new__(cls, *args, **kw)
|
||||
if anchor is not None:
|
||||
b.yaml_set_anchor(anchor, always_dump=True)
|
||||
return b
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
124
lib/spack/external/_vendoring/ruamel/yaml/scalarfloat.py
vendored
Normal file
124
lib/spack/external/_vendoring/ruamel/yaml/scalarfloat.py
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = ['ScalarFloat', 'ExponentialFloat', 'ExponentialCapsFloat']
|
||||
|
||||
|
||||
class ScalarFloat(float):
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
width = kw.pop('width', None)
|
||||
prec = kw.pop('prec', None)
|
||||
m_sign = kw.pop('m_sign', None)
|
||||
m_lead0 = kw.pop('m_lead0', 0)
|
||||
exp = kw.pop('exp', None)
|
||||
e_width = kw.pop('e_width', None)
|
||||
e_sign = kw.pop('e_sign', None)
|
||||
underscore = kw.pop('underscore', None)
|
||||
anchor = kw.pop('anchor', None)
|
||||
v = float.__new__(cls, *args, **kw)
|
||||
v._width = width
|
||||
v._prec = prec
|
||||
v._m_sign = m_sign
|
||||
v._m_lead0 = m_lead0
|
||||
v._exp = exp
|
||||
v._e_width = e_width
|
||||
v._e_sign = e_sign
|
||||
v._underscore = underscore
|
||||
if anchor is not None:
|
||||
v.yaml_set_anchor(anchor, always_dump=True)
|
||||
return v
|
||||
|
||||
def __iadd__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) + a
|
||||
x = type(self)(self + a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
def __ifloordiv__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) // a
|
||||
x = type(self)(self // a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
def __imul__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) * a
|
||||
x = type(self)(self * a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
x._prec = self._prec # check for others
|
||||
return x
|
||||
|
||||
def __ipow__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) ** a
|
||||
x = type(self)(self ** a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
def __isub__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) - a
|
||||
x = type(self)(self - a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
def dump(self, out=sys.stdout):
|
||||
# type: (Any) -> Any
|
||||
out.write(
|
||||
'ScalarFloat({}| w:{}, p:{}, s:{}, lz:{}, _:{}|{}, w:{}, s:{})\n'.format(
|
||||
self,
|
||||
self._width, # type: ignore
|
||||
self._prec, # type: ignore
|
||||
self._m_sign, # type: ignore
|
||||
self._m_lead0, # type: ignore
|
||||
self._underscore, # type: ignore
|
||||
self._exp, # type: ignore
|
||||
self._e_width, # type: ignore
|
||||
self._e_sign, # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ExponentialFloat(ScalarFloat):
|
||||
def __new__(cls, value, width=None, underscore=None):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
|
||||
|
||||
|
||||
class ExponentialCapsFloat(ScalarFloat):
|
||||
def __new__(cls, value, width=None, underscore=None):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
|
||||
127
lib/spack/external/_vendoring/ruamel/yaml/scalarint.py
vendored
Normal file
127
lib/spack/external/_vendoring/ruamel/yaml/scalarint.py
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = ['ScalarInt', 'BinaryInt', 'OctalInt', 'HexInt', 'HexCapsInt', 'DecimalInt']
|
||||
|
||||
|
||||
class ScalarInt(int):
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
width = kw.pop('width', None)
|
||||
underscore = kw.pop('underscore', None)
|
||||
anchor = kw.pop('anchor', None)
|
||||
v = int.__new__(cls, *args, **kw)
|
||||
v._width = width
|
||||
v._underscore = underscore
|
||||
if anchor is not None:
|
||||
v.yaml_set_anchor(anchor, always_dump=True)
|
||||
return v
|
||||
|
||||
def __iadd__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self + a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __ifloordiv__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self // a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __imul__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self * a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __ipow__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self ** a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __isub__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self - a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
|
||||
class BinaryInt(ScalarInt):
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
class OctalInt(ScalarInt):
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
# mixed casing of A-F is not supported, when loading the first non digit
|
||||
# determines the case
|
||||
|
||||
|
||||
class HexInt(ScalarInt):
|
||||
"""uses lower case (a-f)"""
|
||||
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
class HexCapsInt(ScalarInt):
|
||||
"""uses upper case (A-F)"""
|
||||
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
class DecimalInt(ScalarInt):
|
||||
"""needed if anchor"""
|
||||
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
152
lib/spack/external/_vendoring/ruamel/yaml/scalarstring.py
vendored
Normal file
152
lib/spack/external/_vendoring/ruamel/yaml/scalarstring.py
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = [
|
||||
'ScalarString',
|
||||
'LiteralScalarString',
|
||||
'FoldedScalarString',
|
||||
'SingleQuotedScalarString',
|
||||
'DoubleQuotedScalarString',
|
||||
'PlainScalarString',
|
||||
# PreservedScalarString is the old name, as it was the first to be preserved on rt,
|
||||
# use LiteralScalarString instead
|
||||
'PreservedScalarString',
|
||||
]
|
||||
|
||||
|
||||
class ScalarString(str):
|
||||
__slots__ = Anchor.attrib
|
||||
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any) -> Any
|
||||
anchor = kw.pop('anchor', None)
|
||||
ret_val = str.__new__(cls, *args, **kw)
|
||||
if anchor is not None:
|
||||
ret_val.yaml_set_anchor(anchor, always_dump=True)
|
||||
return ret_val
|
||||
|
||||
def replace(self, old, new, maxreplace=-1):
|
||||
# type: (Any, Any, int) -> Any
|
||||
return type(self)((str.replace(self, old, new, maxreplace)))
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
|
||||
class LiteralScalarString(ScalarString):
|
||||
__slots__ = 'comment' # the comment after the | on the first line
|
||||
|
||||
style = '|'
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
PreservedScalarString = LiteralScalarString
|
||||
|
||||
|
||||
class FoldedScalarString(ScalarString):
|
||||
__slots__ = ('fold_pos', 'comment') # the comment after the > on the first line
|
||||
|
||||
style = '>'
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
class SingleQuotedScalarString(ScalarString):
|
||||
__slots__ = ()
|
||||
|
||||
style = "'"
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
class DoubleQuotedScalarString(ScalarString):
|
||||
__slots__ = ()
|
||||
|
||||
style = '"'
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
class PlainScalarString(ScalarString):
|
||||
__slots__ = ()
|
||||
|
||||
style = ''
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
def preserve_literal(s):
|
||||
# type: (Text) -> Text
|
||||
return LiteralScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
|
||||
|
||||
|
||||
def walk_tree(base, map=None):
|
||||
# type: (Any, Any) -> None
|
||||
"""
|
||||
the routine here walks over a simple yaml tree (recursing in
|
||||
dict values and list items) and converts strings that
|
||||
have multiple lines to literal scalars
|
||||
|
||||
You can also provide an explicit (ordered) mapping for multiple transforms
|
||||
(first of which is executed):
|
||||
map = ruamel.yaml.compat.ordereddict
|
||||
map['\n'] = preserve_literal
|
||||
map[':'] = SingleQuotedScalarString
|
||||
walk_tree(data, map=map)
|
||||
"""
|
||||
from collections.abc import MutableMapping, MutableSequence
|
||||
|
||||
if map is None:
|
||||
map = {'\n': preserve_literal}
|
||||
|
||||
if isinstance(base, MutableMapping):
|
||||
for k in base:
|
||||
v = base[k] # type: Text
|
||||
if isinstance(v, str):
|
||||
for ch in map:
|
||||
if ch in v:
|
||||
base[k] = map[ch](v)
|
||||
break
|
||||
else:
|
||||
walk_tree(v, map=map)
|
||||
elif isinstance(base, MutableSequence):
|
||||
for idx, elem in enumerate(base):
|
||||
if isinstance(elem, str):
|
||||
for ch in map:
|
||||
if ch in elem:
|
||||
base[idx] = map[ch](elem)
|
||||
break
|
||||
else:
|
||||
walk_tree(elem, map=map)
|
||||
2444
lib/spack/external/_vendoring/ruamel/yaml/scanner.py
vendored
Normal file
2444
lib/spack/external/_vendoring/ruamel/yaml/scanner.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
241
lib/spack/external/_vendoring/ruamel/yaml/serializer.py
vendored
Normal file
241
lib/spack/external/_vendoring/ruamel/yaml/serializer.py
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.error import YAMLError
|
||||
from ruamel.yaml.compat import nprint, DBG_NODE, dbg, nprintf # NOQA
|
||||
from ruamel.yaml.util import RegExp
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent,
|
||||
StreamEndEvent,
|
||||
MappingStartEvent,
|
||||
MappingEndEvent,
|
||||
SequenceStartEvent,
|
||||
SequenceEndEvent,
|
||||
AliasEvent,
|
||||
ScalarEvent,
|
||||
DocumentStartEvent,
|
||||
DocumentEndEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Union, Text, Optional # NOQA
|
||||
from ruamel.yaml.compat import VersionType # NOQA
|
||||
|
||||
__all__ = ['Serializer', 'SerializerError']
|
||||
|
||||
|
||||
class SerializerError(YAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Serializer:
|
||||
|
||||
# 'id' and 3+ numbers, but not 000
|
||||
ANCHOR_TEMPLATE = 'id%03d'
|
||||
ANCHOR_RE = RegExp('id(?!000$)\\d{3,}')
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
dumper=None,
|
||||
):
|
||||
# type: (Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any) -> None # NOQA
|
||||
self.dumper = dumper
|
||||
if self.dumper is not None:
|
||||
self.dumper._serializer = self
|
||||
self.use_encoding = encoding
|
||||
self.use_explicit_start = explicit_start
|
||||
self.use_explicit_end = explicit_end
|
||||
if isinstance(version, str):
|
||||
self.use_version = tuple(map(int, version.split('.')))
|
||||
else:
|
||||
self.use_version = version # type: ignore
|
||||
self.use_tags = tags
|
||||
self.serialized_nodes = {} # type: Dict[Any, Any]
|
||||
self.anchors = {} # type: Dict[Any, Any]
|
||||
self.last_anchor_id = 0
|
||||
self.closed = None # type: Optional[bool]
|
||||
self._templated_id = None
|
||||
|
||||
@property
|
||||
def emitter(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.dumper, 'typ'):
|
||||
return self.dumper.emitter
|
||||
return self.dumper._emitter
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.dumper, 'typ'):
|
||||
self.dumper.resolver
|
||||
return self.dumper._resolver
|
||||
|
||||
def open(self):
|
||||
# type: () -> None
|
||||
if self.closed is None:
|
||||
self.emitter.emit(StreamStartEvent(encoding=self.use_encoding))
|
||||
self.closed = False
|
||||
elif self.closed:
|
||||
raise SerializerError('serializer is closed')
|
||||
else:
|
||||
raise SerializerError('serializer is already opened')
|
||||
|
||||
def close(self):
|
||||
# type: () -> None
|
||||
if self.closed is None:
|
||||
raise SerializerError('serializer is not opened')
|
||||
elif not self.closed:
|
||||
self.emitter.emit(StreamEndEvent())
|
||||
self.closed = True
|
||||
|
||||
# def __del__(self):
|
||||
# self.close()
|
||||
|
||||
def serialize(self, node):
|
||||
# type: (Any) -> None
|
||||
if dbg(DBG_NODE):
|
||||
nprint('Serializing nodes')
|
||||
node.dump()
|
||||
if self.closed is None:
|
||||
raise SerializerError('serializer is not opened')
|
||||
elif self.closed:
|
||||
raise SerializerError('serializer is closed')
|
||||
self.emitter.emit(
|
||||
DocumentStartEvent(
|
||||
explicit=self.use_explicit_start, version=self.use_version, tags=self.use_tags
|
||||
)
|
||||
)
|
||||
self.anchor_node(node)
|
||||
self.serialize_node(node, None, None)
|
||||
self.emitter.emit(DocumentEndEvent(explicit=self.use_explicit_end))
|
||||
self.serialized_nodes = {}
|
||||
self.anchors = {}
|
||||
self.last_anchor_id = 0
|
||||
|
||||
def anchor_node(self, node):
|
||||
# type: (Any) -> None
|
||||
if node in self.anchors:
|
||||
if self.anchors[node] is None:
|
||||
self.anchors[node] = self.generate_anchor(node)
|
||||
else:
|
||||
anchor = None
|
||||
try:
|
||||
if node.anchor.always_dump:
|
||||
anchor = node.anchor.value
|
||||
except: # NOQA
|
||||
pass
|
||||
self.anchors[node] = anchor
|
||||
if isinstance(node, SequenceNode):
|
||||
for item in node.value:
|
||||
self.anchor_node(item)
|
||||
elif isinstance(node, MappingNode):
|
||||
for key, value in node.value:
|
||||
self.anchor_node(key)
|
||||
self.anchor_node(value)
|
||||
|
||||
def generate_anchor(self, node):
|
||||
# type: (Any) -> Any
|
||||
try:
|
||||
anchor = node.anchor.value
|
||||
except: # NOQA
|
||||
anchor = None
|
||||
if anchor is None:
|
||||
self.last_anchor_id += 1
|
||||
return self.ANCHOR_TEMPLATE % self.last_anchor_id
|
||||
return anchor
|
||||
|
||||
def serialize_node(self, node, parent, index):
|
||||
# type: (Any, Any, Any) -> None
|
||||
alias = self.anchors[node]
|
||||
if node in self.serialized_nodes:
|
||||
node_style = getattr(node, 'style', None)
|
||||
if node_style != '?':
|
||||
node_style = None
|
||||
self.emitter.emit(AliasEvent(alias, style=node_style))
|
||||
else:
|
||||
self.serialized_nodes[node] = True
|
||||
self.resolver.descend_resolver(parent, index)
|
||||
if isinstance(node, ScalarNode):
|
||||
# here check if the node.tag equals the one that would result from parsing
|
||||
# if not equal quoting is necessary for strings
|
||||
detected_tag = self.resolver.resolve(ScalarNode, node.value, (True, False))
|
||||
default_tag = self.resolver.resolve(ScalarNode, node.value, (False, True))
|
||||
implicit = (
|
||||
(node.tag == detected_tag),
|
||||
(node.tag == default_tag),
|
||||
node.tag.startswith('tag:yaml.org,2002:'),
|
||||
)
|
||||
self.emitter.emit(
|
||||
ScalarEvent(
|
||||
alias,
|
||||
node.tag,
|
||||
implicit,
|
||||
node.value,
|
||||
style=node.style,
|
||||
comment=node.comment,
|
||||
)
|
||||
)
|
||||
elif isinstance(node, SequenceNode):
|
||||
implicit = node.tag == self.resolver.resolve(SequenceNode, node.value, True)
|
||||
comment = node.comment
|
||||
end_comment = None
|
||||
seq_comment = None
|
||||
if node.flow_style is True:
|
||||
if comment: # eol comment on flow style sequence
|
||||
seq_comment = comment[0]
|
||||
# comment[0] = None
|
||||
if comment and len(comment) > 2:
|
||||
end_comment = comment[2]
|
||||
else:
|
||||
end_comment = None
|
||||
self.emitter.emit(
|
||||
SequenceStartEvent(
|
||||
alias,
|
||||
node.tag,
|
||||
implicit,
|
||||
flow_style=node.flow_style,
|
||||
comment=node.comment,
|
||||
)
|
||||
)
|
||||
index = 0
|
||||
for item in node.value:
|
||||
self.serialize_node(item, node, index)
|
||||
index += 1
|
||||
self.emitter.emit(SequenceEndEvent(comment=[seq_comment, end_comment]))
|
||||
elif isinstance(node, MappingNode):
|
||||
implicit = node.tag == self.resolver.resolve(MappingNode, node.value, True)
|
||||
comment = node.comment
|
||||
end_comment = None
|
||||
map_comment = None
|
||||
if node.flow_style is True:
|
||||
if comment: # eol comment on flow style sequence
|
||||
map_comment = comment[0]
|
||||
# comment[0] = None
|
||||
if comment and len(comment) > 2:
|
||||
end_comment = comment[2]
|
||||
self.emitter.emit(
|
||||
MappingStartEvent(
|
||||
alias,
|
||||
node.tag,
|
||||
implicit,
|
||||
flow_style=node.flow_style,
|
||||
comment=node.comment,
|
||||
nr_items=len(node.value),
|
||||
)
|
||||
)
|
||||
for key, value in node.value:
|
||||
self.serialize_node(key, node, None)
|
||||
self.serialize_node(value, node, key)
|
||||
self.emitter.emit(MappingEndEvent(comment=[map_comment, end_comment]))
|
||||
self.resolver.ascend_resolver()
|
||||
|
||||
|
||||
def templated_id(s):
|
||||
# type: (Text) -> Any
|
||||
return Serializer.ANCHOR_RE.match(s)
|
||||
61
lib/spack/external/_vendoring/ruamel/yaml/timestamp.py
vendored
Normal file
61
lib/spack/external/_vendoring/ruamel/yaml/timestamp.py
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
# coding: utf-8
|
||||
|
||||
import datetime
|
||||
import copy
|
||||
|
||||
# ToDo: at least on PY3 you could probably attach the tzinfo correctly to the object
|
||||
# a more complete datetime might be used by safe loading as well
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
||||
|
||||
class TimeStamp(datetime.datetime):
|
||||
def __init__(self, *args, **kw):
|
||||
# type: (Any, Any) -> None
|
||||
self._yaml = dict(t=False, tz=None, delta=0) # type: Dict[Any, Any]
|
||||
|
||||
def __new__(cls, *args, **kw): # datetime is immutable
|
||||
# type: (Any, Any) -> Any
|
||||
return datetime.datetime.__new__(cls, *args, **kw)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
# type: (Any) -> Any
|
||||
ts = TimeStamp(self.year, self.month, self.day, self.hour, self.minute, self.second)
|
||||
ts._yaml = copy.deepcopy(self._yaml)
|
||||
return ts
|
||||
|
||||
def replace(
|
||||
self,
|
||||
year=None,
|
||||
month=None,
|
||||
day=None,
|
||||
hour=None,
|
||||
minute=None,
|
||||
second=None,
|
||||
microsecond=None,
|
||||
tzinfo=True,
|
||||
fold=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Any, Any) -> Any
|
||||
if year is None:
|
||||
year = self.year
|
||||
if month is None:
|
||||
month = self.month
|
||||
if day is None:
|
||||
day = self.day
|
||||
if hour is None:
|
||||
hour = self.hour
|
||||
if minute is None:
|
||||
minute = self.minute
|
||||
if second is None:
|
||||
second = self.second
|
||||
if microsecond is None:
|
||||
microsecond = self.microsecond
|
||||
if tzinfo is True:
|
||||
tzinfo = self.tzinfo
|
||||
if fold is None:
|
||||
fold = self.fold
|
||||
ts = type(self)(year, month, day, hour, minute, second, microsecond, tzinfo, fold=fold)
|
||||
ts._yaml = copy.deepcopy(self._yaml)
|
||||
return ts
|
||||
404
lib/spack/external/_vendoring/ruamel/yaml/tokens.py
vendored
Normal file
404
lib/spack/external/_vendoring/ruamel/yaml/tokens.py
vendored
Normal file
@@ -0,0 +1,404 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.compat import _F, nprintf # NOQA
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, Optional, List # NOQA
|
||||
from .error import StreamMark # NOQA
|
||||
|
||||
SHOW_LINES = True
|
||||
|
||||
|
||||
class Token:
|
||||
__slots__ = 'start_mark', 'end_mark', '_comment'
|
||||
|
||||
def __init__(self, start_mark, end_mark):
|
||||
# type: (StreamMark, StreamMark) -> None
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
# attributes = [key for key in self.__slots__ if not key.endswith('_mark') and
|
||||
# hasattr('self', key)]
|
||||
attributes = [key for key in self.__slots__ if not key.endswith('_mark')]
|
||||
attributes.sort()
|
||||
# arguments = ', '.join(
|
||||
# [_F('{key!s}={gattr!r})', key=key, gattr=getattr(self, key)) for key in attributes]
|
||||
# )
|
||||
arguments = [
|
||||
_F('{key!s}={gattr!r}', key=key, gattr=getattr(self, key)) for key in attributes
|
||||
]
|
||||
if SHOW_LINES:
|
||||
try:
|
||||
arguments.append('line: ' + str(self.start_mark.line))
|
||||
except: # NOQA
|
||||
pass
|
||||
try:
|
||||
arguments.append('comment: ' + str(self._comment))
|
||||
except: # NOQA
|
||||
pass
|
||||
return '{}({})'.format(self.__class__.__name__, ', '.join(arguments))
|
||||
|
||||
@property
|
||||
def column(self):
|
||||
# type: () -> int
|
||||
return self.start_mark.column
|
||||
|
||||
@column.setter
|
||||
def column(self, pos):
|
||||
# type: (Any) -> None
|
||||
self.start_mark.column = pos
|
||||
|
||||
# old style ( <= 0.17) is a TWO element list with first being the EOL
|
||||
# comment concatenated with following FLC/BLNK; and second being a list of FLC/BLNK
|
||||
# preceding the token
|
||||
# new style ( >= 0.17 ) is a THREE element list with the first being a list of
|
||||
# preceding FLC/BLNK, the second EOL and the third following FLC/BLNK
|
||||
# note that new style has differing order, and does not consist of CommentToken(s)
|
||||
# but of CommentInfo instances
|
||||
# any non-assigned values in new style are None, but first and last can be empty list
|
||||
# new style routines add one comment at a time
|
||||
|
||||
# going to be deprecated in favour of add_comment_eol/post
|
||||
def add_post_comment(self, comment):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None]
|
||||
else:
|
||||
assert len(self._comment) in [2, 5] # make sure it is version 0
|
||||
# if isinstance(comment, CommentToken):
|
||||
# if comment.value.startswith('# C09'):
|
||||
# raise
|
||||
self._comment[0] = comment
|
||||
|
||||
# going to be deprecated in favour of add_comment_pre
|
||||
def add_pre_comments(self, comments):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None]
|
||||
else:
|
||||
assert len(self._comment) == 2 # make sure it is version 0
|
||||
assert self._comment[1] is None
|
||||
self._comment[1] = comments
|
||||
return
|
||||
|
||||
# new style
|
||||
def add_comment_pre(self, comment):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [[], None, None] # type: ignore
|
||||
else:
|
||||
assert len(self._comment) == 3
|
||||
if self._comment[0] is None:
|
||||
self._comment[0] = [] # type: ignore
|
||||
self._comment[0].append(comment) # type: ignore
|
||||
|
||||
def add_comment_eol(self, comment, comment_type):
|
||||
# type: (Any, Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None, None]
|
||||
else:
|
||||
assert len(self._comment) == 3
|
||||
assert self._comment[1] is None
|
||||
if self.comment[1] is None:
|
||||
self._comment[1] = [] # type: ignore
|
||||
self._comment[1].extend([None] * (comment_type + 1 - len(self.comment[1]))) # type: ignore # NOQA
|
||||
# nprintf('commy', self.comment, comment_type)
|
||||
self._comment[1][comment_type] = comment # type: ignore
|
||||
|
||||
def add_comment_post(self, comment):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None, []] # type: ignore
|
||||
else:
|
||||
assert len(self._comment) == 3
|
||||
if self._comment[2] is None:
|
||||
self._comment[2] = [] # type: ignore
|
||||
self._comment[2].append(comment) # type: ignore
|
||||
|
||||
# def get_comment(self):
|
||||
# # type: () -> Any
|
||||
# return getattr(self, '_comment', None)
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
# type: () -> Any
|
||||
return getattr(self, '_comment', None)
|
||||
|
||||
def move_old_comment(self, target, empty=False):
|
||||
# type: (Any, bool) -> Any
|
||||
"""move a comment from this token to target (normally next token)
|
||||
used to combine e.g. comments before a BlockEntryToken to the
|
||||
ScalarToken that follows it
|
||||
empty is a special for empty values -> comment after key
|
||||
"""
|
||||
c = self.comment
|
||||
if c is None:
|
||||
return
|
||||
# don't push beyond last element
|
||||
if isinstance(target, (StreamEndToken, DocumentStartToken)):
|
||||
return
|
||||
delattr(self, '_comment')
|
||||
tc = target.comment
|
||||
if not tc: # target comment, just insert
|
||||
# special for empty value in key: value issue 25
|
||||
if empty:
|
||||
c = [c[0], c[1], None, None, c[0]]
|
||||
target._comment = c
|
||||
# nprint('mco2:', self, target, target.comment, empty)
|
||||
return self
|
||||
if c[0] and tc[0] or c[1] and tc[1]:
|
||||
raise NotImplementedError(_F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
|
||||
if c[0]:
|
||||
tc[0] = c[0]
|
||||
if c[1]:
|
||||
tc[1] = c[1]
|
||||
return self
|
||||
|
||||
def split_old_comment(self):
|
||||
# type: () -> Any
|
||||
""" split the post part of a comment, and return it
|
||||
as comment to be added. Delete second part if [None, None]
|
||||
abc: # this goes to sequence
|
||||
# this goes to first element
|
||||
- first element
|
||||
"""
|
||||
comment = self.comment
|
||||
if comment is None or comment[0] is None:
|
||||
return None # nothing to do
|
||||
ret_val = [comment[0], None]
|
||||
if comment[1] is None:
|
||||
delattr(self, '_comment')
|
||||
return ret_val
|
||||
|
||||
def move_new_comment(self, target, empty=False):
|
||||
# type: (Any, bool) -> Any
|
||||
"""move a comment from this token to target (normally next token)
|
||||
used to combine e.g. comments before a BlockEntryToken to the
|
||||
ScalarToken that follows it
|
||||
empty is a special for empty values -> comment after key
|
||||
"""
|
||||
c = self.comment
|
||||
if c is None:
|
||||
return
|
||||
# don't push beyond last element
|
||||
if isinstance(target, (StreamEndToken, DocumentStartToken)):
|
||||
return
|
||||
delattr(self, '_comment')
|
||||
tc = target.comment
|
||||
if not tc: # target comment, just insert
|
||||
# special for empty value in key: value issue 25
|
||||
if empty:
|
||||
c = [c[0], c[1], c[2]]
|
||||
target._comment = c
|
||||
# nprint('mco2:', self, target, target.comment, empty)
|
||||
return self
|
||||
# if self and target have both pre, eol or post comments, something seems wrong
|
||||
for idx in range(3):
|
||||
if c[idx] is not None and tc[idx] is not None:
|
||||
raise NotImplementedError(_F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
|
||||
# move the comment parts
|
||||
for idx in range(3):
|
||||
if c[idx]:
|
||||
tc[idx] = c[idx]
|
||||
return self
|
||||
|
||||
|
||||
# class BOMToken(Token):
|
||||
# id = '<byte order mark>'
|
||||
|
||||
|
||||
class DirectiveToken(Token):
|
||||
__slots__ = 'name', 'value'
|
||||
id = '<directive>'
|
||||
|
||||
def __init__(self, name, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
class DocumentStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<document start>'
|
||||
|
||||
|
||||
class DocumentEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<document end>'
|
||||
|
||||
|
||||
class StreamStartToken(Token):
|
||||
__slots__ = ('encoding',)
|
||||
id = '<stream start>'
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.encoding = encoding
|
||||
|
||||
|
||||
class StreamEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<stream end>'
|
||||
|
||||
|
||||
class BlockSequenceStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<block sequence start>'
|
||||
|
||||
|
||||
class BlockMappingStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<block mapping start>'
|
||||
|
||||
|
||||
class BlockEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<block end>'
|
||||
|
||||
|
||||
class FlowSequenceStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '['
|
||||
|
||||
|
||||
class FlowMappingStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '{'
|
||||
|
||||
|
||||
class FlowSequenceEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = ']'
|
||||
|
||||
|
||||
class FlowMappingEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '}'
|
||||
|
||||
|
||||
class KeyToken(Token):
|
||||
__slots__ = ()
|
||||
id = '?'
|
||||
|
||||
# def x__repr__(self):
|
||||
# return 'KeyToken({})'.format(
|
||||
# self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0])
|
||||
|
||||
|
||||
class ValueToken(Token):
|
||||
__slots__ = ()
|
||||
id = ':'
|
||||
|
||||
|
||||
class BlockEntryToken(Token):
|
||||
__slots__ = ()
|
||||
id = '-'
|
||||
|
||||
|
||||
class FlowEntryToken(Token):
|
||||
__slots__ = ()
|
||||
id = ','
|
||||
|
||||
|
||||
class AliasToken(Token):
|
||||
__slots__ = ('value',)
|
||||
id = '<alias>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class AnchorToken(Token):
|
||||
__slots__ = ('value',)
|
||||
id = '<anchor>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class TagToken(Token):
|
||||
__slots__ = ('value',)
|
||||
id = '<tag>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class ScalarToken(Token):
|
||||
__slots__ = 'value', 'plain', 'style'
|
||||
id = '<scalar>'
|
||||
|
||||
def __init__(self, value, plain, start_mark, end_mark, style=None):
|
||||
# type: (Any, Any, Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
self.plain = plain
|
||||
self.style = style
|
||||
|
||||
|
||||
class CommentToken(Token):
|
||||
__slots__ = '_value', 'pre_done'
|
||||
id = '<comment>'
|
||||
|
||||
def __init__(self, value, start_mark=None, end_mark=None, column=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
if start_mark is None:
|
||||
assert column is not None
|
||||
self._column = column
|
||||
Token.__init__(self, start_mark, None) # type: ignore
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
# type: () -> str
|
||||
if isinstance(self._value, str):
|
||||
return self._value
|
||||
return "".join(self._value)
|
||||
|
||||
@value.setter
|
||||
def value(self, val):
|
||||
# type: (Any) -> None
|
||||
self._value = val
|
||||
|
||||
def reset(self):
|
||||
# type: () -> None
|
||||
if hasattr(self, 'pre_done'):
|
||||
delattr(self, 'pre_done')
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
v = '{!r}'.format(self.value)
|
||||
if SHOW_LINES:
|
||||
try:
|
||||
v += ', line: ' + str(self.start_mark.line)
|
||||
except: # NOQA
|
||||
pass
|
||||
try:
|
||||
v += ', col: ' + str(self.start_mark.column)
|
||||
except: # NOQA
|
||||
pass
|
||||
return 'CommentToken({})'.format(v)
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (Any) -> bool
|
||||
if self.start_mark != other.start_mark:
|
||||
return False
|
||||
if self.end_mark != other.end_mark:
|
||||
return False
|
||||
if self.value != other.value:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (Any) -> bool
|
||||
return not self.__eq__(other)
|
||||
256
lib/spack/external/_vendoring/ruamel/yaml/util.py
vendored
Normal file
256
lib/spack/external/_vendoring/ruamel/yaml/util.py
vendored
Normal file
@@ -0,0 +1,256 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
some helper functions that might be generally useful
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from functools import partial
|
||||
import re
|
||||
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Text # NOQA
|
||||
from .compat import StreamTextType # NOQA
|
||||
|
||||
|
||||
class LazyEval:
|
||||
"""
|
||||
Lightweight wrapper around lazily evaluated func(*args, **kwargs).
|
||||
|
||||
func is only evaluated when any attribute of its return value is accessed.
|
||||
Every attribute access is passed through to the wrapped value.
|
||||
(This only excludes special cases like method-wrappers, e.g., __hash__.)
|
||||
The sole additional attribute is the lazy_self function which holds the
|
||||
return value (or, prior to evaluation, func and arguments), in its closure.
|
||||
"""
|
||||
|
||||
def __init__(self, func, *args, **kwargs):
|
||||
# type: (Any, Any, Any) -> None
|
||||
def lazy_self():
|
||||
# type: () -> Any
|
||||
return_value = func(*args, **kwargs)
|
||||
object.__setattr__(self, 'lazy_self', lambda: return_value)
|
||||
return return_value
|
||||
|
||||
object.__setattr__(self, 'lazy_self', lazy_self)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
# type: (Any) -> Any
|
||||
lazy_self = object.__getattribute__(self, 'lazy_self')
|
||||
if name == 'lazy_self':
|
||||
return lazy_self
|
||||
return getattr(lazy_self(), name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
# type: (Any, Any) -> None
|
||||
setattr(self.lazy_self(), name, value)
|
||||
|
||||
|
||||
RegExp = partial(LazyEval, re.compile)
|
||||
|
||||
timestamp_regexp = RegExp(
|
||||
"""^(?P<year>[0-9][0-9][0-9][0-9])
|
||||
-(?P<month>[0-9][0-9]?)
|
||||
-(?P<day>[0-9][0-9]?)
|
||||
(?:((?P<t>[Tt])|[ \\t]+) # explictly not retaining extra spaces
|
||||
(?P<hour>[0-9][0-9]?)
|
||||
:(?P<minute>[0-9][0-9])
|
||||
:(?P<second>[0-9][0-9])
|
||||
(?:\\.(?P<fraction>[0-9]*))?
|
||||
(?:[ \\t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
|
||||
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$""",
|
||||
re.X,
|
||||
)
|
||||
|
||||
|
||||
def create_timestamp(
|
||||
year, month, day, t, hour, minute, second, fraction, tz, tz_sign, tz_hour, tz_minute
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) -> Any
|
||||
# create a timestamp from match against timestamp_regexp
|
||||
MAX_FRAC = 999999
|
||||
year = int(year)
|
||||
month = int(month)
|
||||
day = int(day)
|
||||
if not hour:
|
||||
return datetime.date(year, month, day)
|
||||
hour = int(hour)
|
||||
minute = int(minute)
|
||||
second = int(second)
|
||||
frac = 0
|
||||
if fraction:
|
||||
frac_s = fraction[:6]
|
||||
while len(frac_s) < 6:
|
||||
frac_s += '0'
|
||||
frac = int(frac_s)
|
||||
if len(fraction) > 6 and int(fraction[6]) > 4:
|
||||
frac += 1
|
||||
if frac > MAX_FRAC:
|
||||
fraction = 0
|
||||
else:
|
||||
fraction = frac
|
||||
else:
|
||||
fraction = 0
|
||||
delta = None
|
||||
if tz_sign:
|
||||
tz_hour = int(tz_hour)
|
||||
tz_minute = int(tz_minute) if tz_minute else 0
|
||||
delta = datetime.timedelta(
|
||||
hours=tz_hour, minutes=tz_minute, seconds=1 if frac > MAX_FRAC else 0
|
||||
)
|
||||
if tz_sign == '-':
|
||||
delta = -delta
|
||||
elif frac > MAX_FRAC:
|
||||
delta = -datetime.timedelta(seconds=1)
|
||||
# should do something else instead (or hook this up to the preceding if statement
|
||||
# in reverse
|
||||
# if delta is None:
|
||||
# return datetime.datetime(year, month, day, hour, minute, second, fraction)
|
||||
# return datetime.datetime(year, month, day, hour, minute, second, fraction,
|
||||
# datetime.timezone.utc)
|
||||
# the above is not good enough though, should provide tzinfo. In Python3 that is easily
|
||||
# doable drop that kind of support for Python2 as it has not native tzinfo
|
||||
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
|
||||
if delta:
|
||||
data -= delta
|
||||
return data
|
||||
|
||||
|
||||
# originally as comment
|
||||
# https://github.com/pre-commit/pre-commit/pull/211#issuecomment-186466605
|
||||
# if you use this in your code, I suggest adding a test in your test suite
|
||||
# that check this routines output against a known piece of your YAML
|
||||
# before upgrades to this code break your round-tripped YAML
|
||||
def load_yaml_guess_indent(stream, **kw):
|
||||
# type: (StreamTextType, Any) -> Any
|
||||
"""guess the indent and block sequence indent of yaml stream/string
|
||||
|
||||
returns round_trip_loaded stream, indent level, block sequence indent
|
||||
- block sequence indent is the number of spaces before a dash relative to previous indent
|
||||
- if there are no block sequences, indent is taken from nested mappings, block sequence
|
||||
indent is unset (None) in that case
|
||||
"""
|
||||
from .main import YAML
|
||||
|
||||
# load a YAML document, guess the indentation, if you use TABs you are on your own
|
||||
def leading_spaces(line):
|
||||
# type: (Any) -> int
|
||||
idx = 0
|
||||
while idx < len(line) and line[idx] == ' ':
|
||||
idx += 1
|
||||
return idx
|
||||
|
||||
if isinstance(stream, str):
|
||||
yaml_str = stream # type: Any
|
||||
elif isinstance(stream, bytes):
|
||||
# most likely, but the Reader checks BOM for this
|
||||
yaml_str = stream.decode('utf-8')
|
||||
else:
|
||||
yaml_str = stream.read()
|
||||
map_indent = None
|
||||
indent = None # default if not found for some reason
|
||||
block_seq_indent = None
|
||||
prev_line_key_only = None
|
||||
key_indent = 0
|
||||
for line in yaml_str.splitlines():
|
||||
rline = line.rstrip()
|
||||
lline = rline.lstrip()
|
||||
if lline.startswith('- '):
|
||||
l_s = leading_spaces(line)
|
||||
block_seq_indent = l_s - key_indent
|
||||
idx = l_s + 1
|
||||
while line[idx] == ' ': # this will end as we rstripped
|
||||
idx += 1
|
||||
if line[idx] == '#': # comment after -
|
||||
continue
|
||||
indent = idx - key_indent
|
||||
break
|
||||
if map_indent is None and prev_line_key_only is not None and rline:
|
||||
idx = 0
|
||||
while line[idx] in ' -':
|
||||
idx += 1
|
||||
if idx > prev_line_key_only:
|
||||
map_indent = idx - prev_line_key_only
|
||||
if rline.endswith(':'):
|
||||
key_indent = leading_spaces(line)
|
||||
idx = 0
|
||||
while line[idx] == ' ': # this will end on ':'
|
||||
idx += 1
|
||||
prev_line_key_only = idx
|
||||
continue
|
||||
prev_line_key_only = None
|
||||
if indent is None and map_indent is not None:
|
||||
indent = map_indent
|
||||
yaml = YAML()
|
||||
return yaml.load(yaml_str, **kw), indent, block_seq_indent # type: ignore
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
# type: (Any) -> Any
|
||||
"""
|
||||
walks over a ConfigObj (INI file with comments) generating
|
||||
corresponding YAML output (including comments
|
||||
"""
|
||||
from configobj import ConfigObj # type: ignore
|
||||
|
||||
assert isinstance(cfg, ConfigObj)
|
||||
for c in cfg.initial_comment:
|
||||
if c.strip():
|
||||
yield c
|
||||
for s in _walk_section(cfg):
|
||||
if s.strip():
|
||||
yield s
|
||||
for c in cfg.final_comment:
|
||||
if c.strip():
|
||||
yield c
|
||||
|
||||
|
||||
def _walk_section(s, level=0):
|
||||
# type: (Any, int) -> Any
|
||||
from configobj import Section
|
||||
|
||||
assert isinstance(s, Section)
|
||||
indent = ' ' * level
|
||||
for name in s.scalars:
|
||||
for c in s.comments[name]:
|
||||
yield indent + c.strip()
|
||||
x = s[name]
|
||||
if '\n' in x:
|
||||
i = indent + ' '
|
||||
x = '|\n' + i + x.strip().replace('\n', '\n' + i)
|
||||
elif ':' in x:
|
||||
x = "'" + x.replace("'", "''") + "'"
|
||||
line = '{0}{1}: {2}'.format(indent, name, x)
|
||||
c = s.inline_comments[name]
|
||||
if c:
|
||||
line += ' ' + c
|
||||
yield line
|
||||
for name in s.sections:
|
||||
for c in s.comments[name]:
|
||||
yield indent + c.strip()
|
||||
line = '{0}{1}:'.format(indent, name)
|
||||
c = s.inline_comments[name]
|
||||
if c:
|
||||
line += ' ' + c
|
||||
yield line
|
||||
for val in _walk_section(s[name], level=level + 1):
|
||||
yield val
|
||||
|
||||
|
||||
# def config_obj_2_rt_yaml(cfg):
|
||||
# from .comments import CommentedMap, CommentedSeq
|
||||
# from configobj import ConfigObj
|
||||
# assert isinstance(cfg, ConfigObj)
|
||||
# #for c in cfg.initial_comment:
|
||||
# # if c.strip():
|
||||
# # pass
|
||||
# cm = CommentedMap()
|
||||
# for name in s.sections:
|
||||
# cm[name] = d = CommentedMap()
|
||||
#
|
||||
#
|
||||
# #for c in cfg.final_comment:
|
||||
# # if c.strip():
|
||||
# # yield c
|
||||
# return cm
|
||||
8
lib/spack/external/archspec/__main__.py
vendored
Normal file
8
lib/spack/external/archspec/__main__.py
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
"""
|
||||
Run the `archspec` CLI as a module.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from .cli import main
|
||||
|
||||
sys.exit(main())
|
||||
@@ -268,15 +268,14 @@ def tuplify(ver):
|
||||
return flags
|
||||
|
||||
msg = (
|
||||
"cannot produce optimized binary for micro-architecture '{0}'"
|
||||
" with {1}@{2} [supported compiler versions are {3}]"
|
||||
)
|
||||
msg = msg.format(
|
||||
self.name,
|
||||
compiler,
|
||||
version,
|
||||
", ".join([x["versions"] for x in compiler_info]),
|
||||
"cannot produce optimized binary for micro-architecture '{0}' with {1}@{2}"
|
||||
)
|
||||
if compiler_info:
|
||||
versions = [x["versions"] for x in compiler_info]
|
||||
msg += f' [supported compiler versions are {", ".join(versions)}]'
|
||||
else:
|
||||
msg += " [no supported compiler versions]"
|
||||
msg = msg.format(self.name, compiler, version)
|
||||
raise UnsupportedMicroarchitecture(msg)
|
||||
|
||||
|
||||
|
||||
@@ -102,7 +102,8 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"x86_64_v2": {
|
||||
@@ -157,7 +158,8 @@
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"x86_64_v3": {
|
||||
@@ -228,6 +230,13 @@
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "px",
|
||||
"flags": "-tp {name} -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mxsave"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -304,6 +313,13 @@
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "px",
|
||||
"flags": "-tp {name} -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -358,7 +374,8 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"core2": {
|
||||
@@ -412,7 +429,8 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"nehalem": {
|
||||
@@ -477,7 +495,8 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"westmere": {
|
||||
@@ -539,7 +558,8 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"sandybridge": {
|
||||
@@ -609,6 +629,12 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -681,6 +707,12 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -758,6 +790,12 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -827,6 +865,13 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "haswell",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -899,6 +944,13 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "haswell",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1063,6 +1115,13 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1143,6 +1202,13 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1222,6 +1288,13 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1329,6 +1402,13 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1387,7 +1467,8 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"bulldozer": {
|
||||
@@ -1451,6 +1532,12 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1519,6 +1606,12 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1588,6 +1681,13 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "piledriver",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1663,6 +1763,13 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "piledriver",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1741,6 +1848,12 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1820,6 +1933,12 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "20.5:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1902,6 +2021,12 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1958,18 +2083,28 @@
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"versions": "10.3:13.0",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "13.1:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"versions": "12.0:15.9",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "3.0:3.9",
|
||||
@@ -1982,7 +2117,15 @@
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
@@ -2087,7 +2230,8 @@
|
||||
"versions": ":",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"power8le": {
|
||||
@@ -2116,6 +2260,13 @@
|
||||
"name": "power8",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pwr8",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2139,6 +2290,13 @@
|
||||
"name": "power9",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pwr9",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2170,7 +2328,8 @@
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
],
|
||||
"nvhpc": []
|
||||
}
|
||||
},
|
||||
"armv8.1a": {
|
||||
@@ -2552,6 +2711,13 @@
|
||||
"versions": "20:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "22.5:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2617,15 +2783,31 @@
|
||||
"flags" : "-march=armv8.2-a+crypto+fp16 -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.9",
|
||||
"versions": "8.0:8.4",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.9",
|
||||
"versions": "8.5:8.9",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:",
|
||||
{
|
||||
"versions": "9.0:9.3",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.4:9.9",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.1",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "10.2:10.2.99",
|
||||
"flags" : "-mcpu=zeus"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
}
|
||||
|
||||
@@ -2657,6 +2839,13 @@
|
||||
"versions": "22:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "22.5:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
import pkg_resources
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
38
lib/spack/external/ruamel/yaml/README.rst
vendored
38
lib/spack/external/ruamel/yaml/README.rst
vendored
@@ -1,38 +0,0 @@
|
||||
|
||||
ruamel.yaml
|
||||
===========
|
||||
|
||||
``ruamel.yaml`` is a YAML 1.2 loader/dumper package for Python.
|
||||
|
||||
* `Overview <http://yaml.readthedocs.org/en/latest/overview.html>`_
|
||||
* `Installing <http://yaml.readthedocs.org/en/latest/install.html>`_
|
||||
* `Details <http://yaml.readthedocs.org/en/latest/detail.html>`_
|
||||
* `Examples <http://yaml.readthedocs.org/en/latest/example.html>`_
|
||||
* `Differences with PyYAML <http://yaml.readthedocs.org/en/latest/pyyaml.html>`_
|
||||
|
||||
.. image:: https://readthedocs.org/projects/yaml/badge/?version=stable
|
||||
:target: https://yaml.readthedocs.org/en/stable
|
||||
|
||||
ChangeLog
|
||||
=========
|
||||
|
||||
::
|
||||
|
||||
0.11.15 (2016-XX-XX):
|
||||
- Change to prevent FutureWarning in NumPy, as reported by tgehring
|
||||
("comparison to None will result in an elementwise object comparison in the future")
|
||||
|
||||
0.11.14 (2016-07-06):
|
||||
- fix preserve_quotes missing on original Loaders (as reported
|
||||
by Leynos, bitbucket issue 38)
|
||||
|
||||
0.11.13 (2016-07-06):
|
||||
- documentation only, automated linux wheels
|
||||
|
||||
0.11.12 (2016-07-06):
|
||||
- added support for roundtrip of single/double quoted scalars using:
|
||||
ruamel.yaml.round_trip_load(stream, preserve_quotes=True)
|
||||
|
||||
0.11.0 (2016-02-18):
|
||||
- RoundTripLoader loads 1.2 by default (no sexagesimals, 012 octals nor
|
||||
yes/no/on/off booleans
|
||||
85
lib/spack/external/ruamel/yaml/__init__.py
vendored
85
lib/spack/external/ruamel/yaml/__init__.py
vendored
@@ -1,85 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
# install_requires of ruamel.base is not really required but the old
|
||||
# ruamel.base installed __init__.py, and thus a new version should
|
||||
# be installed at some point
|
||||
|
||||
_package_data = dict(
|
||||
full_package_name="ruamel.yaml",
|
||||
version_info=(0, 11, 15),
|
||||
author="Anthon van der Neut",
|
||||
author_email="a.van.der.neut@ruamel.eu",
|
||||
description="ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order", # NOQA
|
||||
entry_points=None,
|
||||
install_requires=dict(
|
||||
any=[],
|
||||
py26=["ruamel.ordereddict"],
|
||||
py27=["ruamel.ordereddict"]
|
||||
),
|
||||
ext_modules=[dict(
|
||||
name="_ruamel_yaml",
|
||||
src=["ext/_ruamel_yaml.c", "ext/api.c", "ext/writer.c", "ext/dumper.c",
|
||||
"ext/loader.c", "ext/reader.c", "ext/scanner.c", "ext/parser.c",
|
||||
"ext/emitter.c"],
|
||||
lib=[],
|
||||
# test='#include "ext/yaml.h"\n\nint main(int argc, char* argv[])\n{\nyaml_parser_t parser;\nparser = parser; /* prevent warning */\nreturn 0;\n}\n' # NOQA
|
||||
)
|
||||
],
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 2.6",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Programming Language :: Python :: Implementation :: Jython",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Text Processing :: Markup"
|
||||
],
|
||||
windows_wheels=True,
|
||||
read_the_docs='yaml',
|
||||
many_linux='libyaml-devel',
|
||||
)
|
||||
|
||||
|
||||
# < from ruamel.util.new import _convert_version
|
||||
def _convert_version(tup):
|
||||
"""create a PEP 386 pseudo-format conformant string from tuple tup"""
|
||||
ret_val = str(tup[0]) # first is always digit
|
||||
next_sep = "." # separator for next extension, can be "" or "."
|
||||
for x in tup[1:]:
|
||||
if isinstance(x, int):
|
||||
ret_val += next_sep + str(x)
|
||||
next_sep = '.'
|
||||
continue
|
||||
first_letter = x[0].lower()
|
||||
next_sep = ''
|
||||
if first_letter in 'abcr':
|
||||
ret_val += 'rc' if first_letter == 'r' else first_letter
|
||||
elif first_letter in 'pd':
|
||||
ret_val += '.post' if first_letter == 'p' else '.dev'
|
||||
return ret_val
|
||||
|
||||
|
||||
# <
|
||||
version_info = _package_data['version_info']
|
||||
__version__ = _convert_version(version_info)
|
||||
|
||||
del _convert_version
|
||||
|
||||
try:
|
||||
from .cyaml import * # NOQA
|
||||
__with_libyaml__ = True
|
||||
except (ImportError, ValueError): # for Jython
|
||||
__with_libyaml__ = False
|
||||
|
||||
|
||||
# body extracted to main.py
|
||||
try:
|
||||
from .main import * # NOQA
|
||||
except ImportError:
|
||||
from ruamel.yaml.main import * # NOQA
|
||||
486
lib/spack/external/ruamel/yaml/comments.py
vendored
486
lib/spack/external/ruamel/yaml/comments.py
vendored
@@ -1,486 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
"""
|
||||
stuff to deal with comments and formatting on dict/list/ordereddict/set
|
||||
these are not really related, formatting could be factored out as
|
||||
a separate base
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSet
|
||||
else:
|
||||
from collections import MutableSet
|
||||
|
||||
__all__ = ["CommentedSeq", "CommentedMap", "CommentedOrderedMap",
|
||||
"CommentedSet", 'comment_attrib', 'merge_attrib']
|
||||
|
||||
|
||||
try:
|
||||
from .compat import ordereddict
|
||||
except ImportError:
|
||||
from ruamel.yaml.compat import ordereddict
|
||||
|
||||
comment_attrib = '_yaml_comment'
|
||||
format_attrib = '_yaml_format'
|
||||
line_col_attrib = '_yaml_line_col'
|
||||
anchor_attrib = '_yaml_anchor'
|
||||
merge_attrib = '_yaml_merge'
|
||||
tag_attrib = '_yaml_tag'
|
||||
|
||||
|
||||
class Comment(object):
|
||||
# sys.getsize tested the Comment objects, __slots__ make them bigger
|
||||
# and adding self.end did not matter
|
||||
attrib = comment_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.comment = None # [post, [pre]]
|
||||
# map key (mapping/omap/dict) or index (sequence/list) to a list of
|
||||
# dict: post_key, pre_key, post_value, pre_value
|
||||
# list: pre item, post item
|
||||
self._items = {}
|
||||
# self._start = [] # should not put these on first item
|
||||
self._end = [] # end of document comments
|
||||
|
||||
def __str__(self):
|
||||
if self._end:
|
||||
end = ',\n end=' + str(self._end)
|
||||
else:
|
||||
end = ''
|
||||
return "Comment(comment={0},\n items={1}{2})".format(
|
||||
self.comment, self._items, end)
|
||||
|
||||
@property
|
||||
def items(self):
|
||||
return self._items
|
||||
|
||||
@property
|
||||
def end(self):
|
||||
return self._end
|
||||
|
||||
@end.setter
|
||||
def end(self, value):
|
||||
self._end = value
|
||||
|
||||
@property
|
||||
def start(self):
|
||||
return self._start
|
||||
|
||||
@start.setter
|
||||
def start(self, value):
|
||||
self._start = value
|
||||
|
||||
|
||||
# to distinguish key from None
|
||||
def NoComment():
|
||||
pass
|
||||
|
||||
|
||||
class Format(object):
|
||||
attrib = format_attrib
|
||||
|
||||
def __init__(self):
|
||||
self._flow_style = None
|
||||
|
||||
def set_flow_style(self):
|
||||
self._flow_style = True
|
||||
|
||||
def set_block_style(self):
|
||||
self._flow_style = False
|
||||
|
||||
def flow_style(self, default=None):
|
||||
"""if default (the flow_style) is None, the flow style tacked on to
|
||||
the object explicitly will be taken. If that is None as well the
|
||||
default flow style rules the format down the line, or the type
|
||||
of the constituent values (simple -> flow, map/list -> block)"""
|
||||
if self._flow_style is None:
|
||||
return default
|
||||
return self._flow_style
|
||||
|
||||
|
||||
class LineCol(object):
|
||||
attrib = line_col_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.line = None
|
||||
self.col = None
|
||||
self.data = None
|
||||
|
||||
def add_kv_line_col(self, key, data):
|
||||
if self.data is None:
|
||||
self.data = {}
|
||||
self.data[key] = data
|
||||
|
||||
def key(self, k):
|
||||
return self._kv(k, 0, 1)
|
||||
|
||||
def value(self, k):
|
||||
return self._kv(k, 2, 3)
|
||||
|
||||
def _kv(self, k, x0, x1):
|
||||
if self.data is None:
|
||||
return None
|
||||
data = self.data[k]
|
||||
return data[x0], data[x1]
|
||||
|
||||
def item(self, idx):
|
||||
if self.data is None:
|
||||
return None
|
||||
return self.data[idx][0], self.data[idx][1]
|
||||
|
||||
def add_idx_line_col(self, key, data):
|
||||
if self.data is None:
|
||||
self.data = {}
|
||||
self.data[key] = data
|
||||
|
||||
|
||||
class Anchor(object):
|
||||
attrib = anchor_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.value = None
|
||||
self.always_dump = False
|
||||
|
||||
|
||||
class Tag(object):
|
||||
"""store tag information for roundtripping"""
|
||||
attrib = tag_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.value = None
|
||||
|
||||
|
||||
class CommentedBase(object):
|
||||
@property
|
||||
def ca(self):
|
||||
if not hasattr(self, Comment.attrib):
|
||||
setattr(self, Comment.attrib, Comment())
|
||||
return getattr(self, Comment.attrib)
|
||||
|
||||
def yaml_end_comment_extend(self, comment, clear=False):
|
||||
if clear:
|
||||
self.ca.end = []
|
||||
self.ca.end.extend(comment)
|
||||
|
||||
def yaml_key_comment_extend(self, key, comment, clear=False):
|
||||
l = self.ca._items.setdefault(key, [None, None, None, None])
|
||||
if clear or l[1] is None:
|
||||
if comment[1] is not None:
|
||||
assert isinstance(comment[1], list)
|
||||
l[1] = comment[1]
|
||||
else:
|
||||
l[1].extend(comment[0])
|
||||
l[0] = comment[0]
|
||||
|
||||
def yaml_value_comment_extend(self, key, comment, clear=False):
|
||||
l = self.ca._items.setdefault(key, [None, None, None, None])
|
||||
if clear or l[3] is None:
|
||||
if comment[1] is not None:
|
||||
assert isinstance(comment[1], list)
|
||||
l[3] = comment[1]
|
||||
else:
|
||||
l[3].extend(comment[0])
|
||||
l[2] = comment[0]
|
||||
|
||||
def yaml_set_start_comment(self, comment, indent=0):
|
||||
"""overwrites any preceding comment lines on an object
|
||||
expects comment to be without `#` and possible have mutlple lines
|
||||
"""
|
||||
from .error import Mark
|
||||
from .tokens import CommentToken
|
||||
pre_comments = self._yaml_get_pre_comment()
|
||||
if comment[-1] == '\n':
|
||||
comment = comment[:-1] # strip final newline if there
|
||||
start_mark = Mark(None, None, None, indent, None, None)
|
||||
for com in comment.split('\n'):
|
||||
pre_comments.append(CommentToken('# ' + com + '\n', start_mark, None))
|
||||
|
||||
@property
|
||||
def fa(self):
|
||||
"""format attribute
|
||||
|
||||
set_flow_style()/set_block_style()"""
|
||||
if not hasattr(self, Format.attrib):
|
||||
setattr(self, Format.attrib, Format())
|
||||
return getattr(self, Format.attrib)
|
||||
|
||||
def yaml_add_eol_comment(self, comment, key=NoComment, column=None):
|
||||
"""
|
||||
there is a problem as eol comments should start with ' #'
|
||||
(but at the beginning of the line the space doesn't have to be before
|
||||
the #. The column index is for the # mark
|
||||
"""
|
||||
from .tokens import CommentToken
|
||||
from .error import Mark
|
||||
if column is None:
|
||||
column = self._yaml_get_column(key)
|
||||
if comment[0] != '#':
|
||||
comment = '# ' + comment
|
||||
if column is None:
|
||||
if comment[0] == '#':
|
||||
comment = ' ' + comment
|
||||
column = 0
|
||||
start_mark = Mark(None, None, None, column, None, None)
|
||||
ct = [CommentToken(comment, start_mark, None), None]
|
||||
self._yaml_add_eol_comment(ct, key=key)
|
||||
|
||||
@property
|
||||
def lc(self):
|
||||
if not hasattr(self, LineCol.attrib):
|
||||
setattr(self, LineCol.attrib, LineCol())
|
||||
return getattr(self, LineCol.attrib)
|
||||
|
||||
def _yaml_set_line_col(self, line, col):
|
||||
self.lc.line = line
|
||||
self.lc.col = col
|
||||
|
||||
def _yaml_set_kv_line_col(self, key, data):
|
||||
self.lc.add_kv_line_col(key, data)
|
||||
|
||||
def _yaml_set_idx_line_col(self, key, data):
|
||||
self.lc.add_idx_line_col(key, data)
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self):
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
return self.anchor
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
@property
|
||||
def tag(self):
|
||||
if not hasattr(self, Tag.attrib):
|
||||
setattr(self, Tag.attrib, Tag())
|
||||
return getattr(self, Tag.attrib)
|
||||
|
||||
def yaml_set_tag(self, value):
|
||||
self.tag.value = value
|
||||
|
||||
|
||||
class CommentedSeq(list, CommentedBase):
|
||||
__slots__ = [Comment.attrib, ]
|
||||
|
||||
def _yaml_add_comment(self, comment, key=NoComment):
|
||||
if key is not NoComment:
|
||||
self.yaml_key_comment_extend(key, comment)
|
||||
else:
|
||||
self.ca.comment = comment
|
||||
|
||||
def _yaml_add_eol_comment(self, comment, key):
|
||||
self._yaml_add_comment(comment, key=key)
|
||||
|
||||
def _yaml_get_columnX(self, key):
|
||||
return self.ca.items[key][0].start_mark.column
|
||||
|
||||
def insert(self, idx, val):
|
||||
"""the comments after the insertion have to move forward"""
|
||||
list.insert(self, idx, val)
|
||||
for list_index in sorted(self.ca.items, reverse=True):
|
||||
if list_index < idx:
|
||||
break
|
||||
self.ca.items[list_index+1] = self.ca.items.pop(list_index)
|
||||
|
||||
def pop(self, idx):
|
||||
res = list.pop(self, idx)
|
||||
self.ca.items.pop(idx, None) # might not be there -> default value
|
||||
for list_index in sorted(self.ca.items):
|
||||
if list_index < idx:
|
||||
continue
|
||||
self.ca.items[list_index-1] = self.ca.items.pop(list_index)
|
||||
return res
|
||||
|
||||
def _yaml_get_column(self, key):
|
||||
column = None
|
||||
sel_idx = None
|
||||
pre, post = key-1, key+1
|
||||
if pre in self.ca.items:
|
||||
sel_idx = pre
|
||||
elif post in self.ca.items:
|
||||
sel_idx = post
|
||||
else:
|
||||
# self.ca.items is not ordered
|
||||
for row_idx, k1 in enumerate(self):
|
||||
if row_idx >= key:
|
||||
break
|
||||
if row_idx not in self.ca.items:
|
||||
continue
|
||||
sel_idx = row_idx
|
||||
if sel_idx is not None:
|
||||
column = self._yaml_get_columnX(sel_idx)
|
||||
return column
|
||||
|
||||
def _yaml_get_pre_comment(self):
|
||||
if self.ca.comment is None:
|
||||
pre_comments = []
|
||||
self.ca.comment = [None, pre_comments]
|
||||
else:
|
||||
pre_comments = self.ca.comment[1] = []
|
||||
return pre_comments
|
||||
|
||||
|
||||
class CommentedMap(ordereddict, CommentedBase):
|
||||
__slots__ = [Comment.attrib, ]
|
||||
|
||||
def _yaml_add_comment(self, comment, key=NoComment, value=NoComment):
|
||||
"""values is set to key to indicate a value attachment of comment"""
|
||||
if key is not NoComment:
|
||||
self.yaml_key_comment_extend(key, comment)
|
||||
return
|
||||
if value is not NoComment:
|
||||
self.yaml_value_comment_extend(value, comment)
|
||||
else:
|
||||
self.ca.comment = comment
|
||||
|
||||
def _yaml_add_eol_comment(self, comment, key):
|
||||
"""add on the value line, with value specified by the key"""
|
||||
self._yaml_add_comment(comment, value=key)
|
||||
|
||||
def _yaml_get_columnX(self, key):
|
||||
return self.ca.items[key][2].start_mark.column
|
||||
|
||||
def _yaml_get_column(self, key):
|
||||
column = None
|
||||
sel_idx = None
|
||||
pre, post, last = None, None, None
|
||||
for x in self:
|
||||
if pre is not None and x != key:
|
||||
post = x
|
||||
break
|
||||
if x == key:
|
||||
pre = last
|
||||
last = x
|
||||
if pre in self.ca.items:
|
||||
sel_idx = pre
|
||||
elif post in self.ca.items:
|
||||
sel_idx = post
|
||||
else:
|
||||
# self.ca.items is not ordered
|
||||
for row_idx, k1 in enumerate(self):
|
||||
if k1 >= key:
|
||||
break
|
||||
if k1 not in self.ca.items:
|
||||
continue
|
||||
sel_idx = k1
|
||||
if sel_idx is not None:
|
||||
column = self._yaml_get_columnX(sel_idx)
|
||||
return column
|
||||
|
||||
def _yaml_get_pre_comment(self):
|
||||
if self.ca.comment is None:
|
||||
pre_comments = []
|
||||
self.ca.comment = [None, pre_comments]
|
||||
else:
|
||||
pre_comments = self.ca.comment[1] = []
|
||||
return pre_comments
|
||||
|
||||
def update(self, *vals, **kwds):
|
||||
try:
|
||||
ordereddict.update(self, *vals, **kwds)
|
||||
except TypeError:
|
||||
# probably a dict that is used
|
||||
for x in vals:
|
||||
self[x] = vals[x]
|
||||
|
||||
def insert(self, pos, key, value, comment=None):
|
||||
"""insert key value into given position
|
||||
attach comment if provided
|
||||
"""
|
||||
ordereddict.insert(self, pos, key, value)
|
||||
if comment is not None:
|
||||
self.yaml_add_eol_comment(comment, key=key)
|
||||
|
||||
def mlget(self, key, default=None, list_ok=False):
|
||||
"""multi-level get that expects dicts within dicts"""
|
||||
if not isinstance(key, list):
|
||||
return self.get(key, default)
|
||||
# assume that the key is a list of recursively accessible dicts
|
||||
|
||||
def get_one_level(key_list, level, d):
|
||||
if not list_ok:
|
||||
assert isinstance(d, dict)
|
||||
if level >= len(key_list):
|
||||
if level > len(key_list):
|
||||
raise IndexError
|
||||
return d[key_list[level-1]]
|
||||
return get_one_level(key_list, level+1, d[key_list[level-1]])
|
||||
|
||||
try:
|
||||
return get_one_level(key, 1, self)
|
||||
except KeyError:
|
||||
return default
|
||||
except (TypeError, IndexError):
|
||||
if not list_ok:
|
||||
raise
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return ordereddict.__getitem__(self, key)
|
||||
except KeyError:
|
||||
for merged in getattr(self, merge_attrib, []):
|
||||
if key in merged[1]:
|
||||
return merged[1][key]
|
||||
raise
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
except:
|
||||
return default
|
||||
|
||||
@property
|
||||
def merge(self):
|
||||
if not hasattr(self, merge_attrib):
|
||||
setattr(self, merge_attrib, [])
|
||||
return getattr(self, merge_attrib)
|
||||
|
||||
def add_yaml_merge(self, value):
|
||||
self.merge.extend(value)
|
||||
|
||||
|
||||
class CommentedOrderedMap(CommentedMap):
|
||||
__slots__ = [Comment.attrib, ]
|
||||
|
||||
|
||||
class CommentedSet(MutableSet, CommentedMap):
|
||||
__slots__ = [Comment.attrib, 'odict']
|
||||
|
||||
def __init__(self, values=None):
|
||||
self.odict = ordereddict()
|
||||
MutableSet.__init__(self)
|
||||
if values is not None:
|
||||
self |= values
|
||||
|
||||
def add(self, value):
|
||||
"""Add an element."""
|
||||
self.odict[value] = None
|
||||
|
||||
def discard(self, value):
|
||||
"""Remove an element. Do not raise an exception if absent."""
|
||||
del self.odict[value]
|
||||
|
||||
def __contains__(self, x):
|
||||
return x in self.odict
|
||||
|
||||
def __iter__(self):
|
||||
for x in self.odict:
|
||||
yield x
|
||||
|
||||
def __len__(self):
|
||||
return len(self.odict)
|
||||
|
||||
def __repr__(self):
|
||||
return 'set({0!r})'.format(self.odict.keys())
|
||||
123
lib/spack/external/ruamel/yaml/compat.py
vendored
123
lib/spack/external/ruamel/yaml/compat.py
vendored
@@ -1,123 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# partially from package six by Benjamin Peterson
|
||||
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
|
||||
try:
|
||||
from ruamel.ordereddict import ordereddict
|
||||
except:
|
||||
try:
|
||||
from collections.abc import OrderedDict
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
# to get the right name import ... as ordereddict doesn't do that
|
||||
|
||||
class ordereddict(OrderedDict):
|
||||
if not hasattr(OrderedDict, 'insert'):
|
||||
def insert(self, pos, key, value):
|
||||
if pos >= len(self):
|
||||
self[key] = value
|
||||
return
|
||||
od = ordereddict()
|
||||
od.update(self)
|
||||
for k in od:
|
||||
del self[k]
|
||||
for index, old_key in enumerate(od):
|
||||
if pos == index:
|
||||
self[key] = value
|
||||
self[old_key] = od[old_key]
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
def utf8(s):
|
||||
return s
|
||||
|
||||
def to_str(s):
|
||||
return s
|
||||
|
||||
def to_unicode(s):
|
||||
return s
|
||||
|
||||
else:
|
||||
def utf8(s):
|
||||
return s.encode('utf-8')
|
||||
|
||||
def to_str(s):
|
||||
return str(s)
|
||||
|
||||
def to_unicode(s):
|
||||
return unicode(s)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
unichr = chr
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
unichr = unichr # to allow importing
|
||||
import StringIO
|
||||
StringIO = StringIO.StringIO
|
||||
import cStringIO
|
||||
BytesIO = cStringIO.StringIO
|
||||
|
||||
if PY3:
|
||||
builtins_module = 'builtins'
|
||||
else:
|
||||
builtins_module = '__builtin__'
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", bases, {})
|
||||
|
||||
DBG_TOKEN = 1
|
||||
DBG_EVENT = 2
|
||||
DBG_NODE = 4
|
||||
|
||||
|
||||
_debug = None
|
||||
|
||||
|
||||
# used from yaml util when testing
|
||||
def dbg(val=None):
|
||||
global _debug
|
||||
if _debug is None:
|
||||
# set to true or false
|
||||
_debug = os.environ.get('YAMLDEBUG')
|
||||
if _debug is None:
|
||||
_debug = 0
|
||||
else:
|
||||
_debug = int(_debug)
|
||||
if val is None:
|
||||
return _debug
|
||||
return _debug & val
|
||||
|
||||
|
||||
def nprint(*args, **kw):
|
||||
if dbg:
|
||||
print(*args, **kw)
|
||||
182
lib/spack/external/ruamel/yaml/composer.py
vendored
182
lib/spack/external/ruamel/yaml/composer.py
vendored
@@ -1,182 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
try:
|
||||
from .error import MarkedYAMLError
|
||||
from .compat import utf8
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
from ruamel.yaml.compat import utf8
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent, StreamEndEvent, MappingStartEvent, MappingEndEvent,
|
||||
SequenceStartEvent, SequenceEndEvent, AliasEvent, ScalarEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import (
|
||||
MappingNode, ScalarNode, SequenceNode,
|
||||
)
|
||||
|
||||
__all__ = ['Composer', 'ComposerError']
|
||||
|
||||
|
||||
class ComposerError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Composer(object):
|
||||
def __init__(self):
|
||||
self.anchors = {}
|
||||
|
||||
def check_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
if self.check_event(StreamStartEvent):
|
||||
self.get_event()
|
||||
|
||||
# If there are more documents available?
|
||||
return not self.check_event(StreamEndEvent)
|
||||
|
||||
def get_node(self):
|
||||
# Get the root node of the next document.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
return self.compose_document()
|
||||
|
||||
def get_single_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose a document if the stream is not empty.
|
||||
document = None
|
||||
if not self.check_event(StreamEndEvent):
|
||||
document = self.compose_document()
|
||||
|
||||
# Ensure that the stream contains no more documents.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
event = self.get_event()
|
||||
raise ComposerError(
|
||||
"expected a single document in the stream",
|
||||
document.start_mark, "but found another document",
|
||||
event.start_mark)
|
||||
|
||||
# Drop the STREAM-END event.
|
||||
self.get_event()
|
||||
|
||||
return document
|
||||
|
||||
def compose_document(self):
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.get_event()
|
||||
|
||||
self.anchors = {}
|
||||
return node
|
||||
|
||||
def compose_node(self, parent, index):
|
||||
if self.check_event(AliasEvent):
|
||||
event = self.get_event()
|
||||
alias = event.anchor
|
||||
if alias not in self.anchors:
|
||||
raise ComposerError(
|
||||
None, None, "found undefined alias %r"
|
||||
% utf8(alias), event.start_mark)
|
||||
return self.anchors[alias]
|
||||
event = self.peek_event()
|
||||
anchor = event.anchor
|
||||
if anchor is not None: # have an anchor
|
||||
if anchor in self.anchors:
|
||||
raise ComposerError(
|
||||
"found duplicate anchor %r; first occurence"
|
||||
% utf8(anchor), self.anchors[anchor].start_mark,
|
||||
"second occurence", event.start_mark)
|
||||
self.descend_resolver(parent, index)
|
||||
if self.check_event(ScalarEvent):
|
||||
node = self.compose_scalar_node(anchor)
|
||||
elif self.check_event(SequenceStartEvent):
|
||||
node = self.compose_sequence_node(anchor)
|
||||
elif self.check_event(MappingStartEvent):
|
||||
node = self.compose_mapping_node(anchor)
|
||||
self.ascend_resolver()
|
||||
return node
|
||||
|
||||
def compose_scalar_node(self, anchor):
|
||||
event = self.get_event()
|
||||
tag = event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(ScalarNode, event.value, event.implicit)
|
||||
node = ScalarNode(tag, event.value,
|
||||
event.start_mark, event.end_mark, style=event.style,
|
||||
comment=event.comment)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
return node
|
||||
|
||||
def compose_sequence_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(SequenceNode, None, start_event.implicit)
|
||||
node = SequenceNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment, anchor=anchor)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
index = 0
|
||||
while not self.check_event(SequenceEndEvent):
|
||||
node.value.append(self.compose_node(node, index))
|
||||
index += 1
|
||||
end_event = self.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
if node.comment is not None:
|
||||
print('Warning: unexpected end_event commment in sequence '
|
||||
'node {0}'.format(node.flow_style))
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def compose_mapping_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(MappingNode, None, start_event.implicit)
|
||||
node = MappingNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment, anchor=anchor)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
while not self.check_event(MappingEndEvent):
|
||||
# key_event = self.peek_event()
|
||||
item_key = self.compose_node(node, None)
|
||||
# if item_key in node.value:
|
||||
# raise ComposerError("while composing a mapping",
|
||||
# start_event.start_mark,
|
||||
# "found duplicate key", key_event.start_mark)
|
||||
item_value = self.compose_node(node, item_key)
|
||||
# node.value[item_key] = item_value
|
||||
node.value.append((item_key, item_value))
|
||||
end_event = self.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def check_end_doc_comment(self, end_event, node):
|
||||
if end_event.comment and end_event.comment[1]:
|
||||
# pre comments on an end_event, no following to move to
|
||||
if node.comment is None:
|
||||
node.comment = [None, None]
|
||||
assert not isinstance(node, ScalarEvent)
|
||||
# this is a post comment on a mapping node, add as third element
|
||||
# in the list
|
||||
node.comment.append(end_event.comment[1])
|
||||
end_event.comment[1] = None
|
||||
@@ -1,9 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
from ruamel.yaml.util import configobj_walker as new_configobj_walker
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
warnings.warn("configobj_walker has move to ruamel.yaml.util, please update your code")
|
||||
return new_configobj_walker(cfg)
|
||||
1172
lib/spack/external/ruamel/yaml/constructor.py
vendored
1172
lib/spack/external/ruamel/yaml/constructor.py
vendored
File diff suppressed because it is too large
Load Diff
102
lib/spack/external/ruamel/yaml/dumper.py
vendored
102
lib/spack/external/ruamel/yaml/dumper.py
vendored
@@ -1,102 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
|
||||
|
||||
try:
|
||||
from .emitter import * # NOQA
|
||||
from .serializer import * # NOQA
|
||||
from .representer import * # NOQA
|
||||
from .resolver import * # NOQA
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.emitter import * # NOQA
|
||||
from ruamel.yaml.serializer import * # NOQA
|
||||
from ruamel.yaml.representer import * # NOQA
|
||||
from ruamel.yaml.resolver import * # NOQA
|
||||
|
||||
|
||||
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
top_level_colon_align=top_level_colon_align,
|
||||
prefix_colon=prefix_colon)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
RoundTripRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
VersionedResolver.__init__(self)
|
||||
1282
lib/spack/external/ruamel/yaml/emitter.py
vendored
1282
lib/spack/external/ruamel/yaml/emitter.py
vendored
File diff suppressed because it is too large
Load Diff
85
lib/spack/external/ruamel/yaml/error.py
vendored
85
lib/spack/external/ruamel/yaml/error.py
vendored
@@ -1,85 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
|
||||
|
||||
try:
|
||||
from .compat import utf8
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.compat import utf8
|
||||
|
||||
|
||||
class Mark(object):
|
||||
def __init__(self, name, index, line, column, buffer, pointer):
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.line = line
|
||||
self.column = column
|
||||
self.buffer = buffer
|
||||
self.pointer = pointer
|
||||
|
||||
def get_snippet(self, indent=4, max_length=75):
|
||||
if self.buffer is None:
|
||||
return None
|
||||
head = ''
|
||||
start = self.pointer
|
||||
while (start > 0 and
|
||||
self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029'):
|
||||
start -= 1
|
||||
if self.pointer-start > max_length/2-1:
|
||||
head = ' ... '
|
||||
start += 5
|
||||
break
|
||||
tail = ''
|
||||
end = self.pointer
|
||||
while (end < len(self.buffer) and
|
||||
self.buffer[end] not in u'\0\r\n\x85\u2028\u2029'):
|
||||
end += 1
|
||||
if end-self.pointer > max_length/2-1:
|
||||
tail = ' ... '
|
||||
end -= 5
|
||||
break
|
||||
snippet = utf8(self.buffer[start:end])
|
||||
return ' '*indent + head + snippet + tail + '\n' \
|
||||
+ ' '*(indent+self.pointer-start+len(head)) + '^'
|
||||
|
||||
def __str__(self):
|
||||
snippet = self.get_snippet()
|
||||
where = " in \"%s\", line %d, column %d" \
|
||||
% (self.name, self.line+1, self.column+1)
|
||||
if snippet is not None:
|
||||
where += ":\n"+snippet
|
||||
return where
|
||||
|
||||
|
||||
class YAMLError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLError(YAMLError):
|
||||
def __init__(self, context=None, context_mark=None,
|
||||
problem=None, problem_mark=None, note=None):
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
|
||||
def __str__(self):
|
||||
lines = []
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None \
|
||||
and (self.problem is None or self.problem_mark is None or
|
||||
self.context_mark.name != self.problem_mark.name or
|
||||
self.context_mark.line != self.problem_mark.line or
|
||||
self.context_mark.column != self.problem_mark.column):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None:
|
||||
lines.append(self.note)
|
||||
return '\n'.join(lines)
|
||||
106
lib/spack/external/ruamel/yaml/events.py
vendored
106
lib/spack/external/ruamel/yaml/events.py
vendored
@@ -1,106 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
# Abstract classes.
|
||||
|
||||
|
||||
def CommentCheck():
|
||||
pass
|
||||
|
||||
|
||||
class Event(object):
|
||||
def __init__(self, start_mark=None, end_mark=None, comment=CommentCheck):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
# assert comment is not CommentCheck
|
||||
if comment is CommentCheck:
|
||||
comment = None
|
||||
self.comment = comment
|
||||
|
||||
def __repr__(self):
|
||||
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value',
|
||||
'flow_style', 'style']
|
||||
if hasattr(self, key)]
|
||||
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
||||
for key in attributes])
|
||||
if self.comment not in [None, CommentCheck]:
|
||||
arguments += ', comment={!r}'.format(self.comment)
|
||||
return '%s(%s)' % (self.__class__.__name__, arguments)
|
||||
|
||||
|
||||
class NodeEvent(Event):
|
||||
def __init__(self, anchor, start_mark=None, end_mark=None, comment=None):
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.anchor = anchor
|
||||
|
||||
|
||||
class CollectionStartEvent(NodeEvent):
|
||||
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
|
||||
flow_style=None, comment=None):
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.anchor = anchor
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.flow_style = flow_style
|
||||
|
||||
|
||||
class CollectionEndEvent(Event):
|
||||
pass
|
||||
|
||||
# Implementations.
|
||||
|
||||
|
||||
class StreamStartEvent(Event):
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None,
|
||||
comment=None):
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.encoding = encoding
|
||||
|
||||
|
||||
class StreamEndEvent(Event):
|
||||
pass
|
||||
|
||||
|
||||
class DocumentStartEvent(Event):
|
||||
def __init__(self, start_mark=None, end_mark=None,
|
||||
explicit=None, version=None, tags=None, comment=None):
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.explicit = explicit
|
||||
self.version = version
|
||||
self.tags = tags
|
||||
|
||||
|
||||
class DocumentEndEvent(Event):
|
||||
def __init__(self, start_mark=None, end_mark=None,
|
||||
explicit=None, comment=None):
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.explicit = explicit
|
||||
|
||||
|
||||
class AliasEvent(NodeEvent):
|
||||
pass
|
||||
|
||||
|
||||
class ScalarEvent(NodeEvent):
|
||||
def __init__(self, anchor, tag, implicit, value,
|
||||
start_mark=None, end_mark=None, style=None, comment=None):
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.value = value
|
||||
self.style = style
|
||||
|
||||
|
||||
class SequenceStartEvent(CollectionStartEvent):
|
||||
pass
|
||||
|
||||
|
||||
class SequenceEndEvent(CollectionEndEvent):
|
||||
pass
|
||||
|
||||
|
||||
class MappingStartEvent(CollectionStartEvent):
|
||||
pass
|
||||
|
||||
|
||||
class MappingEndEvent(CollectionEndEvent):
|
||||
pass
|
||||
61
lib/spack/external/ruamel/yaml/loader.py
vendored
61
lib/spack/external/ruamel/yaml/loader.py
vendored
@@ -1,61 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
|
||||
|
||||
try:
|
||||
from .reader import * # NOQA
|
||||
from .scanner import * # NOQA
|
||||
from .parser import * # NOQA
|
||||
from .composer import * # NOQA
|
||||
from .constructor import * # NOQA
|
||||
from .resolver import * # NOQA
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.reader import * # NOQA
|
||||
from ruamel.yaml.scanner import * # NOQA
|
||||
from ruamel.yaml.parser import * # NOQA
|
||||
from ruamel.yaml.composer import * # NOQA
|
||||
from ruamel.yaml.constructor import * # NOQA
|
||||
from ruamel.yaml.resolver import * # NOQA
|
||||
|
||||
|
||||
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
BaseConstructor.__init__(self)
|
||||
BaseResolver.__init__(self)
|
||||
|
||||
|
||||
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
SafeConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
Reader.__init__(self, stream)
|
||||
Scanner.__init__(self)
|
||||
Parser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
Constructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class RoundTripLoader(Reader, RoundTripScanner, RoundTripParser, Composer,
|
||||
RoundTripConstructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
Reader.__init__(self, stream)
|
||||
RoundTripScanner.__init__(self)
|
||||
RoundTripParser.__init__(self)
|
||||
Composer.__init__(self)
|
||||
RoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes)
|
||||
VersionedResolver.__init__(self, version)
|
||||
395
lib/spack/external/ruamel/yaml/main.py
vendored
395
lib/spack/external/ruamel/yaml/main.py
vendored
@@ -1,395 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
|
||||
from ruamel.yaml.error import * # NOQA
|
||||
|
||||
from ruamel.yaml.tokens import * # NOQA
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from ruamel.yaml.nodes import * # NOQA
|
||||
|
||||
from ruamel.yaml.loader import * # NOQA
|
||||
from ruamel.yaml.dumper import * # NOQA
|
||||
from ruamel.yaml.compat import StringIO, BytesIO, with_metaclass, PY3
|
||||
|
||||
# import io
|
||||
|
||||
|
||||
def scan(stream, Loader=Loader):
|
||||
"""
|
||||
Scan a YAML stream and produce scanning tokens.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_token():
|
||||
yield loader.get_token()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def parse(stream, Loader=Loader):
|
||||
"""
|
||||
Parse a YAML stream and produce parsing events.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_event():
|
||||
yield loader.get_event()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def compose(stream, Loader=Loader):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding representation tree.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def compose_all(stream, Loader=Loader):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding representation trees.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_node():
|
||||
yield loader.get_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def load(stream, Loader=Loader, version=None, preserve_quotes=None):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
"""
|
||||
loader = Loader(stream, version, preserve_quotes=preserve_quotes)
|
||||
try:
|
||||
return loader.get_single_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def load_all(stream, Loader=Loader, version=None):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
"""
|
||||
loader = Loader(stream, version)
|
||||
try:
|
||||
while loader.check_data():
|
||||
yield loader.get_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def safe_load(stream, version=None):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
Resolve only basic YAML tags.
|
||||
"""
|
||||
return load(stream, SafeLoader, version)
|
||||
|
||||
|
||||
def safe_load_all(stream, version=None):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
Resolve only basic YAML tags.
|
||||
"""
|
||||
return load_all(stream, SafeLoader, version)
|
||||
|
||||
|
||||
def round_trip_load(stream, version=None, preserve_quotes=None):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
Resolve only basic YAML tags.
|
||||
"""
|
||||
return load(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes)
|
||||
|
||||
|
||||
def round_trip_load_all(stream, version=None, preserve_quotes=None):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
Resolve only basic YAML tags.
|
||||
"""
|
||||
return load_all(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes)
|
||||
|
||||
|
||||
def emit(events, stream=None, Dumper=Dumper,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None):
|
||||
"""
|
||||
Emit YAML parsing events into a stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
stream = StringIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
try:
|
||||
for event in events:
|
||||
dumper.emit(event)
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
enc = None if PY3 else 'utf-8'
|
||||
|
||||
|
||||
def serialize_all(nodes, stream=None, Dumper=Dumper,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=enc, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None):
|
||||
"""
|
||||
Serialize a sequence of representation trees into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
if encoding is None:
|
||||
stream = StringIO()
|
||||
else:
|
||||
stream = BytesIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
encoding=encoding, version=version, tags=tags,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end)
|
||||
try:
|
||||
dumper.open()
|
||||
for node in nodes:
|
||||
dumper.serialize(node)
|
||||
dumper.close()
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
|
||||
def serialize(node, stream=None, Dumper=Dumper, **kwds):
|
||||
"""
|
||||
Serialize a representation tree into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return serialize_all([node], stream, Dumper=Dumper, **kwds)
|
||||
|
||||
|
||||
def dump_all(documents, stream=None, Dumper=Dumper,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=enc, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
"""
|
||||
Serialize a sequence of Python objects into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if top_level_colon_align is True:
|
||||
top_level_colon_align = max([len(str(x)) for x in documents[0]])
|
||||
if stream is None:
|
||||
if encoding is None:
|
||||
stream = StringIO()
|
||||
else:
|
||||
stream = BytesIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
encoding=encoding, explicit_start=explicit_start,
|
||||
explicit_end=explicit_end, version=version,
|
||||
tags=tags, block_seq_indent=block_seq_indent,
|
||||
top_level_colon_align=top_level_colon_align, prefix_colon=prefix_colon,
|
||||
)
|
||||
try:
|
||||
dumper.open()
|
||||
for data in documents:
|
||||
dumper.represent(data)
|
||||
dumper.close()
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
|
||||
def dump(data, stream=None, Dumper=Dumper,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=enc, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None):
|
||||
"""
|
||||
Serialize a Python object into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
|
||||
default_style ∈ None, '', '"', "'", '|', '>'
|
||||
|
||||
"""
|
||||
return dump_all([data], stream, Dumper=Dumper,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
encoding=encoding, explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags, block_seq_indent=block_seq_indent)
|
||||
|
||||
|
||||
def safe_dump_all(documents, stream=None, **kwds):
|
||||
"""
|
||||
Serialize a sequence of Python objects into a YAML stream.
|
||||
Produce only basic YAML tags.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
|
||||
|
||||
|
||||
def safe_dump(data, stream=None, **kwds):
|
||||
"""
|
||||
Serialize a Python object into a YAML stream.
|
||||
Produce only basic YAML tags.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
|
||||
|
||||
|
||||
def round_trip_dump(data, stream=None, Dumper=RoundTripDumper,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=enc, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
allow_unicode = True if allow_unicode is None else allow_unicode
|
||||
return dump_all([data], stream, Dumper=Dumper,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
encoding=encoding, explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags, block_seq_indent=block_seq_indent,
|
||||
top_level_colon_align=top_level_colon_align, prefix_colon=prefix_colon)
|
||||
|
||||
|
||||
def add_implicit_resolver(tag, regexp, first=None,
|
||||
Loader=Loader, Dumper=Dumper):
|
||||
"""
|
||||
Add an implicit scalar detector.
|
||||
If an implicit scalar value matches the given regexp,
|
||||
the corresponding tag is assigned to the scalar.
|
||||
first is a sequence of possible initial characters or None.
|
||||
"""
|
||||
Loader.add_implicit_resolver(tag, regexp, first)
|
||||
Dumper.add_implicit_resolver(tag, regexp, first)
|
||||
|
||||
|
||||
def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
|
||||
"""
|
||||
Add a path based resolver for the given tag.
|
||||
A path is a list of keys that forms a path
|
||||
to a node in the representation tree.
|
||||
Keys can be string values, integers, or None.
|
||||
"""
|
||||
Loader.add_path_resolver(tag, path, kind)
|
||||
Dumper.add_path_resolver(tag, path, kind)
|
||||
|
||||
|
||||
def add_constructor(tag, constructor, Loader=Loader):
|
||||
"""
|
||||
Add a constructor for the given tag.
|
||||
Constructor is a function that accepts a Loader instance
|
||||
and a node object and produces the corresponding Python object.
|
||||
"""
|
||||
Loader.add_constructor(tag, constructor)
|
||||
|
||||
|
||||
def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
|
||||
"""
|
||||
Add a multi-constructor for the given tag prefix.
|
||||
Multi-constructor is called for a node if its tag starts with tag_prefix.
|
||||
Multi-constructor accepts a Loader instance, a tag suffix,
|
||||
and a node object and produces the corresponding Python object.
|
||||
"""
|
||||
Loader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
|
||||
|
||||
def add_representer(data_type, representer, Dumper=Dumper):
|
||||
"""
|
||||
Add a representer for the given type.
|
||||
Representer is a function accepting a Dumper instance
|
||||
and an instance of the given data type
|
||||
and producing the corresponding representation node.
|
||||
"""
|
||||
Dumper.add_representer(data_type, representer)
|
||||
|
||||
|
||||
def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
|
||||
"""
|
||||
Add a representer for the given type.
|
||||
Multi-representer is a function accepting a Dumper instance
|
||||
and an instance of the given data type or subtype
|
||||
and producing the corresponding representation node.
|
||||
"""
|
||||
Dumper.add_multi_representer(data_type, multi_representer)
|
||||
|
||||
|
||||
class YAMLObjectMetaclass(type):
|
||||
"""
|
||||
The metaclass for YAMLObject.
|
||||
"""
|
||||
def __init__(cls, name, bases, kwds):
|
||||
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
|
||||
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
|
||||
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
|
||||
cls.yaml_dumper.add_representer(cls, cls.to_yaml)
|
||||
|
||||
|
||||
class YAMLObject(with_metaclass(YAMLObjectMetaclass)):
|
||||
"""
|
||||
An object that can dump itself to a YAML stream
|
||||
and load itself from a YAML stream.
|
||||
"""
|
||||
__slots__ = () # no direct instantiation, so allow immutable subclasses
|
||||
|
||||
yaml_loader = Loader
|
||||
yaml_dumper = Dumper
|
||||
|
||||
yaml_tag = None
|
||||
yaml_flow_style = None
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, loader, node):
|
||||
"""
|
||||
Convert a representation node to a Python object.
|
||||
"""
|
||||
return loader.construct_yaml_object(node, cls)
|
||||
|
||||
@classmethod
|
||||
def to_yaml(cls, dumper, data):
|
||||
"""
|
||||
Convert a Python object to a representation node.
|
||||
"""
|
||||
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
|
||||
flow_style=cls.yaml_flow_style)
|
||||
86
lib/spack/external/ruamel/yaml/nodes.py
vendored
86
lib/spack/external/ruamel/yaml/nodes.py
vendored
@@ -1,86 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
class Node(object):
|
||||
def __init__(self, tag, value, start_mark, end_mark, comment=None):
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.comment = comment
|
||||
self.anchor = None
|
||||
|
||||
def __repr__(self):
|
||||
value = self.value
|
||||
# if isinstance(value, list):
|
||||
# if len(value) == 0:
|
||||
# value = '<empty>'
|
||||
# elif len(value) == 1:
|
||||
# value = '<1 item>'
|
||||
# else:
|
||||
# value = '<%d items>' % len(value)
|
||||
# else:
|
||||
# if len(value) > 75:
|
||||
# value = repr(value[:70]+u' ... ')
|
||||
# else:
|
||||
# value = repr(value)
|
||||
value = repr(value)
|
||||
return '%s(tag=%r, value=%s)' % (self.__class__.__name__,
|
||||
self.tag, value)
|
||||
|
||||
def dump(self, indent=0):
|
||||
if isinstance(self.value, basestring):
|
||||
print('{0}{1}(tag={!r}, value={!r})'.format(
|
||||
' ' * indent, self.__class__.__name__, self.tag, self.value))
|
||||
if self.comment:
|
||||
print(' {0}comment: {1})'.format(
|
||||
' ' * indent, self.comment))
|
||||
return
|
||||
print('{0}{1}(tag={!r})'.format(
|
||||
' ' * indent, self.__class__.__name__, self.tag))
|
||||
if self.comment:
|
||||
print(' {0}comment: {1})'.format(
|
||||
' ' * indent, self.comment))
|
||||
for v in self.value:
|
||||
if isinstance(v, tuple):
|
||||
for v1 in v:
|
||||
v1.dump(indent+1)
|
||||
elif isinstance(v, Node):
|
||||
v.dump(indent+1)
|
||||
else:
|
||||
print('Node value type?', type(v))
|
||||
|
||||
|
||||
class ScalarNode(Node):
|
||||
"""
|
||||
styles:
|
||||
? -> set() ? key, no value
|
||||
" -> double quoted
|
||||
' -> single quoted
|
||||
| -> literal style
|
||||
> ->
|
||||
"""
|
||||
id = 'scalar'
|
||||
|
||||
def __init__(self, tag, value, start_mark=None, end_mark=None, style=None,
|
||||
comment=None):
|
||||
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
|
||||
self.style = style
|
||||
|
||||
|
||||
class CollectionNode(Node):
|
||||
def __init__(self, tag, value, start_mark=None, end_mark=None,
|
||||
flow_style=None, comment=None, anchor=None):
|
||||
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
|
||||
self.flow_style = flow_style
|
||||
self.anchor = anchor
|
||||
|
||||
|
||||
class SequenceNode(CollectionNode):
|
||||
id = 'sequence'
|
||||
|
||||
|
||||
class MappingNode(CollectionNode):
|
||||
id = 'mapping'
|
||||
675
lib/spack/external/ruamel/yaml/parser.py
vendored
675
lib/spack/external/ruamel/yaml/parser.py
vendored
@@ -1,675 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
# The following YAML grammar is LL(1) and is parsed by a recursive descent
|
||||
# parser.
|
||||
#
|
||||
# stream ::= STREAM-START implicit_document? explicit_document*
|
||||
# STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
# block_node_or_indentless_sequence ::=
|
||||
# ALIAS
|
||||
# | properties (block_content |
|
||||
# indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
|
||||
# BLOCK-END
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# FIRST sets:
|
||||
#
|
||||
# stream: { STREAM-START }
|
||||
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
||||
# implicit_document: FIRST(block_node)
|
||||
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
|
||||
# BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START
|
||||
# FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_sequence: { BLOCK-SEQUENCE-START }
|
||||
# block_mapping: { BLOCK-MAPPING-START }
|
||||
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR
|
||||
# BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START BLOCK-ENTRY }
|
||||
# indentless_sequence: { ENTRY }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_sequence: { FLOW-SEQUENCE-START }
|
||||
# flow_mapping: { FLOW-MAPPING-START }
|
||||
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START KEY }
|
||||
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START KEY }
|
||||
|
||||
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
|
||||
|
||||
# need to have full path, as pkg_resources tries to load parser.py in __init__.py
|
||||
# only to not do anything with the package afterwards
|
||||
# and for Jython too
|
||||
from ruamel.yaml.error import MarkedYAMLError # NOQA
|
||||
from ruamel.yaml.tokens import * # NOQA
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from ruamel.yaml.scanner import * # NOQA
|
||||
from ruamel.yaml.compat import utf8 # NOQA
|
||||
|
||||
|
||||
class ParserError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Parser(object):
|
||||
# Since writing a recursive-descendant parser is a straightforward task, we
|
||||
# do not give many comments here.
|
||||
|
||||
DEFAULT_TAGS = {
|
||||
u'!': u'!',
|
||||
u'!!': u'tag:yaml.org,2002:',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.current_event = None
|
||||
self.yaml_version = None
|
||||
self.tag_handles = {}
|
||||
self.states = []
|
||||
self.marks = []
|
||||
self.state = self.parse_stream_start
|
||||
|
||||
def dispose(self):
|
||||
# Reset the state attributes (to clear self-references)
|
||||
self.states = []
|
||||
self.state = None
|
||||
|
||||
def check_event(self, *choices):
|
||||
# Check the type of the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
if self.current_event is not None:
|
||||
if not choices:
|
||||
return True
|
||||
for choice in choices:
|
||||
if isinstance(self.current_event, choice):
|
||||
return True
|
||||
return False
|
||||
|
||||
def peek_event(self):
|
||||
# Get the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
return self.current_event
|
||||
|
||||
def get_event(self):
|
||||
# Get the next event and proceed further.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
value = self.current_event
|
||||
self.current_event = None
|
||||
return value
|
||||
|
||||
# stream ::= STREAM-START implicit_document? explicit_document*
|
||||
# STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
|
||||
def parse_stream_start(self):
|
||||
|
||||
# Parse the stream start.
|
||||
token = self.get_token()
|
||||
token.move_comment(self.peek_token())
|
||||
event = StreamStartEvent(token.start_mark, token.end_mark,
|
||||
encoding=token.encoding)
|
||||
|
||||
# Prepare the next state.
|
||||
self.state = self.parse_implicit_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_implicit_document_start(self):
|
||||
|
||||
# Parse an implicit document.
|
||||
if not self.check_token(DirectiveToken, DocumentStartToken,
|
||||
StreamEndToken):
|
||||
self.tag_handles = self.DEFAULT_TAGS
|
||||
token = self.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
event = DocumentStartEvent(start_mark, end_mark,
|
||||
explicit=False)
|
||||
|
||||
# Prepare the next state.
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_block_node
|
||||
|
||||
return event
|
||||
|
||||
else:
|
||||
return self.parse_document_start()
|
||||
|
||||
def parse_document_start(self):
|
||||
|
||||
# Parse any extra document end indicators.
|
||||
while self.check_token(DocumentEndToken):
|
||||
self.get_token()
|
||||
|
||||
# Parse an explicit document.
|
||||
if not self.check_token(StreamEndToken):
|
||||
token = self.peek_token()
|
||||
start_mark = token.start_mark
|
||||
version, tags = self.process_directives()
|
||||
if not self.check_token(DocumentStartToken):
|
||||
raise ParserError(None, None,
|
||||
"expected '<document start>', but found %r"
|
||||
% self.peek_token().id,
|
||||
self.peek_token().start_mark)
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
event = DocumentStartEvent(
|
||||
start_mark, end_mark,
|
||||
explicit=True, version=version, tags=tags)
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_document_content
|
||||
else:
|
||||
# Parse the end of the stream.
|
||||
token = self.get_token()
|
||||
event = StreamEndEvent(token.start_mark, token.end_mark,
|
||||
comment=token.comment)
|
||||
assert not self.states
|
||||
assert not self.marks
|
||||
self.state = None
|
||||
return event
|
||||
|
||||
def parse_document_end(self):
|
||||
|
||||
# Parse the document end.
|
||||
token = self.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
explicit = False
|
||||
if self.check_token(DocumentEndToken):
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
explicit = True
|
||||
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
|
||||
|
||||
# Prepare the next state.
|
||||
self.state = self.parse_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_document_content(self):
|
||||
if self.check_token(
|
||||
DirectiveToken,
|
||||
DocumentStartToken, DocumentEndToken, StreamEndToken):
|
||||
event = self.process_empty_scalar(self.peek_token().start_mark)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
else:
|
||||
return self.parse_block_node()
|
||||
|
||||
def process_directives(self):
|
||||
self.yaml_version = None
|
||||
self.tag_handles = {}
|
||||
while self.check_token(DirectiveToken):
|
||||
token = self.get_token()
|
||||
if token.name == u'YAML':
|
||||
if self.yaml_version is not None:
|
||||
raise ParserError(
|
||||
None, None,
|
||||
"found duplicate YAML directive", token.start_mark)
|
||||
major, minor = token.value
|
||||
if major != 1:
|
||||
raise ParserError(
|
||||
None, None,
|
||||
"found incompatible YAML document (version 1.* is "
|
||||
"required)",
|
||||
token.start_mark)
|
||||
self.yaml_version = token.value
|
||||
elif token.name == u'TAG':
|
||||
handle, prefix = token.value
|
||||
if handle in self.tag_handles:
|
||||
raise ParserError(None, None,
|
||||
"duplicate tag handle %r" % utf8(handle),
|
||||
token.start_mark)
|
||||
self.tag_handles[handle] = prefix
|
||||
if self.tag_handles:
|
||||
value = self.yaml_version, self.tag_handles.copy()
|
||||
else:
|
||||
value = self.yaml_version, None
|
||||
for key in self.DEFAULT_TAGS:
|
||||
if key not in self.tag_handles:
|
||||
self.tag_handles[key] = self.DEFAULT_TAGS[key]
|
||||
return value
|
||||
|
||||
# block_node_or_indentless_sequence ::= ALIAS
|
||||
# | properties (block_content | indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
|
||||
def parse_block_node(self):
|
||||
return self.parse_node(block=True)
|
||||
|
||||
def parse_flow_node(self):
|
||||
return self.parse_node()
|
||||
|
||||
def parse_block_node_or_indentless_sequence(self):
|
||||
return self.parse_node(block=True, indentless_sequence=True)
|
||||
|
||||
def transform_tag(self, handle, suffix):
|
||||
return self.tag_handles[handle] + suffix
|
||||
|
||||
def parse_node(self, block=False, indentless_sequence=False):
|
||||
if self.check_token(AliasToken):
|
||||
token = self.get_token()
|
||||
event = AliasEvent(token.value, token.start_mark, token.end_mark)
|
||||
self.state = self.states.pop()
|
||||
else:
|
||||
anchor = None
|
||||
tag = None
|
||||
start_mark = end_mark = tag_mark = None
|
||||
if self.check_token(AnchorToken):
|
||||
token = self.get_token()
|
||||
start_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if self.check_token(TagToken):
|
||||
token = self.get_token()
|
||||
tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
elif self.check_token(TagToken):
|
||||
token = self.get_token()
|
||||
start_mark = tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
if self.check_token(AnchorToken):
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if tag is not None:
|
||||
handle, suffix = tag
|
||||
if handle is not None:
|
||||
if handle not in self.tag_handles:
|
||||
raise ParserError(
|
||||
"while parsing a node", start_mark,
|
||||
"found undefined tag handle %r" % utf8(handle),
|
||||
tag_mark)
|
||||
tag = self.transform_tag(handle, suffix)
|
||||
else:
|
||||
tag = suffix
|
||||
# if tag == u'!':
|
||||
# raise ParserError("while parsing a node", start_mark,
|
||||
# "found non-specific tag '!'", tag_mark,
|
||||
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
|
||||
# and share your opinion.")
|
||||
if start_mark is None:
|
||||
start_mark = end_mark = self.peek_token().start_mark
|
||||
event = None
|
||||
implicit = (tag is None or tag == u'!')
|
||||
if indentless_sequence and self.check_token(BlockEntryToken):
|
||||
end_mark = self.peek_token().end_mark
|
||||
event = SequenceStartEvent(anchor, tag, implicit,
|
||||
start_mark, end_mark)
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
else:
|
||||
if self.check_token(ScalarToken):
|
||||
token = self.get_token()
|
||||
end_mark = token.end_mark
|
||||
if (token.plain and tag is None) or tag == u'!':
|
||||
implicit = (True, False)
|
||||
elif tag is None:
|
||||
implicit = (False, True)
|
||||
else:
|
||||
implicit = (False, False)
|
||||
event = ScalarEvent(
|
||||
anchor, tag, implicit, token.value,
|
||||
start_mark, end_mark, style=token.style,
|
||||
comment=token.comment
|
||||
)
|
||||
self.state = self.states.pop()
|
||||
elif self.check_token(FlowSequenceStartToken):
|
||||
end_mark = self.peek_token().end_mark
|
||||
event = SequenceStartEvent(
|
||||
anchor, tag, implicit,
|
||||
start_mark, end_mark, flow_style=True)
|
||||
self.state = self.parse_flow_sequence_first_entry
|
||||
elif self.check_token(FlowMappingStartToken):
|
||||
end_mark = self.peek_token().end_mark
|
||||
event = MappingStartEvent(
|
||||
anchor, tag, implicit,
|
||||
start_mark, end_mark, flow_style=True)
|
||||
self.state = self.parse_flow_mapping_first_key
|
||||
elif block and self.check_token(BlockSequenceStartToken):
|
||||
end_mark = self.peek_token().start_mark
|
||||
# should inserting the comment be dependent on the
|
||||
# indentation?
|
||||
pt = self.peek_token()
|
||||
comment = pt.comment
|
||||
# print('pt0', type(pt))
|
||||
if comment is None or comment[1] is None:
|
||||
comment = pt.split_comment()
|
||||
# print('pt1', comment)
|
||||
event = SequenceStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark,
|
||||
flow_style=False,
|
||||
comment=comment,
|
||||
)
|
||||
self.state = self.parse_block_sequence_first_entry
|
||||
elif block and self.check_token(BlockMappingStartToken):
|
||||
end_mark = self.peek_token().start_mark
|
||||
comment = self.peek_token().comment
|
||||
event = MappingStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark,
|
||||
flow_style=False, comment=comment)
|
||||
self.state = self.parse_block_mapping_first_key
|
||||
elif anchor is not None or tag is not None:
|
||||
# Empty scalars are allowed even if a tag or an anchor is
|
||||
# specified.
|
||||
event = ScalarEvent(anchor, tag, (implicit, False), u'',
|
||||
start_mark, end_mark)
|
||||
self.state = self.states.pop()
|
||||
else:
|
||||
if block:
|
||||
node = 'block'
|
||||
else:
|
||||
node = 'flow'
|
||||
token = self.peek_token()
|
||||
raise ParserError(
|
||||
"while parsing a %s node" % node, start_mark,
|
||||
"expected the node content, but found %r" % token.id,
|
||||
token.start_mark)
|
||||
return event
|
||||
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_sequence_first_entry(self):
|
||||
token = self.get_token()
|
||||
# move any comment from start token
|
||||
# token.move_comment(self.peek_token())
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_sequence_entry()
|
||||
|
||||
def parse_block_sequence_entry(self):
|
||||
if self.check_token(BlockEntryToken):
|
||||
token = self.get_token()
|
||||
token.move_comment(self.peek_token())
|
||||
if not self.check_token(BlockEntryToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_block_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if not self.check_token(BlockEndToken):
|
||||
token = self.peek_token()
|
||||
raise ParserError(
|
||||
"while parsing a block collection", self.marks[-1],
|
||||
"expected <block end>, but found %r" %
|
||||
token.id, token.start_mark)
|
||||
token = self.get_token() # BlockEndToken
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark,
|
||||
comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
|
||||
# indentless_sequence?
|
||||
# sequence:
|
||||
# - entry
|
||||
# - nested
|
||||
|
||||
def parse_indentless_sequence_entry(self):
|
||||
if self.check_token(BlockEntryToken):
|
||||
token = self.get_token()
|
||||
token.move_comment(self.peek_token())
|
||||
if not self.check_token(BlockEntryToken,
|
||||
KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_indentless_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
token = self.peek_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.start_mark,
|
||||
comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_mapping_first_key(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_mapping_key()
|
||||
|
||||
def parse_block_mapping_key(self):
|
||||
if self.check_token(KeyToken):
|
||||
token = self.get_token()
|
||||
token.move_comment(self.peek_token())
|
||||
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_value)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if not self.check_token(BlockEndToken):
|
||||
token = self.peek_token()
|
||||
raise ParserError(
|
||||
"while parsing a block mapping", self.marks[-1],
|
||||
"expected <block end>, but found %r" % token.id,
|
||||
token.start_mark)
|
||||
token = self.get_token()
|
||||
token.move_comment(self.peek_token())
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark,
|
||||
comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_block_mapping_value(self):
|
||||
if self.check_token(ValueToken):
|
||||
token = self.get_token()
|
||||
# value token might have post comment move it to e.g. block
|
||||
token.move_comment(self.peek_token())
|
||||
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_key)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
token = self.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# Note that while production rules for both flow_sequence_entry and
|
||||
# flow_mapping_entry are equal, their interpretations are different.
|
||||
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
||||
# generate an inline mapping (set syntax).
|
||||
|
||||
def parse_flow_sequence_first_entry(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_sequence_entry(first=True)
|
||||
|
||||
def parse_flow_sequence_entry(self, first=False):
|
||||
if not self.check_token(FlowSequenceEndToken):
|
||||
if not first:
|
||||
if self.check_token(FlowEntryToken):
|
||||
self.get_token()
|
||||
else:
|
||||
token = self.peek_token()
|
||||
raise ParserError(
|
||||
"while parsing a flow sequence", self.marks[-1],
|
||||
"expected ',' or ']', but got %r" % token.id,
|
||||
token.start_mark)
|
||||
|
||||
if self.check_token(KeyToken):
|
||||
token = self.peek_token()
|
||||
event = MappingStartEvent(None, None, True,
|
||||
token.start_mark, token.end_mark,
|
||||
flow_style=True)
|
||||
self.state = self.parse_flow_sequence_entry_mapping_key
|
||||
return event
|
||||
elif not self.check_token(FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry)
|
||||
return self.parse_flow_node()
|
||||
token = self.get_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark,
|
||||
comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_sequence_entry_mapping_key(self):
|
||||
token = self.get_token()
|
||||
if not self.check_token(ValueToken,
|
||||
FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_value(self):
|
||||
if self.check_token(ValueToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_end)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
token = self.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_end(self):
|
||||
self.state = self.parse_flow_sequence_entry
|
||||
token = self.peek_token()
|
||||
return MappingEndEvent(token.start_mark, token.start_mark)
|
||||
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
|
||||
def parse_flow_mapping_first_key(self):
|
||||
token = self.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_mapping_key(first=True)
|
||||
|
||||
def parse_flow_mapping_key(self, first=False):
|
||||
if not self.check_token(FlowMappingEndToken):
|
||||
if not first:
|
||||
if self.check_token(FlowEntryToken):
|
||||
self.get_token()
|
||||
else:
|
||||
token = self.peek_token()
|
||||
raise ParserError(
|
||||
"while parsing a flow mapping", self.marks[-1],
|
||||
"expected ',' or '}', but got %r" % token.id,
|
||||
token.start_mark)
|
||||
if self.check_token(KeyToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(ValueToken,
|
||||
FlowEntryToken, FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
elif not self.check_token(FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_empty_value)
|
||||
return self.parse_flow_node()
|
||||
token = self.get_token()
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark,
|
||||
comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_mapping_value(self):
|
||||
if self.check_token(ValueToken):
|
||||
token = self.get_token()
|
||||
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_key)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
token = self.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_mapping_empty_value(self):
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(self.peek_token().start_mark)
|
||||
|
||||
def process_empty_scalar(self, mark):
|
||||
return ScalarEvent(None, None, (True, False), u'', mark, mark)
|
||||
|
||||
|
||||
class RoundTripParser(Parser):
|
||||
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
|
||||
def transform_tag(self, handle, suffix):
|
||||
# return self.tag_handles[handle]+suffix
|
||||
if handle == '!!' and suffix in (u'null', u'bool', u'int', u'float', u'binary',
|
||||
u'timestamp', u'omap', u'pairs', u'set', u'str',
|
||||
u'seq', u'map'):
|
||||
return Parser.transform_tag(self, handle, suffix)
|
||||
return handle+suffix
|
||||
213
lib/spack/external/ruamel/yaml/reader.py
vendored
213
lib/spack/external/ruamel/yaml/reader.py
vendored
@@ -1,213 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
# This module contains abstractions for the input stream. You don't have to
|
||||
# looks further, there are no pretty code.
|
||||
#
|
||||
# We define two classes here.
|
||||
#
|
||||
# Mark(source, line, column)
|
||||
# It's just a record and its only use is producing nice error messages.
|
||||
# Parser does not use it for any other purposes.
|
||||
#
|
||||
# Reader(source, data)
|
||||
# Reader determines the encoding of `data` and converts it to unicode.
|
||||
# Reader provides the following methods and attributes:
|
||||
# reader.peek(length=1) - return the next `length` characters
|
||||
# reader.forward(length=1) - move the current position to `length`
|
||||
# characters.
|
||||
# reader.index - the number of the current character.
|
||||
# reader.line, stream.column - the line and the column of the current
|
||||
# character.
|
||||
|
||||
import codecs
|
||||
import re
|
||||
|
||||
try:
|
||||
from .error import YAMLError, Mark
|
||||
from .compat import text_type, binary_type, PY3
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.error import YAMLError, Mark
|
||||
from ruamel.yaml.compat import text_type, binary_type, PY3
|
||||
|
||||
__all__ = ['Reader', 'ReaderError']
|
||||
|
||||
|
||||
class ReaderError(YAMLError):
|
||||
|
||||
def __init__(self, name, position, character, encoding, reason):
|
||||
self.name = name
|
||||
self.character = character
|
||||
self.position = position
|
||||
self.encoding = encoding
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
if isinstance(self.character, binary_type):
|
||||
return "'%s' codec can't decode byte #x%02x: %s\n" \
|
||||
" in \"%s\", position %d" \
|
||||
% (self.encoding, ord(self.character), self.reason,
|
||||
self.name, self.position)
|
||||
else:
|
||||
return "unacceptable character #x%04x: %s\n" \
|
||||
" in \"%s\", position %d" \
|
||||
% (self.character, self.reason,
|
||||
self.name, self.position)
|
||||
|
||||
|
||||
class Reader(object):
|
||||
# Reader:
|
||||
# - determines the data encoding and converts it to a unicode string,
|
||||
# - checks if characters are in allowed range,
|
||||
# - adds '\0' to the end.
|
||||
|
||||
# Reader accepts
|
||||
# - a `str` object (PY2) / a `bytes` object (PY3),
|
||||
# - a `unicode` object (PY2) / a `str` object (PY3),
|
||||
# - a file-like object with its `read` method returning `str`,
|
||||
# - a file-like object with its `read` method returning `unicode`.
|
||||
|
||||
# Yeah, it's ugly and slow.
|
||||
|
||||
def __init__(self, stream):
|
||||
self.name = None
|
||||
self.stream = None
|
||||
self.stream_pointer = 0
|
||||
self.eof = True
|
||||
self.buffer = u''
|
||||
self.pointer = 0
|
||||
self.raw_buffer = None
|
||||
self.raw_decode = None
|
||||
self.encoding = None
|
||||
self.index = 0
|
||||
self.line = 0
|
||||
self.column = 0
|
||||
if isinstance(stream, text_type):
|
||||
self.name = "<unicode string>"
|
||||
self.check_printable(stream)
|
||||
self.buffer = stream+u'\0'
|
||||
elif isinstance(stream, binary_type):
|
||||
self.name = "<byte string>"
|
||||
self.raw_buffer = stream
|
||||
self.determine_encoding()
|
||||
else:
|
||||
self.stream = stream
|
||||
self.name = getattr(stream, 'name', "<file>")
|
||||
self.eof = False
|
||||
self.raw_buffer = None
|
||||
self.determine_encoding()
|
||||
|
||||
def peek(self, index=0):
|
||||
try:
|
||||
return self.buffer[self.pointer+index]
|
||||
except IndexError:
|
||||
self.update(index+1)
|
||||
return self.buffer[self.pointer+index]
|
||||
|
||||
def prefix(self, length=1):
|
||||
if self.pointer+length >= len(self.buffer):
|
||||
self.update(length)
|
||||
return self.buffer[self.pointer:self.pointer+length]
|
||||
|
||||
def forward(self, length=1):
|
||||
if self.pointer+length+1 >= len(self.buffer):
|
||||
self.update(length+1)
|
||||
while length:
|
||||
ch = self.buffer[self.pointer]
|
||||
self.pointer += 1
|
||||
self.index += 1
|
||||
if ch in u'\n\x85\u2028\u2029' \
|
||||
or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
|
||||
self.line += 1
|
||||
self.column = 0
|
||||
elif ch != u'\uFEFF':
|
||||
self.column += 1
|
||||
length -= 1
|
||||
|
||||
def get_mark(self):
|
||||
if self.stream is None:
|
||||
return Mark(self.name, self.index, self.line, self.column,
|
||||
self.buffer, self.pointer)
|
||||
else:
|
||||
return Mark(self.name, self.index, self.line, self.column,
|
||||
None, None)
|
||||
|
||||
def determine_encoding(self):
|
||||
while not self.eof and (self.raw_buffer is None or
|
||||
len(self.raw_buffer) < 2):
|
||||
self.update_raw()
|
||||
if isinstance(self.raw_buffer, binary_type):
|
||||
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
|
||||
self.raw_decode = codecs.utf_16_le_decode
|
||||
self.encoding = 'utf-16-le'
|
||||
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
|
||||
self.raw_decode = codecs.utf_16_be_decode
|
||||
self.encoding = 'utf-16-be'
|
||||
else:
|
||||
self.raw_decode = codecs.utf_8_decode
|
||||
self.encoding = 'utf-8'
|
||||
self.update(1)
|
||||
|
||||
NON_PRINTABLE = re.compile(
|
||||
u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
|
||||
|
||||
def check_printable(self, data):
|
||||
match = self.NON_PRINTABLE.search(data)
|
||||
if match:
|
||||
character = match.group()
|
||||
position = self.index+(len(self.buffer)-self.pointer)+match.start()
|
||||
raise ReaderError(self.name, position, ord(character),
|
||||
'unicode', "special characters are not allowed")
|
||||
|
||||
def update(self, length):
|
||||
if self.raw_buffer is None:
|
||||
return
|
||||
self.buffer = self.buffer[self.pointer:]
|
||||
self.pointer = 0
|
||||
while len(self.buffer) < length:
|
||||
if not self.eof:
|
||||
self.update_raw()
|
||||
if self.raw_decode is not None:
|
||||
try:
|
||||
data, converted = self.raw_decode(self.raw_buffer,
|
||||
'strict', self.eof)
|
||||
except UnicodeDecodeError as exc:
|
||||
if PY3:
|
||||
character = self.raw_buffer[exc.start]
|
||||
else:
|
||||
character = exc.object[exc.start]
|
||||
if self.stream is not None:
|
||||
position = self.stream_pointer - \
|
||||
len(self.raw_buffer) + exc.start
|
||||
else:
|
||||
position = exc.start
|
||||
raise ReaderError(self.name, position, character,
|
||||
exc.encoding, exc.reason)
|
||||
else:
|
||||
data = self.raw_buffer
|
||||
converted = len(data)
|
||||
self.check_printable(data)
|
||||
self.buffer += data
|
||||
self.raw_buffer = self.raw_buffer[converted:]
|
||||
if self.eof:
|
||||
self.buffer += u'\0'
|
||||
self.raw_buffer = None
|
||||
break
|
||||
|
||||
def update_raw(self, size=None):
|
||||
if size is None:
|
||||
size = 4096 if PY3 else 1024
|
||||
data = self.stream.read(size)
|
||||
if self.raw_buffer is None:
|
||||
self.raw_buffer = data
|
||||
else:
|
||||
self.raw_buffer += data
|
||||
self.stream_pointer += len(data)
|
||||
if not data:
|
||||
self.eof = True
|
||||
|
||||
# try:
|
||||
# import psyco
|
||||
# psyco.bind(Reader)
|
||||
# except ImportError:
|
||||
# pass
|
||||
888
lib/spack/external/ruamel/yaml/representer.py
vendored
888
lib/spack/external/ruamel/yaml/representer.py
vendored
@@ -1,888 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from .error import * # NOQA
|
||||
from .nodes import * # NOQA
|
||||
from .compat import text_type, binary_type, to_unicode, PY2, PY3, ordereddict
|
||||
from .scalarstring import * # NOQA
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.error import * # NOQA
|
||||
from ruamel.yaml.nodes import * # NOQA
|
||||
from ruamel.yaml.compat import text_type, binary_type, to_unicode, PY2, PY3, ordereddict
|
||||
from ruamel.yaml.scalarstring import * # NOQA
|
||||
|
||||
|
||||
import datetime
|
||||
import sys
|
||||
import types
|
||||
if PY3:
|
||||
import copyreg
|
||||
import base64
|
||||
else:
|
||||
import copy_reg as copyreg
|
||||
|
||||
|
||||
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
|
||||
'RepresenterError', 'RoundTripRepresenter']
|
||||
|
||||
|
||||
class RepresenterError(YAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class BaseRepresenter(object):
|
||||
|
||||
yaml_representers = {}
|
||||
yaml_multi_representers = {}
|
||||
|
||||
def __init__(self, default_style=None, default_flow_style=None):
|
||||
self.default_style = default_style
|
||||
self.default_flow_style = default_flow_style
|
||||
self.represented_objects = {}
|
||||
self.object_keeper = []
|
||||
self.alias_key = None
|
||||
|
||||
def represent(self, data):
|
||||
node = self.represent_data(data)
|
||||
self.serialize(node)
|
||||
self.represented_objects = {}
|
||||
self.object_keeper = []
|
||||
self.alias_key = None
|
||||
|
||||
if PY2:
|
||||
def get_classobj_bases(self, cls):
|
||||
bases = [cls]
|
||||
for base in cls.__bases__:
|
||||
bases.extend(self.get_classobj_bases(base))
|
||||
return bases
|
||||
|
||||
def represent_data(self, data):
|
||||
if self.ignore_aliases(data):
|
||||
self.alias_key = None
|
||||
else:
|
||||
self.alias_key = id(data)
|
||||
if self.alias_key is not None:
|
||||
if self.alias_key in self.represented_objects:
|
||||
node = self.represented_objects[self.alias_key]
|
||||
# if node is None:
|
||||
# raise RepresenterError(
|
||||
# "recursive objects are not allowed: %r" % data)
|
||||
return node
|
||||
# self.represented_objects[alias_key] = None
|
||||
self.object_keeper.append(data)
|
||||
data_types = type(data).__mro__
|
||||
if PY2:
|
||||
# if type(data) is types.InstanceType:
|
||||
if isinstance(data, types.InstanceType):
|
||||
data_types = self.get_classobj_bases(data.__class__) + \
|
||||
list(data_types)
|
||||
if data_types[0] in self.yaml_representers:
|
||||
node = self.yaml_representers[data_types[0]](self, data)
|
||||
else:
|
||||
for data_type in data_types:
|
||||
if data_type in self.yaml_multi_representers:
|
||||
node = self.yaml_multi_representers[data_type](self, data)
|
||||
break
|
||||
else:
|
||||
if None in self.yaml_multi_representers:
|
||||
node = self.yaml_multi_representers[None](self, data)
|
||||
elif None in self.yaml_representers:
|
||||
node = self.yaml_representers[None](self, data)
|
||||
else:
|
||||
node = ScalarNode(None, text_type(data))
|
||||
# if alias_key is not None:
|
||||
# self.represented_objects[alias_key] = node
|
||||
return node
|
||||
|
||||
def represent_key(self, data):
|
||||
"""
|
||||
David Fraser: Extract a method to represent keys in mappings, so that
|
||||
a subclass can choose not to quote them (for example)
|
||||
used in repesent_mapping
|
||||
https://bitbucket.org/davidfraser/pyyaml/commits/d81df6eb95f20cac4a79eed95ae553b5c6f77b8c
|
||||
"""
|
||||
return self.represent_data(data)
|
||||
|
||||
@classmethod
|
||||
def add_representer(cls, data_type, representer):
|
||||
if 'yaml_representers' not in cls.__dict__:
|
||||
cls.yaml_representers = cls.yaml_representers.copy()
|
||||
cls.yaml_representers[data_type] = representer
|
||||
|
||||
@classmethod
|
||||
def add_multi_representer(cls, data_type, representer):
|
||||
if 'yaml_multi_representers' not in cls.__dict__:
|
||||
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
|
||||
cls.yaml_multi_representers[data_type] = representer
|
||||
|
||||
def represent_scalar(self, tag, value, style=None):
|
||||
if style is None:
|
||||
style = self.default_style
|
||||
node = ScalarNode(tag, value, style=style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
return node
|
||||
|
||||
def represent_sequence(self, tag, sequence, flow_style=None):
|
||||
value = []
|
||||
node = SequenceNode(tag, value, flow_style=flow_style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
for item in sequence:
|
||||
node_item = self.represent_data(item)
|
||||
if not (isinstance(node_item, ScalarNode) and not node_item.style):
|
||||
best_style = False
|
||||
value.append(node_item)
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def represent_omap(self, tag, omap, flow_style=None):
|
||||
value = []
|
||||
node = SequenceNode(tag, value, flow_style=flow_style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
for item_key in omap:
|
||||
item_val = omap[item_key]
|
||||
node_item = self.represent_data({item_key: item_val})
|
||||
# if not (isinstance(node_item, ScalarNode) \
|
||||
# and not node_item.style):
|
||||
# best_style = False
|
||||
value.append(node_item)
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def represent_mapping(self, tag, mapping, flow_style=None):
|
||||
value = []
|
||||
node = MappingNode(tag, value, flow_style=flow_style)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
if hasattr(mapping, 'items'):
|
||||
mapping = list(mapping.items())
|
||||
try:
|
||||
mapping = sorted(mapping)
|
||||
except TypeError:
|
||||
pass
|
||||
for item_key, item_value in mapping:
|
||||
node_key = self.represent_key(item_key)
|
||||
node_value = self.represent_data(item_value)
|
||||
if not (isinstance(node_key, ScalarNode) and not node_key.style):
|
||||
best_style = False
|
||||
if not (isinstance(node_value, ScalarNode) and not
|
||||
node_value.style):
|
||||
best_style = False
|
||||
value.append((node_key, node_value))
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def ignore_aliases(self, data):
|
||||
return False
|
||||
|
||||
|
||||
class SafeRepresenter(BaseRepresenter):
|
||||
|
||||
def ignore_aliases(self, data):
|
||||
# https://docs.python.org/3/reference/expressions.html#parenthesized-forms :
|
||||
# "i.e. two occurrences of the empty tuple may or may not yield the same object"
|
||||
# so "data is ()" should not be used
|
||||
if data is None or data == ():
|
||||
return True
|
||||
if isinstance(data, (binary_type, text_type, bool, int, float)):
|
||||
return True
|
||||
|
||||
def represent_none(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:null',
|
||||
u'null')
|
||||
|
||||
if PY3:
|
||||
def represent_str(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
|
||||
|
||||
def represent_binary(self, data):
|
||||
if hasattr(base64, 'encodebytes'):
|
||||
data = base64.encodebytes(data).decode('ascii')
|
||||
else:
|
||||
data = base64.encodestring(data).decode('ascii')
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:binary', data,
|
||||
style='|')
|
||||
else:
|
||||
def represent_str(self, data):
|
||||
tag = None
|
||||
style = None
|
||||
try:
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
data = unicode(data, 'utf-8')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
except UnicodeDecodeError:
|
||||
data = data.encode('base64')
|
||||
tag = u'tag:yaml.org,2002:binary'
|
||||
style = '|'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_unicode(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
|
||||
|
||||
def represent_bool(self, data):
|
||||
if data:
|
||||
value = u'true'
|
||||
else:
|
||||
value = u'false'
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
|
||||
|
||||
def represent_int(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:int', text_type(data))
|
||||
|
||||
if PY2:
|
||||
def represent_long(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:int',
|
||||
text_type(data))
|
||||
|
||||
inf_value = 1e300
|
||||
while repr(inf_value) != repr(inf_value*inf_value):
|
||||
inf_value *= inf_value
|
||||
|
||||
def represent_float(self, data):
|
||||
if data != data or (data == 0.0 and data == 1.0):
|
||||
value = u'.nan'
|
||||
elif data == self.inf_value:
|
||||
value = u'.inf'
|
||||
elif data == -self.inf_value:
|
||||
value = u'-.inf'
|
||||
else:
|
||||
value = to_unicode(repr(data)).lower()
|
||||
# Note that in some cases `repr(data)` represents a float number
|
||||
# without the decimal parts. For instance:
|
||||
# >>> repr(1e17)
|
||||
# '1e17'
|
||||
# Unfortunately, this is not a valid float representation according
|
||||
# to the definition of the `!!float` tag. We fix this by adding
|
||||
# '.0' before the 'e' symbol.
|
||||
if u'.' not in value and u'e' in value:
|
||||
value = value.replace(u'e', u'.0e', 1)
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:float', value)
|
||||
|
||||
def represent_list(self, data):
|
||||
# pairs = (len(data) > 0 and isinstance(data, list))
|
||||
# if pairs:
|
||||
# for item in data:
|
||||
# if not isinstance(item, tuple) or len(item) != 2:
|
||||
# pairs = False
|
||||
# break
|
||||
# if not pairs:
|
||||
return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
|
||||
# value = []
|
||||
# for item_key, item_value in data:
|
||||
# value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
|
||||
# [(item_key, item_value)]))
|
||||
# return SequenceNode(u'tag:yaml.org,2002:pairs', value)
|
||||
|
||||
def represent_dict(self, data):
|
||||
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
|
||||
|
||||
def represent_ordereddict(self, data):
|
||||
return self.represent_omap(u'tag:yaml.org,2002:omap', data)
|
||||
|
||||
def represent_set(self, data):
|
||||
value = {}
|
||||
for key in data:
|
||||
value[key] = None
|
||||
return self.represent_mapping(u'tag:yaml.org,2002:set', value)
|
||||
|
||||
def represent_date(self, data):
|
||||
value = to_unicode(data.isoformat())
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
|
||||
|
||||
def represent_datetime(self, data):
|
||||
value = to_unicode(data.isoformat(' '))
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
|
||||
|
||||
def represent_yaml_object(self, tag, data, cls, flow_style=None):
|
||||
if hasattr(data, '__getstate__'):
|
||||
state = data.__getstate__()
|
||||
else:
|
||||
state = data.__dict__.copy()
|
||||
return self.represent_mapping(tag, state, flow_style=flow_style)
|
||||
|
||||
def represent_undefined(self, data):
|
||||
raise RepresenterError("cannot represent an object: %s" % data)
|
||||
|
||||
SafeRepresenter.add_representer(type(None),
|
||||
SafeRepresenter.represent_none)
|
||||
|
||||
SafeRepresenter.add_representer(str,
|
||||
SafeRepresenter.represent_str)
|
||||
|
||||
if PY2:
|
||||
SafeRepresenter.add_representer(unicode,
|
||||
SafeRepresenter.represent_unicode)
|
||||
else:
|
||||
SafeRepresenter.add_representer(bytes,
|
||||
SafeRepresenter.represent_binary)
|
||||
|
||||
SafeRepresenter.add_representer(bool,
|
||||
SafeRepresenter.represent_bool)
|
||||
|
||||
SafeRepresenter.add_representer(int,
|
||||
SafeRepresenter.represent_int)
|
||||
|
||||
if PY2:
|
||||
SafeRepresenter.add_representer(long,
|
||||
SafeRepresenter.represent_long)
|
||||
|
||||
SafeRepresenter.add_representer(float,
|
||||
SafeRepresenter.represent_float)
|
||||
|
||||
SafeRepresenter.add_representer(list,
|
||||
SafeRepresenter.represent_list)
|
||||
|
||||
SafeRepresenter.add_representer(tuple,
|
||||
SafeRepresenter.represent_list)
|
||||
|
||||
SafeRepresenter.add_representer(dict,
|
||||
SafeRepresenter.represent_dict)
|
||||
|
||||
SafeRepresenter.add_representer(set,
|
||||
SafeRepresenter.represent_set)
|
||||
|
||||
SafeRepresenter.add_representer(ordereddict,
|
||||
SafeRepresenter.represent_ordereddict)
|
||||
|
||||
SafeRepresenter.add_representer(datetime.date,
|
||||
SafeRepresenter.represent_date)
|
||||
|
||||
SafeRepresenter.add_representer(datetime.datetime,
|
||||
SafeRepresenter.represent_datetime)
|
||||
|
||||
SafeRepresenter.add_representer(None,
|
||||
SafeRepresenter.represent_undefined)
|
||||
|
||||
|
||||
class Representer(SafeRepresenter):
|
||||
if PY2:
|
||||
def represent_str(self, data):
|
||||
tag = None
|
||||
style = None
|
||||
try:
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
data = unicode(data, 'utf-8')
|
||||
tag = u'tag:yaml.org,2002:python/str'
|
||||
except UnicodeDecodeError:
|
||||
data = data.encode('base64')
|
||||
tag = u'tag:yaml.org,2002:binary'
|
||||
style = '|'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_unicode(self, data):
|
||||
tag = None
|
||||
try:
|
||||
data.encode('ascii')
|
||||
tag = u'tag:yaml.org,2002:python/unicode'
|
||||
except UnicodeEncodeError:
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
return self.represent_scalar(tag, data)
|
||||
|
||||
def represent_long(self, data):
|
||||
tag = u'tag:yaml.org,2002:int'
|
||||
if int(data) is not data:
|
||||
tag = u'tag:yaml.org,2002:python/long'
|
||||
return self.represent_scalar(tag, to_unicode(data))
|
||||
|
||||
def represent_complex(self, data):
|
||||
if data.imag == 0.0:
|
||||
data = u'%r' % data.real
|
||||
elif data.real == 0.0:
|
||||
data = u'%rj' % data.imag
|
||||
elif data.imag > 0:
|
||||
data = u'%r+%rj' % (data.real, data.imag)
|
||||
else:
|
||||
data = u'%r%rj' % (data.real, data.imag)
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
|
||||
|
||||
def represent_tuple(self, data):
|
||||
return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
|
||||
|
||||
def represent_name(self, data):
|
||||
name = u'%s.%s' % (data.__module__, data.__name__)
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:python/name:' +
|
||||
name, u'')
|
||||
|
||||
def represent_module(self, data):
|
||||
return self.represent_scalar(
|
||||
u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
|
||||
|
||||
if PY2:
|
||||
def represent_instance(self, data):
|
||||
# For instances of classic classes, we use __getinitargs__ and
|
||||
# __getstate__ to serialize the data.
|
||||
|
||||
# If data.__getinitargs__ exists, the object must be reconstructed
|
||||
# by calling cls(**args), where args is a tuple returned by
|
||||
# __getinitargs__. Otherwise, the cls.__init__ method should never
|
||||
# be called and the class instance is created by instantiating a
|
||||
# trivial class and assigning to the instance's __class__ variable.
|
||||
|
||||
# If data.__getstate__ exists, it returns the state of the object.
|
||||
# Otherwise, the state of the object is data.__dict__.
|
||||
|
||||
# We produce either a !!python/object or !!python/object/new node.
|
||||
# If data.__getinitargs__ does not exist and state is a dictionary,
|
||||
# we produce a !!python/object node . Otherwise we produce a
|
||||
# !!python/object/new node.
|
||||
|
||||
cls = data.__class__
|
||||
class_name = u'%s.%s' % (cls.__module__, cls.__name__)
|
||||
args = None
|
||||
state = None
|
||||
if hasattr(data, '__getinitargs__'):
|
||||
args = list(data.__getinitargs__())
|
||||
if hasattr(data, '__getstate__'):
|
||||
state = data.__getstate__()
|
||||
else:
|
||||
state = data.__dict__
|
||||
if args is None and isinstance(state, dict):
|
||||
return self.represent_mapping(
|
||||
u'tag:yaml.org,2002:python/object:'+class_name, state)
|
||||
if isinstance(state, dict) and not state:
|
||||
return self.represent_sequence(
|
||||
u'tag:yaml.org,2002:python/object/new:' +
|
||||
class_name, args)
|
||||
value = {}
|
||||
if args:
|
||||
value['args'] = args
|
||||
value['state'] = state
|
||||
return self.represent_mapping(
|
||||
u'tag:yaml.org,2002:python/object/new:'+class_name, value)
|
||||
|
||||
def represent_object(self, data):
|
||||
# We use __reduce__ API to save the data. data.__reduce__ returns
|
||||
# a tuple of length 2-5:
|
||||
# (function, args, state, listitems, dictitems)
|
||||
|
||||
# For reconstructing, we calls function(*args), then set its state,
|
||||
# listitems, and dictitems if they are not None.
|
||||
|
||||
# A special case is when function.__name__ == '__newobj__'. In this
|
||||
# case we create the object with args[0].__new__(*args).
|
||||
|
||||
# Another special case is when __reduce__ returns a string - we don't
|
||||
# support it.
|
||||
|
||||
# We produce a !!python/object, !!python/object/new or
|
||||
# !!python/object/apply node.
|
||||
|
||||
cls = type(data)
|
||||
if cls in copyreg.dispatch_table:
|
||||
reduce = copyreg.dispatch_table[cls](data)
|
||||
elif hasattr(data, '__reduce_ex__'):
|
||||
reduce = data.__reduce_ex__(2)
|
||||
elif hasattr(data, '__reduce__'):
|
||||
reduce = data.__reduce__()
|
||||
else:
|
||||
raise RepresenterError("cannot represent object: %r" % data)
|
||||
reduce = (list(reduce)+[None]*5)[:5]
|
||||
function, args, state, listitems, dictitems = reduce
|
||||
args = list(args)
|
||||
if state is None:
|
||||
state = {}
|
||||
if listitems is not None:
|
||||
listitems = list(listitems)
|
||||
if dictitems is not None:
|
||||
dictitems = dict(dictitems)
|
||||
if function.__name__ == '__newobj__':
|
||||
function = args[0]
|
||||
args = args[1:]
|
||||
tag = u'tag:yaml.org,2002:python/object/new:'
|
||||
newobj = True
|
||||
else:
|
||||
tag = u'tag:yaml.org,2002:python/object/apply:'
|
||||
newobj = False
|
||||
function_name = u'%s.%s' % (function.__module__, function.__name__)
|
||||
if not args and not listitems and not dictitems \
|
||||
and isinstance(state, dict) and newobj:
|
||||
return self.represent_mapping(
|
||||
u'tag:yaml.org,2002:python/object:'+function_name, state)
|
||||
if not listitems and not dictitems \
|
||||
and isinstance(state, dict) and not state:
|
||||
return self.represent_sequence(tag+function_name, args)
|
||||
value = {}
|
||||
if args:
|
||||
value['args'] = args
|
||||
if state or not isinstance(state, dict):
|
||||
value['state'] = state
|
||||
if listitems:
|
||||
value['listitems'] = listitems
|
||||
if dictitems:
|
||||
value['dictitems'] = dictitems
|
||||
return self.represent_mapping(tag+function_name, value)
|
||||
|
||||
if PY2:
|
||||
Representer.add_representer(str,
|
||||
Representer.represent_str)
|
||||
|
||||
Representer.add_representer(unicode,
|
||||
Representer.represent_unicode)
|
||||
|
||||
Representer.add_representer(long,
|
||||
Representer.represent_long)
|
||||
|
||||
Representer.add_representer(complex,
|
||||
Representer.represent_complex)
|
||||
|
||||
Representer.add_representer(tuple,
|
||||
Representer.represent_tuple)
|
||||
|
||||
Representer.add_representer(type,
|
||||
Representer.represent_name)
|
||||
|
||||
if PY2:
|
||||
Representer.add_representer(types.ClassType,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.FunctionType,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.BuiltinFunctionType,
|
||||
Representer.represent_name)
|
||||
|
||||
Representer.add_representer(types.ModuleType,
|
||||
Representer.represent_module)
|
||||
|
||||
if PY2:
|
||||
Representer.add_multi_representer(types.InstanceType,
|
||||
Representer.represent_instance)
|
||||
|
||||
Representer.add_multi_representer(object,
|
||||
Representer.represent_object)
|
||||
|
||||
|
||||
try:
|
||||
from .comments import CommentedMap, CommentedOrderedMap, CommentedSeq, \
|
||||
CommentedSet, comment_attrib, merge_attrib
|
||||
except ImportError: # for Jython
|
||||
from ruamel.yaml.comments import CommentedMap, CommentedOrderedMap, \
|
||||
CommentedSeq, CommentedSet, comment_attrib, merge_attrib
|
||||
|
||||
|
||||
class RoundTripRepresenter(SafeRepresenter):
|
||||
# need to add type here and write out the .comment
|
||||
# in serializer and emitter
|
||||
|
||||
def __init__(self, default_style=None, default_flow_style=None):
|
||||
if default_flow_style is None:
|
||||
default_flow_style = False
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
|
||||
def represent_none(self, data):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:null',
|
||||
u'')
|
||||
|
||||
def represent_preserved_scalarstring(self, data):
|
||||
tag = None
|
||||
style = '|'
|
||||
if PY2 and not isinstance(data, unicode):
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_single_quoted_scalarstring(self, data):
|
||||
tag = None
|
||||
style = "'"
|
||||
if PY2 and not isinstance(data, unicode):
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_double_quoted_scalarstring(self, data):
|
||||
tag = None
|
||||
style = '"'
|
||||
if PY2 and not isinstance(data, unicode):
|
||||
data = unicode(data, 'ascii')
|
||||
tag = u'tag:yaml.org,2002:str'
|
||||
return self.represent_scalar(tag, data, style=style)
|
||||
|
||||
def represent_sequence(self, tag, sequence, flow_style=None):
|
||||
value = []
|
||||
# if the flow_style is None, the flow style tacked on to the object
|
||||
# explicitly will be taken. If that is None as well the default flow
|
||||
# style rules
|
||||
try:
|
||||
flow_style = sequence.fa.flow_style(flow_style)
|
||||
except AttributeError:
|
||||
flow_style = flow_style
|
||||
try:
|
||||
anchor = sequence.yaml_anchor()
|
||||
except AttributeError:
|
||||
anchor = None
|
||||
node = SequenceNode(tag, value, flow_style=flow_style, anchor=anchor)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
try:
|
||||
comment = getattr(sequence, comment_attrib)
|
||||
item_comments = comment.items
|
||||
node.comment = comment.comment
|
||||
try:
|
||||
node.comment.append(comment.end)
|
||||
except AttributeError:
|
||||
pass
|
||||
except AttributeError:
|
||||
item_comments = {}
|
||||
for idx, item in enumerate(sequence):
|
||||
node_item = self.represent_data(item)
|
||||
node_item.comment = item_comments.get(idx)
|
||||
if not (isinstance(node_item, ScalarNode) and not node_item.style):
|
||||
best_style = False
|
||||
value.append(node_item)
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def represent_mapping(self, tag, mapping, flow_style=None):
|
||||
value = []
|
||||
try:
|
||||
flow_style = mapping.fa.flow_style(flow_style)
|
||||
except AttributeError:
|
||||
flow_style = flow_style
|
||||
try:
|
||||
anchor = mapping.yaml_anchor()
|
||||
except AttributeError:
|
||||
anchor = None
|
||||
node = MappingNode(tag, value, flow_style=flow_style, anchor=anchor)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
# no sorting! !!
|
||||
try:
|
||||
comment = getattr(mapping, comment_attrib)
|
||||
node.comment = comment.comment
|
||||
if node.comment and node.comment[1]:
|
||||
for ct in node.comment[1]:
|
||||
ct.reset()
|
||||
item_comments = comment.items
|
||||
for v in item_comments.values():
|
||||
if v and v[1]:
|
||||
for ct in v[1]:
|
||||
ct.reset()
|
||||
try:
|
||||
node.comment.append(comment.end)
|
||||
except AttributeError:
|
||||
pass
|
||||
except AttributeError:
|
||||
item_comments = {}
|
||||
for item_key, item_value in mapping.items():
|
||||
node_key = self.represent_key(item_key)
|
||||
node_value = self.represent_data(item_value)
|
||||
item_comment = item_comments.get(item_key)
|
||||
if item_comment:
|
||||
assert getattr(node_key, 'comment', None) is None
|
||||
node_key.comment = item_comment[:2]
|
||||
nvc = getattr(node_value, 'comment', None)
|
||||
if nvc is not None: # end comment already there
|
||||
nvc[0] = item_comment[2]
|
||||
nvc[1] = item_comment[3]
|
||||
else:
|
||||
node_value.comment = item_comment[2:]
|
||||
if not (isinstance(node_key, ScalarNode) and not node_key.style):
|
||||
best_style = False
|
||||
if not (isinstance(node_value, ScalarNode) and not
|
||||
node_value.style):
|
||||
best_style = False
|
||||
value.append((node_key, node_value))
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
merge_list = [m[1] for m in getattr(mapping, merge_attrib, [])]
|
||||
if merge_list:
|
||||
# because of the call to represent_data here, the anchors
|
||||
# are marked as being used and thereby created
|
||||
if len(merge_list) == 1:
|
||||
arg = self.represent_data(merge_list[0])
|
||||
else:
|
||||
arg = self.represent_data(merge_list)
|
||||
arg.flow_style = True
|
||||
value.insert(0,
|
||||
(ScalarNode(u'tag:yaml.org,2002:merge', '<<'), arg))
|
||||
return node
|
||||
|
||||
def represent_omap(self, tag, omap, flow_style=None):
|
||||
value = []
|
||||
try:
|
||||
flow_style = omap.fa.flow_style(flow_style)
|
||||
except AttributeError:
|
||||
flow_style = flow_style
|
||||
try:
|
||||
anchor = omap.yaml_anchor()
|
||||
except AttributeError:
|
||||
anchor = None
|
||||
node = SequenceNode(tag, value, flow_style=flow_style, anchor=anchor)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
try:
|
||||
comment = getattr(omap, comment_attrib)
|
||||
node.comment = comment.comment
|
||||
if node.comment and node.comment[1]:
|
||||
for ct in node.comment[1]:
|
||||
ct.reset()
|
||||
item_comments = comment.items
|
||||
for v in item_comments.values():
|
||||
if v and v[1]:
|
||||
for ct in v[1]:
|
||||
ct.reset()
|
||||
try:
|
||||
node.comment.append(comment.end)
|
||||
except AttributeError:
|
||||
pass
|
||||
except AttributeError:
|
||||
item_comments = {}
|
||||
for item_key in omap:
|
||||
item_val = omap[item_key]
|
||||
node_item = self.represent_data({item_key: item_val})
|
||||
# node item has two scalars in value: node_key and node_value
|
||||
item_comment = item_comments.get(item_key)
|
||||
if item_comment:
|
||||
if item_comment[1]:
|
||||
node_item.comment = [None, item_comment[1]]
|
||||
assert getattr(node_item.value[0][0], 'comment', None) is None
|
||||
node_item.value[0][0].comment = [item_comment[0], None]
|
||||
nvc = getattr(node_item.value[0][1], 'comment', None)
|
||||
if nvc is not None: # end comment already there
|
||||
nvc[0] = item_comment[2]
|
||||
nvc[1] = item_comment[3]
|
||||
else:
|
||||
node_item.value[0][1].comment = item_comment[2:]
|
||||
# if not (isinstance(node_item, ScalarNode) \
|
||||
# and not node_item.style):
|
||||
# best_style = False
|
||||
value.append(node_item)
|
||||
if flow_style is None:
|
||||
if self.default_flow_style is not None:
|
||||
node.flow_style = self.default_flow_style
|
||||
else:
|
||||
node.flow_style = best_style
|
||||
return node
|
||||
|
||||
def represent_set(self, setting):
|
||||
flow_style = False
|
||||
tag = u'tag:yaml.org,2002:set'
|
||||
# return self.represent_mapping(tag, value)
|
||||
value = []
|
||||
flow_style = setting.fa.flow_style(flow_style)
|
||||
try:
|
||||
anchor = setting.yaml_anchor()
|
||||
except AttributeError:
|
||||
anchor = None
|
||||
node = MappingNode(tag, value, flow_style=flow_style, anchor=anchor)
|
||||
if self.alias_key is not None:
|
||||
self.represented_objects[self.alias_key] = node
|
||||
best_style = True
|
||||
# no sorting! !!
|
||||
try:
|
||||
comment = getattr(setting, comment_attrib)
|
||||
node.comment = comment.comment
|
||||
if node.comment and node.comment[1]:
|
||||
for ct in node.comment[1]:
|
||||
ct.reset()
|
||||
item_comments = comment.items
|
||||
for v in item_comments.values():
|
||||
if v and v[1]:
|
||||
for ct in v[1]:
|
||||
ct.reset()
|
||||
try:
|
||||
node.comment.append(comment.end)
|
||||
except AttributeError:
|
||||
pass
|
||||
except AttributeError:
|
||||
item_comments = {}
|
||||
for item_key in setting.odict:
|
||||
node_key = self.represent_key(item_key)
|
||||
node_value = self.represent_data(None)
|
||||
item_comment = item_comments.get(item_key)
|
||||
if item_comment:
|
||||
assert getattr(node_key, 'comment', None) is None
|
||||
node_key.comment = item_comment[:2]
|
||||
node_key.style = node_value.style = "?"
|
||||
if not (isinstance(node_key, ScalarNode) and not node_key.style):
|
||||
best_style = False
|
||||
if not (isinstance(node_value, ScalarNode) and not
|
||||
node_value.style):
|
||||
best_style = False
|
||||
value.append((node_key, node_value))
|
||||
best_style = best_style
|
||||
return node
|
||||
|
||||
def represent_dict(self, data):
|
||||
"""write out tag if saved on loading"""
|
||||
try:
|
||||
t = data.tag.value
|
||||
except AttributeError:
|
||||
t = None
|
||||
if t:
|
||||
while t and t[0] == '!':
|
||||
t = t[1:]
|
||||
tag = 'tag:yaml.org,2002:' + t
|
||||
else:
|
||||
tag = u'tag:yaml.org,2002:map'
|
||||
return self.represent_mapping(tag, data)
|
||||
|
||||
|
||||
RoundTripRepresenter.add_representer(type(None),
|
||||
RoundTripRepresenter.represent_none)
|
||||
|
||||
RoundTripRepresenter.add_representer(
|
||||
PreservedScalarString,
|
||||
RoundTripRepresenter.represent_preserved_scalarstring)
|
||||
|
||||
RoundTripRepresenter.add_representer(
|
||||
SingleQuotedScalarString,
|
||||
RoundTripRepresenter.represent_single_quoted_scalarstring)
|
||||
|
||||
RoundTripRepresenter.add_representer(
|
||||
DoubleQuotedScalarString,
|
||||
RoundTripRepresenter.represent_double_quoted_scalarstring)
|
||||
|
||||
RoundTripRepresenter.add_representer(CommentedSeq,
|
||||
RoundTripRepresenter.represent_list)
|
||||
|
||||
RoundTripRepresenter.add_representer(CommentedMap,
|
||||
RoundTripRepresenter.represent_dict)
|
||||
|
||||
RoundTripRepresenter.add_representer(CommentedOrderedMap,
|
||||
RoundTripRepresenter.represent_ordereddict)
|
||||
|
||||
if sys.version_info >= (2, 7):
|
||||
import collections
|
||||
RoundTripRepresenter.add_representer(collections.OrderedDict,
|
||||
RoundTripRepresenter.represent_ordereddict)
|
||||
|
||||
RoundTripRepresenter.add_representer(CommentedSet,
|
||||
RoundTripRepresenter.represent_set)
|
||||
60
lib/spack/external/ruamel/yaml/scalarstring.py
vendored
60
lib/spack/external/ruamel/yaml/scalarstring.py
vendored
@@ -1,60 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
__all__ = ["ScalarString", "PreservedScalarString", "SingleQuotedScalarString",
|
||||
"DoubleQuotedScalarString"]
|
||||
|
||||
try:
|
||||
from .compat import text_type
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.compat import text_type
|
||||
|
||||
|
||||
class ScalarString(text_type):
|
||||
def __new__(cls, *args, **kw):
|
||||
return text_type.__new__(cls, *args, **kw)
|
||||
|
||||
|
||||
class PreservedScalarString(ScalarString):
|
||||
def __new__(cls, value):
|
||||
return ScalarString.__new__(cls, value)
|
||||
|
||||
|
||||
class SingleQuotedScalarString(ScalarString):
|
||||
def __new__(cls, value):
|
||||
return ScalarString.__new__(cls, value)
|
||||
|
||||
|
||||
class DoubleQuotedScalarString(ScalarString):
|
||||
def __new__(cls, value):
|
||||
return ScalarString.__new__(cls, value)
|
||||
|
||||
|
||||
def preserve_literal(s):
|
||||
return PreservedScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
|
||||
|
||||
|
||||
def walk_tree(base):
|
||||
"""
|
||||
the routine here walks over a simple yaml tree (recursing in
|
||||
dict values and list items) and converts strings that
|
||||
have multiple lines to literal scalars
|
||||
"""
|
||||
from ruamel.yaml.compat import string_types
|
||||
|
||||
if isinstance(base, dict):
|
||||
for k in base:
|
||||
v = base[k]
|
||||
if isinstance(v, string_types) and '\n' in v:
|
||||
base[k] = preserve_literal(v)
|
||||
else:
|
||||
walk_tree(v)
|
||||
elif isinstance(base, list):
|
||||
for idx, elem in enumerate(base):
|
||||
if isinstance(elem, string_types) and '\n' in elem:
|
||||
print(elem)
|
||||
base[idx] = preserve_literal(elem)
|
||||
else:
|
||||
walk_tree(elem)
|
||||
1661
lib/spack/external/ruamel/yaml/scanner.py
vendored
1661
lib/spack/external/ruamel/yaml/scanner.py
vendored
File diff suppressed because it is too large
Load Diff
178
lib/spack/external/ruamel/yaml/serializer.py
vendored
178
lib/spack/external/ruamel/yaml/serializer.py
vendored
@@ -1,178 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
|
||||
try:
|
||||
from .error import YAMLError
|
||||
from .compat import nprint, DBG_NODE, dbg, string_types
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.error import YAMLError
|
||||
from ruamel.yaml.compat import nprint, DBG_NODE, dbg, string_types
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent, StreamEndEvent, MappingStartEvent, MappingEndEvent,
|
||||
SequenceStartEvent, SequenceEndEvent, AliasEvent, ScalarEvent,
|
||||
DocumentStartEvent, DocumentEndEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import (
|
||||
MappingNode, ScalarNode, SequenceNode,
|
||||
)
|
||||
|
||||
__all__ = ['Serializer', 'SerializerError']
|
||||
|
||||
|
||||
class SerializerError(YAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Serializer(object):
|
||||
|
||||
# 'id' and 3+ numbers, but not 000
|
||||
ANCHOR_TEMPLATE = u'id%03d'
|
||||
ANCHOR_RE = re.compile(u'id(?!000$)\\d{3,}')
|
||||
|
||||
def __init__(self, encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None):
|
||||
self.use_encoding = encoding
|
||||
self.use_explicit_start = explicit_start
|
||||
self.use_explicit_end = explicit_end
|
||||
if isinstance(version, string_types):
|
||||
self.use_version = tuple(map(int, version.split('.')))
|
||||
else:
|
||||
self.use_version = version
|
||||
self.use_tags = tags
|
||||
self.serialized_nodes = {}
|
||||
self.anchors = {}
|
||||
self.last_anchor_id = 0
|
||||
self.closed = None
|
||||
self._templated_id = None
|
||||
|
||||
def open(self):
|
||||
if self.closed is None:
|
||||
self.emit(StreamStartEvent(encoding=self.use_encoding))
|
||||
self.closed = False
|
||||
elif self.closed:
|
||||
raise SerializerError("serializer is closed")
|
||||
else:
|
||||
raise SerializerError("serializer is already opened")
|
||||
|
||||
def close(self):
|
||||
if self.closed is None:
|
||||
raise SerializerError("serializer is not opened")
|
||||
elif not self.closed:
|
||||
self.emit(StreamEndEvent())
|
||||
self.closed = True
|
||||
|
||||
# def __del__(self):
|
||||
# self.close()
|
||||
|
||||
def serialize(self, node):
|
||||
if dbg(DBG_NODE):
|
||||
nprint('Serializing nodes')
|
||||
node.dump()
|
||||
if self.closed is None:
|
||||
raise SerializerError("serializer is not opened")
|
||||
elif self.closed:
|
||||
raise SerializerError("serializer is closed")
|
||||
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
|
||||
version=self.use_version,
|
||||
tags=self.use_tags))
|
||||
self.anchor_node(node)
|
||||
self.serialize_node(node, None, None)
|
||||
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
|
||||
self.serialized_nodes = {}
|
||||
self.anchors = {}
|
||||
self.last_anchor_id = 0
|
||||
|
||||
def anchor_node(self, node):
|
||||
if node in self.anchors:
|
||||
if self.anchors[node] is None:
|
||||
self.anchors[node] = self.generate_anchor(node)
|
||||
else:
|
||||
anchor = None
|
||||
try:
|
||||
if node.anchor.always_dump:
|
||||
anchor = node.anchor.value
|
||||
except:
|
||||
pass
|
||||
self.anchors[node] = anchor
|
||||
if isinstance(node, SequenceNode):
|
||||
for item in node.value:
|
||||
self.anchor_node(item)
|
||||
elif isinstance(node, MappingNode):
|
||||
for key, value in node.value:
|
||||
self.anchor_node(key)
|
||||
self.anchor_node(value)
|
||||
|
||||
def generate_anchor(self, node):
|
||||
try:
|
||||
anchor = node.anchor.value
|
||||
except:
|
||||
anchor = None
|
||||
if anchor is None:
|
||||
self.last_anchor_id += 1
|
||||
return self.ANCHOR_TEMPLATE % self.last_anchor_id
|
||||
return anchor
|
||||
|
||||
def serialize_node(self, node, parent, index):
|
||||
alias = self.anchors[node]
|
||||
if node in self.serialized_nodes:
|
||||
self.emit(AliasEvent(alias))
|
||||
else:
|
||||
self.serialized_nodes[node] = True
|
||||
self.descend_resolver(parent, index)
|
||||
if isinstance(node, ScalarNode):
|
||||
# here check if the node.tag equals the one that would result from parsing
|
||||
# if not equal quoting is necessary for strings
|
||||
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
|
||||
default_tag = self.resolve(ScalarNode, node.value, (False, True))
|
||||
implicit = (node.tag == detected_tag), (node.tag == default_tag)
|
||||
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
|
||||
style=node.style, comment=node.comment))
|
||||
elif isinstance(node, SequenceNode):
|
||||
implicit = (node.tag == self.resolve(SequenceNode, node.value, True))
|
||||
comment = node.comment
|
||||
# print('comment >>>>>>>>>>>>>.', comment, node.flow_style)
|
||||
end_comment = None
|
||||
seq_comment = None
|
||||
if node.flow_style is True:
|
||||
if comment: # eol comment on flow style sequence
|
||||
seq_comment = comment[0]
|
||||
# comment[0] = None
|
||||
if comment and len(comment) > 2:
|
||||
end_comment = comment[2]
|
||||
else:
|
||||
end_comment = None
|
||||
self.emit(SequenceStartEvent(alias, node.tag, implicit,
|
||||
flow_style=node.flow_style,
|
||||
comment=node.comment))
|
||||
index = 0
|
||||
for item in node.value:
|
||||
self.serialize_node(item, node, index)
|
||||
index += 1
|
||||
self.emit(SequenceEndEvent(comment=[seq_comment, end_comment]))
|
||||
elif isinstance(node, MappingNode):
|
||||
implicit = (node.tag == self.resolve(MappingNode, node.value, True))
|
||||
comment = node.comment
|
||||
end_comment = None
|
||||
map_comment = None
|
||||
if node.flow_style is True:
|
||||
if comment: # eol comment on flow style sequence
|
||||
map_comment = comment[0]
|
||||
# comment[0] = None
|
||||
if comment and len(comment) > 2:
|
||||
end_comment = comment[2]
|
||||
self.emit(MappingStartEvent(alias, node.tag, implicit,
|
||||
flow_style=node.flow_style,
|
||||
comment=node.comment))
|
||||
for key, value in node.value:
|
||||
self.serialize_node(key, node, None)
|
||||
self.serialize_node(value, node, key)
|
||||
self.emit(MappingEndEvent(comment=[map_comment, end_comment]))
|
||||
self.ascend_resolver()
|
||||
|
||||
|
||||
def templated_id(s):
|
||||
return Serializer.ANCHOR_RE.match(s)
|
||||
5
lib/spack/external/ruamel/yaml/setup.cfg
vendored
5
lib/spack/external/ruamel/yaml/setup.cfg
vendored
@@ -1,5 +0,0 @@
|
||||
[egg_info]
|
||||
tag_build =
|
||||
tag_date = 0
|
||||
tag_svn_revision = 0
|
||||
|
||||
195
lib/spack/external/ruamel/yaml/tokens.py
vendored
195
lib/spack/external/ruamel/yaml/tokens.py
vendored
@@ -1,195 +0,0 @@
|
||||
# # header
|
||||
# coding: utf-8
|
||||
|
||||
|
||||
class Token(object):
|
||||
def __init__(self, start_mark, end_mark):
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
def __repr__(self):
|
||||
attributes = [key for key in self.__dict__
|
||||
if not key.endswith('_mark')]
|
||||
attributes.sort()
|
||||
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
|
||||
for key in attributes])
|
||||
return '%s(%s)' % (self.__class__.__name__, arguments)
|
||||
|
||||
def add_post_comment(self, comment):
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None]
|
||||
self._comment[0] = comment
|
||||
|
||||
def add_pre_comments(self, comments):
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None]
|
||||
assert self._comment[1] is None
|
||||
self._comment[1] = comments
|
||||
|
||||
def get_comment(self):
|
||||
return getattr(self, '_comment', None)
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
return getattr(self, '_comment', None)
|
||||
|
||||
def move_comment(self, target):
|
||||
"""move a comment from this token to target (normally next token)
|
||||
used to combine e.g. comments before a BlockEntryToken to the
|
||||
ScalarToken that follows it
|
||||
"""
|
||||
c = self.comment
|
||||
if c is None:
|
||||
return
|
||||
# don't push beyond last element
|
||||
if isinstance(target, StreamEndToken):
|
||||
return
|
||||
delattr(self, '_comment')
|
||||
tc = target.comment
|
||||
if not tc: # target comment, just insert
|
||||
target._comment = c
|
||||
return self
|
||||
if c[0] and tc[0] or c[1] and tc[1]:
|
||||
raise NotImplementedError('overlap in comment %r %r' % c, tc)
|
||||
if c[0]:
|
||||
tc[0] = c[0]
|
||||
if c[1]:
|
||||
tc[1] = c[1]
|
||||
return self
|
||||
|
||||
def split_comment(self):
|
||||
""" split the post part of a comment, and return it
|
||||
as comment to be added. Delete second part if [None, None]
|
||||
abc: # this goes to sequence
|
||||
# this goes to first element
|
||||
- first element
|
||||
"""
|
||||
comment = self.comment
|
||||
if comment is None or comment[0] is None:
|
||||
return None # nothing to do
|
||||
ret_val = [comment[0], None]
|
||||
if comment[1] is None:
|
||||
delattr(self, '_comment')
|
||||
return ret_val
|
||||
|
||||
|
||||
# class BOMToken(Token):
|
||||
# id = '<byte order mark>'
|
||||
|
||||
class DirectiveToken(Token):
|
||||
id = '<directive>'
|
||||
|
||||
def __init__(self, name, value, start_mark, end_mark):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
class DocumentStartToken(Token):
|
||||
id = '<document start>'
|
||||
|
||||
|
||||
class DocumentEndToken(Token):
|
||||
id = '<document end>'
|
||||
|
||||
|
||||
class StreamStartToken(Token):
|
||||
id = '<stream start>'
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.encoding = encoding
|
||||
|
||||
|
||||
class StreamEndToken(Token):
|
||||
id = '<stream end>'
|
||||
|
||||
|
||||
class BlockSequenceStartToken(Token):
|
||||
id = '<block sequence start>'
|
||||
|
||||
|
||||
class BlockMappingStartToken(Token):
|
||||
id = '<block mapping start>'
|
||||
|
||||
|
||||
class BlockEndToken(Token):
|
||||
id = '<block end>'
|
||||
|
||||
|
||||
class FlowSequenceStartToken(Token):
|
||||
id = '['
|
||||
|
||||
|
||||
class FlowMappingStartToken(Token):
|
||||
id = '{'
|
||||
|
||||
|
||||
class FlowSequenceEndToken(Token):
|
||||
id = ']'
|
||||
|
||||
|
||||
class FlowMappingEndToken(Token):
|
||||
id = '}'
|
||||
|
||||
|
||||
class KeyToken(Token):
|
||||
id = '?'
|
||||
|
||||
|
||||
class ValueToken(Token):
|
||||
id = ':'
|
||||
|
||||
|
||||
class BlockEntryToken(Token):
|
||||
id = '-'
|
||||
|
||||
|
||||
class FlowEntryToken(Token):
|
||||
id = ','
|
||||
|
||||
|
||||
class AliasToken(Token):
|
||||
id = '<alias>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class AnchorToken(Token):
|
||||
id = '<anchor>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class TagToken(Token):
|
||||
id = '<tag>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class ScalarToken(Token):
|
||||
id = '<scalar>'
|
||||
|
||||
def __init__(self, value, plain, start_mark, end_mark, style=None):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
self.plain = plain
|
||||
self.style = style
|
||||
|
||||
|
||||
class CommentToken(Token):
|
||||
id = '<comment>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
def reset(self):
|
||||
if hasattr(self, 'pre_done'):
|
||||
delattr(self, 'pre_done')
|
||||
139
lib/spack/external/ruamel/yaml/util.py
vendored
139
lib/spack/external/ruamel/yaml/util.py
vendored
@@ -1,139 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
some helper functions that might be generally useful
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .compat import text_type, binary_type
|
||||
from .main import round_trip_load
|
||||
|
||||
|
||||
# originally as comment
|
||||
# https://github.com/pre-commit/pre-commit/pull/211#issuecomment-186466605
|
||||
# if you use this in your code, I suggest adding a test in your test suite
|
||||
# that check this routines output against a known piece of your YAML
|
||||
# before upgrades to this code break your round-tripped YAML
|
||||
def load_yaml_guess_indent(stream, **kw):
|
||||
"""guess the indent and block sequence indent of yaml stream/string
|
||||
|
||||
returns round_trip_loaded stream, indent level, block sequence indent
|
||||
- block sequence indent is the number of spaces before a dash relative to previous indent
|
||||
- if there are no block sequences, indent is taken from nested mappings, block sequence
|
||||
indent is unset (None) in that case
|
||||
"""
|
||||
# load a yaml file guess the indentation, if you use TABs ...
|
||||
def leading_spaces(l):
|
||||
idx = 0
|
||||
while idx < len(l) and l[idx] == ' ':
|
||||
idx += 1
|
||||
return idx
|
||||
|
||||
if isinstance(stream, text_type):
|
||||
yaml_str = stream
|
||||
elif isinstance(stream, binary_type):
|
||||
yaml_str = stream.decode('utf-8') # most likely, but the Reader checks BOM for this
|
||||
else:
|
||||
yaml_str = stream.read()
|
||||
map_indent = None
|
||||
indent = None # default if not found for some reason
|
||||
block_seq_indent = None
|
||||
prev_line_key_only = None
|
||||
key_indent = 0
|
||||
for line in yaml_str.splitlines():
|
||||
rline = line.rstrip()
|
||||
lline = rline.lstrip()
|
||||
if lline.startswith('- '):
|
||||
l_s = leading_spaces(line)
|
||||
block_seq_indent = l_s - key_indent
|
||||
idx = l_s + 1
|
||||
while line[idx] == ' ': # this will end as we rstripped
|
||||
idx += 1
|
||||
if line[idx] == '#': # comment after -
|
||||
continue
|
||||
indent = idx - key_indent
|
||||
break
|
||||
if map_indent is None and prev_line_key_only is not None and rline:
|
||||
idx = 0
|
||||
while line[idx] in ' -':
|
||||
idx += 1
|
||||
if idx > prev_line_key_only:
|
||||
map_indent = idx - prev_line_key_only
|
||||
if rline.endswith(':'):
|
||||
key_indent = leading_spaces(line)
|
||||
idx = 0
|
||||
while line[idx] == ' ': # this will end on ':'
|
||||
idx += 1
|
||||
prev_line_key_only = idx
|
||||
continue
|
||||
prev_line_key_only = None
|
||||
if indent is None and map_indent is not None:
|
||||
indent = map_indent
|
||||
return round_trip_load(yaml_str, **kw), indent, block_seq_indent
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
"""
|
||||
walks over a ConfigObj (INI file with comments) generating
|
||||
corresponding YAML output (including comments
|
||||
"""
|
||||
from configobj import ConfigObj
|
||||
assert isinstance(cfg, ConfigObj)
|
||||
for c in cfg.initial_comment:
|
||||
if c.strip():
|
||||
yield c
|
||||
for s in _walk_section(cfg):
|
||||
if s.strip():
|
||||
yield s
|
||||
for c in cfg.final_comment:
|
||||
if c.strip():
|
||||
yield c
|
||||
|
||||
|
||||
def _walk_section(s, level=0):
|
||||
from configobj import Section
|
||||
assert isinstance(s, Section)
|
||||
indent = u' ' * level
|
||||
for name in s.scalars:
|
||||
for c in s.comments[name]:
|
||||
yield indent + c.strip()
|
||||
x = s[name]
|
||||
if u'\n' in x:
|
||||
i = indent + u' '
|
||||
x = u'|\n' + i + x.strip().replace(u'\n', u'\n' + i)
|
||||
elif ':' in x:
|
||||
x = u"'" + x.replace(u"'", u"''") + u"'"
|
||||
line = u'{0}{1}: {2}'.format(indent, name, x)
|
||||
c = s.inline_comments[name]
|
||||
if c:
|
||||
line += u' ' + c
|
||||
yield line
|
||||
for name in s.sections:
|
||||
for c in s.comments[name]:
|
||||
yield indent + c.strip()
|
||||
line = u'{0}{1}:'.format(indent, name)
|
||||
c = s.inline_comments[name]
|
||||
if c:
|
||||
line += u' ' + c
|
||||
yield line
|
||||
for val in _walk_section(s[name], level=level+1):
|
||||
yield val
|
||||
|
||||
# def config_obj_2_rt_yaml(cfg):
|
||||
# from .comments import CommentedMap, CommentedSeq
|
||||
# from configobj import ConfigObj
|
||||
# assert isinstance(cfg, ConfigObj)
|
||||
# #for c in cfg.initial_comment:
|
||||
# # if c.strip():
|
||||
# # pass
|
||||
# cm = CommentedMap()
|
||||
# for name in s.sections:
|
||||
# cm[name] = d = CommentedMap()
|
||||
#
|
||||
#
|
||||
# #for c in cfg.final_comment:
|
||||
# # if c.strip():
|
||||
# # yield c
|
||||
# return cm
|
||||
1
lib/spack/external/vendor.txt
vendored
1
lib/spack/external/vendor.txt
vendored
@@ -7,3 +7,4 @@ jinja2==3.0.3
|
||||
six==1.16.0
|
||||
macholib==1.16.2
|
||||
altgraph==0.17.3
|
||||
ruamel.yaml==0.17.21
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.20.0.dev0"
|
||||
__version__ = "0.21.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
||||
@@ -289,9 +289,14 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
if test_callbacks and "test" in test_callbacks:
|
||||
msg = '{0} package contains "test" method in ' "build_time_test_callbacks"
|
||||
instr = 'Remove "test" from: [{0}]'.format(", ".join(test_callbacks))
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
|
||||
if has_test_method:
|
||||
msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
|
||||
instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
|
||||
", ".join(test_callbacks)
|
||||
)
|
||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -24,11 +24,9 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Union
|
||||
from typing import List, NamedTuple, Optional, Union
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -195,10 +193,17 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
|
||||
|
||||
self._index_file_cache.init_entry(cache_key)
|
||||
cache_path = self._index_file_cache.cache_path(cache_key)
|
||||
with self._index_file_cache.read_transaction(cache_key):
|
||||
db._read_from_file(cache_path)
|
||||
try:
|
||||
self._index_file_cache.init_entry(cache_key)
|
||||
cache_path = self._index_file_cache.cache_path(cache_key)
|
||||
with self._index_file_cache.read_transaction(cache_key):
|
||||
db._read_from_file(cache_path)
|
||||
except spack_db.InvalidDatabaseVersionError as e:
|
||||
msg = (
|
||||
f"you need a newer Spack version to read the buildcache index for the "
|
||||
f"following mirror: '{mirror_url}'. {e.database_version_message}"
|
||||
)
|
||||
raise BuildcacheIndexError(msg) from e
|
||||
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
|
||||
@@ -421,7 +426,7 @@ def update(self, with_cooldown=False):
|
||||
|
||||
self._write_local_index_cache()
|
||||
|
||||
if all_methods_failed:
|
||||
if configured_mirror_urls and all_methods_failed:
|
||||
raise FetchCacheError(fetch_errors)
|
||||
if fetch_errors:
|
||||
tty.warn(
|
||||
@@ -509,13 +514,11 @@ def _binary_index():
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when a file exists and must be overwritten.
|
||||
"""
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super(NoOverwriteException, self).__init__(
|
||||
'"{}" exists in buildcache. Use --force flag to overwrite.'.format(file_path)
|
||||
f"Refusing to overwrite the following file: {file_path}"
|
||||
)
|
||||
|
||||
|
||||
@@ -618,7 +621,7 @@ def read_buildinfo_file(prefix):
|
||||
filename = buildinfo_file_name(prefix)
|
||||
with open(filename, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
buildinfo = yaml.load(content)
|
||||
buildinfo = syaml.load(content)
|
||||
return buildinfo
|
||||
|
||||
|
||||
@@ -746,12 +749,14 @@ def get_buildfile_manifest(spec):
|
||||
return data
|
||||
|
||||
|
||||
def prefixes_to_hashes(spec):
|
||||
def hashes_to_prefixes(spec):
|
||||
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
||||
return {
|
||||
str(s.prefix): s.dag_hash()
|
||||
s.dag_hash(): str(s.prefix)
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
)
|
||||
if not s.external
|
||||
}
|
||||
|
||||
|
||||
@@ -769,7 +774,7 @@ def get_buildinfo_dict(spec, rel=False):
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
"prefix_to_hash": prefixes_to_hashes(spec),
|
||||
"hash_to_prefix": hashes_to_prefixes(spec),
|
||||
}
|
||||
|
||||
|
||||
@@ -778,11 +783,10 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return "%s/%s/%s-%s" % (
|
||||
spec.architecture,
|
||||
str(spec.compiler).replace("@", "-"),
|
||||
spec.name,
|
||||
spec.version,
|
||||
return os.path.join(
|
||||
str(spec.architecture),
|
||||
f"{spec.compiler.name}-{spec.compiler.version}",
|
||||
f"{spec.name}-{spec.version}",
|
||||
)
|
||||
|
||||
|
||||
@@ -791,13 +795,9 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
return "%s-%s-%s-%s-%s%s" % (
|
||||
spec.architecture,
|
||||
str(spec.compiler).replace("@", "-"),
|
||||
spec.name,
|
||||
spec.version,
|
||||
spec.dag_hash(),
|
||||
ext,
|
||||
return (
|
||||
f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-"
|
||||
f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}"
|
||||
)
|
||||
|
||||
|
||||
@@ -1205,48 +1205,42 @@ def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
|
||||
|
||||
def _build_tarball(
|
||||
spec,
|
||||
out_url,
|
||||
force=False,
|
||||
relative=False,
|
||||
unsigned=False,
|
||||
allow_root=False,
|
||||
key=None,
|
||||
regenerate_index=False,
|
||||
):
|
||||
class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
#: Regenerated indices after pushing
|
||||
regenerate_index: bool = False
|
||||
|
||||
#: Whether to sign or not.
|
||||
unsigned: bool = False
|
||||
|
||||
#: What key to use for signing
|
||||
key: Optional[str] = None
|
||||
|
||||
|
||||
def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
|
||||
"""
|
||||
Build a tarball from given spec and put it into the directory structure
|
||||
used at the mirror (following <tarball_directory_name>).
|
||||
|
||||
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
||||
spec.json file already exist in the buildcache.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
_build_tarball_in_stage_dir(
|
||||
spec,
|
||||
out_url,
|
||||
stage_dir=tmpdir,
|
||||
force=force,
|
||||
relative=relative,
|
||||
unsigned=unsigned,
|
||||
allow_root=allow_root,
|
||||
key=key,
|
||||
regenerate_index=regenerate_index,
|
||||
)
|
||||
_build_tarball_in_stage_dir(spec, out_url, stage_dir=tmpdir, options=options)
|
||||
|
||||
|
||||
def _build_tarball_in_stage_dir(
|
||||
spec,
|
||||
out_url,
|
||||
stage_dir,
|
||||
force=False,
|
||||
relative=False,
|
||||
unsigned=False,
|
||||
allow_root=False,
|
||||
key=None,
|
||||
regenerate_index=False,
|
||||
):
|
||||
def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, options: PushOptions):
|
||||
cache_prefix = build_cache_prefix(stage_dir)
|
||||
tarfile_name = tarball_name(spec, ".spack")
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
@@ -1256,7 +1250,7 @@ def _build_tarball_in_stage_dir(
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path):
|
||||
if force:
|
||||
if options.force:
|
||||
web_util.remove_url(remote_spackfile_path)
|
||||
else:
|
||||
raise NoOverwriteException(url_util.format(remote_spackfile_path))
|
||||
@@ -1276,7 +1270,7 @@ def _build_tarball_in_stage_dir(
|
||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
if options.force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
@@ -1293,7 +1287,7 @@ def _build_tarball_in_stage_dir(
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if relative:
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
@@ -1307,19 +1301,19 @@ def _build_tarball_in_stage_dir(
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec, relative)
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if relative:
|
||||
make_package_relative(workdir, spec, buildinfo, allow_root)
|
||||
elif not allow_root:
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if relative:
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
@@ -1342,7 +1336,7 @@ def _build_tarball_in_stage_dir(
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = relative
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
@@ -1353,40 +1347,40 @@ def _build_tarball_in_stage_dir(
|
||||
json.dump(spec_dict, outfile, indent=0, separators=(",", ":"))
|
||||
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
key = select_signing_key(key)
|
||||
sign_specfile(key, force, specfile_path)
|
||||
if not options.unsigned:
|
||||
key = select_signing_key(options.key)
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not unsigned else remote_specfile_path,
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'.format(spec, remote_spackfile_path))
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=stage_dir)
|
||||
if not options.unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=options.regenerate_index, tmpdir=stage_dir)
|
||||
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
if options.regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def nodes_to_be_packaged(specs, root=True, dependencies=True):
|
||||
def specs_to_be_packaged(
|
||||
specs: List[Spec], root: bool = True, dependencies: bool = True
|
||||
) -> List[Spec]:
|
||||
"""Return the list of nodes to be packaged, given a list of specs.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): list of root specs to be processed
|
||||
root (bool): include the root of each spec in the nodes
|
||||
dependencies (bool): include the dependencies of each
|
||||
specs: list of root specs to be processed
|
||||
root: include the root of each spec in the nodes
|
||||
dependencies: include the dependencies of each
|
||||
spec in the nodes
|
||||
"""
|
||||
if not root and not dependencies:
|
||||
@@ -1404,32 +1398,26 @@ def nodes_to_be_packaged(specs, root=True, dependencies=True):
|
||||
return list(filter(packageable, nodes))
|
||||
|
||||
|
||||
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
|
||||
"""Create a binary package for each of the specs passed as input and push them
|
||||
to a given push URL.
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
"""Create and push binary package for a single spec to the specified
|
||||
mirror url.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): installed specs to be packaged
|
||||
push_url (str): url where to push the binary package
|
||||
include_root (bool): include the root of each spec in the nodes
|
||||
include_dependencies (bool): include the dependencies of each
|
||||
spec in the nodes
|
||||
**kwargs: TODO
|
||||
spec: Spec to package and push
|
||||
mirror_url: Desired destination url for binary package
|
||||
options:
|
||||
|
||||
Returns:
|
||||
True if package was pushed, False otherwise.
|
||||
|
||||
"""
|
||||
# Be explicit about the arugment type
|
||||
if type(include_root) != bool or type(include_dependencies) != bool:
|
||||
raise ValueError("Expected include_root/include_dependencies to be True/False")
|
||||
try:
|
||||
push_or_raise(spec, mirror_url, options)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
return False
|
||||
|
||||
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
|
||||
|
||||
# TODO: This seems to be an easy target for task
|
||||
# TODO: distribution using a parallel pool
|
||||
for node in nodes:
|
||||
try:
|
||||
_build_tarball(node, push_url, **kwargs)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
return True
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
@@ -1675,7 +1663,7 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
buildinfo[key] = new_list
|
||||
|
||||
|
||||
def relocate_package(spec, allow_root):
|
||||
def relocate_package(spec):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
@@ -1693,23 +1681,23 @@ def relocate_package(spec, allow_root):
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
rel = buildinfo.get("relative_rpaths")
|
||||
prefix_to_hash = buildinfo.get("prefix_to_hash", None)
|
||||
if old_rel_prefix != new_rel_prefix and not prefix_to_hash:
|
||||
rel = buildinfo.get("relative_rpaths", False)
|
||||
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
if "hash_to_prefix" in buildinfo:
|
||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||
elif "prefix_to_hash" in buildinfo:
|
||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||
else:
|
||||
hash_to_old_prefix = dict()
|
||||
|
||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||
msg = "Package tarball was created from an install "
|
||||
msg += "prefix with a different directory layout and an older "
|
||||
msg += "buildcache create implementation. It cannot be relocated."
|
||||
raise NewLayoutException(msg)
|
||||
# older buildcaches do not have the prefix_to_hash dictionary
|
||||
# need to set an empty dictionary and add one entry to
|
||||
# prefix_to_prefix to reproduce the old behavior
|
||||
if not prefix_to_hash:
|
||||
prefix_to_hash = dict()
|
||||
hash_to_prefix = dict()
|
||||
hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix)
|
||||
new_deps = spack.build_environment.get_rpath_deps(spec.package)
|
||||
for d in new_deps + spec.dependencies(deptype="run"):
|
||||
hash_to_prefix[d.format("{hash}")] = str(d.prefix)
|
||||
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
@@ -1723,9 +1711,11 @@ def relocate_package(spec, allow_root):
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
for orig_prefix, hash in prefix_to_hash.items():
|
||||
prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
||||
if dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
||||
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
@@ -1864,7 +1854,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, allow_root=False, unsigned=False, force=False):
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
@@ -1960,7 +1950,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
os.remove(specfile_path)
|
||||
|
||||
try:
|
||||
relocate_package(spec, allow_root)
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix)
|
||||
raise e
|
||||
@@ -1979,7 +1969,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
Checking the sha256 sum of a node before installation is usually needed only
|
||||
@@ -1988,8 +1978,6 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
|
||||
Args:
|
||||
spec: spec to be installed (note that only the root node will be installed)
|
||||
allow_root (bool): allows the root directory to be present in binaries
|
||||
(may affect relocation)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
@@ -2025,24 +2013,22 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, allow_root, unsigned, force)
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
|
||||
def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
"""Install a single concrete spec from a buildcache.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec to be installed
|
||||
allow_root (bool): allows the root directory to be present in binaries
|
||||
(may affect relocation)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
"""
|
||||
for node in spec.traverse(root=True, order="post", deptype=("link", "run")):
|
||||
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
|
||||
install_root_node(node, unsigned=unsigned, force=force)
|
||||
|
||||
|
||||
def try_direct_fetch(spec, mirrors=None):
|
||||
@@ -2429,6 +2415,10 @@ def __init__(self, all_architectures):
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
@@ -2450,6 +2440,10 @@ def __str__(self):
|
||||
return "{}, due to: {}".format(self.args[0], self.args[1])
|
||||
|
||||
|
||||
class BuildcacheIndexError(spack.error.SpackError):
|
||||
"""Raised when a buildcache cannot be read for any reason"""
|
||||
|
||||
|
||||
FetchIndexResult = collections.namedtuple("FetchIndexResult", "etag hash data fresh")
|
||||
|
||||
|
||||
|
||||
@@ -200,7 +200,7 @@ def _install_by_hash(
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
@@ -121,15 +122,12 @@ def update_syspath_and_environ(self) -> None:
|
||||
)
|
||||
|
||||
def _install_with_depfile(self) -> None:
|
||||
spackcmd = spack.util.executable.which("spack")
|
||||
spackcmd(
|
||||
"-e",
|
||||
str(self.environment_root()),
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
str(self.environment_root().joinpath("Makefile")),
|
||||
model = depfile.MakefileModel.from_env(self)
|
||||
template = spack.tengine.make_environment().get_template(
|
||||
os.path.join("depfile", "Makefile")
|
||||
)
|
||||
makefile = self.environment_root() / "Makefile"
|
||||
makefile.write_text(template.render(model.to_dict()))
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
@@ -53,7 +54,6 @@
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -66,6 +66,7 @@
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.environment import (
|
||||
@@ -588,7 +589,6 @@ def set_module_variables_for_package(pkg):
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
@@ -1075,19 +1075,18 @@ def _setup_pkg_and_run(
|
||||
# 'pkg' is not defined yet
|
||||
pass
|
||||
elif context == "test":
|
||||
logfile = os.path.join(
|
||||
pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec)
|
||||
)
|
||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||
|
||||
error_msg = str(exc)
|
||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to build "
|
||||
"from sources. This might be due to a change in Spack's package format "
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
"This might be due to a change in Spack's package format "
|
||||
"to support multiple build-systems for a single package. You can fix this "
|
||||
"by updating the build recipe, and you can also report the issue as a bug. "
|
||||
"by updating the {} recipe, and you can also report the issue as a bug. "
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
).format(pkg.name)
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||
|
||||
@@ -1216,6 +1215,9 @@ def child_fun():
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
"""Return some context for an error message when the build fails.
|
||||
|
||||
@@ -1244,32 +1246,38 @@ def make_stack(tb, stack=None):
|
||||
|
||||
stack = make_stack(traceback)
|
||||
|
||||
basenames = tuple(base.__name__ for base in CONTEXT_BASES)
|
||||
for tb in stack:
|
||||
frame = tb.tb_frame
|
||||
if "self" in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
# Find the first proper subclass of the PackageBase or BaseBuilder, but
|
||||
# don't provide context if the code is actually in the base classes.
|
||||
obj = frame.f_locals["self"]
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
lines = [
|
||||
"{0}:{1:d}, in {2}:".format(
|
||||
inspect.getfile(frame.f_code),
|
||||
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
|
||||
frame.f_code.co_name,
|
||||
)
|
||||
]
|
||||
filename = inspect.getfile(frame.f_code)
|
||||
lineno = frame.f_lineno
|
||||
if os.path.basename(filename) == "package.py":
|
||||
# subtract 1 because we inject a magic import at the top of package files.
|
||||
# TODO: get rid of the magic import.
|
||||
lineno -= 1
|
||||
|
||||
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||
|
||||
# Build a message showing context in the install method.
|
||||
sourcelines, start = inspect.getsourcelines(frame)
|
||||
|
||||
# Calculate lineno of the error relative to the start of the function.
|
||||
# Subtract 1 because f_lineno is 0-indexed.
|
||||
fun_lineno = frame.f_lineno - start - 1
|
||||
fun_lineno = lineno - start
|
||||
start_ctx = max(0, fun_lineno - context)
|
||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||
|
||||
@@ -1360,6 +1368,13 @@ def long_message(self):
|
||||
out.write("See {0} log for details:\n".format(self.log_type))
|
||||
out.write(" {0}\n".format(self.log_name))
|
||||
|
||||
# Also output the test log path IF it exists
|
||||
if self.context != "test":
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
out.write(" {0}n".format(test_log))
|
||||
|
||||
return out.getvalue()
|
||||
|
||||
def __str__(self):
|
||||
|
||||
@@ -108,7 +108,10 @@ def execute_build_time_tests(builder: spack.builder.Builder):
|
||||
builder: builder prescribing the test callbacks. The name of the callbacks is
|
||||
stored as a list of strings in the ``build_time_test_callbacks`` attribute.
|
||||
"""
|
||||
builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build")
|
||||
if not builder.pkg.run_tests or not builder.build_time_test_callbacks:
|
||||
return
|
||||
|
||||
builder.pkg.tester.phase_tests(builder, "build", builder.build_time_test_callbacks)
|
||||
|
||||
|
||||
def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
@@ -118,7 +121,10 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
builder: builder prescribing the test callbacks. The name of the callbacks is
|
||||
stored as a list of strings in the ``install_time_test_callbacks`` attribute.
|
||||
"""
|
||||
builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install")
|
||||
if not builder.pkg.run_tests or not builder.install_time_test_callbacks:
|
||||
return
|
||||
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BaseBuilder(spack.builder.Builder):
|
||||
|
||||
@@ -90,9 +90,13 @@ class CMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
with when("build_system=cmake"):
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
# See https://github.com/spack/spack/pull/36679 and related issues for a
|
||||
# discussion of the trade-offs between Release and RelWithDebInfo for default
|
||||
# builds. Release is chosen to maximize performance and reduce disk-space burden,
|
||||
# at the cost of more difficulty in debugging.
|
||||
variant(
|
||||
"build_type",
|
||||
default="RelWithDebInfo",
|
||||
default="Release",
|
||||
description="CMake build type",
|
||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
|
||||
)
|
||||
|
||||
@@ -137,11 +137,12 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
@@ -142,7 +142,7 @@ def license_required(self):
|
||||
# The Intel libraries are provided without requiring a license as of
|
||||
# version 2017.2. Trying to specify one anyway will fail. See:
|
||||
# https://software.intel.com/en-us/articles/free-ipsxe-tools-and-libraries
|
||||
return self._has_compilers or self.version < ver("2017.2")
|
||||
return self._has_compilers or self.version < Version("2017.2")
|
||||
|
||||
#: Comment symbol used in the license.lic file
|
||||
license_comment = "#"
|
||||
@@ -341,7 +341,7 @@ def version_yearlike(self):
|
||||
v_year = year
|
||||
break
|
||||
|
||||
return ver("%s.%s" % (v_year, v_tail))
|
||||
return Version("%s.%s" % (v_year, v_tail))
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Directory handling common to all Intel components
|
||||
@@ -764,9 +764,9 @@ def _tbb_abi(self):
|
||||
elif matches:
|
||||
# TODO: Confirm that this covers clang (needed on Linux only)
|
||||
gcc_version = Version(matches.groups()[1])
|
||||
if gcc_version >= ver("4.7"):
|
||||
if gcc_version >= Version("4.7"):
|
||||
abi = "gcc4.7"
|
||||
elif gcc_version >= ver("4.4"):
|
||||
elif gcc_version >= Version("4.4"):
|
||||
abi = "gcc4.4"
|
||||
else:
|
||||
abi = "gcc4.1" # unlikely, one hopes.
|
||||
@@ -1019,7 +1019,7 @@ def libs(self):
|
||||
# Intel MPI since 2019 depends on libfabric which is not in the
|
||||
# lib directory but in a directory of its own which should be
|
||||
# included in the rpath
|
||||
if self.version_yearlike >= ver("2019"):
|
||||
if self.version_yearlike >= Version("2019"):
|
||||
d = ancestor(self.component_lib_dir("mpi"))
|
||||
if "+external-libfabric" in self.spec:
|
||||
result += self.spec["libfabric"].libs
|
||||
|
||||
@@ -33,7 +33,7 @@ class MesonPackage(spack.package_base.PackageBase):
|
||||
with when("build_system=meson"):
|
||||
variant(
|
||||
"buildtype",
|
||||
default="debugoptimized",
|
||||
default="release",
|
||||
description="Meson build type",
|
||||
values=("plain", "debug", "debugoptimized", "release", "minsize"),
|
||||
)
|
||||
|
||||
@@ -4,13 +4,16 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common utilities for managing intel oneapi packages."""
|
||||
import getpass
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
from os.path import basename, dirname, isdir
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.directives import conflicts, variant
|
||||
from spack.package import mkdirp
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -125,6 +128,31 @@ def setup_run_environment(self, env):
|
||||
)
|
||||
)
|
||||
|
||||
def symlink_dir(self, src, dest):
|
||||
# Taken from: https://github.com/spack/spack/pull/31285/files
|
||||
# oneapi bin/lib directories are 2 or 3 levels below the
|
||||
# prefix, but it is more typical to have them directly in the
|
||||
# prefix. The location has changed over time. Rather than make
|
||||
# every package that needs to know where include/lib are
|
||||
# located be aware of this, put in symlinks to conform. This
|
||||
# is good enough for some, but not all packages.
|
||||
|
||||
# If we symlink top-level directories directly, files won't
|
||||
# show up in views Create real dirs and symlink files instead
|
||||
|
||||
# Create a real directory at dest
|
||||
mkdirp(dest)
|
||||
|
||||
# Symlink all files in src to dest keeping directories as dirs
|
||||
for entry in os.listdir(src):
|
||||
src_path = join_path(src, entry)
|
||||
dest_path = join_path(dest, entry)
|
||||
if isdir(src_path) and os.access(src_path, os.X_OK):
|
||||
link_tree = LinkTree(src_path)
|
||||
link_tree.merge(dest_path)
|
||||
else:
|
||||
os.symlink(src_path, dest_path)
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
"""Base class for Intel oneAPI library packages.
|
||||
|
||||
@@ -290,7 +290,7 @@ def get_external_python_for_prefix(self):
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.Spec(item["spec"])
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends, maintainers
|
||||
from spack.directives import extends
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
||||
@@ -71,8 +71,6 @@ class RPackage(Package):
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
maintainers("glennpj")
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "RPackage"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user