Compare commits
768 Commits
v0.19.1
...
features/i
Author | SHA1 | Date | |
---|---|---|---|
![]() |
03084d2ff8 | ||
![]() |
e804671fc0 | ||
![]() |
c0b458e38a | ||
![]() |
deef4177f7 | ||
![]() |
a1c840b3e8 | ||
![]() |
57e9e77475 | ||
![]() |
d260fa59c9 | ||
![]() |
0da69a45e3 | ||
![]() |
153b79c2ed | ||
![]() |
ffae0db115 | ||
![]() |
37c8ebcafd | ||
![]() |
03fc502da5 | ||
![]() |
b73884d541 | ||
![]() |
43f337d993 | ||
![]() |
a8188a9f14 | ||
![]() |
c19638bb0d | ||
![]() |
ccb9969b64 | ||
![]() |
fa93b5219c | ||
![]() |
42a0cf9b96 | ||
![]() |
4b67153613 | ||
![]() |
c65c944853 | ||
![]() |
ea6608edde | ||
![]() |
07499f2355 | ||
![]() |
781c4823e6 | ||
![]() |
71a609b2f4 | ||
![]() |
74595de359 | ||
![]() |
9d70f6e6c7 | ||
![]() |
7365d138fb | ||
![]() |
b94030cc5d | ||
![]() |
4414161787 | ||
![]() |
881a0d4462 | ||
![]() |
8d83af732a | ||
![]() |
a5a2918202 | ||
![]() |
8dc3a09104 | ||
![]() |
8f6f77ac9f | ||
![]() |
d0bd8f74cf | ||
![]() |
9d00e7d15d | ||
![]() |
cc333b600c | ||
![]() |
006969a8cd | ||
![]() |
d2618cd640 | ||
![]() |
807b87068a | ||
![]() |
6879c35d1c | ||
![]() |
f1b8bc97f0 | ||
![]() |
6fefb92413 | ||
![]() |
03a2feef88 | ||
![]() |
416615f30b | ||
![]() |
b62c4276c9 | ||
![]() |
b9861f7bbb | ||
![]() |
856fe5941a | ||
![]() |
0f7f600d1f | ||
![]() |
9283a94ee4 | ||
![]() |
726dee9e2d | ||
![]() |
b5a5af8c05 | ||
![]() |
289d6df911 | ||
![]() |
6158e4d8aa | ||
![]() |
8c3c8a4e4d | ||
![]() |
954e241174 | ||
![]() |
ae3a1618a2 | ||
![]() |
0d96b0b311 | ||
![]() |
f96f8a99e8 | ||
![]() |
94cff84df1 | ||
![]() |
582ee111f6 | ||
![]() |
c55de3f5e0 | ||
![]() |
0bdd4c10cf | ||
![]() |
521cc41db0 | ||
![]() |
c0eb2cf9e6 | ||
![]() |
9f0bb4301f | ||
![]() |
288e728144 | ||
![]() |
47011f594f | ||
![]() |
a00f692f20 | ||
![]() |
68bffd4689 | ||
![]() |
5acb20f145 | ||
![]() |
8a1b817978 | ||
![]() |
86e346e906 | ||
![]() |
a5ef522f98 | ||
![]() |
800ac7b53d | ||
![]() |
691e8c69c4 | ||
![]() |
ea970c8ab8 | ||
![]() |
c05827e794 | ||
![]() |
910b13000f | ||
![]() |
9474fdf910 | ||
![]() |
2cdac9392f | ||
![]() |
6775c10e82 | ||
![]() |
ea5d5517ce | ||
![]() |
8780c718ea | ||
![]() |
9687491568 | ||
![]() |
9c56b97aa9 | ||
![]() |
df10e711f7 | ||
![]() |
6fdb5dfa0a | ||
![]() |
ada174afbd | ||
![]() |
eace2ebb08 | ||
![]() |
441c0a4fee | ||
![]() |
0a7b0c68cf | ||
![]() |
6a44a146af | ||
![]() |
033cb86fd6 | ||
![]() |
5175189412 | ||
![]() |
86378502f9 | ||
![]() |
2530c7828b | ||
![]() |
ab2f842424 | ||
![]() |
b9f48da560 | ||
![]() |
e21c1c5770 | ||
![]() |
310b6b9466 | ||
![]() |
a869cfd95d | ||
![]() |
ddab6c4ac3 | ||
![]() |
25cff6be14 | ||
![]() |
5fe1281b14 | ||
![]() |
43cf60814d | ||
![]() |
582f165871 | ||
![]() |
bf76f1e774 | ||
![]() |
9cdb862856 | ||
![]() |
2e8d165120 | ||
![]() |
6984ee291a | ||
![]() |
10d506d61b | ||
![]() |
4549312c5e | ||
![]() |
80761bdc9d | ||
![]() |
4eb853856a | ||
![]() |
790929c832 | ||
![]() |
769378c959 | ||
![]() |
a18e2f98b3 | ||
![]() |
8f3bdf29dc | ||
![]() |
a28bb90b93 | ||
![]() |
5c9b591439 | ||
![]() |
778325db42 | ||
![]() |
f65bb62de4 | ||
![]() |
e4881d5465 | ||
![]() |
31cccdf52b | ||
![]() |
2c1523debe | ||
![]() |
c07881c843 | ||
![]() |
cb807594b8 | ||
![]() |
0227c0a98a | ||
![]() |
20a1cdd95e | ||
![]() |
2a779c1234 | ||
![]() |
3f6109404d | ||
![]() |
f83d47442d | ||
![]() |
08aafe180b | ||
![]() |
9cbbe64cf7 | ||
![]() |
2b006bb6ec | ||
![]() |
5509392151 | ||
![]() |
2001be99b3 | ||
![]() |
dd4409b62c | ||
![]() |
ca265ea0c2 | ||
![]() |
7a92579480 | ||
![]() |
190dfd0269 | ||
![]() |
b549548f69 | ||
![]() |
79268cedd2 | ||
![]() |
2004171b7e | ||
![]() |
06312ddf18 | ||
![]() |
3a0db729c7 | ||
![]() |
9759331f43 | ||
![]() |
ceca97518a | ||
![]() |
1929d5e3de | ||
![]() |
238e9c3613 | ||
![]() |
d43e7cb5cd | ||
![]() |
51a037d52a | ||
![]() |
c91f8c2f14 | ||
![]() |
04ad42e5ee | ||
![]() |
d02c71e443 | ||
![]() |
ca6e178890 | ||
![]() |
b145085fff | ||
![]() |
3a4b96e61c | ||
![]() |
36d87a4783 | ||
![]() |
6d2645f73b | ||
![]() |
44f7363fbe | ||
![]() |
9d936a2a75 | ||
![]() |
18438c395d | ||
![]() |
28a30bcea6 | ||
![]() |
536c7709c2 | ||
![]() |
e28738a01e | ||
![]() |
5f8c706128 | ||
![]() |
558695793f | ||
![]() |
b43a27674b | ||
![]() |
3d961b9a1f | ||
![]() |
d100ac8923 | ||
![]() |
e8fa8c5f01 | ||
![]() |
be6bb413df | ||
![]() |
d23c302ca2 | ||
![]() |
ed0c1cea91 | ||
![]() |
ffc42e287d | ||
![]() |
ba0d182e10 | ||
![]() |
8d8104de2c | ||
![]() |
7975e0afbc | ||
![]() |
4a43522763 | ||
![]() |
30343d65ba | ||
![]() |
38c1639c9c | ||
![]() |
be5033c869 | ||
![]() |
eb67497020 | ||
![]() |
371268a9aa | ||
![]() |
344e8d142a | ||
![]() |
161fbfadf4 | ||
![]() |
3304312b26 | ||
![]() |
3279ee7068 | ||
![]() |
8f3f838763 | ||
![]() |
09864d00c5 | ||
![]() |
0f7fa27327 | ||
![]() |
a27139c081 | ||
![]() |
4d4338db16 | ||
![]() |
6d64ffdd1a | ||
![]() |
e9ea9e2316 | ||
![]() |
2a5509ea90 | ||
![]() |
b9d027f0cc | ||
![]() |
6d54dc2a44 | ||
![]() |
6cd9cbf578 | ||
![]() |
72e81796d1 | ||
![]() |
f116e6762a | ||
![]() |
c74bbc6723 | ||
![]() |
492a603d5e | ||
![]() |
dab68687bd | ||
![]() |
1a32cea114 | ||
![]() |
aaec76652b | ||
![]() |
f748911ea0 | ||
![]() |
e60e74694f | ||
![]() |
2ef026b8c6 | ||
![]() |
a6c2569b18 | ||
![]() |
5483b5ff99 | ||
![]() |
2b78a7099d | ||
![]() |
34cdc6f52b | ||
![]() |
3aafdb06c9 | ||
![]() |
4a22c1c699 | ||
![]() |
f021479ef0 | ||
![]() |
3f374fb62f | ||
![]() |
949be42f32 | ||
![]() |
e5abd5abc1 | ||
![]() |
4473d5d811 | ||
![]() |
c3e61664cf | ||
![]() |
c3217775c3 | ||
![]() |
58a7e11db9 | ||
![]() |
ac570bb5c4 | ||
![]() |
b2c806f6fc | ||
![]() |
bd613b3124 | ||
![]() |
f1b85bc653 | ||
![]() |
e1fab4dd51 | ||
![]() |
a924079f66 | ||
![]() |
c5aff1d412 | ||
![]() |
6c9602ee64 | ||
![]() |
64327bfef0 | ||
![]() |
05c3cb7cc9 | ||
![]() |
c87b251639 | ||
![]() |
f2332a17d3 | ||
![]() |
c7f24a132e | ||
![]() |
96a7af1dd2 | ||
![]() |
db1caa9e92 | ||
![]() |
237d26460d | ||
![]() |
1020b65297 | ||
![]() |
dceb4c9d65 | ||
![]() |
50570ea334 | ||
![]() |
7e836b925d | ||
![]() |
cec3da61d2 | ||
![]() |
7ed53cf083 | ||
![]() |
bdc3ab5b54 | ||
![]() |
5a985e33ea | ||
![]() |
9817593c1c | ||
![]() |
1cc78dac38 | ||
![]() |
e2c5fe4aa3 | ||
![]() |
1bf87dbb5d | ||
![]() |
ffe527b141 | ||
![]() |
642c5b876b | ||
![]() |
8b7bd6dc74 | ||
![]() |
2f97dc7aa6 | ||
![]() |
958d542f81 | ||
![]() |
b1aae1c2ed | ||
![]() |
690f9d69fe | ||
![]() |
a78c16a609 | ||
![]() |
7bb2d3cca3 | ||
![]() |
7216050dd3 | ||
![]() |
2f26e422d6 | ||
![]() |
3477d578a3 | ||
![]() |
aa8e1ba606 | ||
![]() |
08e007e9a6 | ||
![]() |
d6fb65ebc6 | ||
![]() |
2b5be919dd | ||
![]() |
cc2dff48a8 | ||
![]() |
22922bf74c | ||
![]() |
8a02463d7d | ||
![]() |
c6465bd9bd | ||
![]() |
9025caed6e | ||
![]() |
7056a4bffd | ||
![]() |
d2aa8466eb | ||
![]() |
6e4684fbca | ||
![]() |
fcbf617d38 | ||
![]() |
1f8b55a021 | ||
![]() |
b5f8ed07fb | ||
![]() |
65bd9b9ac5 | ||
![]() |
6250d84b41 | ||
![]() |
99056e03bd | ||
![]() |
1db849ee5f | ||
![]() |
2f82b213df | ||
![]() |
2a5f0158bc | ||
![]() |
21a1f7dd97 | ||
![]() |
4b5ed94af4 | ||
![]() |
06788019a4 | ||
![]() |
cab8f795a7 | ||
![]() |
2db38bfa38 | ||
![]() |
ea029442e6 | ||
![]() |
43e38d0d12 | ||
![]() |
2522c8b754 | ||
![]() |
f64cb29aea | ||
![]() |
80e30222e1 | ||
![]() |
55356e9edb | ||
![]() |
eec09f791d | ||
![]() |
9032179b34 | ||
![]() |
45b40115fb | ||
![]() |
e030833129 | ||
![]() |
e055dc0e64 | ||
![]() |
c45729cba1 | ||
![]() |
b02b2f0f00 | ||
![]() |
3ded50cc8c | ||
![]() |
a7280cd5bb | ||
![]() |
2837b47ea5 | ||
![]() |
ea2c61c683 | ||
![]() |
217b34825a | ||
![]() |
17d90f4cbc | ||
![]() |
7a5bd8cac4 | ||
![]() |
333da47dc7 | ||
![]() |
8b68b4ae72 | ||
![]() |
40a3fdefa8 | ||
![]() |
a61474f2c1 | ||
![]() |
b95a75779b | ||
![]() |
0ff6a1bd1c | ||
![]() |
f9cfc2f57e | ||
![]() |
f4fb20e27e | ||
![]() |
3ff5d49102 | ||
![]() |
238d4f72f5 | ||
![]() |
c5bc469eeb | ||
![]() |
b01e7dca9d | ||
![]() |
c62906f781 | ||
![]() |
94bac8d6dd | ||
![]() |
cd9c9b47e8 | ||
![]() |
8560295529 | ||
![]() |
fd248ad0b8 | ||
![]() |
0578ccc0e6 | ||
![]() |
fcc2ab8b4b | ||
![]() |
76511ac039 | ||
![]() |
e4547982b3 | ||
![]() |
80722fbaa3 | ||
![]() |
c2fa444344 | ||
![]() |
088ece1219 | ||
![]() |
fcdd275564 | ||
![]() |
b6d6a1ab2c | ||
![]() |
7efcb5ae73 | ||
![]() |
06e6389258 | ||
![]() |
b7f0f7879d | ||
![]() |
f7cfbe2702 | ||
![]() |
1466f8d602 | ||
![]() |
9fdb36585f | ||
![]() |
1f0a9fdc11 | ||
![]() |
0baba62900 | ||
![]() |
4a0e34eda8 | ||
![]() |
88f2f59d92 | ||
![]() |
c1d11975f5 | ||
![]() |
cca56291c6 | ||
![]() |
ef155c16f0 | ||
![]() |
0952d314bd | ||
![]() |
f29ac34558 | ||
![]() |
47628521b9 | ||
![]() |
62da76cb5d | ||
![]() |
65c914fff7 | ||
![]() |
dd7b2deb47 | ||
![]() |
7d72aeb4fe | ||
![]() |
43d97afd8b | ||
![]() |
39f13853ba | ||
![]() |
d65b9c559a | ||
![]() |
bde5720a81 | ||
![]() |
2371ec7497 | ||
![]() |
aa3b6e598f | ||
![]() |
8035eeb36d | ||
![]() |
57383a2294 | ||
![]() |
9517dab409 | ||
![]() |
84fa4e6c4c | ||
![]() |
f33507961d | ||
![]() |
46010ef1e1 | ||
![]() |
f9d9d43b63 | ||
![]() |
db8f115013 | ||
![]() |
09b5476049 | ||
![]() |
14c4896ec2 | ||
![]() |
b5ef5c2eb5 | ||
![]() |
675afd884d | ||
![]() |
0f5482dc9a | ||
![]() |
069e5f874c | ||
![]() |
cad01a03cb | ||
![]() |
f10f8ed013 | ||
![]() |
d991ec90e3 | ||
![]() |
8353d1539f | ||
![]() |
bf3d18bf06 | ||
![]() |
0e69710f41 | ||
![]() |
ec62150ed7 | ||
![]() |
d37dc37504 | ||
![]() |
38d37897d4 | ||
![]() |
606eef43bd | ||
![]() |
02a30f8d95 | ||
![]() |
7e054cb7fc | ||
![]() |
d29cb87ecc | ||
![]() |
f8c0d9728d | ||
![]() |
f5bff16745 | ||
![]() |
2d1cb6d64a | ||
![]() |
c6e35da2c7 | ||
![]() |
f1cd327186 | ||
![]() |
391ad8cec4 | ||
![]() |
2c668f4bfd | ||
![]() |
52fdae83f0 | ||
![]() |
0ea81affd1 | ||
![]() |
ddc6e233c7 | ||
![]() |
7ee4499f2b | ||
![]() |
641adae961 | ||
![]() |
aed77efb9a | ||
![]() |
ab6499ce1e | ||
![]() |
412bec45aa | ||
![]() |
c3dcd94ebc | ||
![]() |
cb8f642297 | ||
![]() |
92f19c8491 | ||
![]() |
f3f8b31be5 | ||
![]() |
63cadf04ea | ||
![]() |
541e75350f | ||
![]() |
8806e74419 | ||
![]() |
381f8161b1 | ||
![]() |
884123b7ce | ||
![]() |
35aa875762 | ||
![]() |
9b0e79fcab | ||
![]() |
8ba0faa9ee | ||
![]() |
d464185bba | ||
![]() |
7f4d71252b | ||
![]() |
7950311767 | ||
![]() |
194f9a9ca9 | ||
![]() |
a72021fd63 | ||
![]() |
d910b3725b | ||
![]() |
99f209019e | ||
![]() |
c11a4e0ad3 | ||
![]() |
4a429ec315 | ||
![]() |
eadccfe332 | ||
![]() |
dfab5b5ceb | ||
![]() |
862029215c | ||
![]() |
559c3de213 | ||
![]() |
e3bf7358d7 | ||
![]() |
b58ec9e2b9 | ||
![]() |
95b5d54129 | ||
![]() |
bcce9c3e9c | ||
![]() |
4c05fe569c | ||
![]() |
e550665df7 | ||
![]() |
d92d34b162 | ||
![]() |
f27be808a4 | ||
![]() |
855d3519b6 | ||
![]() |
37f232e319 | ||
![]() |
ac1c29eac0 | ||
![]() |
56072172f5 | ||
![]() |
64d957dece | ||
![]() |
3edc85ec21 | ||
![]() |
d8006a9495 | ||
![]() |
a2cfc07412 | ||
![]() |
1295ea5d40 | ||
![]() |
4664b3cd1e | ||
![]() |
dc7e0e3ef6 | ||
![]() |
9aa615aa98 | ||
![]() |
85b6bf99a4 | ||
![]() |
78ec3d5662 | ||
![]() |
a7b5f2ef39 | ||
![]() |
f71701f39d | ||
![]() |
54008a2342 | ||
![]() |
1670c325c6 | ||
![]() |
534a994b4c | ||
![]() |
359efca201 | ||
![]() |
65809140f3 | ||
![]() |
3f1622f9e7 | ||
![]() |
8332a59194 | ||
![]() |
05abea3a3a | ||
![]() |
e7fc9ea243 | ||
![]() |
eea3ea7675 | ||
![]() |
895ac2626d | ||
![]() |
94dc86e163 | ||
![]() |
729b1c9fa6 | ||
![]() |
82b7fe649f | ||
![]() |
76417d6ac6 | ||
![]() |
fe995542ab | ||
![]() |
8f5209063d | ||
![]() |
241a8f6be6 | ||
![]() |
a8a0a6916a | ||
![]() |
8d10dce651 | ||
![]() |
a2938c9348 | ||
![]() |
8017f4b55b | ||
![]() |
588d2e295f | ||
![]() |
c10b84f08d | ||
![]() |
99044bedd7 | ||
![]() |
3afe6f1adc | ||
![]() |
fcd9038225 | ||
![]() |
9d82024f1a | ||
![]() |
bcefe6a73e | ||
![]() |
87562042df | ||
![]() |
10d10b612a | ||
![]() |
69dd742dc9 | ||
![]() |
18efd817b1 | ||
![]() |
65a5369d6a | ||
![]() |
f66ec00fa9 | ||
![]() |
f63fb2f521 | ||
![]() |
dfa00f5a8d | ||
![]() |
6602780657 | ||
![]() |
8420c610fa | ||
![]() |
b139cab687 | ||
![]() |
99fcc57607 | ||
![]() |
5a394d37b7 | ||
![]() |
472074cb7c | ||
![]() |
45c8d7f457 | ||
![]() |
dce1f01f1a | ||
![]() |
03cc83bc67 | ||
![]() |
f452741e3d | ||
![]() |
99b68e646d | ||
![]() |
f78c8265f4 | ||
![]() |
5d3efbba14 | ||
![]() |
7423f52cd3 | ||
![]() |
43d93f7773 | ||
![]() |
f8dec3e87f | ||
![]() |
ef06b9db5b | ||
![]() |
c64c9649be | ||
![]() |
2c6b52f137 | ||
![]() |
33422acef0 | ||
![]() |
428f635142 | ||
![]() |
c6c74e98ff | ||
![]() |
d9b438ec76 | ||
![]() |
c6ee30497c | ||
![]() |
1270ae1526 | ||
![]() |
d15fead30c | ||
![]() |
23aaaf2d28 | ||
![]() |
56f9c76394 | ||
![]() |
49cda811fc | ||
![]() |
a97312535a | ||
![]() |
a0180ef741 | ||
![]() |
d640a573a8 | ||
![]() |
b3679406d0 | ||
![]() |
587488882a | ||
![]() |
a17844a367 | ||
![]() |
093a37750c | ||
![]() |
173cc7e973 | ||
![]() |
451e3ff50b | ||
![]() |
523c4c2b63 | ||
![]() |
35e5a916bc | ||
![]() |
1374577659 | ||
![]() |
4c017403db | ||
![]() |
fdfda72371 | ||
![]() |
efa1dba9e4 | ||
![]() |
2a7ae2a700 | ||
![]() |
a1b4e1bccd | ||
![]() |
066ec31604 | ||
![]() |
bb1888dbd4 | ||
![]() |
bc17b6cefb | ||
![]() |
46a0cd8e55 | ||
![]() |
b2ceb23165 | ||
![]() |
2fad966139 | ||
![]() |
0b01c8c950 | ||
![]() |
613d0b7e8e | ||
![]() |
21c29ee375 | ||
![]() |
e236339e5a | ||
![]() |
7a03525c35 | ||
![]() |
17ca86a309 | ||
![]() |
ce71a38703 | ||
![]() |
12c23f2724 | ||
![]() |
b8ae0fbbf4 | ||
![]() |
6b5c86e0be | ||
![]() |
1ed1b49c9b | ||
![]() |
4265d5e111 | ||
![]() |
8c0fb91d4e | ||
![]() |
567532b9e5 | ||
![]() |
47d59e571e | ||
![]() |
93ff19c9b7 | ||
![]() |
2167cbf72c | ||
![]() |
7a5e527cab | ||
![]() |
a25868594c | ||
![]() |
dd5263694b | ||
![]() |
5fca1c9aff | ||
![]() |
1d7393c281 | ||
![]() |
f0bc551718 | ||
![]() |
46b9a09843 | ||
![]() |
c0898565b9 | ||
![]() |
3018e7f63d | ||
![]() |
dfa1a42420 | ||
![]() |
2c8ab85e6a | ||
![]() |
b2505aed5c | ||
![]() |
7847d4332e | ||
![]() |
70bcbba5eb | ||
![]() |
0182603609 | ||
![]() |
bf1b846f26 | ||
![]() |
d06fd26c9a | ||
![]() |
5d2c9636ff | ||
![]() |
63e4406514 | ||
![]() |
d56380fc07 | ||
![]() |
f89cc96b0c | ||
![]() |
cf952d41d8 | ||
![]() |
5f737c5a71 | ||
![]() |
a845b1f984 | ||
![]() |
b8d059e8f4 | ||
![]() |
1006c77374 | ||
![]() |
38d4fd7711 | ||
![]() |
643ce586de | ||
![]() |
5b3b0130f2 | ||
![]() |
55c77d659e | ||
![]() |
fe1c105161 | ||
![]() |
09f2b6f5f5 | ||
![]() |
73fe21ba41 | ||
![]() |
81fb87cedf | ||
![]() |
7de39c44b1 | ||
![]() |
c902e27e52 | ||
![]() |
65b991a4c5 | ||
![]() |
65520311a6 | ||
![]() |
def79731d0 | ||
![]() |
0fd3c9f451 | ||
![]() |
c5883fffd7 | ||
![]() |
4bf964e6b3 | ||
![]() |
bcc0fda4e2 | ||
![]() |
69987fd323 | ||
![]() |
9a16234ed4 | ||
![]() |
bd198312c9 | ||
![]() |
7f9af8d4a0 | ||
![]() |
793a7bc6a9 | ||
![]() |
376afd631c | ||
![]() |
e287c6ac4b | ||
![]() |
e864744b60 | ||
![]() |
5b3af53b10 | ||
![]() |
44c22a54c9 | ||
![]() |
f97f37550a | ||
![]() |
0e4ee3d352 | ||
![]() |
05fc800db9 | ||
![]() |
2387c116ad | ||
![]() |
6411cbd803 | ||
![]() |
8ea366b33f | ||
![]() |
9a2fbf373c | ||
![]() |
9e1fef8813 | ||
![]() |
f8a6e3ad90 | ||
![]() |
0706919b09 | ||
![]() |
b9b93ce272 | ||
![]() |
87cb9760ce | ||
![]() |
d472e28bfe | ||
![]() |
dbc81549db | ||
![]() |
dc00c4fdae | ||
![]() |
f1b9da16c8 | ||
![]() |
93ce943301 | ||
![]() |
632b36ab5d | ||
![]() |
94c76c5823 | ||
![]() |
45b4cedb7e | ||
![]() |
6d0a8f78b2 | ||
![]() |
409cf185ce | ||
![]() |
602984460d | ||
![]() |
62b1d52a1e | ||
![]() |
790bd175e0 | ||
![]() |
2f057d729d | ||
![]() |
a124185090 | ||
![]() |
c5235bbe86 | ||
![]() |
e715901cb2 | ||
![]() |
1db914f567 | ||
![]() |
688dae7058 | ||
![]() |
ddb460ec8d | ||
![]() |
703e5fe44a | ||
![]() |
c601bdf7bf | ||
![]() |
778dddc523 | ||
![]() |
acc19ad34f | ||
![]() |
f4826e1b33 | ||
![]() |
05ff7e657c | ||
![]() |
839a14c0ba | ||
![]() |
9aafbec121 | ||
![]() |
20071e0c04 | ||
![]() |
51bb2f23a3 | ||
![]() |
2060d51bd0 | ||
![]() |
e5af0ccc09 | ||
![]() |
284859e742 | ||
![]() |
37e77f7a15 | ||
![]() |
d2432e1ba4 | ||
![]() |
5809ba0e3f | ||
![]() |
95e294b2e8 | ||
![]() |
cdaac58488 | ||
![]() |
13389f7eb8 | ||
![]() |
4964633614 | ||
![]() |
381bedf369 | ||
![]() |
6811651a0f | ||
![]() |
22aada0e20 | ||
![]() |
4a71020cd2 | ||
![]() |
294e6f80a0 | ||
![]() |
cc2d0eade6 | ||
![]() |
f00e411287 | ||
![]() |
da0a6280ac | ||
![]() |
6ee6844473 | ||
![]() |
69822b0d82 | ||
![]() |
93eecae0c3 | ||
![]() |
61f5d85525 | ||
![]() |
a90e86de75 | ||
![]() |
7247a493ab | ||
![]() |
cd8ec60ae9 | ||
![]() |
fe597dfb0c | ||
![]() |
c721aab006 | ||
![]() |
6a08e9ed08 | ||
![]() |
7637efb363 | ||
![]() |
b31f1b0353 | ||
![]() |
497682260f | ||
![]() |
e47beceb8a | ||
![]() |
067976f4b8 | ||
![]() |
1263b5c444 | ||
![]() |
90f0a8eacc | ||
![]() |
61a7420c94 | ||
![]() |
39a1f1462b | ||
![]() |
6de5d8e68c | ||
![]() |
b0f2523350 | ||
![]() |
bc8cc39871 | ||
![]() |
b36a8f4f2e | ||
![]() |
0a952f8b7b | ||
![]() |
26a0384171 | ||
![]() |
d18cccf7c5 | ||
![]() |
fbe6b4b486 | ||
![]() |
5fe08a5647 | ||
![]() |
ac2fc4f271 | ||
![]() |
93430496e2 | ||
![]() |
901b31a7aa | ||
![]() |
0cec2d3110 | ||
![]() |
40e4884e8b | ||
![]() |
f18425a51f | ||
![]() |
472893c5c4 | ||
![]() |
289bbf74f6 | ||
![]() |
a0182c069f | ||
![]() |
4ecb6ecaff | ||
![]() |
d5193f73d8 | ||
![]() |
1aab5bb9f2 | ||
![]() |
8dda4ff60b | ||
![]() |
0811f81a09 | ||
![]() |
af74680405 | ||
![]() |
d1715c5fdf | ||
![]() |
b245f1ece1 | ||
![]() |
e10c47c53d | ||
![]() |
0697d20fd4 | ||
![]() |
fd4f905ce5 | ||
![]() |
d36c7b20d2 | ||
![]() |
2948248d7a | ||
![]() |
850c54c3b1 | ||
![]() |
90fb16033e | ||
![]() |
13a68d547d | ||
![]() |
857ae5a74b | ||
![]() |
b3124bff7c | ||
![]() |
5c4137baf1 | ||
![]() |
a9dcd4c01e | ||
![]() |
2cd7322b11 | ||
![]() |
f9e9ecd0c1 | ||
![]() |
d756034161 | ||
![]() |
2460c4fc28 | ||
![]() |
6e39efbb9a | ||
![]() |
277e35c3b0 | ||
![]() |
bf1b2a828c | ||
![]() |
2913f8b42b | ||
![]() |
57f4c922e9 | ||
![]() |
8d82fecce9 | ||
![]() |
96126cbf17 | ||
![]() |
6ecb57e91f | ||
![]() |
a75af62fe3 | ||
![]() |
e4e02dbeae | ||
![]() |
3efa4ee26f | ||
![]() |
f4c3d98064 | ||
![]() |
a4cec82841 | ||
![]() |
3812edd0db | ||
![]() |
3ea9c8529a | ||
![]() |
ed28797f83 | ||
![]() |
eadb6ae774 | ||
![]() |
a5d35c3077 | ||
![]() |
3d811617e6 | ||
![]() |
03224e52d2 | ||
![]() |
4ebe57cd64 | ||
![]() |
343cd04a54 | ||
![]() |
ed45385b7b | ||
![]() |
8a3b596042 | ||
![]() |
1792327874 | ||
![]() |
d0dedda9a9 | ||
![]() |
368dde437a | ||
![]() |
022a2d2eaf | ||
![]() |
5f8511311c | ||
![]() |
2d2c591633 | ||
![]() |
d49c992b23 | ||
![]() |
f1392bbd49 | ||
![]() |
c14dc2f56a |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
24
.github/workflows/bootstrap.yml
vendored
24
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -20,12 +20,6 @@ jobs:
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
audit-ancient-python:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
needs: [ changes ]
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
python_version: 2.7
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
@@ -41,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -85,7 +79,7 @@ jobs:
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
8
.github/workflows/setup_git.ps1
vendored
8
.github/workflows/setup_git.ps1
vendored
@@ -1,15 +1,9 @@
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
# (c) 2022 Lawrence Livermore National Laboratory
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
|
4
.github/workflows/setup_git.sh
vendored
4
.github/workflows/setup_git.sh
vendored
@@ -2,10 +2,6 @@
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
# create a local pr base branch
|
||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||
|
64
.github/workflows/unit_tests.yaml
vendored
64
.github/workflows/unit_tests.yaml
vendored
@@ -11,39 +11,46 @@ concurrency:
|
||||
jobs:
|
||||
# Run unit tests with different configurations on linux
|
||||
ubuntu:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: 2.7
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.11'
|
||||
concretizer: original
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -52,24 +59,11 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf cmake bison libbison-dev kcov
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
|
||||
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
|
||||
# to bugs on an unmaintained version of the package when used with xdist.
|
||||
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
|
||||
pip install --upgrade pytest-cov
|
||||
fi
|
||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0"
|
||||
fi
|
||||
- name: Pin pathlib for Python 2.7
|
||||
if: ${{ matrix.python-version == 2.7 }}
|
||||
run: |
|
||||
pip install -U pathlib2==2.3.6 toml
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -82,6 +76,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap now
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests
|
||||
env:
|
||||
@@ -89,7 +84,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -99,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -138,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -150,25 +145,22 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
spack -d bootstrap now --dev
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
patchelf kcov
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||
@@ -193,10 +185,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
14
.github/workflows/valid-style.yml
vendored
14
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -28,23 +28,23 @@ jobs:
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
175
.github/workflows/windows_python.yml
vendored
175
.github/workflows/windows_python.yml
vendored
@@ -10,15 +10,15 @@ concurrency:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -26,13 +26,11 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
dir
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -41,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -52,12 +50,11 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -66,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -78,81 +75,81 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
make-installer:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: make-installer
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
|
274
CHANGELOG.md
274
CHANGELOG.md
@@ -1,16 +1,284 @@
|
||||
# v0.19.0 (2022-11-11)
|
||||
|
||||
`v0.19.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Package requirements**
|
||||
|
||||
Spack's traditional [package preferences](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-preferences)
|
||||
are soft, but we've added hard requriements to `packages.yaml` and `spack.yaml`
|
||||
(#32528, #32369). Package requirements use the same syntax as specs:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
```
|
||||
|
||||
More details in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements).
|
||||
|
||||
2. **Environment UI Improvements**
|
||||
|
||||
* Fewer surprising modifications to `spack.yaml` (#33711):
|
||||
|
||||
* `spack install` in an environment will no longer add to the `specs:` list; you'll
|
||||
need to either use `spack add <spec>` or `spack install --add <spec>`.
|
||||
|
||||
* Similarly, `spack uninstall` will not remove from your environment's `specs:`
|
||||
list; you'll need to use `spack remove` or `spack uninstall --remove`.
|
||||
|
||||
This will make it easier to manage an environment, as there is clear separation
|
||||
between the stack to be installed (`spack.yaml`/`spack.lock`) and which parts of
|
||||
it should be installed (`spack install` / `spack uninstall`).
|
||||
|
||||
* `concretizer:unify:true` is now the default mode for new environments (#31787)
|
||||
|
||||
We see more users creating `unify:true` environments now. Users who need
|
||||
`unify:false` can add it to their environment to get the old behavior. This will
|
||||
concretize every spec in the environment independently.
|
||||
|
||||
* Include environment configuration from URLs (#29026, [docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#included-configurations))
|
||||
|
||||
You can now include configuration in your environment directly from a URL:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
include:
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
```
|
||||
|
||||
4. **Multiple Build Systems**
|
||||
|
||||
An increasing number of packages in the ecosystem need the ability to support
|
||||
multiple build systems (#30738, [docs](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#multiple-build-systems)),
|
||||
either across versions, across platforms, or within the same version of the software.
|
||||
This has been hard to support through multiple inheritance, as methods from different
|
||||
build system superclasses would conflict. `package.py` files can now define separate
|
||||
builder classes with installation logic for different build systems, e.g.:
|
||||
|
||||
```python
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
```
|
||||
|
||||
5. **Compiler and variant propagation**
|
||||
|
||||
Currently, compiler flags and variants are inconsistent: compiler flags set for a
|
||||
package are inherited by its dependencies, while variants are not. We should have
|
||||
these be consistent by allowing for inheritance to be enabled or disabled for both
|
||||
variants and compiler flags.
|
||||
|
||||
Example syntax:
|
||||
- `package ++variant`:
|
||||
enabled variant that will be propagated to dependencies
|
||||
- `package +variant`:
|
||||
enabled variant that will NOT be propagated to dependencies
|
||||
- `package ~~variant`:
|
||||
disabled variant that will be propagated to dependencies
|
||||
- `package ~variant`:
|
||||
disabled variant that will NOT be propagated to dependencies
|
||||
- `package cflags==-g`:
|
||||
`cflags` will be propagated to dependencies
|
||||
- `package cflags=-g`:
|
||||
`cflags` will NOT be propagated to dependencies
|
||||
|
||||
Syntax for non-boolan variants is similar to compiler flags. More in the docs for
|
||||
[variants](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#variants) and [compiler flags](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#compiler-flags).
|
||||
|
||||
6. **Enhancements to git version specifiers**
|
||||
|
||||
* `v0.18.0` added the ability to use git commits as versions. You can now use the
|
||||
`git.` prefix to specify git tags or branches as versions. All of these are valid git
|
||||
versions in `v0.19` (#31200):
|
||||
|
||||
```console
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # raw commit
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234 # commit with git prefix
|
||||
foo@git.develop # the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
```
|
||||
|
||||
* `v0.19` also gives you more control over how Spack interprets git versions, in case
|
||||
Spack cannot detect the version from the git repository. You can suffix a git
|
||||
version with `=<version>` to force Spack to concretize it as a particular version
|
||||
(#30998, #31914, #32257):
|
||||
|
||||
```console
|
||||
# use mybranch, but treat it as version 3.2 for version comparison
|
||||
foo@git.mybranch=3.2
|
||||
|
||||
# use the given commit, but treat it as develop for version comparison
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop
|
||||
```
|
||||
|
||||
More in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier)
|
||||
|
||||
7. **Changes to Cray EX Support**
|
||||
|
||||
Cray machines have historically had their own "platform" within Spack, because we
|
||||
needed to go through the module system to leverage compilers and MPI installations on
|
||||
these machines. The Cray EX programming environment now provides standalone `craycc`
|
||||
executables and proper `mpicc` wrappers, so Spack can treat EX machines like Linux
|
||||
with extra packages (#29392).
|
||||
|
||||
We expect this to greatly reduce bugs, as external packages and compilers can now be
|
||||
used by prefix instead of through modules. We will also no longer be subject to
|
||||
reproducibility issues when modules change from Cray PE release to release and from
|
||||
site to site. This also simplifies dealing with the underlying Linux OS on cray
|
||||
systems, as Spack will properly model the machine's OS as either SuSE or RHEL.
|
||||
|
||||
8. **Improvements to tests and testing in CI**
|
||||
|
||||
* `spack ci generate --tests` will generate a `.gitlab-ci.yml` file that not only does
|
||||
builds but also runs tests for built packages (#27877). Public GitHub pipelines now
|
||||
also run tests in CI.
|
||||
|
||||
* `spack test run --explicit` will only run tests for packages that are explicitly
|
||||
installed, instead of all packages.
|
||||
|
||||
9. **Experimental binding link model**
|
||||
|
||||
You can add a new option to `config.yaml` to make Spack embed absolute paths to
|
||||
needed shared libraries in ELF executables and shared libraries on Linux (#31948, [docs](
|
||||
https://spack.readthedocs.io/en/latest/config_yaml.html#shared-linking-bind)):
|
||||
|
||||
```yaml
|
||||
config:
|
||||
shared_linking:
|
||||
type: rpath
|
||||
bind: true
|
||||
```
|
||||
|
||||
This can improve launch time at scale for parallel applications, and it can make
|
||||
installations less susceptible to environment variables like `LD_LIBRARY_PATH`, even
|
||||
especially when dealing with external libraries that use `RUNPATH`. You can think of
|
||||
this as a faster, even higher-precedence version of `RPATH`.
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `spack spec` prints dependencies more legibly. Dependencies in the output now appear
|
||||
at the *earliest* level of indentation possible (#33406)
|
||||
* You can override `package.py` attributes like `url`, directly in `packages.yaml`
|
||||
(#33275, [docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#assigning-package-attributes))
|
||||
* There are a number of new architecture-related format strings you can use in Spack
|
||||
configuration files to specify paths (#29810, [docs](
|
||||
https://spack.readthedocs.io/en/latest/configuration.html#config-file-variables))
|
||||
* Spack now supports bootstrapping Clingo on Windows (#33400)
|
||||
* There is now support for an `RPATH`-like library model on Windows (#31930)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Major performance improvements for installation from binary caches (#27610, #33628,
|
||||
#33636, #33608, #33590, #33496)
|
||||
* Test suite can now be parallelized using `xdist` (used in GitHub Actions) (#32361)
|
||||
* Reduce lock contention for parallel builds in environments (#31643)
|
||||
|
||||
## New binary caches and stacks
|
||||
|
||||
* We now build nearly all of E4S with `oneapi` in our buildcache (#31781, #31804,
|
||||
#31804, #31803, #31840, #31991, #32117, #32107, #32239)
|
||||
* Added 3 new machine learning-centric stacks to binary cache: `x86_64_v3`, CUDA, ROCm
|
||||
(#31592, #33463)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Support for Python 3.5 is dropped (#31908). Only Python 2.7 and 3.6+ are officially
|
||||
supported.
|
||||
|
||||
* This is the last Spack release that will support Python 2 (#32615). Spack `v0.19`
|
||||
will emit a deprecation warning if you run it with Python 2, and Python 2 support will
|
||||
soon be removed from the `develop` branch.
|
||||
|
||||
* `LD_LIBRARY_PATH` is no longer set by default by `spack load` or module loads.
|
||||
|
||||
Setting `LD_LIBRARY_PATH` in Spack environments/modules can cause binaries from
|
||||
outside of Spack to crash, and Spack's own builds use `RPATH` and do not need
|
||||
`LD_LIBRARY_PATH` set in order to run. If you still want the old behavior, you
|
||||
can run these commands to configure Spack to set `LD_LIBRARY_PATH`:
|
||||
|
||||
```console
|
||||
spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]
|
||||
spack config add modules:prefix_inspections:lib:[LD_LIBRARY_PATH]
|
||||
```
|
||||
|
||||
* The `spack:concretization:[together|separately]` has been removed after being
|
||||
deprecated in `v0.18`. Use `concretizer:unify:[true|false]`.
|
||||
* `config:module_roots` is no longer supported after being deprecated in `v0.18`. Use
|
||||
configuration in module sets instead (#28659, [docs](
|
||||
https://spack.readthedocs.io/en/latest/module_file_support.html)).
|
||||
* `spack activate` and `spack deactivate` are no longer supported, having been
|
||||
deprecated in `v0.18`. Use an environment with a view instead of
|
||||
activating/deactivating ([docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#configuration-in-spack-yaml)).
|
||||
* The old YAML format for buildcaches is now deprecated (#33707). If you are using an
|
||||
old buildcache with YAML metadata you will need to regenerate it with JSON metadata.
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are deprecated in favor of
|
||||
`spack bootstrap enable` and `spack bootstrap disable` and will be removed in `v0.20`.
|
||||
(#33600)
|
||||
* The `graviton2` architecture has been renamed to `neoverse_n1`, and `graviton3`
|
||||
is now `neoverse_v1`. Buildcaches using the old architecture names will need to be rebuilt.
|
||||
* The terms `blacklist` and `whitelist` have been replaced with `include` and `exclude`
|
||||
in all configuration files (#31569). You can use `spack config update` to
|
||||
automatically fix your configuration files.
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Permission setting on installation now handles effective uid properly (#19980)
|
||||
* `buildable:true` for an MPI implementation now overrides `buildable:false` for `mpi` (#18269)
|
||||
* Improved error messages when attempting to use an unconfigured compiler (#32084)
|
||||
* Do not punish explicitly requested compiler mismatches in the solver (#30074)
|
||||
* `spack stage`: add missing --fresh and --reuse (#31626)
|
||||
* Fixes for adding build system executables like `cmake` to package scope (#31739)
|
||||
* Bugfix for binary relocation with aliased strings produced by newer `binutils` (#32253)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,751 total packages, 335 new since `v0.18.0`
|
||||
* 141 new Python packages
|
||||
* 89 new R packages
|
||||
* 303 people contributed to this release
|
||||
* 287 committers to packages
|
||||
* 57 committers to core
|
||||
|
||||
|
||||
# v0.18.1 (2022-07-19)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
Python 2 and Python 3 (#31092)
|
||||
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
||||
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
||||
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173,#31186)
|
||||
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
||||
in the system (#19895)
|
||||
in the system (#19895)
|
||||
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
|
||||
### Package updates
|
||||
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
||||
|
@@ -10,8 +10,8 @@ For more on Spack's release structure, see
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.17.x | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@@ -10,6 +10,7 @@ def getpywin():
|
||||
try:
|
||||
import win32con # noqa: F401
|
||||
except ImportError:
|
||||
print("pyWin32 not installed but is required...\nInstalling via pip:")
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
@@ -31,13 +31,11 @@ import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 5)
|
||||
min_python3 = (3, 6)
|
||||
|
||||
if sys.version_info[:2] < (2, 7) or (
|
||||
sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
|
||||
):
|
||||
if sys.version_info[:2] < min_python3:
|
||||
v_info = sys.version_info[:3]
|
||||
msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3
|
||||
msg = "Spack requires Python %d.%d or higher " % min_python3
|
||||
msg += "You are running spack with Python %d.%d.%d." % v_info
|
||||
sys.exit(msg)
|
||||
|
||||
|
@@ -52,7 +52,6 @@ if defined py_path (
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}"
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
@@ -214,4 +214,8 @@ config:
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
binary_index_ttl: 600
|
||||
binary_index_ttl: 600
|
||||
|
||||
flags:
|
||||
# Whether to keep -Werror flags active in package builds.
|
||||
keep_werror: 'none'
|
||||
|
21
etc/spack/defaults/windows/packages.yaml
Normal file
21
etc/spack/defaults/windows/packages.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
mpi: [msmpi]
|
@@ -1,162 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _analyze:
|
||||
|
||||
=======
|
||||
Analyze
|
||||
=======
|
||||
|
||||
|
||||
The analyze command is a front-end to various tools that let us analyze
|
||||
package installations. Each analyzer is a module for a different kind
|
||||
of analysis that can be done on a package installation, including (but not
|
||||
limited to) binary, log, or text analysis. Thus, the analyze command group
|
||||
allows you to take an existing package install, choose an analyzer,
|
||||
and extract some output for the package using it.
|
||||
|
||||
|
||||
-----------------
|
||||
Analyzer Metadata
|
||||
-----------------
|
||||
|
||||
For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the
|
||||
value that you specify in your spack config at ``config:analyzers_dir``.
|
||||
For example, here we see the results of running an analysis on zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
This means that you can always find analyzer output in this folder, and it
|
||||
is organized with the same logic as the package install it was run for.
|
||||
If you want to customize this top level folder, simply provide the ``--path``
|
||||
argument to ``spack analyze run``. The nested organization will be maintained
|
||||
within your custom root.
|
||||
|
||||
-----------------
|
||||
Listing Analyzers
|
||||
-----------------
|
||||
|
||||
If you aren't familiar with Spack's analyzers, you can quickly list those that
|
||||
are available:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze list-analyzers
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
a package install directory to save
|
||||
|
||||
-------------------
|
||||
Analyzing a Package
|
||||
-------------------
|
||||
|
||||
The analyze command, akin to install, will accept a package spec to perform
|
||||
an analysis for. The package must be installed. Let's walk through an example
|
||||
with zlib. We first ask to analyze it. However, since we have more than one
|
||||
install, we are asked to disambiguate:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib
|
||||
==> Error: zlib matches multiple packages.
|
||||
Matching packages:
|
||||
fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake
|
||||
sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
Use a more specific spec.
|
||||
|
||||
|
||||
We can then specify the spec version that we want to analyze:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib/fz2bs56
|
||||
|
||||
If you don't provide any specific analyzer names, by default all analyzers
|
||||
(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not
|
||||
have any result, it will be skipped. For example, here is a result running for
|
||||
zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/
|
||||
spack-analyzer-environment-variables.json
|
||||
spack-analyzer-install-files.json
|
||||
spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run
|
||||
spack analyze on libabigail (already installed) _using_ libabigail1
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --analyzer abigail libabigail
|
||||
|
||||
|
||||
.. _analyze_monitoring:
|
||||
|
||||
----------------------
|
||||
Monitoring An Analysis
|
||||
----------------------
|
||||
|
||||
For any kind of analysis, you can
|
||||
use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
as a server to upload the same run metadata to. You can
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
You should first export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor wget
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget
|
||||
|
||||
If your server doesn't have authentication, you can skip it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-disable-auth wget
|
||||
|
||||
Regardless of your choice, when you run analyze on an installed package (whether
|
||||
it was installed with ``--monitor`` or not, you'll see the results generating as they did
|
||||
before, and a message that the monitor server was pinged:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze --monitor wget
|
||||
...
|
||||
==> Sending result for wget bin/wget to monitor.
|
@@ -1244,8 +1244,8 @@ For example, for the ``stackstart`` variant:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
mpileaks stackstart=4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart==4 # only mpileaks will have this variant value
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
@@ -1672,9 +1672,13 @@ own install prefix. However, certain packages are typically installed
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
Spack has support for this type of installation as well. In Spack,
|
||||
a package that can live inside the prefix of another package is called
|
||||
an *extension*. Suppose you have Python installed like so:
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1712,8 +1716,6 @@ You can find extensions for your Python installation like this:
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> None activated.
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
@@ -1741,32 +1743,72 @@ directly when you run ``python``:
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Using Extensions
|
||||
^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are multiple ways to get ``numpy`` working in Python. The first is
|
||||
to use :ref:`shell-support`. You can simply ``load`` the extension,
|
||||
and it will be added to the ``PYTHONPATH`` in your current shell, and
|
||||
Python itself will be available in the ``PATH``:
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
Now ``import numpy`` will succeed for as long as you keep your current
|
||||
session open.
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Instead of using Spack's environment modification capabilities through
|
||||
the ``spack load`` command, you can load numpy through your
|
||||
environment modules (using ``environment-modules`` or ``lmod``). This
|
||||
will also add the extension to the ``PYTHONPATH`` in your current
|
||||
shell.
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1776,15 +1818,6 @@ If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Extensions in an Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`environments` for a more in-depth description
|
||||
of environment views.
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
-----------
|
||||
CachedCMake
|
||||
-----------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _cudapackage:
|
||||
|
||||
-----------
|
||||
CudaPackage
|
||||
-----------
|
||||
----
|
||||
Cuda
|
||||
----
|
||||
|
||||
Different from other packages, ``CudaPackage`` does not represent a build system.
|
||||
Instead its goal is to simplify and unify usage of ``CUDA`` in other packages by providing a `mixin-class <https://en.wikipedia.org/wiki/Mixin>`_.
|
||||
@@ -80,7 +80,7 @@ standard CUDA compiler flags.
|
||||
|
||||
**cuda_flags**
|
||||
|
||||
This built-in static method returns a list of command line flags
|
||||
This built-in static method returns a list of command line flags
|
||||
for the chosen ``cuda_arch`` value(s). The flags are intended to
|
||||
be passed to the CUDA compiler driver (i.e., ``nvcc``).
|
||||
|
||||
|
@@ -6,9 +6,9 @@
|
||||
.. _inteloneapipackage:
|
||||
|
||||
|
||||
====================
|
||||
IntelOneapiPackage
|
||||
====================
|
||||
===========
|
||||
IntelOneapi
|
||||
===========
|
||||
|
||||
|
||||
.. contents::
|
||||
@@ -36,7 +36,7 @@ For more information on a specific package, do::
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||
now be found in oneAPI.
|
||||
now be found in oneAPI.
|
||||
|
||||
Examples
|
||||
========
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _intelpackage:
|
||||
|
||||
------------
|
||||
IntelPackage
|
||||
------------
|
||||
-----
|
||||
Intel
|
||||
-----
|
||||
|
||||
.. contents::
|
||||
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _pythonpackage:
|
||||
|
||||
-------------
|
||||
PythonPackage
|
||||
-------------
|
||||
------
|
||||
Python
|
||||
------
|
||||
|
||||
Python packages and modules have their own special build system. This
|
||||
documentation covers everything you'll need to know in order to write
|
||||
@@ -724,10 +724,9 @@ extends vs. depends_on
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` adds
|
||||
the ability to *activate* the package. Activation involves symlinking
|
||||
everything in the installation prefix of the package to the installation
|
||||
prefix of Python. This allows the user to import a Python module without
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
@@ -735,7 +734,7 @@ thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``, as symlinking the package wouldn't be useful.
|
||||
don't use ``extends``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _rocmpackage:
|
||||
|
||||
-----------
|
||||
ROCmPackage
|
||||
-----------
|
||||
----
|
||||
ROCm
|
||||
----
|
||||
|
||||
The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPackage``,
|
||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||
@@ -25,7 +25,7 @@ This package provides the following variants:
|
||||
|
||||
* **rocm**
|
||||
|
||||
This variant is used to enable/disable building with ``rocm``.
|
||||
This variant is used to enable/disable building with ``rocm``.
|
||||
The default is disabled (or ``False``).
|
||||
|
||||
* **amdgpu_target**
|
||||
|
@@ -5,9 +5,9 @@
|
||||
|
||||
.. _rpackage:
|
||||
|
||||
--------
|
||||
RPackage
|
||||
--------
|
||||
--
|
||||
R
|
||||
--
|
||||
|
||||
Like Python, R has its own built-in build system.
|
||||
|
||||
@@ -193,10 +193,10 @@ Build system dependencies
|
||||
|
||||
As an extension of the R ecosystem, your package will obviously depend
|
||||
on R to build and run. Normally, we would use ``depends_on`` to express
|
||||
this, but for R packages, we use ``extends``. ``extends`` is similar to
|
||||
``depends_on``, but adds an additional feature: the ability to "activate"
|
||||
the package by symlinking it to the R installation directory. Since
|
||||
every R package needs this, the ``RPackage`` base class contains:
|
||||
this, but for R packages, we use ``extends``. This implies a special
|
||||
dependency on R, which is used to set environment variables such as
|
||||
``R_LIBS`` uniformly. Since every R package needs this, the ``RPackage``
|
||||
base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -5,15 +5,15 @@
|
||||
|
||||
.. _sourceforgepackage:
|
||||
|
||||
------------------
|
||||
SourceforgePackage
|
||||
------------------
|
||||
-----------
|
||||
Sourceforge
|
||||
-----------
|
||||
|
||||
``SourceforgePackage`` is a
|
||||
``SourceforgePackage`` is a
|
||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||
sets the URL based on a list of Sourceforge mirrors listed in
|
||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||
Refer to the package source
|
||||
Refer to the package source
|
||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ This package provides a method for populating mirror URLs.
|
||||
It is decorated with `property` so its results are treated as
|
||||
a package attribute.
|
||||
|
||||
Refer to
|
||||
Refer to
|
||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||
for information on how Spack uses the `urls` attribute during
|
||||
fetching.
|
||||
|
@@ -36,13 +36,7 @@
|
||||
if not os.path.exists(link_name):
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
|
||||
else:
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
|
||||
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring"))
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
@@ -80,8 +74,16 @@
|
||||
"--force", # Overwrite existing files
|
||||
"--no-toc", # Don't create a table of contents file
|
||||
"--output-dir=.", # Directory to place all output
|
||||
"--module-first", # emit module docs before submodule docs
|
||||
]
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"])
|
||||
sphinx_apidoc(
|
||||
apidoc_args
|
||||
+ [
|
||||
"_spack_root/lib/spack/spack",
|
||||
"_spack_root/lib/spack/spack/test/*.py",
|
||||
"_spack_root/lib/spack/spack/test/cmd/*.py",
|
||||
]
|
||||
)
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
|
||||
# Enable todo items
|
||||
@@ -160,8 +162,8 @@ def setup(sphinx):
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u"Spack"
|
||||
copyright = u"2013-2021, Lawrence Livermore National Laboratory."
|
||||
project = "Spack"
|
||||
copyright = "2013-2021, Lawrence Livermore National Laboratory."
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
@@ -206,12 +208,14 @@ def setup(sphinx):
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
@@ -350,7 +354,7 @@ class SpackStyle(DefaultStyle):
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
|
||||
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
@@ -378,7 +382,7 @@ class SpackStyle(DefaultStyle):
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
|
||||
man_pages = [("index", "spack", "Spack Documentation", ["Todd Gamblin"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
@@ -393,8 +397,8 @@ class SpackStyle(DefaultStyle):
|
||||
(
|
||||
"index",
|
||||
"Spack",
|
||||
u"Spack Documentation",
|
||||
u"Todd Gamblin",
|
||||
"Spack Documentation",
|
||||
"Todd Gamblin",
|
||||
"Spack",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
|
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
||||
Spack-specific variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
Spack understands over a dozen special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
@@ -416,6 +416,8 @@ Spack understands several special variables. These are:
|
||||
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
* ``$date``: the current date in the format YYYY-MM-DD
|
||||
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
|
@@ -175,14 +175,11 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
@@ -235,7 +232,7 @@ Spack Subcommands
|
||||
Unit tests
|
||||
^^^^^^^^^^
|
||||
|
||||
:mod:`spack.test`
|
||||
``spack.test``
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
|
@@ -1070,19 +1070,23 @@ the include is conditional.
|
||||
Building a subset of the environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The generated ``Makefile``\s contain install targets for each spec. Given the hash
|
||||
of a particular spec, you can use the ``.install/<hash>`` target to install the
|
||||
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
|
||||
The generated ``Makefile``\s contain install targets for each spec, identified
|
||||
by ``<name>-<version>-<hash>``. This allows you to install only a subset of the
|
||||
packages in the environment. When packages are unique in the environment, it's
|
||||
enough to know the name and let tab-completion fill out the version and hash.
|
||||
|
||||
The following phony targets are available: ``install/<spec>`` to install the
|
||||
spec with its dependencies, and ``install-deps/<spec>`` to *only* install
|
||||
its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ spack env depfile -o Makefile --make-target-prefix my_env
|
||||
$ spack env depfile -o Makefile
|
||||
|
||||
# Install dependencies in parallel, only show a log on error.
|
||||
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||
$ make -j16 install-deps/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||
|
||||
# Install the root spec with verbose output.
|
||||
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose
|
||||
$ make -j16 install/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--verbose
|
@@ -21,8 +21,9 @@ be present on the machine where Spack is run:
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, XCode is required. Spack is designed to run on HPC
|
||||
platforms like Cray. Not all packages should be expected
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||
to run on HPC platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms.
|
||||
|
||||
A build matrix showing which packages are working on which systems is shown below.
|
||||
@@ -1704,9 +1705,11 @@ dependencies or incompatible build tools like autoconf. Here are several
|
||||
packages known to work on Windows:
|
||||
|
||||
* abseil-cpp
|
||||
* bzip2
|
||||
* clingo
|
||||
* cpuinfo
|
||||
* cmake
|
||||
* hdf5
|
||||
* glm
|
||||
* nasm
|
||||
* netlib-lapack (requires Intel Fortran)
|
||||
|
@@ -67,7 +67,6 @@ or refer to the full manual below.
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
monitoring
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
@@ -78,12 +77,6 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Research
|
||||
|
||||
analyze
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
@@ -1,265 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _monitoring:
|
||||
|
||||
==========
|
||||
Monitoring
|
||||
==========
|
||||
|
||||
You can use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
server to store a database of your packages, builds, and associated metadata
|
||||
for provenance, research, or some other kind of development. You should
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
-------------------
|
||||
Analysis Monitoring
|
||||
-------------------
|
||||
|
||||
To read about how to monitor an analysis (meaning you want to send analysis results
|
||||
to a server) see :ref:`analyze_monitoring`.
|
||||
|
||||
---------------------
|
||||
Monitoring An Install
|
||||
---------------------
|
||||
|
||||
Since an install is typically when you build packages, we logically want
|
||||
to tell spack to monitor during this step. Let's start with an example
|
||||
where we want to monitor the install of hdf5. Unless you have disabled authentication
|
||||
for the server, we first want to export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor hdf5
|
||||
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5
|
||||
|
||||
|
||||
As a precaution, we cut out early in the spack client if you have not provided
|
||||
authentication credentials. For example, if you run the command above without
|
||||
exporting your username or token, you'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER
|
||||
|
||||
This extra check is to ensure that we don't start any builds,
|
||||
and then discover that you forgot to export your token. However, if
|
||||
your monitoring server has authentication disabled, you can tell this to
|
||||
the client to skip this step:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-disable-auth hdf5
|
||||
|
||||
If the service is not running, you'll cleanly exit early - the install will
|
||||
not continue if you've asked it to monitor and there is no service.
|
||||
For example, here is what you'll see if the monitoring service is not running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[Errno 111] Connection refused
|
||||
|
||||
|
||||
If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-keep-going hdf5
|
||||
|
||||
This could mean that if a request fails, you only have partial or no data
|
||||
added to your monitoring database. This setting will not be applied to the
|
||||
first request to check if the server is running, but to subsequent requests.
|
||||
If you don't have a monitor server running and you want to build, simply
|
||||
don't provide the ``--monitor`` flag! Finally, if you want to provide one or
|
||||
more tags to your build, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Add one tag, "pizza"
|
||||
$ spack install --monitor --monitor-tags pizza hdf5
|
||||
|
||||
# Add two tags, "pizza" and "pasta"
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
|
||||
In the case that you want to save monitor results to your filesystem
|
||||
and then upload them later (perhaps you are in an environment where you don't
|
||||
have credentials or it isn't safe to use them) you can use the ``--monitor-save-local``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
@@ -34,6 +34,15 @@ ubiquitous in the scientific software community. Second, it's a modern
|
||||
language and has many powerful features to help make package writing
|
||||
easy.
|
||||
|
||||
.. warning::
|
||||
|
||||
As a general rule, packages should install the software *from source*.
|
||||
The only exception is for proprietary software (e.g., vendor compilers).
|
||||
|
||||
If a special build system needs to be added in order to support building
|
||||
a package from source, then the associated code and recipe need to be added
|
||||
first.
|
||||
|
||||
|
||||
.. _installation_procedure:
|
||||
|
||||
@@ -2397,13 +2406,15 @@ this because uninstalling the dependency would break the package.
|
||||
|
||||
``build``, ``link``, and ``run`` dependencies all affect the hash of Spack
|
||||
packages (along with ``sha256`` sums of patches and archives used to build the
|
||||
package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of
|
||||
package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
|
||||
the ``package.py`` recipes). ``test`` dependencies do not affect the package
|
||||
hash, as they are only used to construct a test environment *after* building and
|
||||
installing a given package installation. Older versions of Spack did not include
|
||||
build dependencies in the hash, but this has been
|
||||
[fixed](https://github.com/spack/spack/pull/28504) as of [Spack
|
||||
``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0)
|
||||
build dependencies in the hash, but this has been
|
||||
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
|
||||
|
||||
.. |Spack v0.18| replace:: Spack ``v0.18``
|
||||
.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0
|
||||
|
||||
If the dependency type is not specified, Spack uses a default of
|
||||
``('build', 'link')``. This is the common case for compiler languages.
|
||||
@@ -2634,9 +2645,12 @@ extendable package:
|
||||
extends('python')
|
||||
...
|
||||
|
||||
Now, the ``py-numpy`` package can be used as an argument to ``spack
|
||||
activate``. When it is activated, all the files in its prefix will be
|
||||
symbolically linked into the prefix of the python package.
|
||||
This accomplishes a few things. Firstly, the Python package can set special
|
||||
variables such as ``PYTHONPATH`` for all extensions when the run or build
|
||||
environment is set up. Secondly, filesystem views can ensure that extensions
|
||||
are put in the same prefix as their extendee. This ensures that Python in
|
||||
a view can always locate its Python packages, even without environment
|
||||
variables set.
|
||||
|
||||
A package can only extend one other package at a time. To support packages
|
||||
that may extend one of a list of other packages, Spack supports multiple
|
||||
@@ -2684,9 +2698,8 @@ variant(s) are selected. This may be accomplished with conditional
|
||||
...
|
||||
|
||||
Sometimes, certain files in one package will conflict with those in
|
||||
another, which means they cannot both be activated (symlinked) at the
|
||||
same time. In this case, you can tell Spack to ignore those files
|
||||
when it does the activation:
|
||||
another, which means they cannot both be used in a view at the
|
||||
same time. In this case, you can tell Spack to ignore those files:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2698,7 +2711,7 @@ when it does the activation:
|
||||
...
|
||||
|
||||
The code above will prevent everything in the ``$prefix/bin/`` directory
|
||||
from being linked in at activation time.
|
||||
from being linked in a view.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -3523,7 +3536,7 @@ will likely contain some overriding of default builder methods:
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
|
@@ -184,13 +184,48 @@ simply run the following commands:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --force
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack mark -i --all
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
Running ``spack install`` will reexamine your spack environment after
|
||||
a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
|
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.6-3.11, , Interpreter for Spack
|
||||
Python, 3.6--3.11, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
|
|
41
lib/spack/env/cc
vendored
41
lib/spack/env/cc
vendored
@@ -440,6 +440,47 @@ while [ $# -ne 0 ]; do
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
|
57
lib/spack/external/__init__.py
vendored
57
lib/spack/external/__init__.py
vendored
@@ -11,25 +11,14 @@
|
||||
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.2
|
||||
* Version: 0.17.3
|
||||
|
||||
archspec
|
||||
--------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/argparse
|
||||
* Usage: We include our own version to be Python 3.X compatible.
|
||||
* Version: 1.4.0
|
||||
* Note: This package has been slightly modified to improve
|
||||
error message formatting. See the following commit if the
|
||||
vendored copy ever needs to be updated again:
|
||||
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
|
||||
* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -52,7 +41,7 @@
|
||||
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad)
|
||||
* Version: 22.1.0
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@@ -67,21 +56,14 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff)
|
||||
* Note: Last version supporting Python 2.7
|
||||
|
||||
functools32
|
||||
-----------
|
||||
* Homepage: https://github.com/MiCHiLU/python-functools32
|
||||
* Usage: Needed by jsonschema when using Python 2.7.
|
||||
* Version: 3.2.3-2
|
||||
* Version: 1.8.0
|
||||
|
||||
jinja2
|
||||
------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 2.11.3 (last version supporting Python 2.7)
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@@ -96,44 +78,21 @@
|
||||
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.15.2
|
||||
* Version: 1.16.2
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 1.1.1 (last version supporting Python 2.7)
|
||||
|
||||
py
|
||||
--
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/py
|
||||
* Usage: Needed by pytest. Library with cross-python path,
|
||||
ini-parsing, io, code, and log facilities.
|
||||
* Version: 1.4.34 (last version supporting Python 2.6)
|
||||
* Note: This packages has been modified:
|
||||
* https://github.com/pytest-dev/py/pull/186 was backported
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.16.1 (last version supporting Python 2.7)
|
||||
* Note: We only include the parts needed for `jsonschema`.
|
||||
|
||||
pytest
|
||||
------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/pytest
|
||||
* Usage: Testing framework used by Spack.
|
||||
* Version: 3.2.5 (last version supporting Python 2.6)
|
||||
* Note: This package has been slightly modified:
|
||||
* We improve Python 2.6 compatibility. See:
|
||||
https://github.com/spack/spack/pull/6801.
|
||||
* We have patched pytest not to depend on setuptools. See:
|
||||
https://github.com/spack/spack/pull/15612
|
||||
* Version: 0.18.0
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
|
1
lib/spack/external/_vendoring/_pyrsistent_version.py
vendored
Normal file
1
lib/spack/external/_vendoring/_pyrsistent_version.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = '0.18.0'
|
1
lib/spack/external/_vendoring/_pyrsistent_version.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/_pyrsistent_version.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from _pyrsistent_version import *
|
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from altgraph import *
|
18
lib/spack/external/_vendoring/altgraph/LICENSE
vendored
Normal file
18
lib/spack/external/_vendoring/altgraph/LICENSE
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
Copyright (c) 2004 Istvan Albert unless otherwise noted.
|
||||
Copyright (c) 2006-2010 Bob Ippolito
|
||||
Copyright (2) 2010-2020 Ronald Oussoren, et. al.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
@@ -22,7 +23,7 @@
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
|
||||
__version__ = "21.2.0"
|
||||
__version__ = "22.1.0"
|
||||
__version_info__ = VersionInfo._from_version_string(__version__)
|
||||
|
||||
__title__ = "attrs"
|
||||
@@ -73,6 +74,6 @@
|
||||
]
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
from ._next_gen import define, field, frozen, mutable
|
||||
from ._next_gen import define, field, frozen, mutable # noqa: F401
|
||||
|
||||
__all__.extend((define, field, frozen, mutable))
|
||||
__all__.extend(("define", "field", "frozen", "mutable"))
|
486
lib/spack/external/_vendoring/attr/__init__.pyi
vendored
Normal file
486
lib/spack/external/_vendoring/attr/__init__.pyi
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
# `import X as X` is required to make these public
|
||||
from . import converters as converters
|
||||
from . import exceptions as exceptions
|
||||
from . import filters as filters
|
||||
from . import setters as setters
|
||||
from . import validators as validators
|
||||
from ._cmp import cmp_using as cmp_using
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
__version__: str
|
||||
__version_info__: VersionInfo
|
||||
__title__: str
|
||||
__description__: str
|
||||
__url__: str
|
||||
__uri__: str
|
||||
__author__: str
|
||||
__email__: str
|
||||
__license__: str
|
||||
__copyright__: str
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_EqOrderType = Union[bool, Callable[[Any], Any]]
|
||||
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
||||
_ConverterType = Callable[[Any], Any]
|
||||
_FilterType = Callable[[Attribute[_T], _T], bool]
|
||||
_ReprType = Callable[[Any], str]
|
||||
_ReprArgType = Union[bool, _ReprType]
|
||||
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
|
||||
_OnSetAttrArgType = Union[
|
||||
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
|
||||
]
|
||||
_FieldTransformer = Callable[
|
||||
[type, List[Attribute[Any]]], List[Attribute[Any]]
|
||||
]
|
||||
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||
# or tuple, but those are invariant and so would prevent subtypes of
|
||||
# _ValidatorType from working when passed in a list or tuple.
|
||||
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
||||
|
||||
# A protocol to be able to statically accept an attrs class.
|
||||
class AttrsInstance(Protocol):
|
||||
__attrs_attrs__: ClassVar[Any]
|
||||
|
||||
# _make --
|
||||
|
||||
NOTHING: object
|
||||
|
||||
# NOTE: Factory lies about its return type to make this possible:
|
||||
# `x: List[int] # = Factory(list)`
|
||||
# Work around mypy issue #4554 in the common case by using an overload.
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Literal
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[Any], _T],
|
||||
takes_self: Literal[True],
|
||||
) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[], _T],
|
||||
takes_self: Literal[False],
|
||||
) -> _T: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
||||
takes_self: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# Static type inference support via __dataclass_transform__ implemented as per:
|
||||
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
|
||||
# This annotation must be applied to all overloads of "define" and "attrs"
|
||||
#
|
||||
# NOTE: This is a typing construct and does not exist at runtime. Extensions
|
||||
# wrapping attrs decorators should declare a separate __dataclass_transform__
|
||||
# signature in the extension module using the specification linked above to
|
||||
# provide pyright support.
|
||||
def __dataclass_transform__(
|
||||
*,
|
||||
eq_default: bool = True,
|
||||
order_default: bool = False,
|
||||
kw_only_default: bool = False,
|
||||
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
||||
) -> Callable[[_T], _T]: ...
|
||||
|
||||
class Attribute(Generic[_T]):
|
||||
name: str
|
||||
default: Optional[_T]
|
||||
validator: Optional[_ValidatorType[_T]]
|
||||
repr: _ReprArgType
|
||||
cmp: _EqOrderType
|
||||
eq: _EqOrderType
|
||||
order: _EqOrderType
|
||||
hash: Optional[bool]
|
||||
init: bool
|
||||
converter: Optional[_ConverterType]
|
||||
metadata: Dict[Any, Any]
|
||||
type: Optional[Type[_T]]
|
||||
kw_only: bool
|
||||
on_setattr: _OnSetAttrType
|
||||
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
||||
|
||||
# NOTE: We had several choices for the annotation to use for type arg:
|
||||
# 1) Type[_T]
|
||||
# - Pros: Handles simple cases correctly
|
||||
# - Cons: Might produce less informative errors in the case of conflicting
|
||||
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||
# 2) Callable[..., _T]
|
||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||
# - Cons: Terrible error messages for validator checks.
|
||||
# e.g. attr.ib(type=int, validator=validate_str)
|
||||
# -> error: Cannot infer function type argument
|
||||
# 3) type (and do all of the work in the mypy plugin)
|
||||
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||
# We chose option #1.
|
||||
|
||||
# `attr` lies about its return type to make the following possible:
|
||||
# attr() -> Any
|
||||
# attr(8) -> int
|
||||
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||
# This makes this type of assignments possible:
|
||||
# x: int = attr(8)
|
||||
#
|
||||
# This form catches explicit None or no default but with no other arguments
|
||||
# returns Any.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def attrib(
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: object = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: _C,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: None = ...,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: _C,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: None = ...,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
mutable = define
|
||||
frozen = define # they differ only in their defaults
|
||||
|
||||
def fields(cls: Type[AttrsInstance]) -> Any: ...
|
||||
def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
|
||||
def validate(inst: AttrsInstance) -> None: ...
|
||||
def resolve_types(
|
||||
cls: _C,
|
||||
globalns: Optional[Dict[str, Any]] = ...,
|
||||
localns: Optional[Dict[str, Any]] = ...,
|
||||
attribs: Optional[List[Attribute[Any]]] = ...,
|
||||
) -> _C: ...
|
||||
|
||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||
# [attr.ib()])` is valid
|
||||
def make_class(
|
||||
name: str,
|
||||
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
||||
bases: Tuple[type, ...] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
) -> type: ...
|
||||
|
||||
# _funcs --
|
||||
|
||||
# TODO: add support for returning TypedDict from the mypy plugin
|
||||
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||
# these:
|
||||
# https://github.com/python/mypy/issues/4236
|
||||
# https://github.com/python/typing/issues/253
|
||||
# XXX: remember to fix attrs.asdict/astuple too!
|
||||
def asdict(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: Optional[bool] = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
def has(cls: type) -> bool: ...
|
||||
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||
|
||||
# _config --
|
||||
|
||||
def set_run_validators(run: bool) -> None: ...
|
||||
def get_run_validators() -> bool: ...
|
||||
|
||||
# aliases --
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
@@ -1,8 +1,9 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import functools
|
||||
import types
|
||||
|
||||
from ._compat import new_class
|
||||
from ._make import _make_ne
|
||||
|
||||
|
||||
@@ -78,7 +79,9 @@ def cmp_using(
|
||||
num_order_functions += 1
|
||||
body["__ge__"] = _make_operator("ge", ge)
|
||||
|
||||
type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
|
||||
type_ = types.new_class(
|
||||
class_name, (object,), {}, lambda ns: ns.update(body)
|
||||
)
|
||||
|
||||
# Add same type requirement.
|
||||
if require_same_type:
|
13
lib/spack/external/_vendoring/attr/_cmp.pyi
vendored
Normal file
13
lib/spack/external/_vendoring/attr/_cmp.pyi
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
from typing import Any, Callable, Optional, Type
|
||||
|
||||
_CompareWithType = Callable[[Any, Any], bool]
|
||||
|
||||
def cmp_using(
|
||||
eq: Optional[_CompareWithType],
|
||||
lt: Optional[_CompareWithType],
|
||||
le: Optional[_CompareWithType],
|
||||
gt: Optional[_CompareWithType],
|
||||
ge: Optional[_CompareWithType],
|
||||
require_same_type: bool,
|
||||
class_name: str,
|
||||
) -> Type: ...
|
185
lib/spack/external/_vendoring/attr/_compat.py
vendored
Normal file
185
lib/spack/external/_vendoring/attr/_compat.py
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import inspect
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import types
|
||||
import warnings
|
||||
|
||||
from collections.abc import Mapping, Sequence # noqa
|
||||
|
||||
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
PY36 = sys.version_info[:2] >= (3, 6)
|
||||
HAS_F_STRINGS = PY36
|
||||
PY310 = sys.version_info[:2] >= (3, 10)
|
||||
|
||||
|
||||
if PYPY or PY36:
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
warnings.warn(
|
||||
"Running interpreter doesn't sufficiently support code object "
|
||||
"introspection. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slotted classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
class _AnnotationExtractor:
|
||||
"""
|
||||
Extract type annotations from a callable, returning None whenever there
|
||||
is none.
|
||||
"""
|
||||
|
||||
__slots__ = ["sig"]
|
||||
|
||||
def __init__(self, callable):
|
||||
try:
|
||||
self.sig = inspect.signature(callable)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
self.sig = None
|
||||
|
||||
def get_first_param_type(self):
|
||||
"""
|
||||
Return the type annotation of the first argument if it's not empty.
|
||||
"""
|
||||
if not self.sig:
|
||||
return None
|
||||
|
||||
params = list(self.sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
return params[0].annotation
|
||||
|
||||
return None
|
||||
|
||||
def get_return_type(self):
|
||||
"""
|
||||
Return the return type if it's not empty.
|
||||
"""
|
||||
if (
|
||||
self.sig
|
||||
and self.sig.return_annotation is not inspect.Signature.empty
|
||||
):
|
||||
return self.sig.return_annotation
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""Return a function of two arguments (cell, value) which sets
|
||||
the value stored in the closure cell `cell` to `value`.
|
||||
"""
|
||||
# pypy makes this easy. (It also supports the logic below, but
|
||||
# why not do the easy/fast thing?)
|
||||
if PYPY:
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
return set_closure_cell
|
||||
|
||||
# Otherwise gotta do it the hard way.
|
||||
|
||||
# Create a function that will set its first cellvar to `value`.
|
||||
def set_first_cellvar_to(value):
|
||||
x = value
|
||||
return
|
||||
|
||||
# This function will be eliminated as dead code, but
|
||||
# not before its reference to `x` forces `x` to be
|
||||
# represented as a closure cell rather than a local.
|
||||
def force_x_to_be_a_cell(): # pragma: no cover
|
||||
return x
|
||||
|
||||
try:
|
||||
# Extract the code object and make sure our assumptions about
|
||||
# the closure behavior are correct.
|
||||
co = set_first_cellvar_to.__code__
|
||||
if co.co_cellvars != ("x",) or co.co_freevars != ():
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
# Convert this code object to a code object that sets the
|
||||
# function's first _freevar_ (not cellvar) to the argument.
|
||||
if sys.version_info >= (3, 8):
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.cell_contents = value
|
||||
|
||||
else:
|
||||
args = [co.co_argcount]
|
||||
args.append(co.co_kwonlyargcount)
|
||||
args.extend(
|
||||
[
|
||||
co.co_nlocals,
|
||||
co.co_stacksize,
|
||||
co.co_flags,
|
||||
co.co_code,
|
||||
co.co_consts,
|
||||
co.co_names,
|
||||
co.co_varnames,
|
||||
co.co_filename,
|
||||
co.co_name,
|
||||
co.co_firstlineno,
|
||||
co.co_lnotab,
|
||||
# These two arguments are reversed:
|
||||
co.co_cellvars,
|
||||
co.co_freevars,
|
||||
]
|
||||
)
|
||||
set_first_freevar_code = types.CodeType(*args)
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
# Create a function using the set_first_freevar_code,
|
||||
# whose first closure cell is `cell`. Calling it will
|
||||
# change the value of that cell.
|
||||
setter = types.FunctionType(
|
||||
set_first_freevar_code, {}, "setter", (), (cell,)
|
||||
)
|
||||
# And call it to set the cell.
|
||||
setter(value)
|
||||
|
||||
# Make sure it works on this interpreter:
|
||||
def make_func_with_cell():
|
||||
x = None
|
||||
|
||||
def func():
|
||||
return x # pragma: no cover
|
||||
|
||||
return func
|
||||
|
||||
cell = make_func_with_cell().__closure__[0]
|
||||
set_closure_cell(cell, 100)
|
||||
if cell.cell_contents != 100:
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
except Exception:
|
||||
return just_warn
|
||||
else:
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
|
||||
# Thread-local global to track attrs instances which are already being repr'd.
|
||||
# This is needed because there is no other (thread-safe) way to pass info
|
||||
# about the instances that are already being repr'd through the call stack
|
||||
# in order to ensure we don't perform infinite recursion.
|
||||
#
|
||||
# For instance, if an instance contains a dict which contains that instance,
|
||||
# we need to know that we're already repr'ing the outside instance from within
|
||||
# the dict's repr() call.
|
||||
#
|
||||
# This lives here rather than in _make.py so that the functions in _make.py
|
||||
# don't have a direct reference to the thread-local in their globals dict.
|
||||
# If they have such a reference, it breaks cloudpickle.
|
||||
repr_context = threading.local()
|
@@ -1,4 +1,4 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
__all__ = ["set_run_validators", "get_run_validators"]
|
||||
@@ -9,6 +9,10 @@
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
||||
instead.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
raise TypeError("'run' must be bool.")
|
||||
@@ -19,5 +23,9 @@ def set_run_validators(run):
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
||||
instead.
|
||||
"""
|
||||
return _run_validators
|
@@ -1,8 +1,8 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._make import NOTHING, _obj_setattr, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
@@ -25,7 +25,7 @@ def asdict(
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
called with the `attrs.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable dict_factory: A callable to produce dictionaries from. For
|
||||
example, to produce ordered dictionaries instead of normal Python
|
||||
@@ -46,6 +46,8 @@ def asdict(
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
.. versionadded:: 20.3.0 *value_serializer*
|
||||
.. versionadded:: 21.3.0 If a dict has a collection for a key, it is
|
||||
serialized as a tuple.
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
@@ -61,11 +63,11 @@ def asdict(
|
||||
if has(v.__class__):
|
||||
rv[a.name] = asdict(
|
||||
v,
|
||||
True,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain_collection_types is True else list
|
||||
@@ -73,10 +75,11 @@ def asdict(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
@@ -87,20 +90,22 @@ def asdict(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
filter,
|
||||
df,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
filter,
|
||||
df,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
for kk, vv in v.items()
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
@@ -111,6 +116,7 @@ def asdict(
|
||||
|
||||
def _asdict_anything(
|
||||
val,
|
||||
is_key,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
@@ -123,22 +129,29 @@ def _asdict_anything(
|
||||
# Attrs class.
|
||||
rv = asdict(
|
||||
val,
|
||||
True,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif isinstance(val, (tuple, list, set, frozenset)):
|
||||
cf = val.__class__ if retain_collection_types is True else list
|
||||
if retain_collection_types is True:
|
||||
cf = val.__class__
|
||||
elif is_key:
|
||||
cf = tuple
|
||||
else:
|
||||
cf = list
|
||||
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
@@ -148,13 +161,23 @@ def _asdict_anything(
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk, filter, df, retain_collection_types, value_serializer
|
||||
kk,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv, filter, df, retain_collection_types, value_serializer
|
||||
vv,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(val)
|
||||
for kk, vv in val.items()
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
@@ -181,7 +204,7 @@ def astuple(
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
called with the `attrs.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable tuple_factory: A callable to produce tuples from. For
|
||||
example, to produce lists instead of tuples.
|
||||
@@ -253,7 +276,7 @@ def astuple(
|
||||
if has(vv.__class__)
|
||||
else vv,
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
for kk, vv in v.items()
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -291,7 +314,9 @@ def assoc(inst, **changes):
|
||||
class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use `evolve` instead.
|
||||
Use `attrs.evolve` instead if you can.
|
||||
This function will not be removed du to the slightly different approach
|
||||
compared to `attrs.evolve`.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
@@ -302,7 +327,7 @@ def assoc(inst, **changes):
|
||||
)
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in iteritems(changes):
|
||||
for k, v in changes.items():
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
raise AttrsAttributeNotFoundError(
|
||||
@@ -370,18 +395,16 @@ class and you didn't pass any attribs.
|
||||
:raise NameError: If types cannot be resolved because of missing variables.
|
||||
|
||||
:returns: *cls* so you can use this function also as a class decorator.
|
||||
Please note that you have to apply it **after** `attr.s`. That means
|
||||
the decorator has to come in the line **before** `attr.s`.
|
||||
Please note that you have to apply it **after** `attrs.define`. That
|
||||
means the decorator has to come in the line **before** `attrs.define`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionadded:: 21.1.0 *attribs*
|
||||
|
||||
"""
|
||||
try:
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
cls.__attrs_types_resolved__
|
||||
except AttributeError:
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
if getattr(cls, "__attrs_types_resolved__", None) != cls:
|
||||
import typing
|
||||
|
||||
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
|
||||
@@ -389,7 +412,9 @@ class and you didn't pass any attribs.
|
||||
if field.name in hints:
|
||||
# Since fields have been frozen we must work around it.
|
||||
_obj_setattr(field, "type", hints[field.name])
|
||||
cls.__attrs_types_resolved__ = True
|
||||
# We store the class we resolved so that subclasses know they haven't
|
||||
# been resolved.
|
||||
cls.__attrs_types_resolved__ = cls
|
||||
|
||||
# Return the class so you can use it as a decorator too.
|
||||
return cls
|
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,24 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
|
||||
`attr.ib` with different default values.
|
||||
"""
|
||||
|
||||
|
||||
from functools import partial
|
||||
|
||||
from attr.exceptions import UnannotatedAttributeError
|
||||
|
||||
from . import setters
|
||||
from ._make import NOTHING, _frozen_setattrs, attrib, attrs
|
||||
from ._funcs import asdict as _asdict
|
||||
from ._funcs import astuple as _astuple
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
_frozen_setattrs,
|
||||
_ng_default_on_setattr,
|
||||
attrib,
|
||||
attrs,
|
||||
)
|
||||
from .exceptions import UnannotatedAttributeError
|
||||
|
||||
|
||||
def define(
|
||||
@@ -32,22 +42,45 @@ def define(
|
||||
getstate_setstate=None,
|
||||
on_setattr=None,
|
||||
field_transformer=None,
|
||||
match_args=True,
|
||||
):
|
||||
r"""
|
||||
The only behavioral differences are the handling of the *auto_attribs*
|
||||
option:
|
||||
Define an ``attrs`` class.
|
||||
|
||||
Differences to the classic `attr.s` that it uses underneath:
|
||||
|
||||
- Automatically detect whether or not *auto_attribs* should be `True` (c.f.
|
||||
*auto_attribs* parameter).
|
||||
- If *frozen* is `False`, run converters and validators when setting an
|
||||
attribute by default.
|
||||
- *slots=True*
|
||||
|
||||
.. caution::
|
||||
|
||||
Usually this has only upsides and few visible effects in everyday
|
||||
programming. But it *can* lead to some suprising behaviors, so please
|
||||
make sure to read :term:`slotted classes`.
|
||||
- *auto_exc=True*
|
||||
- *auto_detect=True*
|
||||
- *order=False*
|
||||
- Some options that were only relevant on Python 2 or were kept around for
|
||||
backwards-compatibility have been removed.
|
||||
|
||||
Please note that these are all defaults and you can change them as you
|
||||
wish.
|
||||
|
||||
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
|
||||
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
|
||||
|
||||
1. If any attributes are annotated and no unannotated `attr.ib`\ s
|
||||
1. If any attributes are annotated and no unannotated `attrs.fields`\ s
|
||||
are found, it assumes *auto_attribs=True*.
|
||||
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||
`attr.ib`\ s.
|
||||
`attrs.fields`\ s.
|
||||
|
||||
and that mutable classes (``frozen=False``) validate on ``__setattr__``.
|
||||
For now, please refer to `attr.s` for the rest of the parameters.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
|
||||
"""
|
||||
|
||||
def do_it(cls, auto_attribs):
|
||||
@@ -72,6 +105,7 @@ def do_it(cls, auto_attribs):
|
||||
getstate_setstate=getstate_setstate,
|
||||
on_setattr=on_setattr,
|
||||
field_transformer=field_transformer,
|
||||
match_args=match_args,
|
||||
)
|
||||
|
||||
def wrap(cls):
|
||||
@@ -84,9 +118,9 @@ def wrap(cls):
|
||||
|
||||
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||
|
||||
# By default, mutable classes validate on setattr.
|
||||
# By default, mutable classes convert & validate on setattr.
|
||||
if frozen is False and on_setattr is None:
|
||||
on_setattr = setters.validate
|
||||
on_setattr = _ng_default_on_setattr
|
||||
|
||||
# However, if we subclass a frozen class, we inherit the immutability
|
||||
# and disable on_setattr.
|
||||
@@ -156,3 +190,31 @@ def field(
|
||||
order=order,
|
||||
on_setattr=on_setattr,
|
||||
)
|
||||
|
||||
|
||||
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
|
||||
"""
|
||||
Same as `attr.asdict`, except that collections types are always retained
|
||||
and dict is always used as *dict_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _asdict(
|
||||
inst=inst,
|
||||
recurse=recurse,
|
||||
filter=filter,
|
||||
value_serializer=value_serializer,
|
||||
retain_collection_types=True,
|
||||
)
|
||||
|
||||
|
||||
def astuple(inst, *, recurse=True, filter=None):
|
||||
"""
|
||||
Same as `attr.astuple`, except that collections types are always retained
|
||||
and `tuple` is always used as the *tuple_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _astuple(
|
||||
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
|
||||
)
|
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
from functools import total_ordering
|
||||
|
||||
@@ -8,7 +9,7 @@
|
||||
|
||||
@total_ordering
|
||||
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||
class VersionInfo(object):
|
||||
class VersionInfo:
|
||||
"""
|
||||
A version object that can be compared to tuple of length 1--4:
|
||||
|
9
lib/spack/external/_vendoring/attr/_version_info.pyi
vendored
Normal file
9
lib/spack/external/_vendoring/attr/_version_info.pyi
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
class VersionInfo:
|
||||
@property
|
||||
def year(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def micro(self) -> int: ...
|
||||
@property
|
||||
def releaselevel(self) -> str: ...
|
@@ -1,22 +1,21 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import PY2
|
||||
import typing
|
||||
|
||||
from ._compat import _AnnotationExtractor
|
||||
from ._make import NOTHING, Factory, pipe
|
||||
|
||||
|
||||
if not PY2:
|
||||
import inspect
|
||||
import typing
|
||||
|
||||
|
||||
__all__ = [
|
||||
"pipe",
|
||||
"optional",
|
||||
"default_if_none",
|
||||
"optional",
|
||||
"pipe",
|
||||
"to_bool",
|
||||
]
|
||||
|
||||
|
||||
@@ -39,22 +38,15 @@ def optional_converter(val):
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
if not PY2:
|
||||
sig = None
|
||||
try:
|
||||
sig = inspect.signature(converter)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
pass
|
||||
if sig:
|
||||
params = list(sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[
|
||||
params[0].annotation
|
||||
]
|
||||
if sig.return_annotation is not inspect.Signature.empty:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[
|
||||
sig.return_annotation
|
||||
]
|
||||
xtr = _AnnotationExtractor(converter)
|
||||
|
||||
t = xtr.get_first_param_type()
|
||||
if t:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[t]
|
||||
|
||||
rt = xtr.get_return_type()
|
||||
if rt:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[rt]
|
||||
|
||||
return optional_converter
|
||||
|
||||
@@ -65,14 +57,14 @@ def default_if_none(default=NOTHING, factory=None):
|
||||
result of *factory*.
|
||||
|
||||
:param default: Value to be used if ``None`` is passed. Passing an instance
|
||||
of `attr.Factory` is supported, however the ``takes_self`` option
|
||||
of `attrs.Factory` is supported, however the ``takes_self`` option
|
||||
is *not*.
|
||||
:param callable factory: A callable that takes no parameters whose result
|
||||
is used if ``None`` is passed.
|
||||
|
||||
:raises TypeError: If **neither** *default* or *factory* is passed.
|
||||
:raises TypeError: If **both** *default* and *factory* are passed.
|
||||
:raises ValueError: If an instance of `attr.Factory` is passed with
|
||||
:raises ValueError: If an instance of `attrs.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
@@ -109,3 +101,44 @@ def default_if_none_converter(val):
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
|
||||
|
||||
def to_bool(val):
|
||||
"""
|
||||
Convert "boolean" strings (e.g., from env. vars.) to real booleans.
|
||||
|
||||
Values mapping to :code:`True`:
|
||||
|
||||
- :code:`True`
|
||||
- :code:`"true"` / :code:`"t"`
|
||||
- :code:`"yes"` / :code:`"y"`
|
||||
- :code:`"on"`
|
||||
- :code:`"1"`
|
||||
- :code:`1`
|
||||
|
||||
Values mapping to :code:`False`:
|
||||
|
||||
- :code:`False`
|
||||
- :code:`"false"` / :code:`"f"`
|
||||
- :code:`"no"` / :code:`"n"`
|
||||
- :code:`"off"`
|
||||
- :code:`"0"`
|
||||
- :code:`0`
|
||||
|
||||
:raises ValueError: for any other value.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
val = val.lower()
|
||||
truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
|
||||
falsy = {False, "false", "f", "no", "n", "off", "0", 0}
|
||||
try:
|
||||
if val in truthy:
|
||||
return True
|
||||
if val in falsy:
|
||||
return False
|
||||
except TypeError:
|
||||
# Raised when "val" is not hashable (e.g., lists)
|
||||
pass
|
||||
raise ValueError("Cannot convert value to bool: {}".format(val))
|
13
lib/spack/external/_vendoring/attr/converters.pyi
vendored
Normal file
13
lib/spack/external/_vendoring/attr/converters.pyi
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
from typing import Callable, Optional, TypeVar, overload
|
||||
|
||||
from . import _ConverterType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(default: _T) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
|
||||
def to_bool(val: str) -> bool: ...
|
@@ -1,4 +1,4 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
class FrozenError(AttributeError):
|
17
lib/spack/external/_vendoring/attr/exceptions.pyi
vendored
Normal file
17
lib/spack/external/_vendoring/attr/exceptions.pyi
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import Any
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
msg: str = ...
|
||||
|
||||
class FrozenInstanceError(FrozenError): ...
|
||||
class FrozenAttributeError(FrozenError): ...
|
||||
class AttrsAttributeNotFoundError(ValueError): ...
|
||||
class NotAnAttrsClassError(ValueError): ...
|
||||
class DefaultAlreadySetError(RuntimeError): ...
|
||||
class UnannotatedAttributeError(RuntimeError): ...
|
||||
class PythonTooOldError(RuntimeError): ...
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
msg: str = ...
|
||||
value: Any = ...
|
||||
def __init__(self, msg: str, value: Any) -> None: ...
|
@@ -1,10 +1,9 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful filters for `attr.asdict`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import isclass
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
@@ -13,17 +12,17 @@ def _split_what(what):
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isclass(cls)),
|
||||
frozenset(cls for cls in what if isinstance(cls, type)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Whitelist *what*.
|
||||
Include *what*.
|
||||
|
||||
:param what: What to whitelist.
|
||||
:type what: `list` of `type` or `attr.Attribute`\\ s
|
||||
:param what: What to include.
|
||||
:type what: `list` of `type` or `attrs.Attribute`\\ s
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
@@ -37,10 +36,10 @@ def include_(attribute, value):
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Blacklist *what*.
|
||||
Exclude *what*.
|
||||
|
||||
:param what: What to blacklist.
|
||||
:type what: `list` of classes or `attr.Attribute`\\ s.
|
||||
:param what: What to exclude.
|
||||
:type what: `list` of classes or `attrs.Attribute`\\ s.
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
6
lib/spack/external/_vendoring/attr/filters.pyi
vendored
Normal file
6
lib/spack/external/_vendoring/attr/filters.pyi
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
from typing import Any, Union
|
||||
|
||||
from . import Attribute, _FilterType
|
||||
|
||||
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
@@ -1,8 +1,9 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly used hooks for on_setattr.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from . import _config
|
||||
from .exceptions import FrozenAttributeError
|
||||
@@ -67,11 +68,6 @@ def convert(instance, attrib, new_value):
|
||||
return new_value
|
||||
|
||||
|
||||
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
# autodata stopped working, so the docstring is inlined in the API docs.
|
||||
NO_OP = object()
|
||||
"""
|
||||
Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
|
||||
Does not work in `pipe` or within lists.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
19
lib/spack/external/_vendoring/attr/setters.pyi
vendored
Normal file
19
lib/spack/external/_vendoring/attr/setters.pyi
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import Any, NewType, NoReturn, TypeVar, cast
|
||||
|
||||
from . import Attribute, _OnSetAttrType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def frozen(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> NoReturn: ...
|
||||
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||
|
||||
# convert is allowed to return Any, because they can be chained using pipe.
|
||||
def convert(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> Any: ...
|
||||
|
||||
_NoOpType = NewType("_NoOpType", object)
|
||||
NO_OP: _NoOpType
|
@@ -1,30 +1,98 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import operator
|
||||
import re
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
from .exceptions import NotCallableError
|
||||
|
||||
|
||||
try:
|
||||
Pattern = re.Pattern
|
||||
except AttributeError: # Python <3.7 lacks a Pattern type.
|
||||
Pattern = type(re.compile(""))
|
||||
|
||||
|
||||
__all__ = [
|
||||
"and_",
|
||||
"deep_iterable",
|
||||
"deep_mapping",
|
||||
"disabled",
|
||||
"ge",
|
||||
"get_disabled",
|
||||
"gt",
|
||||
"in_",
|
||||
"instance_of",
|
||||
"is_callable",
|
||||
"le",
|
||||
"lt",
|
||||
"matches_re",
|
||||
"max_len",
|
||||
"min_len",
|
||||
"optional",
|
||||
"provides",
|
||||
"set_disabled",
|
||||
]
|
||||
|
||||
|
||||
def set_disabled(disabled):
|
||||
"""
|
||||
Globally disable or enable running validators.
|
||||
|
||||
By default, they are run.
|
||||
|
||||
:param disabled: If ``True``, disable running all validators.
|
||||
:type disabled: bool
|
||||
|
||||
.. warning::
|
||||
|
||||
This function is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(not disabled)
|
||||
|
||||
|
||||
def get_disabled():
|
||||
"""
|
||||
Return a bool indicating whether validators are currently disabled or not.
|
||||
|
||||
:return: ``True`` if validators are currently disabled.
|
||||
:rtype: bool
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return not get_run_validators()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disabled():
|
||||
"""
|
||||
Context manager that disables running validators within its context.
|
||||
|
||||
.. warning::
|
||||
|
||||
This context manager is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(False)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
set_run_validators(True)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InstanceOfValidator(object):
|
||||
class _InstanceOfValidator:
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -61,16 +129,15 @@ def instance_of(type):
|
||||
:type type: type or tuple of types
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected type, and the value it
|
||||
(of type `attrs.Attribute`), the expected type, and the value it
|
||||
got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MatchesReValidator(object):
|
||||
regex = attrib()
|
||||
flags = attrib()
|
||||
class _MatchesReValidator:
|
||||
pattern = attrib()
|
||||
match_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -79,18 +146,18 @@ def __call__(self, inst, attr, value):
|
||||
"""
|
||||
if not self.match_func(value):
|
||||
raise ValueError(
|
||||
"'{name}' must match regex {regex!r}"
|
||||
"'{name}' must match regex {pattern!r}"
|
||||
" ({value!r} doesn't)".format(
|
||||
name=attr.name, regex=self.regex.pattern, value=value
|
||||
name=attr.name, pattern=self.pattern.pattern, value=value
|
||||
),
|
||||
attr,
|
||||
self.regex,
|
||||
self.pattern,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<matches_re validator for pattern {regex!r}>".format(
|
||||
regex=self.regex
|
||||
return "<matches_re validator for pattern {pattern!r}>".format(
|
||||
pattern=self.pattern
|
||||
)
|
||||
|
||||
|
||||
@@ -99,48 +166,51 @@ def matches_re(regex, flags=0, func=None):
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string that doesn't match *regex*.
|
||||
|
||||
:param str regex: a regex string to match against
|
||||
:param regex: a regex string or precompiled pattern to match against
|
||||
:param int flags: flags that will be passed to the underlying re function
|
||||
(default 0)
|
||||
:param callable func: which underlying `re` function to call (options
|
||||
are `re.fullmatch`, `re.search`, `re.match`, default
|
||||
is ``None`` which means either `re.fullmatch` or an emulation of
|
||||
it on Python 2). For performance reasons, they won't be used directly
|
||||
but on a pre-`re.compile`\ ed pattern.
|
||||
:param callable func: which underlying `re` function to call. Valid options
|
||||
are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
|
||||
means `re.fullmatch`. For performance reasons, the pattern is always
|
||||
precompiled using `re.compile`.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
|
||||
"""
|
||||
fullmatch = getattr(re, "fullmatch", None)
|
||||
valid_funcs = (fullmatch, None, re.search, re.match)
|
||||
valid_funcs = (re.fullmatch, None, re.search, re.match)
|
||||
if func not in valid_funcs:
|
||||
raise ValueError(
|
||||
"'func' must be one of %s."
|
||||
% (
|
||||
"'func' must be one of {}.".format(
|
||||
", ".join(
|
||||
sorted(
|
||||
e and e.__name__ or "None" for e in set(valid_funcs)
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
pattern = re.compile(regex, flags)
|
||||
if isinstance(regex, Pattern):
|
||||
if flags:
|
||||
raise TypeError(
|
||||
"'flags' can only be used with a string pattern; "
|
||||
"pass flags to re.compile() instead"
|
||||
)
|
||||
pattern = regex
|
||||
else:
|
||||
pattern = re.compile(regex, flags)
|
||||
|
||||
if func is re.match:
|
||||
match_func = pattern.match
|
||||
elif func is re.search:
|
||||
match_func = pattern.search
|
||||
else:
|
||||
if fullmatch:
|
||||
match_func = pattern.fullmatch
|
||||
else:
|
||||
pattern = re.compile(r"(?:{})\Z".format(regex), flags)
|
||||
match_func = pattern.match
|
||||
match_func = pattern.fullmatch
|
||||
|
||||
return _MatchesReValidator(pattern, flags, match_func)
|
||||
return _MatchesReValidator(pattern, match_func)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _ProvidesValidator(object):
|
||||
class _ProvidesValidator:
|
||||
interface = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -175,14 +245,14 @@ def provides(interface):
|
||||
:type interface: ``zope.interface.Interface``
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected interface, and the
|
||||
(of type `attrs.Attribute`), the expected interface, and the
|
||||
value it got.
|
||||
"""
|
||||
return _ProvidesValidator(interface)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _OptionalValidator(object):
|
||||
class _OptionalValidator:
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -216,7 +286,7 @@ def optional(validator):
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InValidator(object):
|
||||
class _InValidator:
|
||||
options = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -229,7 +299,10 @@ def __call__(self, inst, attr, value):
|
||||
raise ValueError(
|
||||
"'{name}' must be in {options!r} (got {value!r})".format(
|
||||
name=attr.name, options=self.options, value=value
|
||||
)
|
||||
),
|
||||
attr,
|
||||
self.options,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -248,16 +321,20 @@ def in_(options):
|
||||
:type options: list, tuple, `enum.Enum`, ...
|
||||
|
||||
:raises ValueError: With a human readable error message, the attribute (of
|
||||
type `attr.Attribute`), the expected options, and the value it
|
||||
type `attrs.Attribute`), the expected options, and the value it
|
||||
got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
.. versionchanged:: 22.1.0
|
||||
The ValueError was incomplete until now and only contained the human
|
||||
readable error message. Now it contains all the information that has
|
||||
been promised since 17.1.0.
|
||||
"""
|
||||
return _InValidator(options)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=False, hash=True)
|
||||
class _IsCallableValidator(object):
|
||||
class _IsCallableValidator:
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
@@ -287,14 +364,14 @@ def is_callable():
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises `attr.exceptions.NotCallableError`: With a human readable error
|
||||
message containing the attribute (`attr.Attribute`) name,
|
||||
message containing the attribute (`attrs.Attribute`) name,
|
||||
and the value it got.
|
||||
"""
|
||||
return _IsCallableValidator()
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepIterable(object):
|
||||
class _DeepIterable:
|
||||
member_validator = attrib(validator=is_callable())
|
||||
iterable_validator = attrib(
|
||||
default=None, validator=optional(is_callable())
|
||||
@@ -329,7 +406,7 @@ def deep_iterable(member_validator, iterable_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of an iterable.
|
||||
|
||||
:param member_validator: Validator to apply to iterable members
|
||||
:param member_validator: Validator(s) to apply to iterable members
|
||||
:param iterable_validator: Validator to apply to iterable itself
|
||||
(optional)
|
||||
|
||||
@@ -337,11 +414,13 @@ def deep_iterable(member_validator, iterable_validator=None):
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
if isinstance(member_validator, (list, tuple)):
|
||||
member_validator = and_(*member_validator)
|
||||
return _DeepIterable(member_validator, iterable_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepMapping(object):
|
||||
class _DeepMapping:
|
||||
key_validator = attrib(validator=is_callable())
|
||||
value_validator = attrib(validator=is_callable())
|
||||
mapping_validator = attrib(default=None, validator=optional(is_callable()))
|
||||
@@ -377,3 +456,139 @@ def deep_mapping(key_validator, value_validator, mapping_validator=None):
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _NumberValidator:
|
||||
bound = attrib()
|
||||
compare_op = attrib()
|
||||
compare_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.compare_func(value, self.bound):
|
||||
raise ValueError(
|
||||
"'{name}' must be {op} {bound}: {value}".format(
|
||||
name=attr.name,
|
||||
op=self.compare_op,
|
||||
bound=self.bound,
|
||||
value=value,
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Validator for x {op} {bound}>".format(
|
||||
op=self.compare_op, bound=self.bound
|
||||
)
|
||||
|
||||
|
||||
def lt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number larger or equal to *val*.
|
||||
|
||||
:param val: Exclusive upper bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<", operator.lt)
|
||||
|
||||
|
||||
def le(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number greater than *val*.
|
||||
|
||||
:param val: Inclusive upper bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<=", operator.le)
|
||||
|
||||
|
||||
def ge(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number smaller than *val*.
|
||||
|
||||
:param val: Inclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">=", operator.ge)
|
||||
|
||||
|
||||
def gt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number smaller or equal to *val*.
|
||||
|
||||
:param val: Exclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">", operator.gt)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MaxLengthValidator:
|
||||
max_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) > self.max_length:
|
||||
raise ValueError(
|
||||
"Length of '{name}' must be <= {max}: {len}".format(
|
||||
name=attr.name, max=self.max_length, len=len(value)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<max_len validator for {max}>".format(max=self.max_length)
|
||||
|
||||
|
||||
def max_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is longer than *length*.
|
||||
|
||||
:param int length: Maximum length of the string or iterable
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _MaxLengthValidator(length)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MinLengthValidator:
|
||||
min_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) < self.min_length:
|
||||
raise ValueError(
|
||||
"Length of '{name}' must be => {min}: {len}".format(
|
||||
name=attr.name, min=self.min_length, len=len(value)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<min_len validator for {min}>".format(min=self.min_length)
|
||||
|
||||
|
||||
def min_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is shorter than *length*.
|
||||
|
||||
:param int length: Minimum length of the string or iterable
|
||||
|
||||
.. versionadded:: 22.1.0
|
||||
"""
|
||||
return _MinLengthValidator(length)
|
80
lib/spack/external/_vendoring/attr/validators.pyi
vendored
Normal file
80
lib/spack/external/_vendoring/attr/validators.pyi
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Container,
|
||||
ContextManager,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Match,
|
||||
Optional,
|
||||
Pattern,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from . import _ValidatorType
|
||||
from . import _ValidatorArgType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_T3 = TypeVar("_T3")
|
||||
_I = TypeVar("_I", bound=Iterable)
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_M = TypeVar("_M", bound=Mapping)
|
||||
|
||||
def set_disabled(run: bool) -> None: ...
|
||||
def get_disabled() -> bool: ...
|
||||
def disabled() -> ContextManager[None]: ...
|
||||
|
||||
# To be more precise on instance_of use some overloads.
|
||||
# If there are more than 3 items in the tuple then we fall back to Any
|
||||
@overload
|
||||
def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2]]
|
||||
) -> _ValidatorType[Union[_T1, _T2]]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
|
||||
) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
|
||||
def provides(interface: Any) -> _ValidatorType[Any]: ...
|
||||
def optional(
|
||||
validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
|
||||
) -> _ValidatorType[Optional[_T]]: ...
|
||||
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||
def matches_re(
|
||||
regex: Union[Pattern[AnyStr], AnyStr],
|
||||
flags: int = ...,
|
||||
func: Optional[
|
||||
Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
|
||||
] = ...,
|
||||
) -> _ValidatorType[AnyStr]: ...
|
||||
def deep_iterable(
|
||||
member_validator: _ValidatorArgType[_T],
|
||||
iterable_validator: Optional[_ValidatorType[_I]] = ...,
|
||||
) -> _ValidatorType[_I]: ...
|
||||
def deep_mapping(
|
||||
key_validator: _ValidatorType[_K],
|
||||
value_validator: _ValidatorType[_V],
|
||||
mapping_validator: Optional[_ValidatorType[_M]] = ...,
|
||||
) -> _ValidatorType[_M]: ...
|
||||
def is_callable() -> _ValidatorType[_T]: ...
|
||||
def lt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def le(val: _T) -> _ValidatorType[_T]: ...
|
||||
def ge(val: _T) -> _ValidatorType[_T]: ...
|
||||
def gt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def max_len(length: int) -> _ValidatorType[_T]: ...
|
||||
def min_len(length: int) -> _ValidatorType[_T]: ...
|
@@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Hynek Schlawack
|
||||
Copyright (c) 2015 Hynek Schlawack and the attrs contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
70
lib/spack/external/_vendoring/attrs/__init__.py
vendored
Normal file
70
lib/spack/external/_vendoring/attrs/__init__.py
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
__author__,
|
||||
__copyright__,
|
||||
__description__,
|
||||
__doc__,
|
||||
__email__,
|
||||
__license__,
|
||||
__title__,
|
||||
__url__,
|
||||
__version__,
|
||||
__version_info__,
|
||||
assoc,
|
||||
cmp_using,
|
||||
define,
|
||||
evolve,
|
||||
field,
|
||||
fields,
|
||||
fields_dict,
|
||||
frozen,
|
||||
has,
|
||||
make_class,
|
||||
mutable,
|
||||
resolve_types,
|
||||
validate,
|
||||
)
|
||||
from attr._next_gen import asdict, astuple
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__author__",
|
||||
"__copyright__",
|
||||
"__description__",
|
||||
"__doc__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__title__",
|
||||
"__url__",
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"Attribute",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"define",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"Factory",
|
||||
"field",
|
||||
"fields_dict",
|
||||
"fields",
|
||||
"filters",
|
||||
"frozen",
|
||||
"has",
|
||||
"make_class",
|
||||
"mutable",
|
||||
"NOTHING",
|
||||
"resolve_types",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
66
lib/spack/external/_vendoring/attrs/__init__.pyi
vendored
Normal file
66
lib/spack/external/_vendoring/attrs/__init__.pyi
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
# Because we need to type our own stuff, we have to make everything from
|
||||
# attr explicitly public too.
|
||||
from attr import __author__ as __author__
|
||||
from attr import __copyright__ as __copyright__
|
||||
from attr import __description__ as __description__
|
||||
from attr import __email__ as __email__
|
||||
from attr import __license__ as __license__
|
||||
from attr import __title__ as __title__
|
||||
from attr import __url__ as __url__
|
||||
from attr import __version__ as __version__
|
||||
from attr import __version_info__ as __version_info__
|
||||
from attr import _FilterType
|
||||
from attr import assoc as assoc
|
||||
from attr import Attribute as Attribute
|
||||
from attr import cmp_using as cmp_using
|
||||
from attr import converters as converters
|
||||
from attr import define as define
|
||||
from attr import evolve as evolve
|
||||
from attr import exceptions as exceptions
|
||||
from attr import Factory as Factory
|
||||
from attr import field as field
|
||||
from attr import fields as fields
|
||||
from attr import fields_dict as fields_dict
|
||||
from attr import filters as filters
|
||||
from attr import frozen as frozen
|
||||
from attr import has as has
|
||||
from attr import make_class as make_class
|
||||
from attr import mutable as mutable
|
||||
from attr import NOTHING as NOTHING
|
||||
from attr import resolve_types as resolve_types
|
||||
from attr import setters as setters
|
||||
from attr import validate as validate
|
||||
from attr import validators as validators
|
||||
|
||||
# TODO: see definition of attr.asdict/astuple
|
||||
def asdict(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: bool = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
3
lib/spack/external/_vendoring/attrs/converters.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/converters.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.converters import * # noqa
|
3
lib/spack/external/_vendoring/attrs/exceptions.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/exceptions.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.exceptions import * # noqa
|
3
lib/spack/external/_vendoring/attrs/filters.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/filters.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.filters import * # noqa
|
0
lib/spack/external/_vendoring/attrs/py.typed
vendored
Normal file
0
lib/spack/external/_vendoring/attrs/py.typed
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/setters.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/setters.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.setters import * # noqa
|
3
lib/spack/external/_vendoring/attrs/validators.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/validators.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.validators import * # noqa
|
202
lib/spack/external/_vendoring/distro/LICENSE
vendored
Normal file
202
lib/spack/external/_vendoring/distro/LICENSE
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
54
lib/spack/external/_vendoring/distro/__init__.py
vendored
Normal file
54
lib/spack/external/_vendoring/distro/__init__.py
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
from .distro import (
|
||||
NORMALIZED_DISTRO_ID,
|
||||
NORMALIZED_LSB_ID,
|
||||
NORMALIZED_OS_ID,
|
||||
LinuxDistribution,
|
||||
__version__,
|
||||
build_number,
|
||||
codename,
|
||||
distro_release_attr,
|
||||
distro_release_info,
|
||||
id,
|
||||
info,
|
||||
like,
|
||||
linux_distribution,
|
||||
lsb_release_attr,
|
||||
lsb_release_info,
|
||||
major_version,
|
||||
minor_version,
|
||||
name,
|
||||
os_release_attr,
|
||||
os_release_info,
|
||||
uname_attr,
|
||||
uname_info,
|
||||
version,
|
||||
version_parts,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"NORMALIZED_DISTRO_ID",
|
||||
"NORMALIZED_LSB_ID",
|
||||
"NORMALIZED_OS_ID",
|
||||
"LinuxDistribution",
|
||||
"build_number",
|
||||
"codename",
|
||||
"distro_release_attr",
|
||||
"distro_release_info",
|
||||
"id",
|
||||
"info",
|
||||
"like",
|
||||
"linux_distribution",
|
||||
"lsb_release_attr",
|
||||
"lsb_release_info",
|
||||
"major_version",
|
||||
"minor_version",
|
||||
"name",
|
||||
"os_release_attr",
|
||||
"os_release_info",
|
||||
"uname_attr",
|
||||
"uname_info",
|
||||
"version",
|
||||
"version_parts",
|
||||
]
|
||||
|
||||
__version__ = __version__
|
4
lib/spack/external/_vendoring/distro/__main__.py
vendored
Normal file
4
lib/spack/external/_vendoring/distro/__main__.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
from .distro import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,3 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2015,2016,2017 Nir Cohen
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -36,40 +37,39 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
Sequence,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
__version__ = "1.6.0"
|
||||
try:
|
||||
from typing import TypedDict
|
||||
except ImportError:
|
||||
# Python 3.7
|
||||
TypedDict = dict
|
||||
|
||||
# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2
|
||||
# support, can use typing.TYPE_CHECKING instead. See:
|
||||
# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING
|
||||
if False: # pragma: nocover
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
Sequence,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
__version__ = "1.8.0"
|
||||
|
||||
VersionDict = TypedDict(
|
||||
"VersionDict", {"major": str, "minor": str, "build_number": str}
|
||||
)
|
||||
InfoDict = TypedDict(
|
||||
"InfoDict",
|
||||
{
|
||||
"id": str,
|
||||
"version": str,
|
||||
"version_parts": VersionDict,
|
||||
"like": str,
|
||||
"codename": str,
|
||||
},
|
||||
)
|
||||
|
||||
class VersionDict(TypedDict):
|
||||
major: str
|
||||
minor: str
|
||||
build_number: str
|
||||
|
||||
|
||||
class InfoDict(TypedDict):
|
||||
id: str
|
||||
version: str
|
||||
version_parts: VersionDict
|
||||
like: str
|
||||
codename: str
|
||||
|
||||
|
||||
_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
|
||||
@@ -85,6 +85,7 @@
|
||||
#: * Value: Normalized value.
|
||||
NORMALIZED_OS_ID = {
|
||||
"ol": "oracle", # Oracle Linux
|
||||
"opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap
|
||||
}
|
||||
|
||||
#: Translation table for normalizing the "Distributor ID" attribute returned by
|
||||
@@ -121,6 +122,26 @@
|
||||
# Pattern for base file name of distro release file
|
||||
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
||||
|
||||
# Base file names to be looked up for if _UNIXCONFDIR is not readable.
|
||||
_DISTRO_RELEASE_BASENAMES = [
|
||||
"SuSE-release",
|
||||
"arch-release",
|
||||
"base-release",
|
||||
"centos-release",
|
||||
"fedora-release",
|
||||
"gentoo-release",
|
||||
"mageia-release",
|
||||
"mandrake-release",
|
||||
"mandriva-release",
|
||||
"mandrivalinux-release",
|
||||
"manjaro-release",
|
||||
"oracle-release",
|
||||
"redhat-release",
|
||||
"rocky-release",
|
||||
"sl-release",
|
||||
"slackware-version",
|
||||
]
|
||||
|
||||
# Base file names to be ignored when searching for distro release file
|
||||
_DISTRO_RELEASE_IGNORE_BASENAMES = (
|
||||
"debian_version",
|
||||
@@ -133,8 +154,7 @@
|
||||
)
|
||||
|
||||
|
||||
def linux_distribution(full_distribution_name=True):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
|
||||
"""
|
||||
.. deprecated:: 1.6.0
|
||||
|
||||
@@ -151,7 +171,8 @@ def linux_distribution(full_distribution_name=True):
|
||||
|
||||
* ``version``: The result of :func:`distro.version`.
|
||||
|
||||
* ``codename``: The result of :func:`distro.codename`.
|
||||
* ``codename``: The extra item (usually in parentheses) after the
|
||||
os-release version number, or the result of :func:`distro.codename`.
|
||||
|
||||
The interface of this function is compatible with the original
|
||||
:py:func:`platform.linux_distribution` function, supporting a subset of
|
||||
@@ -176,8 +197,7 @@ def linux_distribution(full_distribution_name=True):
|
||||
return _distro.linux_distribution(full_distribution_name)
|
||||
|
||||
|
||||
def id():
|
||||
# type: () -> str
|
||||
def id() -> str:
|
||||
"""
|
||||
Return the distro ID of the current distribution, as a
|
||||
machine-readable string.
|
||||
@@ -198,8 +218,9 @@ def id():
|
||||
"fedora" Fedora
|
||||
"sles" SUSE Linux Enterprise Server
|
||||
"opensuse" openSUSE
|
||||
"amazon" Amazon Linux
|
||||
"amzn" Amazon Linux
|
||||
"arch" Arch Linux
|
||||
"buildroot" Buildroot
|
||||
"cloudlinux" CloudLinux OS
|
||||
"exherbo" Exherbo Linux
|
||||
"gentoo" GenToo Linux
|
||||
@@ -219,6 +240,9 @@ def id():
|
||||
"netbsd" NetBSD
|
||||
"freebsd" FreeBSD
|
||||
"midnightbsd" MidnightBSD
|
||||
"rocky" Rocky Linux
|
||||
"aix" AIX
|
||||
"guix" Guix System
|
||||
============== =========================================
|
||||
|
||||
If you have a need to get distros for reliable IDs added into this set,
|
||||
@@ -256,8 +280,7 @@ def id():
|
||||
return _distro.id()
|
||||
|
||||
|
||||
def name(pretty=False):
|
||||
# type: (bool) -> str
|
||||
def name(pretty: bool = False) -> str:
|
||||
"""
|
||||
Return the name of the current OS distribution, as a human-readable
|
||||
string.
|
||||
@@ -296,8 +319,7 @@ def name(pretty=False):
|
||||
return _distro.name(pretty)
|
||||
|
||||
|
||||
def version(pretty=False, best=False):
|
||||
# type: (bool, bool) -> str
|
||||
def version(pretty: bool = False, best: bool = False) -> str:
|
||||
"""
|
||||
Return the version of the current OS distribution, as a human-readable
|
||||
string.
|
||||
@@ -313,6 +335,10 @@ def version(pretty=False, best=False):
|
||||
sources in a fixed priority order does not always yield the most precise
|
||||
version (e.g. for Debian 8.2, or CentOS 7.1).
|
||||
|
||||
Some other distributions may not provide this kind of information. In these
|
||||
cases, an empty string would be returned. This behavior can be observed
|
||||
with rolling releases distributions (e.g. Arch Linux).
|
||||
|
||||
The *best* parameter can be used to control the approach for the returned
|
||||
version:
|
||||
|
||||
@@ -341,8 +367,7 @@ def version(pretty=False, best=False):
|
||||
return _distro.version(pretty, best)
|
||||
|
||||
|
||||
def version_parts(best=False):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def version_parts(best: bool = False) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return the version of the current OS distribution as a tuple
|
||||
``(major, minor, build_number)`` with items as follows:
|
||||
@@ -359,8 +384,7 @@ def version_parts(best=False):
|
||||
return _distro.version_parts(best)
|
||||
|
||||
|
||||
def major_version(best=False):
|
||||
# type: (bool) -> str
|
||||
def major_version(best: bool = False) -> str:
|
||||
"""
|
||||
Return the major version of the current OS distribution, as a string,
|
||||
if provided.
|
||||
@@ -373,8 +397,7 @@ def major_version(best=False):
|
||||
return _distro.major_version(best)
|
||||
|
||||
|
||||
def minor_version(best=False):
|
||||
# type: (bool) -> str
|
||||
def minor_version(best: bool = False) -> str:
|
||||
"""
|
||||
Return the minor version of the current OS distribution, as a string,
|
||||
if provided.
|
||||
@@ -387,8 +410,7 @@ def minor_version(best=False):
|
||||
return _distro.minor_version(best)
|
||||
|
||||
|
||||
def build_number(best=False):
|
||||
# type: (bool) -> str
|
||||
def build_number(best: bool = False) -> str:
|
||||
"""
|
||||
Return the build number of the current OS distribution, as a string,
|
||||
if provided.
|
||||
@@ -401,8 +423,7 @@ def build_number(best=False):
|
||||
return _distro.build_number(best)
|
||||
|
||||
|
||||
def like():
|
||||
# type: () -> str
|
||||
def like() -> str:
|
||||
"""
|
||||
Return a space-separated list of distro IDs of distributions that are
|
||||
closely related to the current OS distribution in regards to packaging
|
||||
@@ -419,8 +440,7 @@ def like():
|
||||
return _distro.like()
|
||||
|
||||
|
||||
def codename():
|
||||
# type: () -> str
|
||||
def codename() -> str:
|
||||
"""
|
||||
Return the codename for the release of the current OS distribution,
|
||||
as a string.
|
||||
@@ -444,8 +464,7 @@ def codename():
|
||||
return _distro.codename()
|
||||
|
||||
|
||||
def info(pretty=False, best=False):
|
||||
# type: (bool, bool) -> InfoDict
|
||||
def info(pretty: bool = False, best: bool = False) -> InfoDict:
|
||||
"""
|
||||
Return certain machine-readable information items about the current OS
|
||||
distribution in a dictionary, as shown in the following example:
|
||||
@@ -489,8 +508,7 @@ def info(pretty=False, best=False):
|
||||
return _distro.info(pretty, best)
|
||||
|
||||
|
||||
def os_release_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def os_release_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the os-release file data source of the current OS distribution.
|
||||
@@ -500,8 +518,7 @@ def os_release_info():
|
||||
return _distro.os_release_info()
|
||||
|
||||
|
||||
def lsb_release_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def lsb_release_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the lsb_release command data source of the current OS distribution.
|
||||
@@ -512,8 +529,7 @@ def lsb_release_info():
|
||||
return _distro.lsb_release_info()
|
||||
|
||||
|
||||
def distro_release_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def distro_release_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the distro release file data source of the current OS distribution.
|
||||
@@ -523,8 +539,7 @@ def distro_release_info():
|
||||
return _distro.distro_release_info()
|
||||
|
||||
|
||||
def uname_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def uname_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the distro release file data source of the current OS distribution.
|
||||
@@ -532,8 +547,7 @@ def uname_info():
|
||||
return _distro.uname_info()
|
||||
|
||||
|
||||
def os_release_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def os_release_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the os-release file data source
|
||||
of the current OS distribution.
|
||||
@@ -552,8 +566,7 @@ def os_release_attr(attribute):
|
||||
return _distro.os_release_attr(attribute)
|
||||
|
||||
|
||||
def lsb_release_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def lsb_release_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the lsb_release command output
|
||||
data source of the current OS distribution.
|
||||
@@ -573,8 +586,7 @@ def lsb_release_attr(attribute):
|
||||
return _distro.lsb_release_attr(attribute)
|
||||
|
||||
|
||||
def distro_release_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def distro_release_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the current OS distribution.
|
||||
@@ -593,8 +605,7 @@ def distro_release_attr(attribute):
|
||||
return _distro.distro_release_attr(attribute)
|
||||
|
||||
|
||||
def uname_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def uname_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the current OS distribution.
|
||||
@@ -615,25 +626,23 @@ def uname_attr(attribute):
|
||||
from functools import cached_property
|
||||
except ImportError:
|
||||
# Python < 3.8
|
||||
class cached_property(object): # type: ignore
|
||||
class cached_property: # type: ignore
|
||||
"""A version of @property which caches the value. On access, it calls the
|
||||
underlying function and sets the value in `__dict__` so future accesses
|
||||
will not re-call the property.
|
||||
"""
|
||||
|
||||
def __init__(self, f):
|
||||
# type: (Callable[[Any], Any]) -> None
|
||||
def __init__(self, f: Callable[[Any], Any]) -> None:
|
||||
self._fname = f.__name__
|
||||
self._f = f
|
||||
|
||||
def __get__(self, obj, owner):
|
||||
# type: (Any, Type[Any]) -> Any
|
||||
assert obj is not None, "call {} on an instance".format(self._fname)
|
||||
def __get__(self, obj: Any, owner: Type[Any]) -> Any:
|
||||
assert obj is not None, f"call {self._fname} on an instance"
|
||||
ret = obj.__dict__[self._fname] = self._f(obj)
|
||||
return ret
|
||||
|
||||
|
||||
class LinuxDistribution(object):
|
||||
class LinuxDistribution:
|
||||
"""
|
||||
Provides information about a OS distribution.
|
||||
|
||||
@@ -653,13 +662,13 @@ class LinuxDistribution(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
include_lsb=True,
|
||||
os_release_file="",
|
||||
distro_release_file="",
|
||||
include_uname=True,
|
||||
root_dir=None,
|
||||
):
|
||||
# type: (bool, str, str, bool, Optional[str]) -> None
|
||||
include_lsb: Optional[bool] = None,
|
||||
os_release_file: str = "",
|
||||
distro_release_file: str = "",
|
||||
include_uname: Optional[bool] = None,
|
||||
root_dir: Optional[str] = None,
|
||||
include_oslevel: Optional[bool] = None,
|
||||
) -> None:
|
||||
"""
|
||||
The initialization method of this class gathers information from the
|
||||
available data sources, and stores that in private instance attributes.
|
||||
@@ -699,7 +708,13 @@ def __init__(
|
||||
be empty.
|
||||
|
||||
* ``root_dir`` (string): The absolute path to the root directory to use
|
||||
to find distro-related information files.
|
||||
to find distro-related information files. Note that ``include_*``
|
||||
parameters must not be enabled in combination with ``root_dir``.
|
||||
|
||||
* ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
|
||||
output is included as a data source. If the oslevel command is not
|
||||
available in the program execution path the data source will be
|
||||
empty.
|
||||
|
||||
Public instance attributes:
|
||||
|
||||
@@ -718,14 +733,21 @@ def __init__(
|
||||
parameter. This controls whether the uname information will
|
||||
be loaded.
|
||||
|
||||
* ``include_oslevel`` (bool): The result of the ``include_oslevel``
|
||||
parameter. This controls whether (AIX) oslevel information will be
|
||||
loaded.
|
||||
|
||||
* ``root_dir`` (string): The result of the ``root_dir`` parameter.
|
||||
The absolute path to the root directory to use to find distro-related
|
||||
information files.
|
||||
|
||||
Raises:
|
||||
|
||||
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
|
||||
release file.
|
||||
* :py:exc:`ValueError`: Initialization parameters combination is not
|
||||
supported.
|
||||
|
||||
* :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
|
||||
some issue (other than not being available in the program execution
|
||||
path).
|
||||
* :py:exc:`OSError`: Some I/O issue with an os-release file or distro
|
||||
release file.
|
||||
|
||||
* :py:exc:`UnicodeError`: A data source has unexpected characters or
|
||||
uses an unexpected encoding.
|
||||
@@ -754,11 +776,24 @@ def __init__(
|
||||
self.os_release_file = usr_lib_os_release_file
|
||||
|
||||
self.distro_release_file = distro_release_file or "" # updated later
|
||||
self.include_lsb = include_lsb
|
||||
self.include_uname = include_uname
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
is_root_dir_defined = root_dir is not None
|
||||
if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
|
||||
raise ValueError(
|
||||
"Including subprocess data sources from specific root_dir is disallowed"
|
||||
" to prevent false information"
|
||||
)
|
||||
self.include_lsb = (
|
||||
include_lsb if include_lsb is not None else not is_root_dir_defined
|
||||
)
|
||||
self.include_uname = (
|
||||
include_uname if include_uname is not None else not is_root_dir_defined
|
||||
)
|
||||
self.include_oslevel = (
|
||||
include_oslevel if include_oslevel is not None else not is_root_dir_defined
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return repr of all info"""
|
||||
return (
|
||||
"LinuxDistribution("
|
||||
@@ -766,14 +801,18 @@ def __repr__(self):
|
||||
"distro_release_file={self.distro_release_file!r}, "
|
||||
"include_lsb={self.include_lsb!r}, "
|
||||
"include_uname={self.include_uname!r}, "
|
||||
"include_oslevel={self.include_oslevel!r}, "
|
||||
"root_dir={self.root_dir!r}, "
|
||||
"_os_release_info={self._os_release_info!r}, "
|
||||
"_lsb_release_info={self._lsb_release_info!r}, "
|
||||
"_distro_release_info={self._distro_release_info!r}, "
|
||||
"_uname_info={self._uname_info!r})".format(self=self)
|
||||
"_uname_info={self._uname_info!r}, "
|
||||
"_oslevel_info={self._oslevel_info!r})".format(self=self)
|
||||
)
|
||||
|
||||
def linux_distribution(self, full_distribution_name=True):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def linux_distribution(
|
||||
self, full_distribution_name: bool = True
|
||||
) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return information about the OS distribution that is compatible
|
||||
with Python's :func:`platform.linux_distribution`, supporting a subset
|
||||
@@ -784,18 +823,16 @@ def linux_distribution(self, full_distribution_name=True):
|
||||
return (
|
||||
self.name() if full_distribution_name else self.id(),
|
||||
self.version(),
|
||||
self.codename(),
|
||||
self._os_release_info.get("release_codename") or self.codename(),
|
||||
)
|
||||
|
||||
def id(self):
|
||||
# type: () -> str
|
||||
def id(self) -> str:
|
||||
"""Return the distro ID of the OS distribution, as a string.
|
||||
|
||||
For details, see :func:`distro.id`.
|
||||
"""
|
||||
|
||||
def normalize(distro_id, table):
|
||||
# type: (str, Dict[str, str]) -> str
|
||||
def normalize(distro_id: str, table: Dict[str, str]) -> str:
|
||||
distro_id = distro_id.lower().replace(" ", "_")
|
||||
return table.get(distro_id, distro_id)
|
||||
|
||||
@@ -817,8 +854,7 @@ def normalize(distro_id, table):
|
||||
|
||||
return ""
|
||||
|
||||
def name(self, pretty=False):
|
||||
# type: (bool) -> str
|
||||
def name(self, pretty: bool = False) -> str:
|
||||
"""
|
||||
Return the name of the OS distribution, as a string.
|
||||
|
||||
@@ -838,11 +874,10 @@ def name(self, pretty=False):
|
||||
name = self.distro_release_attr("name") or self.uname_attr("name")
|
||||
version = self.version(pretty=True)
|
||||
if version:
|
||||
name = name + " " + version
|
||||
name = f"{name} {version}"
|
||||
return name or ""
|
||||
|
||||
def version(self, pretty=False, best=False):
|
||||
# type: (bool, bool) -> str
|
||||
def version(self, pretty: bool = False, best: bool = False) -> str:
|
||||
"""
|
||||
Return the version of the OS distribution, as a string.
|
||||
|
||||
@@ -860,6 +895,12 @@ def version(self, pretty=False, best=False):
|
||||
).get("version_id", ""),
|
||||
self.uname_attr("release"),
|
||||
]
|
||||
if self.uname_attr("id").startswith("aix"):
|
||||
# On AIX platforms, prefer oslevel command output.
|
||||
versions.insert(0, self.oslevel_info())
|
||||
elif self.id() == "debian" or "debian" in self.like().split():
|
||||
# On Debian-like, add debian_version file content to candidates list.
|
||||
versions.append(self._debian_version)
|
||||
version = ""
|
||||
if best:
|
||||
# This algorithm uses the last version in priority order that has
|
||||
@@ -875,11 +916,10 @@ def version(self, pretty=False, best=False):
|
||||
version = v
|
||||
break
|
||||
if pretty and version and self.codename():
|
||||
version = "{0} ({1})".format(version, self.codename())
|
||||
version = f"{version} ({self.codename()})"
|
||||
return version
|
||||
|
||||
def version_parts(self, best=False):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return the version of the OS distribution, as a tuple of version
|
||||
numbers.
|
||||
@@ -895,8 +935,7 @@ def version_parts(self, best=False):
|
||||
return major, minor or "", build_number or ""
|
||||
return "", "", ""
|
||||
|
||||
def major_version(self, best=False):
|
||||
# type: (bool) -> str
|
||||
def major_version(self, best: bool = False) -> str:
|
||||
"""
|
||||
Return the major version number of the current distribution.
|
||||
|
||||
@@ -904,8 +943,7 @@ def major_version(self, best=False):
|
||||
"""
|
||||
return self.version_parts(best)[0]
|
||||
|
||||
def minor_version(self, best=False):
|
||||
# type: (bool) -> str
|
||||
def minor_version(self, best: bool = False) -> str:
|
||||
"""
|
||||
Return the minor version number of the current distribution.
|
||||
|
||||
@@ -913,8 +951,7 @@ def minor_version(self, best=False):
|
||||
"""
|
||||
return self.version_parts(best)[1]
|
||||
|
||||
def build_number(self, best=False):
|
||||
# type: (bool) -> str
|
||||
def build_number(self, best: bool = False) -> str:
|
||||
"""
|
||||
Return the build number of the current distribution.
|
||||
|
||||
@@ -922,8 +959,7 @@ def build_number(self, best=False):
|
||||
"""
|
||||
return self.version_parts(best)[2]
|
||||
|
||||
def like(self):
|
||||
# type: () -> str
|
||||
def like(self) -> str:
|
||||
"""
|
||||
Return the IDs of distributions that are like the OS distribution.
|
||||
|
||||
@@ -931,8 +967,7 @@ def like(self):
|
||||
"""
|
||||
return self.os_release_attr("id_like") or ""
|
||||
|
||||
def codename(self):
|
||||
# type: () -> str
|
||||
def codename(self) -> str:
|
||||
"""
|
||||
Return the codename of the OS distribution.
|
||||
|
||||
@@ -949,8 +984,7 @@ def codename(self):
|
||||
or ""
|
||||
)
|
||||
|
||||
def info(self, pretty=False, best=False):
|
||||
# type: (bool, bool) -> InfoDict
|
||||
def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
|
||||
"""
|
||||
Return certain machine-readable information about the OS
|
||||
distribution.
|
||||
@@ -969,8 +1003,7 @@ def info(self, pretty=False, best=False):
|
||||
codename=self.codename(),
|
||||
)
|
||||
|
||||
def os_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def os_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the os-release file data source of the OS distribution.
|
||||
@@ -979,8 +1012,7 @@ def os_release_info(self):
|
||||
"""
|
||||
return self._os_release_info
|
||||
|
||||
def lsb_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def lsb_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the lsb_release command data source of the OS
|
||||
@@ -990,8 +1022,7 @@ def lsb_release_info(self):
|
||||
"""
|
||||
return self._lsb_release_info
|
||||
|
||||
def distro_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def distro_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the distro release file data source of the OS
|
||||
@@ -1001,8 +1032,7 @@ def distro_release_info(self):
|
||||
"""
|
||||
return self._distro_release_info
|
||||
|
||||
def uname_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def uname_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the uname command data source of the OS distribution.
|
||||
@@ -1011,8 +1041,13 @@ def uname_info(self):
|
||||
"""
|
||||
return self._uname_info
|
||||
|
||||
def os_release_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def oslevel_info(self) -> str:
|
||||
"""
|
||||
Return AIX' oslevel command output.
|
||||
"""
|
||||
return self._oslevel_info
|
||||
|
||||
def os_release_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the os-release file data
|
||||
source of the OS distribution.
|
||||
@@ -1021,8 +1056,7 @@ def os_release_attr(self, attribute):
|
||||
"""
|
||||
return self._os_release_info.get(attribute, "")
|
||||
|
||||
def lsb_release_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def lsb_release_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the lsb_release command
|
||||
output data source of the OS distribution.
|
||||
@@ -1031,8 +1065,7 @@ def lsb_release_attr(self, attribute):
|
||||
"""
|
||||
return self._lsb_release_info.get(attribute, "")
|
||||
|
||||
def distro_release_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def distro_release_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the OS distribution.
|
||||
@@ -1041,8 +1074,7 @@ def distro_release_attr(self, attribute):
|
||||
"""
|
||||
return self._distro_release_info.get(attribute, "")
|
||||
|
||||
def uname_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def uname_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the uname command
|
||||
output data source of the OS distribution.
|
||||
@@ -1052,8 +1084,7 @@ def uname_attr(self, attribute):
|
||||
return self._uname_info.get(attribute, "")
|
||||
|
||||
@cached_property
|
||||
def _os_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def _os_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get the information items from the specified os-release file.
|
||||
|
||||
@@ -1061,13 +1092,12 @@ def _os_release_info(self):
|
||||
A dictionary containing all information items.
|
||||
"""
|
||||
if os.path.isfile(self.os_release_file):
|
||||
with open(self.os_release_file) as release_file:
|
||||
with open(self.os_release_file, encoding="utf-8") as release_file:
|
||||
return self._parse_os_release_content(release_file)
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_os_release_content(lines):
|
||||
# type: (TextIO) -> Dict[str, str]
|
||||
def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
|
||||
"""
|
||||
Parse the lines of an os-release file.
|
||||
|
||||
@@ -1084,16 +1114,6 @@ def _parse_os_release_content(lines):
|
||||
lexer = shlex.shlex(lines, posix=True)
|
||||
lexer.whitespace_split = True
|
||||
|
||||
# The shlex module defines its `wordchars` variable using literals,
|
||||
# making it dependent on the encoding of the Python source file.
|
||||
# In Python 2.6 and 2.7, the shlex source file is encoded in
|
||||
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
|
||||
# string. This causes a UnicodeDecodeError to be raised when the
|
||||
# parsed content is a unicode object. The following fix resolves that
|
||||
# (... but it should be fixed in shlex...):
|
||||
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
|
||||
lexer.wordchars = lexer.wordchars.decode("iso-8859-1")
|
||||
|
||||
tokens = list(lexer)
|
||||
for token in tokens:
|
||||
# At this point, all shell-like parsing has been done (i.e.
|
||||
@@ -1102,12 +1122,17 @@ def _parse_os_release_content(lines):
|
||||
# stripped, etc.), so the tokens are now either:
|
||||
# * variable assignments: var=value
|
||||
# * commands or their arguments (not allowed in os-release)
|
||||
# Ignore any tokens that are not variable assignments
|
||||
if "=" in token:
|
||||
k, v = token.split("=", 1)
|
||||
props[k.lower()] = v
|
||||
else:
|
||||
# Ignore any tokens that are not variable assignments
|
||||
pass
|
||||
|
||||
if "version" in props:
|
||||
# extract release codename (if any) from version attribute
|
||||
match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
|
||||
if match:
|
||||
release_codename = match.group(1) or match.group(2)
|
||||
props["codename"] = props["release_codename"] = release_codename
|
||||
|
||||
if "version_codename" in props:
|
||||
# os-release added a version_codename field. Use that in
|
||||
@@ -1118,22 +1143,11 @@ def _parse_os_release_content(lines):
|
||||
elif "ubuntu_codename" in props:
|
||||
# Same as above but a non-standard field name used on older Ubuntus
|
||||
props["codename"] = props["ubuntu_codename"]
|
||||
elif "version" in props:
|
||||
# If there is no version_codename, parse it from the version
|
||||
match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
|
||||
if match:
|
||||
codename = match.group()
|
||||
codename = codename.strip("()")
|
||||
codename = codename.strip(",")
|
||||
codename = codename.strip()
|
||||
# codename appears within paranthese.
|
||||
props["codename"] = codename
|
||||
|
||||
return props
|
||||
|
||||
@cached_property
|
||||
def _lsb_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def _lsb_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get the information items from the lsb_release command output.
|
||||
|
||||
@@ -1142,19 +1156,17 @@ def _lsb_release_info(self):
|
||||
"""
|
||||
if not self.include_lsb:
|
||||
return {}
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
try:
|
||||
cmd = ("lsb_release", "-a")
|
||||
stdout = subprocess.check_output(cmd, stderr=devnull)
|
||||
# Command not found or lsb_release returned error
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return {}
|
||||
try:
|
||||
cmd = ("lsb_release", "-a")
|
||||
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
||||
# Command not found or lsb_release returned error
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return {}
|
||||
content = self._to_str(stdout).splitlines()
|
||||
return self._parse_lsb_release_content(content)
|
||||
|
||||
@staticmethod
|
||||
def _parse_lsb_release_content(lines):
|
||||
# type: (Iterable[str]) -> Dict[str, str]
|
||||
def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
|
||||
"""
|
||||
Parse the output of the lsb_release command.
|
||||
|
||||
@@ -1178,20 +1190,41 @@ def _parse_lsb_release_content(lines):
|
||||
return props
|
||||
|
||||
@cached_property
|
||||
def _uname_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
try:
|
||||
cmd = ("uname", "-rs")
|
||||
stdout = subprocess.check_output(cmd, stderr=devnull)
|
||||
except OSError:
|
||||
return {}
|
||||
def _uname_info(self) -> Dict[str, str]:
|
||||
if not self.include_uname:
|
||||
return {}
|
||||
try:
|
||||
cmd = ("uname", "-rs")
|
||||
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
||||
except OSError:
|
||||
return {}
|
||||
content = self._to_str(stdout).splitlines()
|
||||
return self._parse_uname_content(content)
|
||||
|
||||
@cached_property
|
||||
def _oslevel_info(self) -> str:
|
||||
if not self.include_oslevel:
|
||||
return ""
|
||||
try:
|
||||
stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return ""
|
||||
return self._to_str(stdout).strip()
|
||||
|
||||
@cached_property
|
||||
def _debian_version(self) -> str:
|
||||
try:
|
||||
with open(
|
||||
os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
|
||||
) as fp:
|
||||
return fp.readline().rstrip()
|
||||
except FileNotFoundError:
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _parse_uname_content(lines):
|
||||
# type: (Sequence[str]) -> Dict[str, str]
|
||||
def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
|
||||
if not lines:
|
||||
return {}
|
||||
props = {}
|
||||
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
|
||||
if match:
|
||||
@@ -1208,23 +1241,12 @@ def _parse_uname_content(lines):
|
||||
return props
|
||||
|
||||
@staticmethod
|
||||
def _to_str(text):
|
||||
# type: (Union[bytes, str]) -> str
|
||||
def _to_str(bytestring: bytes) -> str:
|
||||
encoding = sys.getfilesystemencoding()
|
||||
encoding = "utf-8" if encoding == "ascii" else encoding
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
if isinstance(text, bytes):
|
||||
return text.decode(encoding)
|
||||
else:
|
||||
if isinstance(text, unicode): # noqa
|
||||
return text.encode(encoding)
|
||||
|
||||
return text
|
||||
return bytestring.decode(encoding)
|
||||
|
||||
@cached_property
|
||||
def _distro_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def _distro_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get the information items from the specified distro release file.
|
||||
|
||||
@@ -1241,14 +1263,14 @@ def _distro_release_info(self):
|
||||
# file), because we want to use what was specified as best as
|
||||
# possible.
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
if "name" in distro_info and "cloudlinux" in distro_info["name"].lower():
|
||||
distro_info["id"] = "cloudlinux"
|
||||
elif match:
|
||||
distro_info["id"] = match.group(1)
|
||||
return distro_info
|
||||
else:
|
||||
try:
|
||||
basenames = os.listdir(self.etc_dir)
|
||||
basenames = [
|
||||
basename
|
||||
for basename in os.listdir(self.etc_dir)
|
||||
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
@@ -1258,41 +1280,31 @@ def _distro_release_info(self):
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
basenames = [
|
||||
"SuSE-release",
|
||||
"arch-release",
|
||||
"base-release",
|
||||
"centos-release",
|
||||
"fedora-release",
|
||||
"gentoo-release",
|
||||
"mageia-release",
|
||||
"mandrake-release",
|
||||
"mandriva-release",
|
||||
"mandrivalinux-release",
|
||||
"manjaro-release",
|
||||
"oracle-release",
|
||||
"redhat-release",
|
||||
"sl-release",
|
||||
"slackware-version",
|
||||
]
|
||||
basenames = _DISTRO_RELEASE_BASENAMES
|
||||
for basename in basenames:
|
||||
if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
|
||||
continue
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
if match:
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
if "name" in distro_info:
|
||||
# The name is always present if the pattern matches
|
||||
self.distro_release_file = filepath
|
||||
distro_info["id"] = match.group(1)
|
||||
if "cloudlinux" in distro_info["name"].lower():
|
||||
distro_info["id"] = "cloudlinux"
|
||||
return distro_info
|
||||
return {}
|
||||
if match is None:
|
||||
continue
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
||||
continue
|
||||
self.distro_release_file = filepath
|
||||
break
|
||||
else: # the loop didn't "break": no candidate.
|
||||
return {}
|
||||
|
||||
def _parse_distro_release_file(self, filepath):
|
||||
# type: (str) -> Dict[str, str]
|
||||
if match is not None:
|
||||
distro_info["id"] = match.group(1)
|
||||
|
||||
# CloudLinux < 7: manually enrich info with proper id.
|
||||
if "cloudlinux" in distro_info.get("name", "").lower():
|
||||
distro_info["id"] = "cloudlinux"
|
||||
|
||||
return distro_info
|
||||
|
||||
def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
|
||||
"""
|
||||
Parse a distro release file.
|
||||
|
||||
@@ -1304,19 +1316,18 @@ def _parse_distro_release_file(self, filepath):
|
||||
A dictionary containing all information items.
|
||||
"""
|
||||
try:
|
||||
with open(filepath) as fp:
|
||||
with open(filepath, encoding="utf-8") as fp:
|
||||
# Only parse the first line. For instance, on SLES there
|
||||
# are multiple lines. We don't want them...
|
||||
return self._parse_distro_release_content(fp.readline())
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
# Ignore not being able to read a specific, seemingly version
|
||||
# related file.
|
||||
# See https://github.com/python-distro/distro/issues/162
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_distro_release_content(line):
|
||||
# type: (str) -> Dict[str, str]
|
||||
def _parse_distro_release_content(line: str) -> Dict[str, str]:
|
||||
"""
|
||||
Parse a line from a distro release file.
|
||||
|
||||
@@ -1344,8 +1355,7 @@ def _parse_distro_release_content(line):
|
||||
_distro = LinuxDistribution()
|
||||
|
||||
|
||||
def main():
|
||||
# type: () -> None
|
||||
def main() -> None:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.addHandler(logging.StreamHandler(sys.stdout))
|
||||
@@ -1367,7 +1377,10 @@ def main():
|
||||
|
||||
if args.root_dir:
|
||||
dist = LinuxDistribution(
|
||||
include_lsb=False, include_uname=False, root_dir=args.root_dir
|
||||
include_lsb=False,
|
||||
include_uname=False,
|
||||
include_oslevel=False,
|
||||
root_dir=args.root_dir,
|
||||
)
|
||||
else:
|
||||
dist = _distro
|
0
lib/spack/external/_vendoring/distro/py.typed
vendored
Normal file
0
lib/spack/external/_vendoring/distro/py.typed
vendored
Normal file
45
lib/spack/external/_vendoring/jinja2/__init__.py
vendored
Normal file
45
lib/spack/external/_vendoring/jinja2/__init__.py
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Jinja is a template engine written in pure Python. It provides a
|
||||
non-XML syntax that supports inline expressions and an optional
|
||||
sandboxed environment.
|
||||
"""
|
||||
from .bccache import BytecodeCache as BytecodeCache
|
||||
from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache
|
||||
from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache
|
||||
from .environment import Environment as Environment
|
||||
from .environment import Template as Template
|
||||
from .exceptions import TemplateAssertionError as TemplateAssertionError
|
||||
from .exceptions import TemplateError as TemplateError
|
||||
from .exceptions import TemplateNotFound as TemplateNotFound
|
||||
from .exceptions import TemplateRuntimeError as TemplateRuntimeError
|
||||
from .exceptions import TemplatesNotFound as TemplatesNotFound
|
||||
from .exceptions import TemplateSyntaxError as TemplateSyntaxError
|
||||
from .exceptions import UndefinedError as UndefinedError
|
||||
from .filters import contextfilter
|
||||
from .filters import environmentfilter
|
||||
from .filters import evalcontextfilter
|
||||
from .loaders import BaseLoader as BaseLoader
|
||||
from .loaders import ChoiceLoader as ChoiceLoader
|
||||
from .loaders import DictLoader as DictLoader
|
||||
from .loaders import FileSystemLoader as FileSystemLoader
|
||||
from .loaders import FunctionLoader as FunctionLoader
|
||||
from .loaders import ModuleLoader as ModuleLoader
|
||||
from .loaders import PackageLoader as PackageLoader
|
||||
from .loaders import PrefixLoader as PrefixLoader
|
||||
from .runtime import ChainableUndefined as ChainableUndefined
|
||||
from .runtime import DebugUndefined as DebugUndefined
|
||||
from .runtime import make_logging_undefined as make_logging_undefined
|
||||
from .runtime import StrictUndefined as StrictUndefined
|
||||
from .runtime import Undefined as Undefined
|
||||
from .utils import clear_caches as clear_caches
|
||||
from .utils import contextfunction
|
||||
from .utils import environmentfunction
|
||||
from .utils import escape
|
||||
from .utils import evalcontextfunction
|
||||
from .utils import is_undefined as is_undefined
|
||||
from .utils import Markup
|
||||
from .utils import pass_context as pass_context
|
||||
from .utils import pass_environment as pass_environment
|
||||
from .utils import pass_eval_context as pass_eval_context
|
||||
from .utils import select_autoescape as select_autoescape
|
||||
|
||||
__version__ = "3.0.3"
|
75
lib/spack/external/_vendoring/jinja2/async_utils.py
vendored
Normal file
75
lib/spack/external/_vendoring/jinja2/async_utils.py
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import inspect
|
||||
import typing as t
|
||||
from functools import wraps
|
||||
|
||||
from .utils import _PassArg
|
||||
from .utils import pass_eval_context
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
|
||||
def async_variant(normal_func): # type: ignore
|
||||
def decorator(async_func): # type: ignore
|
||||
pass_arg = _PassArg.from_obj(normal_func)
|
||||
need_eval_context = pass_arg is None
|
||||
|
||||
if pass_arg is _PassArg.environment:
|
||||
|
||||
def is_async(args: t.Any) -> bool:
|
||||
return t.cast(bool, args[0].is_async)
|
||||
|
||||
else:
|
||||
|
||||
def is_async(args: t.Any) -> bool:
|
||||
return t.cast(bool, args[0].environment.is_async)
|
||||
|
||||
@wraps(normal_func)
|
||||
def wrapper(*args, **kwargs): # type: ignore
|
||||
b = is_async(args)
|
||||
|
||||
if need_eval_context:
|
||||
args = args[1:]
|
||||
|
||||
if b:
|
||||
return async_func(*args, **kwargs)
|
||||
|
||||
return normal_func(*args, **kwargs)
|
||||
|
||||
if need_eval_context:
|
||||
wrapper = pass_eval_context(wrapper)
|
||||
|
||||
wrapper.jinja_async_variant = True
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)}
|
||||
|
||||
|
||||
async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V":
|
||||
# Avoid a costly call to isawaitable
|
||||
if type(value) in _common_primitives:
|
||||
return t.cast("V", value)
|
||||
|
||||
if inspect.isawaitable(value):
|
||||
return await t.cast("t.Awaitable[V]", value)
|
||||
|
||||
return t.cast("V", value)
|
||||
|
||||
|
||||
async def auto_aiter(
|
||||
iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
|
||||
) -> "t.AsyncIterator[V]":
|
||||
if hasattr(iterable, "__aiter__"):
|
||||
async for item in t.cast("t.AsyncIterable[V]", iterable):
|
||||
yield item
|
||||
else:
|
||||
for item in t.cast("t.Iterable[V]", iterable):
|
||||
yield item
|
||||
|
||||
|
||||
async def auto_to_list(
|
||||
value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
|
||||
) -> t.List["V"]:
|
||||
return [x async for x in auto_aiter(value)]
|
@@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""The optional bytecode cache system. This is useful if you have very
|
||||
complex template situations and the compilation of all those templates
|
||||
slows down your application too much.
|
||||
@@ -8,22 +7,30 @@
|
||||
"""
|
||||
import errno
|
||||
import fnmatch
|
||||
import marshal
|
||||
import os
|
||||
import pickle
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
import typing as t
|
||||
from hashlib import sha1
|
||||
from os import listdir
|
||||
from os import path
|
||||
from io import BytesIO
|
||||
from types import CodeType
|
||||
|
||||
from ._compat import BytesIO
|
||||
from ._compat import marshal_dump
|
||||
from ._compat import marshal_load
|
||||
from ._compat import pickle
|
||||
from ._compat import text_type
|
||||
from .utils import open_if_exists
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
bc_version = 4
|
||||
class _MemcachedClient(te.Protocol):
|
||||
def get(self, key: str) -> bytes:
|
||||
...
|
||||
|
||||
def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None:
|
||||
...
|
||||
|
||||
|
||||
bc_version = 5
|
||||
# Magic bytes to identify Jinja bytecode cache files. Contains the
|
||||
# Python major and minor version to avoid loading incompatible bytecode
|
||||
# if a project upgrades its Python version.
|
||||
@@ -34,7 +41,7 @@
|
||||
)
|
||||
|
||||
|
||||
class Bucket(object):
|
||||
class Bucket:
|
||||
"""Buckets are used to store the bytecode for one template. It's created
|
||||
and initialized by the bytecode cache and passed to the loading functions.
|
||||
|
||||
@@ -43,17 +50,17 @@ class Bucket(object):
|
||||
cache subclasses don't have to care about cache invalidation.
|
||||
"""
|
||||
|
||||
def __init__(self, environment, key, checksum):
|
||||
def __init__(self, environment: "Environment", key: str, checksum: str) -> None:
|
||||
self.environment = environment
|
||||
self.key = key
|
||||
self.checksum = checksum
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
def reset(self) -> None:
|
||||
"""Resets the bucket (unloads the bytecode)."""
|
||||
self.code = None
|
||||
self.code: t.Optional[CodeType] = None
|
||||
|
||||
def load_bytecode(self, f):
|
||||
def load_bytecode(self, f: t.BinaryIO) -> None:
|
||||
"""Loads bytecode from a file or file like object."""
|
||||
# make sure the magic header is correct
|
||||
magic = f.read(len(bc_magic))
|
||||
@@ -67,31 +74,31 @@ def load_bytecode(self, f):
|
||||
return
|
||||
# if marshal_load fails then we need to reload
|
||||
try:
|
||||
self.code = marshal_load(f)
|
||||
self.code = marshal.load(f)
|
||||
except (EOFError, ValueError, TypeError):
|
||||
self.reset()
|
||||
return
|
||||
|
||||
def write_bytecode(self, f):
|
||||
def write_bytecode(self, f: t.BinaryIO) -> None:
|
||||
"""Dump the bytecode into the file or file like object passed."""
|
||||
if self.code is None:
|
||||
raise TypeError("can't write empty bucket")
|
||||
f.write(bc_magic)
|
||||
pickle.dump(self.checksum, f, 2)
|
||||
marshal_dump(self.code, f)
|
||||
marshal.dump(self.code, f)
|
||||
|
||||
def bytecode_from_string(self, string):
|
||||
"""Load bytecode from a string."""
|
||||
def bytecode_from_string(self, string: bytes) -> None:
|
||||
"""Load bytecode from bytes."""
|
||||
self.load_bytecode(BytesIO(string))
|
||||
|
||||
def bytecode_to_string(self):
|
||||
"""Return the bytecode as string."""
|
||||
def bytecode_to_string(self) -> bytes:
|
||||
"""Return the bytecode as bytes."""
|
||||
out = BytesIO()
|
||||
self.write_bytecode(out)
|
||||
return out.getvalue()
|
||||
|
||||
|
||||
class BytecodeCache(object):
|
||||
class BytecodeCache:
|
||||
"""To implement your own bytecode cache you have to subclass this class
|
||||
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
|
||||
these methods are passed a :class:`~jinja2.bccache.Bucket`.
|
||||
@@ -120,41 +127,48 @@ def dump_bytecode(self, bucket):
|
||||
Jinja.
|
||||
"""
|
||||
|
||||
def load_bytecode(self, bucket):
|
||||
def load_bytecode(self, bucket: Bucket) -> None:
|
||||
"""Subclasses have to override this method to load bytecode into a
|
||||
bucket. If they are not able to find code in the cache for the
|
||||
bucket, it must not do anything.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def dump_bytecode(self, bucket):
|
||||
def dump_bytecode(self, bucket: Bucket) -> None:
|
||||
"""Subclasses have to override this method to write the bytecode
|
||||
from a bucket back to the cache. If it unable to do so it must not
|
||||
fail silently but raise an exception.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
"""Clears the cache. This method is not used by Jinja but should be
|
||||
implemented to allow applications to clear the bytecode cache used
|
||||
by a particular environment.
|
||||
"""
|
||||
|
||||
def get_cache_key(self, name, filename=None):
|
||||
def get_cache_key(
|
||||
self, name: str, filename: t.Optional[t.Union[str]] = None
|
||||
) -> str:
|
||||
"""Returns the unique hash key for this template name."""
|
||||
hash = sha1(name.encode("utf-8"))
|
||||
|
||||
if filename is not None:
|
||||
filename = "|" + filename
|
||||
if isinstance(filename, text_type):
|
||||
filename = filename.encode("utf-8")
|
||||
hash.update(filename)
|
||||
hash.update(f"|{filename}".encode())
|
||||
|
||||
return hash.hexdigest()
|
||||
|
||||
def get_source_checksum(self, source):
|
||||
def get_source_checksum(self, source: str) -> str:
|
||||
"""Returns a checksum for the source."""
|
||||
return sha1(source.encode("utf-8")).hexdigest()
|
||||
|
||||
def get_bucket(self, environment, name, filename, source):
|
||||
def get_bucket(
|
||||
self,
|
||||
environment: "Environment",
|
||||
name: str,
|
||||
filename: t.Optional[str],
|
||||
source: str,
|
||||
) -> Bucket:
|
||||
"""Return a cache bucket for the given template. All arguments are
|
||||
mandatory but filename may be `None`.
|
||||
"""
|
||||
@@ -164,7 +178,7 @@ def get_bucket(self, environment, name, filename, source):
|
||||
self.load_bytecode(bucket)
|
||||
return bucket
|
||||
|
||||
def set_bucket(self, bucket):
|
||||
def set_bucket(self, bucket: Bucket) -> None:
|
||||
"""Put the bucket into the cache."""
|
||||
self.dump_bytecode(bucket)
|
||||
|
||||
@@ -187,14 +201,16 @@ class FileSystemBytecodeCache(BytecodeCache):
|
||||
This bytecode cache supports clearing of the cache using the clear method.
|
||||
"""
|
||||
|
||||
def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
|
||||
def __init__(
|
||||
self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
|
||||
) -> None:
|
||||
if directory is None:
|
||||
directory = self._get_default_cache_dir()
|
||||
self.directory = directory
|
||||
self.pattern = pattern
|
||||
|
||||
def _get_default_cache_dir(self):
|
||||
def _unsafe_dir():
|
||||
def _get_default_cache_dir(self) -> str:
|
||||
def _unsafe_dir() -> "te.NoReturn":
|
||||
raise RuntimeError(
|
||||
"Cannot determine safe temp directory. You "
|
||||
"need to explicitly provide one."
|
||||
@@ -209,7 +225,7 @@ def _unsafe_dir():
|
||||
if not hasattr(os, "getuid"):
|
||||
_unsafe_dir()
|
||||
|
||||
dirname = "_jinja2-cache-%d" % os.getuid()
|
||||
dirname = f"_jinja2-cache-{os.getuid()}"
|
||||
actual_dir = os.path.join(tmpdir, dirname)
|
||||
|
||||
try:
|
||||
@@ -240,34 +256,30 @@ def _unsafe_dir():
|
||||
|
||||
return actual_dir
|
||||
|
||||
def _get_cache_filename(self, bucket):
|
||||
return path.join(self.directory, self.pattern % bucket.key)
|
||||
def _get_cache_filename(self, bucket: Bucket) -> str:
|
||||
return os.path.join(self.directory, self.pattern % (bucket.key,))
|
||||
|
||||
def load_bytecode(self, bucket):
|
||||
f = open_if_exists(self._get_cache_filename(bucket), "rb")
|
||||
if f is not None:
|
||||
try:
|
||||
def load_bytecode(self, bucket: Bucket) -> None:
|
||||
filename = self._get_cache_filename(bucket)
|
||||
|
||||
if os.path.exists(filename):
|
||||
with open(filename, "rb") as f:
|
||||
bucket.load_bytecode(f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def dump_bytecode(self, bucket):
|
||||
f = open(self._get_cache_filename(bucket), "wb")
|
||||
try:
|
||||
def dump_bytecode(self, bucket: Bucket) -> None:
|
||||
with open(self._get_cache_filename(bucket), "wb") as f:
|
||||
bucket.write_bytecode(f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
# imported lazily here because google app-engine doesn't support
|
||||
# write access on the file system and the function does not exist
|
||||
# normally.
|
||||
from os import remove
|
||||
|
||||
files = fnmatch.filter(listdir(self.directory), self.pattern % "*")
|
||||
files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
|
||||
for filename in files:
|
||||
try:
|
||||
remove(path.join(self.directory, filename))
|
||||
remove(os.path.join(self.directory, filename))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@@ -284,7 +296,7 @@ class MemcachedBytecodeCache(BytecodeCache):
|
||||
- `python-memcached <https://pypi.org/project/python-memcached/>`_
|
||||
|
||||
(Unfortunately the django cache interface is not compatible because it
|
||||
does not support storing binary data, only unicode. You can however pass
|
||||
does not support storing binary data, only text. You can however pass
|
||||
the underlying cache client to the bytecode cache which is available
|
||||
as `django.core.cache.cache._client`.)
|
||||
|
||||
@@ -319,32 +331,34 @@ class MemcachedBytecodeCache(BytecodeCache):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client,
|
||||
prefix="jinja2/bytecode/",
|
||||
timeout=None,
|
||||
ignore_memcache_errors=True,
|
||||
client: "_MemcachedClient",
|
||||
prefix: str = "jinja2/bytecode/",
|
||||
timeout: t.Optional[int] = None,
|
||||
ignore_memcache_errors: bool = True,
|
||||
):
|
||||
self.client = client
|
||||
self.prefix = prefix
|
||||
self.timeout = timeout
|
||||
self.ignore_memcache_errors = ignore_memcache_errors
|
||||
|
||||
def load_bytecode(self, bucket):
|
||||
def load_bytecode(self, bucket: Bucket) -> None:
|
||||
try:
|
||||
code = self.client.get(self.prefix + bucket.key)
|
||||
except Exception:
|
||||
if not self.ignore_memcache_errors:
|
||||
raise
|
||||
code = None
|
||||
if code is not None:
|
||||
else:
|
||||
bucket.bytecode_from_string(code)
|
||||
|
||||
def dump_bytecode(self, bucket):
|
||||
args = (self.prefix + bucket.key, bucket.bytecode_to_string())
|
||||
if self.timeout is not None:
|
||||
args += (self.timeout,)
|
||||
def dump_bytecode(self, bucket: Bucket) -> None:
|
||||
key = self.prefix + bucket.key
|
||||
value = bucket.bytecode_to_string()
|
||||
|
||||
try:
|
||||
self.client.set(*args)
|
||||
if self.timeout is not None:
|
||||
self.client.set(key, value, self.timeout)
|
||||
else:
|
||||
self.client.set(key, value)
|
||||
except Exception:
|
||||
if not self.ignore_memcache_errors:
|
||||
raise
|
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#: list of lorem ipsum words used by the lipsum() helper function
|
||||
LOREM_IPSUM_WORDS = u"""\
|
||||
LOREM_IPSUM_WORDS = """\
|
||||
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
|
||||
auctor augue bibendum blandit class commodo condimentum congue consectetuer
|
||||
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
|
@@ -1,38 +1,38 @@
|
||||
import platform
|
||||
import sys
|
||||
import typing as t
|
||||
from types import CodeType
|
||||
from types import TracebackType
|
||||
|
||||
from . import TemplateSyntaxError
|
||||
from ._compat import PYPY
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .utils import internal_code
|
||||
from .utils import missing
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .runtime import Context
|
||||
|
||||
def rewrite_traceback_stack(source=None):
|
||||
|
||||
def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
|
||||
"""Rewrite the current exception to replace any tracebacks from
|
||||
within compiled template code with tracebacks that look like they
|
||||
came from the template source.
|
||||
|
||||
This must be called within an ``except`` block.
|
||||
|
||||
:param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
|
||||
the current ``exc_info`` is used.
|
||||
:param source: For ``TemplateSyntaxError``, the original source if
|
||||
known.
|
||||
:return: A :meth:`sys.exc_info` tuple that can be re-raised.
|
||||
:return: The original exception with the rewritten traceback.
|
||||
"""
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
_, exc_value, tb = sys.exc_info()
|
||||
exc_value = t.cast(BaseException, exc_value)
|
||||
tb = t.cast(TracebackType, tb)
|
||||
|
||||
if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
|
||||
exc_value.translated = True
|
||||
exc_value.source = source
|
||||
|
||||
try:
|
||||
# Remove the old traceback on Python 3, otherwise the frames
|
||||
# from the compiler still show up.
|
||||
exc_value.with_traceback(None)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Remove the old traceback, otherwise the frames from the
|
||||
# compiler still show up.
|
||||
exc_value.with_traceback(None)
|
||||
# Outside of runtime, so the frame isn't executing template
|
||||
# code, but it still needs to point at the template.
|
||||
tb = fake_traceback(
|
||||
@@ -70,10 +70,12 @@ def rewrite_traceback_stack(source=None):
|
||||
for tb in reversed(stack):
|
||||
tb_next = tb_set_next(tb, tb_next)
|
||||
|
||||
return exc_type, exc_value, tb_next
|
||||
return exc_value.with_traceback(tb_next)
|
||||
|
||||
|
||||
def fake_traceback(exc_value, tb, filename, lineno):
|
||||
def fake_traceback( # type: ignore
|
||||
exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
|
||||
) -> TracebackType:
|
||||
"""Produce a new traceback object that looks like it came from the
|
||||
template source instead of the compiled code. The filename, line
|
||||
number, and location name will point to the template, and the local
|
||||
@@ -100,79 +102,60 @@ def fake_traceback(exc_value, tb, filename, lineno):
|
||||
"__jinja_exception__": exc_value,
|
||||
}
|
||||
# Raise an exception at the correct line number.
|
||||
code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
|
||||
code: CodeType = compile(
|
||||
"\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec"
|
||||
)
|
||||
|
||||
# Build a new code object that points to the template file and
|
||||
# replaces the location with a block name.
|
||||
try:
|
||||
location = "template"
|
||||
location = "template"
|
||||
|
||||
if tb is not None:
|
||||
function = tb.tb_frame.f_code.co_name
|
||||
if tb is not None:
|
||||
function = tb.tb_frame.f_code.co_name
|
||||
|
||||
if function == "root":
|
||||
location = "top-level template code"
|
||||
elif function.startswith("block_"):
|
||||
location = 'block "%s"' % function[6:]
|
||||
if function == "root":
|
||||
location = "top-level template code"
|
||||
elif function.startswith("block_"):
|
||||
location = f"block {function[6:]!r}"
|
||||
|
||||
# Collect arguments for the new code object. CodeType only
|
||||
# accepts positional arguments, and arguments were inserted in
|
||||
# new Python versions.
|
||||
code_args = []
|
||||
|
||||
for attr in (
|
||||
"argcount",
|
||||
"posonlyargcount", # Python 3.8
|
||||
"kwonlyargcount", # Python 3
|
||||
"nlocals",
|
||||
"stacksize",
|
||||
"flags",
|
||||
"code", # codestring
|
||||
"consts", # constants
|
||||
"names",
|
||||
"varnames",
|
||||
("filename", filename),
|
||||
("name", location),
|
||||
"firstlineno",
|
||||
"lnotab",
|
||||
"freevars",
|
||||
"cellvars",
|
||||
):
|
||||
if isinstance(attr, tuple):
|
||||
# Replace with given value.
|
||||
code_args.append(attr[1])
|
||||
continue
|
||||
|
||||
try:
|
||||
# Copy original value if it exists.
|
||||
code_args.append(getattr(code, "co_" + attr))
|
||||
except AttributeError:
|
||||
# Some arguments were added later.
|
||||
continue
|
||||
|
||||
code = CodeType(*code_args)
|
||||
except Exception:
|
||||
# Some environments such as Google App Engine don't support
|
||||
# modifying code objects.
|
||||
pass
|
||||
if sys.version_info >= (3, 8):
|
||||
code = code.replace(co_name=location)
|
||||
else:
|
||||
code = CodeType(
|
||||
code.co_argcount,
|
||||
code.co_kwonlyargcount,
|
||||
code.co_nlocals,
|
||||
code.co_stacksize,
|
||||
code.co_flags,
|
||||
code.co_code,
|
||||
code.co_consts,
|
||||
code.co_names,
|
||||
code.co_varnames,
|
||||
code.co_filename,
|
||||
location,
|
||||
code.co_firstlineno,
|
||||
code.co_lnotab,
|
||||
code.co_freevars,
|
||||
code.co_cellvars,
|
||||
)
|
||||
|
||||
# Execute the new code, which is guaranteed to raise, and return
|
||||
# the new traceback without this frame.
|
||||
try:
|
||||
exec(code, globals, locals)
|
||||
except BaseException:
|
||||
return sys.exc_info()[2].tb_next
|
||||
return sys.exc_info()[2].tb_next # type: ignore
|
||||
|
||||
|
||||
def get_template_locals(real_locals):
|
||||
def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]:
|
||||
"""Based on the runtime locals, get the context that would be
|
||||
available at that point in the template.
|
||||
"""
|
||||
# Start with the current template context.
|
||||
ctx = real_locals.get("context")
|
||||
ctx: "t.Optional[Context]" = real_locals.get("context")
|
||||
|
||||
if ctx:
|
||||
data = ctx.get_all().copy()
|
||||
if ctx is not None:
|
||||
data: t.Dict[str, t.Any] = ctx.get_all().copy()
|
||||
else:
|
||||
data = {}
|
||||
|
||||
@@ -180,7 +163,7 @@ def get_template_locals(real_locals):
|
||||
# rather than pushing a context. Local variables follow the scheme
|
||||
# l_depth_name. Find the highest-depth local that has a value for
|
||||
# each name.
|
||||
local_overrides = {}
|
||||
local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {}
|
||||
|
||||
for name, value in real_locals.items():
|
||||
if not name.startswith("l_") or value is missing:
|
||||
@@ -188,8 +171,8 @@ def get_template_locals(real_locals):
|
||||
continue
|
||||
|
||||
try:
|
||||
_, depth, name = name.split("_", 2)
|
||||
depth = int(depth)
|
||||
_, depth_str, name = name.split("_", 2)
|
||||
depth = int(depth_str)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
@@ -210,31 +193,37 @@ def get_template_locals(real_locals):
|
||||
|
||||
if sys.version_info >= (3, 7):
|
||||
# tb_next is directly assignable as of Python 3.7
|
||||
def tb_set_next(tb, tb_next):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
tb.tb_next = tb_next
|
||||
return tb
|
||||
|
||||
|
||||
elif PYPY:
|
||||
elif platform.python_implementation() == "PyPy":
|
||||
# PyPy might have special support, and won't work with ctypes.
|
||||
try:
|
||||
import tputil
|
||||
import tputil # type: ignore
|
||||
except ImportError:
|
||||
# Without tproxy support, use the original traceback.
|
||||
def tb_set_next(tb, tb_next):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
return tb
|
||||
|
||||
else:
|
||||
# With tproxy support, create a proxy around the traceback that
|
||||
# returns the new tb_next.
|
||||
def tb_set_next(tb, tb_next):
|
||||
def controller(op):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
def controller(op): # type: ignore
|
||||
if op.opname == "__getattribute__" and op.args[0] == "tb_next":
|
||||
return tb_next
|
||||
|
||||
return op.delegate()
|
||||
|
||||
return tputil.make_proxy(controller, obj=tb)
|
||||
return tputil.make_proxy(controller, obj=tb) # type: ignore
|
||||
|
||||
|
||||
else:
|
||||
@@ -250,7 +239,9 @@ class _CTraceback(ctypes.Structure):
|
||||
("tb_next", ctypes.py_object),
|
||||
]
|
||||
|
||||
def tb_set_next(tb, tb_next):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
c_tb = _CTraceback.from_address(id(tb))
|
||||
|
||||
# Clear out the old tb_next.
|
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._compat import range_type
|
||||
import typing as t
|
||||
|
||||
from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
|
||||
from .tests import TESTS as DEFAULT_TESTS # noqa: F401
|
||||
from .utils import Cycler
|
||||
@@ -7,6 +7,9 @@
|
||||
from .utils import Joiner
|
||||
from .utils import Namespace
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
# defaults for the parser / lexer
|
||||
BLOCK_START_STRING = "{%"
|
||||
BLOCK_END_STRING = "%}"
|
||||
@@ -14,17 +17,17 @@
|
||||
VARIABLE_END_STRING = "}}"
|
||||
COMMENT_START_STRING = "{#"
|
||||
COMMENT_END_STRING = "#}"
|
||||
LINE_STATEMENT_PREFIX = None
|
||||
LINE_COMMENT_PREFIX = None
|
||||
LINE_STATEMENT_PREFIX: t.Optional[str] = None
|
||||
LINE_COMMENT_PREFIX: t.Optional[str] = None
|
||||
TRIM_BLOCKS = False
|
||||
LSTRIP_BLOCKS = False
|
||||
NEWLINE_SEQUENCE = "\n"
|
||||
NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
|
||||
KEEP_TRAILING_NEWLINE = False
|
||||
|
||||
# default filters, tests and namespace
|
||||
|
||||
DEFAULT_NAMESPACE = {
|
||||
"range": range_type,
|
||||
"range": range,
|
||||
"dict": dict,
|
||||
"lipsum": generate_lorem_ipsum,
|
||||
"cycler": Cycler,
|
||||
@@ -33,10 +36,11 @@
|
||||
}
|
||||
|
||||
# default policies
|
||||
DEFAULT_POLICIES = {
|
||||
DEFAULT_POLICIES: t.Dict[str, t.Any] = {
|
||||
"compiler.ascii_str": True,
|
||||
"urlize.rel": "noopener",
|
||||
"urlize.target": None,
|
||||
"urlize.extra_schemes": None,
|
||||
"truncate.leeway": 5,
|
||||
"json.dumps_function": None,
|
||||
"json.dumps_kwargs": {"sort_keys": True},
|
File diff suppressed because it is too large
Load Diff
@@ -1,44 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._compat import imap
|
||||
from ._compat import implements_to_string
|
||||
from ._compat import PY2
|
||||
from ._compat import text_type
|
||||
import typing as t
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .runtime import Undefined
|
||||
|
||||
|
||||
class TemplateError(Exception):
|
||||
"""Baseclass for all template errors."""
|
||||
|
||||
if PY2:
|
||||
def __init__(self, message: t.Optional[str] = None) -> None:
|
||||
super().__init__(message)
|
||||
|
||||
def __init__(self, message=None):
|
||||
if message is not None:
|
||||
message = text_type(message).encode("utf-8")
|
||||
Exception.__init__(self, message)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self.args:
|
||||
message = self.args[0]
|
||||
if message is not None:
|
||||
return message.decode("utf-8", "replace")
|
||||
|
||||
def __unicode__(self):
|
||||
return self.message or u""
|
||||
|
||||
else:
|
||||
|
||||
def __init__(self, message=None):
|
||||
Exception.__init__(self, message)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self.args:
|
||||
message = self.args[0]
|
||||
if message is not None:
|
||||
return message
|
||||
@property
|
||||
def message(self) -> t.Optional[str]:
|
||||
return self.args[0] if self.args else None
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class TemplateNotFound(IOError, LookupError, TemplateError):
|
||||
"""Raised if a template does not exist.
|
||||
|
||||
@@ -47,11 +23,15 @@ class TemplateNotFound(IOError, LookupError, TemplateError):
|
||||
provided, an :exc:`UndefinedError` is raised.
|
||||
"""
|
||||
|
||||
# looks weird, but removes the warning descriptor that just
|
||||
# bogusly warns us about message being deprecated
|
||||
message = None
|
||||
# Silence the Python warning about message being deprecated since
|
||||
# it's not valid here.
|
||||
message: t.Optional[str] = None
|
||||
|
||||
def __init__(self, name, message=None):
|
||||
def __init__(
|
||||
self,
|
||||
name: t.Optional[t.Union[str, "Undefined"]],
|
||||
message: t.Optional[str] = None,
|
||||
) -> None:
|
||||
IOError.__init__(self, name)
|
||||
|
||||
if message is None:
|
||||
@@ -66,8 +46,8 @@ def __init__(self, name, message=None):
|
||||
self.name = name
|
||||
self.templates = [name]
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
def __str__(self) -> str:
|
||||
return str(self.message)
|
||||
|
||||
|
||||
class TemplatesNotFound(TemplateNotFound):
|
||||
@@ -82,7 +62,11 @@ class TemplatesNotFound(TemplateNotFound):
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
def __init__(self, names=(), message=None):
|
||||
def __init__(
|
||||
self,
|
||||
names: t.Sequence[t.Union[str, "Undefined"]] = (),
|
||||
message: t.Optional[str] = None,
|
||||
) -> None:
|
||||
if message is None:
|
||||
from .runtime import Undefined
|
||||
|
||||
@@ -94,52 +78,57 @@ def __init__(self, names=(), message=None):
|
||||
else:
|
||||
parts.append(name)
|
||||
|
||||
message = u"none of the templates given were found: " + u", ".join(
|
||||
imap(text_type, parts)
|
||||
)
|
||||
TemplateNotFound.__init__(self, names and names[-1] or None, message)
|
||||
parts_str = ", ".join(map(str, parts))
|
||||
message = f"none of the templates given were found: {parts_str}"
|
||||
|
||||
super().__init__(names[-1] if names else None, message)
|
||||
self.templates = list(names)
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class TemplateSyntaxError(TemplateError):
|
||||
"""Raised to tell the user that there is a problem with the template."""
|
||||
|
||||
def __init__(self, message, lineno, name=None, filename=None):
|
||||
TemplateError.__init__(self, message)
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
lineno: int,
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.lineno = lineno
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.source = None
|
||||
self.source: t.Optional[str] = None
|
||||
|
||||
# this is set to True if the debug.translate_syntax_error
|
||||
# function translated the syntax error into a new traceback
|
||||
self.translated = False
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
# for translated errors we only return the message
|
||||
if self.translated:
|
||||
return self.message
|
||||
return t.cast(str, self.message)
|
||||
|
||||
# otherwise attach some stuff
|
||||
location = "line %d" % self.lineno
|
||||
location = f"line {self.lineno}"
|
||||
name = self.filename or self.name
|
||||
if name:
|
||||
location = 'File "%s", %s' % (name, location)
|
||||
lines = [self.message, " " + location]
|
||||
location = f'File "{name}", {location}'
|
||||
lines = [t.cast(str, self.message), " " + location]
|
||||
|
||||
# if the source is set, add the line to the output
|
||||
if self.source is not None:
|
||||
try:
|
||||
line = self.source.splitlines()[self.lineno - 1]
|
||||
except IndexError:
|
||||
line = None
|
||||
if line:
|
||||
pass
|
||||
else:
|
||||
lines.append(" " + line.strip())
|
||||
|
||||
return u"\n".join(lines)
|
||||
return "\n".join(lines)
|
||||
|
||||
def __reduce__(self):
|
||||
def __reduce__(self): # type: ignore
|
||||
# https://bugs.python.org/issue1692335 Exceptions that take
|
||||
# multiple required arguments have problems with pickling.
|
||||
# Without this, raises TypeError: __init__() missing 1 required
|
@@ -1,53 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Extension API for adding custom tags and behavior."""
|
||||
import pprint
|
||||
import re
|
||||
from sys import version_info
|
||||
import typing as t
|
||||
import warnings
|
||||
|
||||
from markupsafe import Markup
|
||||
|
||||
from . import defaults
|
||||
from . import nodes
|
||||
from ._compat import iteritems
|
||||
from ._compat import string_types
|
||||
from ._compat import with_metaclass
|
||||
from .defaults import BLOCK_END_STRING
|
||||
from .defaults import BLOCK_START_STRING
|
||||
from .defaults import COMMENT_END_STRING
|
||||
from .defaults import COMMENT_START_STRING
|
||||
from .defaults import KEEP_TRAILING_NEWLINE
|
||||
from .defaults import LINE_COMMENT_PREFIX
|
||||
from .defaults import LINE_STATEMENT_PREFIX
|
||||
from .defaults import LSTRIP_BLOCKS
|
||||
from .defaults import NEWLINE_SEQUENCE
|
||||
from .defaults import TRIM_BLOCKS
|
||||
from .defaults import VARIABLE_END_STRING
|
||||
from .defaults import VARIABLE_START_STRING
|
||||
from .environment import Environment
|
||||
from .exceptions import TemplateAssertionError
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .nodes import ContextReference
|
||||
from .runtime import concat
|
||||
from .utils import contextfunction
|
||||
from .runtime import concat # type: ignore
|
||||
from .runtime import Context
|
||||
from .runtime import Undefined
|
||||
from .utils import import_string
|
||||
from .utils import pass_context
|
||||
|
||||
# the only real useful gettext functions for a Jinja template. Note
|
||||
# that ugettext must be assigned to gettext as Jinja doesn't support
|
||||
# non unicode strings.
|
||||
GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .lexer import Token
|
||||
from .lexer import TokenStream
|
||||
from .parser import Parser
|
||||
|
||||
class _TranslationsBasic(te.Protocol):
|
||||
def gettext(self, message: str) -> str:
|
||||
...
|
||||
|
||||
def ngettext(self, singular: str, plural: str, n: int) -> str:
|
||||
pass
|
||||
|
||||
class _TranslationsContext(_TranslationsBasic):
|
||||
def pgettext(self, context: str, message: str) -> str:
|
||||
...
|
||||
|
||||
def npgettext(self, context: str, singular: str, plural: str, n: int) -> str:
|
||||
...
|
||||
|
||||
_SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext]
|
||||
|
||||
|
||||
# I18N functions available in Jinja templates. If the I18N library
|
||||
# provides ugettext, it will be assigned to gettext.
|
||||
GETTEXT_FUNCTIONS: t.Tuple[str, ...] = (
|
||||
"_",
|
||||
"gettext",
|
||||
"ngettext",
|
||||
"pgettext",
|
||||
"npgettext",
|
||||
)
|
||||
_ws_re = re.compile(r"\s*\n\s*")
|
||||
|
||||
|
||||
class ExtensionRegistry(type):
|
||||
"""Gives the extension an unique identifier."""
|
||||
|
||||
def __new__(mcs, name, bases, d):
|
||||
rv = type.__new__(mcs, name, bases, d)
|
||||
rv.identifier = rv.__module__ + "." + rv.__name__
|
||||
return rv
|
||||
|
||||
|
||||
class Extension(with_metaclass(ExtensionRegistry, object)):
|
||||
class Extension:
|
||||
"""Extensions can be used to add extra functionality to the Jinja template
|
||||
system at the parser level. Custom extensions are bound to an environment
|
||||
but may not store environment specific data on `self`. The reason for
|
||||
@@ -66,8 +71,13 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
|
||||
name as includes the name of the extension (fragment cache).
|
||||
"""
|
||||
|
||||
identifier: t.ClassVar[str]
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
cls.identifier = f"{cls.__module__}.{cls.__name__}"
|
||||
|
||||
#: if this extension parses this is the list of tags it's listening to.
|
||||
tags = set()
|
||||
tags: t.Set[str] = set()
|
||||
|
||||
#: the priority of that extension. This is especially useful for
|
||||
#: extensions that preprocess values. A lower value means higher
|
||||
@@ -76,24 +86,28 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
|
||||
#: .. versionadded:: 2.4
|
||||
priority = 100
|
||||
|
||||
def __init__(self, environment):
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
self.environment = environment
|
||||
|
||||
def bind(self, environment):
|
||||
def bind(self, environment: Environment) -> "Extension":
|
||||
"""Create a copy of this extension bound to another environment."""
|
||||
rv = object.__new__(self.__class__)
|
||||
rv = t.cast(Extension, object.__new__(self.__class__))
|
||||
rv.__dict__.update(self.__dict__)
|
||||
rv.environment = environment
|
||||
return rv
|
||||
|
||||
def preprocess(self, source, name, filename=None):
|
||||
def preprocess(
|
||||
self, source: str, name: t.Optional[str], filename: t.Optional[str] = None
|
||||
) -> str:
|
||||
"""This method is called before the actual lexing and can be used to
|
||||
preprocess the source. The `filename` is optional. The return value
|
||||
must be the preprocessed source.
|
||||
"""
|
||||
return source
|
||||
|
||||
def filter_stream(self, stream):
|
||||
def filter_stream(
|
||||
self, stream: "TokenStream"
|
||||
) -> t.Union["TokenStream", t.Iterable["Token"]]:
|
||||
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
|
||||
to filter tokens returned. This method has to return an iterable of
|
||||
:class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
|
||||
@@ -101,7 +115,7 @@ def filter_stream(self, stream):
|
||||
"""
|
||||
return stream
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
|
||||
"""If any of the :attr:`tags` matched this method is called with the
|
||||
parser as first argument. The token the parser stream is pointing at
|
||||
is the name token that matched. This method has to return one or a
|
||||
@@ -109,7 +123,9 @@ def parse(self, parser):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def attr(self, name, lineno=None):
|
||||
def attr(
|
||||
self, name: str, lineno: t.Optional[int] = None
|
||||
) -> nodes.ExtensionAttribute:
|
||||
"""Return an attribute node for the current extension. This is useful
|
||||
to pass constants on extensions to generated template code.
|
||||
|
||||
@@ -120,8 +136,14 @@ def attr(self, name, lineno=None):
|
||||
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
|
||||
|
||||
def call_method(
|
||||
self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
args: t.Optional[t.List[nodes.Expr]] = None,
|
||||
kwargs: t.Optional[t.List[nodes.Keyword]] = None,
|
||||
dyn_args: t.Optional[nodes.Expr] = None,
|
||||
dyn_kwargs: t.Optional[nodes.Expr] = None,
|
||||
lineno: t.Optional[int] = None,
|
||||
) -> nodes.Call:
|
||||
"""Call a method of the extension. This is a shortcut for
|
||||
:meth:`attr` + :class:`jinja2.nodes.Call`.
|
||||
"""
|
||||
@@ -139,38 +161,88 @@ def call_method(
|
||||
)
|
||||
|
||||
|
||||
@contextfunction
|
||||
def _gettext_alias(__context, *args, **kwargs):
|
||||
@pass_context
|
||||
def _gettext_alias(
|
||||
__context: Context, *args: t.Any, **kwargs: t.Any
|
||||
) -> t.Union[t.Any, Undefined]:
|
||||
return __context.call(__context.resolve("gettext"), *args, **kwargs)
|
||||
|
||||
|
||||
def _make_new_gettext(func):
|
||||
@contextfunction
|
||||
def gettext(__context, __string, **variables):
|
||||
def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def gettext(__context: Context, __string: str, **variables: t.Any) -> str:
|
||||
rv = __context.call(func, __string)
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
# Always treat as a format string, even if there are no
|
||||
# variables. This makes translation strings more consistent
|
||||
# and predictable. This requires escaping
|
||||
return rv % variables
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return gettext
|
||||
|
||||
|
||||
def _make_new_ngettext(func):
|
||||
@contextfunction
|
||||
def ngettext(__context, __singular, __plural, __num, **variables):
|
||||
def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def ngettext(
|
||||
__context: Context,
|
||||
__singular: str,
|
||||
__plural: str,
|
||||
__num: int,
|
||||
**variables: t.Any,
|
||||
) -> str:
|
||||
variables.setdefault("num", __num)
|
||||
rv = __context.call(func, __singular, __plural, __num)
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
# Always treat as a format string, see gettext comment above.
|
||||
return rv % variables
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return ngettext
|
||||
|
||||
|
||||
def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def pgettext(
|
||||
__context: Context, __string_ctx: str, __string: str, **variables: t.Any
|
||||
) -> str:
|
||||
variables.setdefault("context", __string_ctx)
|
||||
rv = __context.call(func, __string_ctx, __string)
|
||||
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
|
||||
# Always treat as a format string, see gettext comment above.
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return pgettext
|
||||
|
||||
|
||||
def _make_new_npgettext(
|
||||
func: t.Callable[[str, str, str, int], str]
|
||||
) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def npgettext(
|
||||
__context: Context,
|
||||
__string_ctx: str,
|
||||
__singular: str,
|
||||
__plural: str,
|
||||
__num: int,
|
||||
**variables: t.Any,
|
||||
) -> str:
|
||||
variables.setdefault("context", __string_ctx)
|
||||
variables.setdefault("num", __num)
|
||||
rv = __context.call(func, __string_ctx, __singular, __plural, __num)
|
||||
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
|
||||
# Always treat as a format string, see gettext comment above.
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return npgettext
|
||||
|
||||
|
||||
class InternationalizationExtension(Extension):
|
||||
"""This extension adds gettext support to Jinja."""
|
||||
|
||||
@@ -183,8 +255,8 @@ class InternationalizationExtension(Extension):
|
||||
# something is called twice here. One time for the gettext value and
|
||||
# the other time for the n-parameter of the ngettext function.
|
||||
|
||||
def __init__(self, environment):
|
||||
Extension.__init__(self, environment)
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
super().__init__(environment)
|
||||
environment.globals["_"] = _gettext_alias
|
||||
environment.extend(
|
||||
install_gettext_translations=self._install,
|
||||
@@ -195,38 +267,92 @@ def __init__(self, environment):
|
||||
newstyle_gettext=False,
|
||||
)
|
||||
|
||||
def _install(self, translations, newstyle=None):
|
||||
def _install(
|
||||
self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None
|
||||
) -> None:
|
||||
# ugettext and ungettext are preferred in case the I18N library
|
||||
# is providing compatibility with older Python versions.
|
||||
gettext = getattr(translations, "ugettext", None)
|
||||
if gettext is None:
|
||||
gettext = translations.gettext
|
||||
ngettext = getattr(translations, "ungettext", None)
|
||||
if ngettext is None:
|
||||
ngettext = translations.ngettext
|
||||
self._install_callables(gettext, ngettext, newstyle)
|
||||
|
||||
def _install_null(self, newstyle=None):
|
||||
pgettext = getattr(translations, "pgettext", None)
|
||||
npgettext = getattr(translations, "npgettext", None)
|
||||
self._install_callables(
|
||||
lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle
|
||||
gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext
|
||||
)
|
||||
|
||||
def _install_callables(self, gettext, ngettext, newstyle=None):
|
||||
def _install_null(self, newstyle: t.Optional[bool] = None) -> None:
|
||||
import gettext
|
||||
|
||||
translations = gettext.NullTranslations()
|
||||
|
||||
if hasattr(translations, "pgettext"):
|
||||
# Python < 3.8
|
||||
pgettext = translations.pgettext # type: ignore
|
||||
else:
|
||||
|
||||
def pgettext(c: str, s: str) -> str:
|
||||
return s
|
||||
|
||||
if hasattr(translations, "npgettext"):
|
||||
npgettext = translations.npgettext # type: ignore
|
||||
else:
|
||||
|
||||
def npgettext(c: str, s: str, p: str, n: int) -> str:
|
||||
return s if n == 1 else p
|
||||
|
||||
self._install_callables(
|
||||
gettext=translations.gettext,
|
||||
ngettext=translations.ngettext,
|
||||
newstyle=newstyle,
|
||||
pgettext=pgettext,
|
||||
npgettext=npgettext,
|
||||
)
|
||||
|
||||
def _install_callables(
|
||||
self,
|
||||
gettext: t.Callable[[str], str],
|
||||
ngettext: t.Callable[[str, str, int], str],
|
||||
newstyle: t.Optional[bool] = None,
|
||||
pgettext: t.Optional[t.Callable[[str, str], str]] = None,
|
||||
npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None,
|
||||
) -> None:
|
||||
if newstyle is not None:
|
||||
self.environment.newstyle_gettext = newstyle
|
||||
if self.environment.newstyle_gettext:
|
||||
self.environment.newstyle_gettext = newstyle # type: ignore
|
||||
if self.environment.newstyle_gettext: # type: ignore
|
||||
gettext = _make_new_gettext(gettext)
|
||||
ngettext = _make_new_ngettext(ngettext)
|
||||
self.environment.globals.update(gettext=gettext, ngettext=ngettext)
|
||||
|
||||
def _uninstall(self, translations):
|
||||
for key in "gettext", "ngettext":
|
||||
if pgettext is not None:
|
||||
pgettext = _make_new_pgettext(pgettext)
|
||||
|
||||
if npgettext is not None:
|
||||
npgettext = _make_new_npgettext(npgettext)
|
||||
|
||||
self.environment.globals.update(
|
||||
gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext
|
||||
)
|
||||
|
||||
def _uninstall(self, translations: "_SupportedTranslations") -> None:
|
||||
for key in ("gettext", "ngettext", "pgettext", "npgettext"):
|
||||
self.environment.globals.pop(key, None)
|
||||
|
||||
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
|
||||
if isinstance(source, string_types):
|
||||
def _extract(
|
||||
self,
|
||||
source: t.Union[str, nodes.Template],
|
||||
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
|
||||
) -> t.Iterator[
|
||||
t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
|
||||
]:
|
||||
if isinstance(source, str):
|
||||
source = self.environment.parse(source)
|
||||
return extract_from_ast(source, gettext_functions)
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
|
||||
"""Parse a translatable tag."""
|
||||
lineno = next(parser.stream).lineno
|
||||
num_called_num = False
|
||||
@@ -234,9 +360,9 @@ def parse(self, parser):
|
||||
# find all the variables referenced. Additionally a variable can be
|
||||
# defined in the body of the trans block too, but this is checked at
|
||||
# a later state.
|
||||
plural_expr = None
|
||||
plural_expr_assignment = None
|
||||
variables = {}
|
||||
plural_expr: t.Optional[nodes.Expr] = None
|
||||
plural_expr_assignment: t.Optional[nodes.Assign] = None
|
||||
variables: t.Dict[str, nodes.Expr] = {}
|
||||
trimmed = None
|
||||
while parser.stream.current.type != "block_end":
|
||||
if variables:
|
||||
@@ -246,34 +372,34 @@ def parse(self, parser):
|
||||
if parser.stream.skip_if("colon"):
|
||||
break
|
||||
|
||||
name = parser.stream.expect("name")
|
||||
if name.value in variables:
|
||||
token = parser.stream.expect("name")
|
||||
if token.value in variables:
|
||||
parser.fail(
|
||||
"translatable variable %r defined twice." % name.value,
|
||||
name.lineno,
|
||||
f"translatable variable {token.value!r} defined twice.",
|
||||
token.lineno,
|
||||
exc=TemplateAssertionError,
|
||||
)
|
||||
|
||||
# expressions
|
||||
if parser.stream.current.type == "assign":
|
||||
next(parser.stream)
|
||||
variables[name.value] = var = parser.parse_expression()
|
||||
elif trimmed is None and name.value in ("trimmed", "notrimmed"):
|
||||
trimmed = name.value == "trimmed"
|
||||
variables[token.value] = var = parser.parse_expression()
|
||||
elif trimmed is None and token.value in ("trimmed", "notrimmed"):
|
||||
trimmed = token.value == "trimmed"
|
||||
continue
|
||||
else:
|
||||
variables[name.value] = var = nodes.Name(name.value, "load")
|
||||
variables[token.value] = var = nodes.Name(token.value, "load")
|
||||
|
||||
if plural_expr is None:
|
||||
if isinstance(var, nodes.Call):
|
||||
plural_expr = nodes.Name("_trans", "load")
|
||||
variables[name.value] = plural_expr
|
||||
variables[token.value] = plural_expr
|
||||
plural_expr_assignment = nodes.Assign(
|
||||
nodes.Name("_trans", "store"), var
|
||||
)
|
||||
else:
|
||||
plural_expr = var
|
||||
num_called_num = name.value == "num"
|
||||
num_called_num = token.value == "num"
|
||||
|
||||
parser.stream.expect("block_end")
|
||||
|
||||
@@ -294,15 +420,15 @@ def parse(self, parser):
|
||||
have_plural = True
|
||||
next(parser.stream)
|
||||
if parser.stream.current.type != "block_end":
|
||||
name = parser.stream.expect("name")
|
||||
if name.value not in variables:
|
||||
token = parser.stream.expect("name")
|
||||
if token.value not in variables:
|
||||
parser.fail(
|
||||
"unknown variable %r for pluralization" % name.value,
|
||||
name.lineno,
|
||||
f"unknown variable {token.value!r} for pluralization",
|
||||
token.lineno,
|
||||
exc=TemplateAssertionError,
|
||||
)
|
||||
plural_expr = variables[name.value]
|
||||
num_called_num = name.value == "num"
|
||||
plural_expr = variables[token.value]
|
||||
num_called_num = token.value == "num"
|
||||
parser.stream.expect("block_end")
|
||||
plural_names, plural = self._parse_block(parser, False)
|
||||
next(parser.stream)
|
||||
@@ -311,9 +437,9 @@ def parse(self, parser):
|
||||
next(parser.stream)
|
||||
|
||||
# register free names as simple name expressions
|
||||
for var in referenced:
|
||||
if var not in variables:
|
||||
variables[var] = nodes.Name(var, "load")
|
||||
for name in referenced:
|
||||
if name not in variables:
|
||||
variables[name] = nodes.Name(name, "load")
|
||||
|
||||
if not have_plural:
|
||||
plural_expr = None
|
||||
@@ -341,14 +467,17 @@ def parse(self, parser):
|
||||
else:
|
||||
return node
|
||||
|
||||
def _trim_whitespace(self, string, _ws_re=_ws_re):
|
||||
def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str:
|
||||
return _ws_re.sub(" ", string.strip())
|
||||
|
||||
def _parse_block(self, parser, allow_pluralize):
|
||||
def _parse_block(
|
||||
self, parser: "Parser", allow_pluralize: bool
|
||||
) -> t.Tuple[t.List[str], str]:
|
||||
"""Parse until the next block tag with a given name."""
|
||||
referenced = []
|
||||
buf = []
|
||||
while 1:
|
||||
|
||||
while True:
|
||||
if parser.stream.current.type == "data":
|
||||
buf.append(parser.stream.current.value.replace("%", "%%"))
|
||||
next(parser.stream)
|
||||
@@ -356,7 +485,7 @@ def _parse_block(self, parser, allow_pluralize):
|
||||
next(parser.stream)
|
||||
name = parser.stream.expect("name").value
|
||||
referenced.append(name)
|
||||
buf.append("%%(%s)s" % name)
|
||||
buf.append(f"%({name})s")
|
||||
parser.stream.expect("variable_end")
|
||||
elif parser.stream.current.type == "block_begin":
|
||||
next(parser.stream)
|
||||
@@ -379,12 +508,21 @@ def _parse_block(self, parser, allow_pluralize):
|
||||
return referenced, concat(buf)
|
||||
|
||||
def _make_node(
|
||||
self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
|
||||
):
|
||||
self,
|
||||
singular: str,
|
||||
plural: t.Optional[str],
|
||||
variables: t.Dict[str, nodes.Expr],
|
||||
plural_expr: t.Optional[nodes.Expr],
|
||||
vars_referenced: bool,
|
||||
num_called_num: bool,
|
||||
) -> nodes.Output:
|
||||
"""Generates a useful node from the data provided."""
|
||||
newstyle = self.environment.newstyle_gettext # type: ignore
|
||||
node: nodes.Expr
|
||||
|
||||
# no variables referenced? no need to escape for old style
|
||||
# gettext invocations only if there are vars.
|
||||
if not vars_referenced and not self.environment.newstyle_gettext:
|
||||
if not vars_referenced and not newstyle:
|
||||
singular = singular.replace("%%", "%")
|
||||
if plural:
|
||||
plural = plural.replace("%%", "%")
|
||||
@@ -408,8 +546,8 @@ def _make_node(
|
||||
# in case newstyle gettext is used, the method is powerful
|
||||
# enough to handle the variable expansion and autoescape
|
||||
# handling itself
|
||||
if self.environment.newstyle_gettext:
|
||||
for key, value in iteritems(variables):
|
||||
if newstyle:
|
||||
for key, value in variables.items():
|
||||
# the function adds that later anyways in case num was
|
||||
# called num, so just skip it.
|
||||
if num_called_num and key == "num":
|
||||
@@ -439,9 +577,9 @@ class ExprStmtExtension(Extension):
|
||||
that it doesn't print the return value.
|
||||
"""
|
||||
|
||||
tags = set(["do"])
|
||||
tags = {"do"}
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> nodes.ExprStmt:
|
||||
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
|
||||
node.node = parser.parse_tuple()
|
||||
return node
|
||||
@@ -450,9 +588,9 @@ def parse(self, parser):
|
||||
class LoopControlExtension(Extension):
|
||||
"""Adds break and continue to the template engine."""
|
||||
|
||||
tags = set(["break", "continue"])
|
||||
tags = {"break", "continue"}
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]:
|
||||
token = next(parser.stream)
|
||||
if token.value == "break":
|
||||
return nodes.Break(lineno=token.lineno)
|
||||
@@ -460,11 +598,25 @@ def parse(self, parser):
|
||||
|
||||
|
||||
class WithExtension(Extension):
|
||||
pass
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
super().__init__(environment)
|
||||
warnings.warn(
|
||||
"The 'with' extension is deprecated and will be removed in"
|
||||
" Jinja 3.1. This is built in now.",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
|
||||
class AutoEscapeExtension(Extension):
|
||||
pass
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
super().__init__(environment)
|
||||
warnings.warn(
|
||||
"The 'autoescape' extension is deprecated and will be"
|
||||
" removed in Jinja 3.1. This is built in now.",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
|
||||
class DebugExtension(Extension):
|
||||
@@ -490,13 +642,13 @@ class DebugExtension(Extension):
|
||||
|
||||
tags = {"debug"}
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> nodes.Output:
|
||||
lineno = parser.stream.expect("name:debug").lineno
|
||||
context = ContextReference()
|
||||
context = nodes.ContextReference()
|
||||
result = self.call_method("_render", [context], lineno=lineno)
|
||||
return nodes.Output([result], lineno=lineno)
|
||||
|
||||
def _render(self, context):
|
||||
def _render(self, context: Context) -> str:
|
||||
result = {
|
||||
"context": context.get_all(),
|
||||
"filters": sorted(self.environment.filters.keys()),
|
||||
@@ -504,13 +656,16 @@ def _render(self, context):
|
||||
}
|
||||
|
||||
# Set the depth since the intent is to show the top few names.
|
||||
if version_info[:2] >= (3, 4):
|
||||
return pprint.pformat(result, depth=3, compact=True)
|
||||
else:
|
||||
return pprint.pformat(result, depth=3)
|
||||
return pprint.pformat(result, depth=3, compact=True)
|
||||
|
||||
|
||||
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
|
||||
def extract_from_ast(
|
||||
ast: nodes.Template,
|
||||
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
|
||||
babel_style: bool = True,
|
||||
) -> t.Iterator[
|
||||
t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
|
||||
]:
|
||||
"""Extract localizable strings from the given template node. Per
|
||||
default this function returns matches in babel style that means non string
|
||||
parameters as well as keyword arguments are returned as `None`. This
|
||||
@@ -538,23 +693,26 @@ def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True
|
||||
* ``lineno`` is the number of the line on which the string was found,
|
||||
* ``function`` is the name of the ``gettext`` function used (if the
|
||||
string was extracted from embedded Python code), and
|
||||
* ``message`` is the string itself (a ``unicode`` object, or a tuple
|
||||
of ``unicode`` objects for functions with multiple string arguments).
|
||||
* ``message`` is the string, or a tuple of strings for functions
|
||||
with multiple string arguments.
|
||||
|
||||
This extraction function operates on the AST and is because of that unable
|
||||
to extract any comments. For comment support you have to use the babel
|
||||
extraction interface or extract comments yourself.
|
||||
"""
|
||||
for node in node.find_all(nodes.Call):
|
||||
out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]
|
||||
|
||||
for node in ast.find_all(nodes.Call):
|
||||
if (
|
||||
not isinstance(node.node, nodes.Name)
|
||||
or node.node.name not in gettext_functions
|
||||
):
|
||||
continue
|
||||
|
||||
strings = []
|
||||
strings: t.List[t.Optional[str]] = []
|
||||
|
||||
for arg in node.args:
|
||||
if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types):
|
||||
if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
|
||||
strings.append(arg.value)
|
||||
else:
|
||||
strings.append(None)
|
||||
@@ -567,31 +725,35 @@ def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True
|
||||
strings.append(None)
|
||||
|
||||
if not babel_style:
|
||||
strings = tuple(x for x in strings if x is not None)
|
||||
if not strings:
|
||||
out = tuple(x for x in strings if x is not None)
|
||||
|
||||
if not out:
|
||||
continue
|
||||
else:
|
||||
if len(strings) == 1:
|
||||
strings = strings[0]
|
||||
out = strings[0]
|
||||
else:
|
||||
strings = tuple(strings)
|
||||
yield node.lineno, node.node.name, strings
|
||||
out = tuple(strings)
|
||||
|
||||
yield node.lineno, node.node.name, out
|
||||
|
||||
|
||||
class _CommentFinder(object):
|
||||
class _CommentFinder:
|
||||
"""Helper class to find comments in a token stream. Can only
|
||||
find comments for gettext calls forwards. Once the comment
|
||||
from line 4 is found, a comment for line 1 will not return a
|
||||
usable value.
|
||||
"""
|
||||
|
||||
def __init__(self, tokens, comment_tags):
|
||||
def __init__(
|
||||
self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str]
|
||||
) -> None:
|
||||
self.tokens = tokens
|
||||
self.comment_tags = comment_tags
|
||||
self.offset = 0
|
||||
self.last_lineno = 0
|
||||
|
||||
def find_backwards(self, offset):
|
||||
def find_backwards(self, offset: int) -> t.List[str]:
|
||||
try:
|
||||
for _, token_type, token_value in reversed(
|
||||
self.tokens[self.offset : offset]
|
||||
@@ -607,7 +769,7 @@ def find_backwards(self, offset):
|
||||
finally:
|
||||
self.offset = offset
|
||||
|
||||
def find_comments(self, lineno):
|
||||
def find_comments(self, lineno: int) -> t.List[str]:
|
||||
if not self.comment_tags or self.last_lineno > lineno:
|
||||
return []
|
||||
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
|
||||
@@ -616,7 +778,16 @@ def find_comments(self, lineno):
|
||||
return self.find_backwards(len(self.tokens))
|
||||
|
||||
|
||||
def babel_extract(fileobj, keywords, comment_tags, options):
|
||||
def babel_extract(
|
||||
fileobj: t.BinaryIO,
|
||||
keywords: t.Sequence[str],
|
||||
comment_tags: t.Sequence[str],
|
||||
options: t.Dict[str, t.Any],
|
||||
) -> t.Iterator[
|
||||
t.Tuple[
|
||||
int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str]
|
||||
]
|
||||
]:
|
||||
"""Babel extraction method for Jinja templates.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
@@ -644,33 +815,37 @@ def babel_extract(fileobj, keywords, comment_tags, options):
|
||||
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
|
||||
(comments will be empty currently)
|
||||
"""
|
||||
extensions = set()
|
||||
for extension in options.get("extensions", "").split(","):
|
||||
extension = extension.strip()
|
||||
if not extension:
|
||||
continue
|
||||
extensions.add(import_string(extension))
|
||||
if InternationalizationExtension not in extensions:
|
||||
extensions.add(InternationalizationExtension)
|
||||
extensions: t.Dict[t.Type[Extension], None] = {}
|
||||
|
||||
def getbool(options, key, default=False):
|
||||
return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
|
||||
for extension_name in options.get("extensions", "").split(","):
|
||||
extension_name = extension_name.strip()
|
||||
|
||||
if not extension_name:
|
||||
continue
|
||||
|
||||
extensions[import_string(extension_name)] = None
|
||||
|
||||
if InternationalizationExtension not in extensions:
|
||||
extensions[InternationalizationExtension] = None
|
||||
|
||||
def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool:
|
||||
return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"}
|
||||
|
||||
silent = getbool(options, "silent", True)
|
||||
environment = Environment(
|
||||
options.get("block_start_string", BLOCK_START_STRING),
|
||||
options.get("block_end_string", BLOCK_END_STRING),
|
||||
options.get("variable_start_string", VARIABLE_START_STRING),
|
||||
options.get("variable_end_string", VARIABLE_END_STRING),
|
||||
options.get("comment_start_string", COMMENT_START_STRING),
|
||||
options.get("comment_end_string", COMMENT_END_STRING),
|
||||
options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX,
|
||||
options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
|
||||
getbool(options, "trim_blocks", TRIM_BLOCKS),
|
||||
getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
|
||||
NEWLINE_SEQUENCE,
|
||||
getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
|
||||
frozenset(extensions),
|
||||
options.get("block_start_string", defaults.BLOCK_START_STRING),
|
||||
options.get("block_end_string", defaults.BLOCK_END_STRING),
|
||||
options.get("variable_start_string", defaults.VARIABLE_START_STRING),
|
||||
options.get("variable_end_string", defaults.VARIABLE_END_STRING),
|
||||
options.get("comment_start_string", defaults.COMMENT_START_STRING),
|
||||
options.get("comment_end_string", defaults.COMMENT_END_STRING),
|
||||
options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX,
|
||||
options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX,
|
||||
getbool(options, "trim_blocks", defaults.TRIM_BLOCKS),
|
||||
getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS),
|
||||
defaults.NEWLINE_SEQUENCE,
|
||||
getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE),
|
||||
tuple(extensions),
|
||||
cache_size=0,
|
||||
auto_reload=False,
|
||||
)
|
||||
@@ -678,7 +853,7 @@ def getbool(options, key, default=False):
|
||||
if getbool(options, "trimmed"):
|
||||
environment.policies["ext.i18n.trimmed"] = True
|
||||
if getbool(options, "newstyle_gettext"):
|
||||
environment.newstyle_gettext = True
|
||||
environment.newstyle_gettext = True # type: ignore
|
||||
|
||||
source = fileobj.read().decode(options.get("encoding", "utf-8"))
|
||||
try:
|
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,6 @@
|
||||
from ._compat import iteritems
|
||||
import typing as t
|
||||
|
||||
from . import nodes
|
||||
from .visitor import NodeVisitor
|
||||
|
||||
VAR_LOAD_PARAMETER = "param"
|
||||
@@ -7,7 +9,9 @@
|
||||
VAR_LOAD_UNDEFINED = "undefined"
|
||||
|
||||
|
||||
def find_symbols(nodes, parent_symbols=None):
|
||||
def find_symbols(
|
||||
nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None
|
||||
) -> "Symbols":
|
||||
sym = Symbols(parent=parent_symbols)
|
||||
visitor = FrameSymbolVisitor(sym)
|
||||
for node in nodes:
|
||||
@@ -15,66 +19,79 @@ def find_symbols(nodes, parent_symbols=None):
|
||||
return sym
|
||||
|
||||
|
||||
def symbols_for_node(node, parent_symbols=None):
|
||||
def symbols_for_node(
|
||||
node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None
|
||||
) -> "Symbols":
|
||||
sym = Symbols(parent=parent_symbols)
|
||||
sym.analyze_node(node)
|
||||
return sym
|
||||
|
||||
|
||||
class Symbols(object):
|
||||
def __init__(self, parent=None, level=None):
|
||||
class Symbols:
|
||||
def __init__(
|
||||
self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None
|
||||
) -> None:
|
||||
if level is None:
|
||||
if parent is None:
|
||||
level = 0
|
||||
else:
|
||||
level = parent.level + 1
|
||||
self.level = level
|
||||
self.parent = parent
|
||||
self.refs = {}
|
||||
self.loads = {}
|
||||
self.stores = set()
|
||||
|
||||
def analyze_node(self, node, **kwargs):
|
||||
self.level: int = level
|
||||
self.parent = parent
|
||||
self.refs: t.Dict[str, str] = {}
|
||||
self.loads: t.Dict[str, t.Any] = {}
|
||||
self.stores: t.Set[str] = set()
|
||||
|
||||
def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None:
|
||||
visitor = RootVisitor(self)
|
||||
visitor.visit(node, **kwargs)
|
||||
|
||||
def _define_ref(self, name, load=None):
|
||||
ident = "l_%d_%s" % (self.level, name)
|
||||
def _define_ref(
|
||||
self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None
|
||||
) -> str:
|
||||
ident = f"l_{self.level}_{name}"
|
||||
self.refs[name] = ident
|
||||
if load is not None:
|
||||
self.loads[ident] = load
|
||||
return ident
|
||||
|
||||
def find_load(self, target):
|
||||
def find_load(self, target: str) -> t.Optional[t.Any]:
|
||||
if target in self.loads:
|
||||
return self.loads[target]
|
||||
|
||||
if self.parent is not None:
|
||||
return self.parent.find_load(target)
|
||||
|
||||
def find_ref(self, name):
|
||||
return None
|
||||
|
||||
def find_ref(self, name: str) -> t.Optional[str]:
|
||||
if name in self.refs:
|
||||
return self.refs[name]
|
||||
|
||||
if self.parent is not None:
|
||||
return self.parent.find_ref(name)
|
||||
|
||||
def ref(self, name):
|
||||
return None
|
||||
|
||||
def ref(self, name: str) -> str:
|
||||
rv = self.find_ref(name)
|
||||
if rv is None:
|
||||
raise AssertionError(
|
||||
"Tried to resolve a name to a reference that "
|
||||
"was unknown to the frame (%r)" % name
|
||||
"Tried to resolve a name to a reference that was"
|
||||
f" unknown to the frame ({name!r})"
|
||||
)
|
||||
return rv
|
||||
|
||||
def copy(self):
|
||||
rv = object.__new__(self.__class__)
|
||||
def copy(self) -> "Symbols":
|
||||
rv = t.cast(Symbols, object.__new__(self.__class__))
|
||||
rv.__dict__.update(self.__dict__)
|
||||
rv.refs = self.refs.copy()
|
||||
rv.loads = self.loads.copy()
|
||||
rv.stores = self.stores.copy()
|
||||
return rv
|
||||
|
||||
def store(self, name):
|
||||
def store(self, name: str) -> None:
|
||||
self.stores.add(name)
|
||||
|
||||
# If we have not see the name referenced yet, we need to figure
|
||||
@@ -92,17 +109,16 @@ def store(self, name):
|
||||
# Otherwise we can just set it to undefined.
|
||||
self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
|
||||
|
||||
def declare_parameter(self, name):
|
||||
def declare_parameter(self, name: str) -> str:
|
||||
self.stores.add(name)
|
||||
return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
|
||||
|
||||
def load(self, name):
|
||||
target = self.find_ref(name)
|
||||
if target is None:
|
||||
def load(self, name: str) -> None:
|
||||
if self.find_ref(name) is None:
|
||||
self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
|
||||
|
||||
def branch_update(self, branch_symbols):
|
||||
stores = {}
|
||||
def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None:
|
||||
stores: t.Dict[str, int] = {}
|
||||
for branch in branch_symbols:
|
||||
for target in branch.stores:
|
||||
if target in self.stores:
|
||||
@@ -114,10 +130,11 @@ def branch_update(self, branch_symbols):
|
||||
self.loads.update(sym.loads)
|
||||
self.stores.update(sym.stores)
|
||||
|
||||
for name, branch_count in iteritems(stores):
|
||||
for name, branch_count in stores.items():
|
||||
if branch_count == len(branch_symbols):
|
||||
continue
|
||||
target = self.find_ref(name)
|
||||
|
||||
target = self.find_ref(name) # type: ignore
|
||||
assert target is not None, "should not happen"
|
||||
|
||||
if self.parent is not None:
|
||||
@@ -127,56 +144,64 @@ def branch_update(self, branch_symbols):
|
||||
continue
|
||||
self.loads[target] = (VAR_LOAD_RESOLVE, name)
|
||||
|
||||
def dump_stores(self):
|
||||
rv = {}
|
||||
node = self
|
||||
def dump_stores(self) -> t.Dict[str, str]:
|
||||
rv: t.Dict[str, str] = {}
|
||||
node: t.Optional["Symbols"] = self
|
||||
|
||||
while node is not None:
|
||||
for name in node.stores:
|
||||
for name in sorted(node.stores):
|
||||
if name not in rv:
|
||||
rv[name] = self.find_ref(name)
|
||||
rv[name] = self.find_ref(name) # type: ignore
|
||||
|
||||
node = node.parent
|
||||
|
||||
return rv
|
||||
|
||||
def dump_param_targets(self):
|
||||
def dump_param_targets(self) -> t.Set[str]:
|
||||
rv = set()
|
||||
node = self
|
||||
node: t.Optional["Symbols"] = self
|
||||
|
||||
while node is not None:
|
||||
for target, (instr, _) in iteritems(self.loads):
|
||||
for target, (instr, _) in self.loads.items():
|
||||
if instr == VAR_LOAD_PARAMETER:
|
||||
rv.add(target)
|
||||
|
||||
node = node.parent
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
class RootVisitor(NodeVisitor):
|
||||
def __init__(self, symbols):
|
||||
def __init__(self, symbols: "Symbols") -> None:
|
||||
self.sym_visitor = FrameSymbolVisitor(symbols)
|
||||
|
||||
def _simple_visit(self, node, **kwargs):
|
||||
def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None:
|
||||
for child in node.iter_child_nodes():
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
visit_Template = (
|
||||
visit_Block
|
||||
) = (
|
||||
visit_Macro
|
||||
) = (
|
||||
visit_FilterBlock
|
||||
) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
|
||||
visit_Template = _simple_visit
|
||||
visit_Block = _simple_visit
|
||||
visit_Macro = _simple_visit
|
||||
visit_FilterBlock = _simple_visit
|
||||
visit_Scope = _simple_visit
|
||||
visit_If = _simple_visit
|
||||
visit_ScopedEvalContextModifier = _simple_visit
|
||||
|
||||
def visit_AssignBlock(self, node, **kwargs):
|
||||
def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
|
||||
for child in node.body:
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def visit_CallBlock(self, node, **kwargs):
|
||||
def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
|
||||
for child in node.iter_child_nodes(exclude=("call",)):
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def visit_OverlayScope(self, node, **kwargs):
|
||||
def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
|
||||
for child in node.body:
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def visit_For(self, node, for_branch="body", **kwargs):
|
||||
def visit_For(
|
||||
self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any
|
||||
) -> None:
|
||||
if for_branch == "body":
|
||||
self.sym_visitor.visit(node.target, store_as_param=True)
|
||||
branch = node.body
|
||||
@@ -189,28 +214,30 @@ def visit_For(self, node, for_branch="body", **kwargs):
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("Unknown for branch")
|
||||
for item in branch or ():
|
||||
self.sym_visitor.visit(item)
|
||||
|
||||
def visit_With(self, node, **kwargs):
|
||||
if branch:
|
||||
for item in branch:
|
||||
self.sym_visitor.visit(item)
|
||||
|
||||
def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
|
||||
for target in node.targets:
|
||||
self.sym_visitor.visit(target)
|
||||
for child in node.body:
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def generic_visit(self, node, *args, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"Cannot find symbols for %r" % node.__class__.__name__
|
||||
)
|
||||
def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None:
|
||||
raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}")
|
||||
|
||||
|
||||
class FrameSymbolVisitor(NodeVisitor):
|
||||
"""A visitor for `Frame.inspect`."""
|
||||
|
||||
def __init__(self, symbols):
|
||||
def __init__(self, symbols: "Symbols") -> None:
|
||||
self.symbols = symbols
|
||||
|
||||
def visit_Name(self, node, store_as_param=False, **kwargs):
|
||||
def visit_Name(
|
||||
self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any
|
||||
) -> None:
|
||||
"""All assignments to names go through this function."""
|
||||
if store_as_param or node.ctx == "param":
|
||||
self.symbols.declare_parameter(node.name)
|
||||
@@ -219,72 +246,73 @@ def visit_Name(self, node, store_as_param=False, **kwargs):
|
||||
elif node.ctx == "load":
|
||||
self.symbols.load(node.name)
|
||||
|
||||
def visit_NSRef(self, node, **kwargs):
|
||||
def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None:
|
||||
self.symbols.load(node.name)
|
||||
|
||||
def visit_If(self, node, **kwargs):
|
||||
def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None:
|
||||
self.visit(node.test, **kwargs)
|
||||
|
||||
original_symbols = self.symbols
|
||||
|
||||
def inner_visit(nodes):
|
||||
def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols":
|
||||
self.symbols = rv = original_symbols.copy()
|
||||
|
||||
for subnode in nodes:
|
||||
self.visit(subnode, **kwargs)
|
||||
|
||||
self.symbols = original_symbols
|
||||
return rv
|
||||
|
||||
body_symbols = inner_visit(node.body)
|
||||
elif_symbols = inner_visit(node.elif_)
|
||||
else_symbols = inner_visit(node.else_ or ())
|
||||
|
||||
self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
|
||||
|
||||
def visit_Macro(self, node, **kwargs):
|
||||
def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None:
|
||||
self.symbols.store(node.name)
|
||||
|
||||
def visit_Import(self, node, **kwargs):
|
||||
def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None:
|
||||
self.generic_visit(node, **kwargs)
|
||||
self.symbols.store(node.target)
|
||||
|
||||
def visit_FromImport(self, node, **kwargs):
|
||||
def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None:
|
||||
self.generic_visit(node, **kwargs)
|
||||
|
||||
for name in node.names:
|
||||
if isinstance(name, tuple):
|
||||
self.symbols.store(name[1])
|
||||
else:
|
||||
self.symbols.store(name)
|
||||
|
||||
def visit_Assign(self, node, **kwargs):
|
||||
def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None:
|
||||
"""Visit assignments in the correct order."""
|
||||
self.visit(node.node, **kwargs)
|
||||
self.visit(node.target, **kwargs)
|
||||
|
||||
def visit_For(self, node, **kwargs):
|
||||
def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None:
|
||||
"""Visiting stops at for blocks. However the block sequence
|
||||
is visited as part of the outer scope.
|
||||
"""
|
||||
self.visit(node.iter, **kwargs)
|
||||
|
||||
def visit_CallBlock(self, node, **kwargs):
|
||||
def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
|
||||
self.visit(node.call, **kwargs)
|
||||
|
||||
def visit_FilterBlock(self, node, **kwargs):
|
||||
def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None:
|
||||
self.visit(node.filter, **kwargs)
|
||||
|
||||
def visit_With(self, node, **kwargs):
|
||||
def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
|
||||
for target in node.values:
|
||||
self.visit(target)
|
||||
|
||||
def visit_AssignBlock(self, node, **kwargs):
|
||||
def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
|
||||
"""Stop visiting at block assigns."""
|
||||
self.visit(node.target, **kwargs)
|
||||
|
||||
def visit_Scope(self, node, **kwargs):
|
||||
def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None:
|
||||
"""Stop visiting at scopes."""
|
||||
|
||||
def visit_Block(self, node, **kwargs):
|
||||
def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None:
|
||||
"""Stop visiting at blocks."""
|
||||
|
||||
def visit_OverlayScope(self, node, **kwargs):
|
||||
def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
|
||||
"""Do not visit into overlay scopes."""
|
@@ -1,32 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
|
||||
is used to do some preprocessing. It filters out invalid operators like
|
||||
the bitshift operators we don't allow in templates. It separates
|
||||
template code and python code in expressions.
|
||||
"""
|
||||
import re
|
||||
import typing as t
|
||||
from ast import literal_eval
|
||||
from collections import deque
|
||||
from operator import itemgetter
|
||||
from sys import intern
|
||||
|
||||
from ._compat import implements_iterator
|
||||
from ._compat import intern
|
||||
from ._compat import iteritems
|
||||
from ._compat import text_type
|
||||
from ._identifier import pattern as name_re
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .utils import LRUCache
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
# cache for the lexers. Exists in order to be able to have multiple
|
||||
# environments with the same lexer
|
||||
_lexer_cache = LRUCache(50)
|
||||
_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50) # type: ignore
|
||||
|
||||
# static regular expressions
|
||||
whitespace_re = re.compile(r"\s+", re.U)
|
||||
whitespace_re = re.compile(r"\s+")
|
||||
newline_re = re.compile(r"(\r\n|\r|\n)")
|
||||
string_re = re.compile(
|
||||
r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
|
||||
)
|
||||
integer_re = re.compile(r"(\d+_)*\d+")
|
||||
integer_re = re.compile(
|
||||
r"""
|
||||
(
|
||||
0b(_?[0-1])+ # binary
|
||||
|
|
||||
0o(_?[0-7])+ # octal
|
||||
|
|
||||
0x(_?[\da-f])+ # hex
|
||||
|
|
||||
[1-9](_?\d)* # decimal
|
||||
|
|
||||
0(_?0)* # decimal zero
|
||||
)
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
float_re = re.compile(
|
||||
r"""
|
||||
(?<!\.) # doesn't start with a .
|
||||
@@ -41,20 +57,6 @@
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
|
||||
try:
|
||||
# check if this Python supports Unicode identifiers
|
||||
compile("föö", "<unknown>", "eval")
|
||||
except SyntaxError:
|
||||
# Python 2, no Unicode support, use ASCII identifiers
|
||||
name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*")
|
||||
check_ident = False
|
||||
else:
|
||||
# Unicode support, import generated re pattern and set flag to use
|
||||
# str.isidentifier to validate during lexing.
|
||||
from ._identifier import pattern as name_re
|
||||
|
||||
check_ident = True
|
||||
|
||||
# internal the tokens and keep references to them
|
||||
TOKEN_ADD = intern("add")
|
||||
TOKEN_ASSIGN = intern("assign")
|
||||
@@ -136,10 +138,10 @@
|
||||
";": TOKEN_SEMICOLON,
|
||||
}
|
||||
|
||||
reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
|
||||
reverse_operators = {v: k for k, v in operators.items()}
|
||||
assert len(operators) == len(reverse_operators), "operators dropped"
|
||||
operator_re = re.compile(
|
||||
"(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))
|
||||
f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})"
|
||||
)
|
||||
|
||||
ignored_tokens = frozenset(
|
||||
@@ -158,9 +160,10 @@
|
||||
)
|
||||
|
||||
|
||||
def _describe_token_type(token_type):
|
||||
def _describe_token_type(token_type: str) -> str:
|
||||
if token_type in reverse_operators:
|
||||
return reverse_operators[token_type]
|
||||
|
||||
return {
|
||||
TOKEN_COMMENT_BEGIN: "begin of comment",
|
||||
TOKEN_COMMENT_END: "end of comment",
|
||||
@@ -177,32 +180,35 @@ def _describe_token_type(token_type):
|
||||
}.get(token_type, token_type)
|
||||
|
||||
|
||||
def describe_token(token):
|
||||
def describe_token(token: "Token") -> str:
|
||||
"""Returns a description of the token."""
|
||||
if token.type == TOKEN_NAME:
|
||||
return token.value
|
||||
|
||||
return _describe_token_type(token.type)
|
||||
|
||||
|
||||
def describe_token_expr(expr):
|
||||
def describe_token_expr(expr: str) -> str:
|
||||
"""Like `describe_token` but for token expressions."""
|
||||
if ":" in expr:
|
||||
type, value = expr.split(":", 1)
|
||||
|
||||
if type == TOKEN_NAME:
|
||||
return value
|
||||
else:
|
||||
type = expr
|
||||
|
||||
return _describe_token_type(type)
|
||||
|
||||
|
||||
def count_newlines(value):
|
||||
def count_newlines(value: str) -> int:
|
||||
"""Count the number of newline characters in the string. This is
|
||||
useful for extensions that filter a stream.
|
||||
"""
|
||||
return len(newline_re.findall(value))
|
||||
|
||||
|
||||
def compile_rules(environment):
|
||||
def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]:
|
||||
"""Compiles all the rules from the environment into a list of rules."""
|
||||
e = re.escape
|
||||
rules = [
|
||||
@@ -243,36 +249,30 @@ def compile_rules(environment):
|
||||
return [x[1:] for x in sorted(rules, reverse=True)]
|
||||
|
||||
|
||||
class Failure(object):
|
||||
class Failure:
|
||||
"""Class that raises a `TemplateSyntaxError` if called.
|
||||
Used by the `Lexer` to specify known errors.
|
||||
"""
|
||||
|
||||
def __init__(self, message, cls=TemplateSyntaxError):
|
||||
def __init__(
|
||||
self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError
|
||||
) -> None:
|
||||
self.message = message
|
||||
self.error_class = cls
|
||||
|
||||
def __call__(self, lineno, filename):
|
||||
def __call__(self, lineno: int, filename: str) -> "te.NoReturn":
|
||||
raise self.error_class(self.message, lineno, filename)
|
||||
|
||||
|
||||
class Token(tuple):
|
||||
"""Token class."""
|
||||
class Token(t.NamedTuple):
|
||||
lineno: int
|
||||
type: str
|
||||
value: str
|
||||
|
||||
__slots__ = ()
|
||||
lineno, type, value = (property(itemgetter(x)) for x in range(3))
|
||||
def __str__(self) -> str:
|
||||
return describe_token(self)
|
||||
|
||||
def __new__(cls, lineno, type, value):
|
||||
return tuple.__new__(cls, (lineno, intern(str(type)), value))
|
||||
|
||||
def __str__(self):
|
||||
if self.type in reverse_operators:
|
||||
return reverse_operators[self.type]
|
||||
elif self.type == "name":
|
||||
return self.value
|
||||
return self.type
|
||||
|
||||
def test(self, expr):
|
||||
def test(self, expr: str) -> bool:
|
||||
"""Test a token against a token expression. This can either be a
|
||||
token type or ``'token_type:token_value'``. This can only test
|
||||
against string values and types.
|
||||
@@ -281,76 +281,75 @@ def test(self, expr):
|
||||
# passed an iterable of not interned strings.
|
||||
if self.type == expr:
|
||||
return True
|
||||
elif ":" in expr:
|
||||
|
||||
if ":" in expr:
|
||||
return expr.split(":", 1) == [self.type, self.value]
|
||||
|
||||
return False
|
||||
|
||||
def test_any(self, *iterable):
|
||||
def test_any(self, *iterable: str) -> bool:
|
||||
"""Test against multiple token expressions."""
|
||||
for expr in iterable:
|
||||
if self.test(expr):
|
||||
return True
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value)
|
||||
return any(self.test(expr) for expr in iterable)
|
||||
|
||||
|
||||
@implements_iterator
|
||||
class TokenStreamIterator(object):
|
||||
class TokenStreamIterator:
|
||||
"""The iterator for tokenstreams. Iterate over the stream
|
||||
until the eof token is reached.
|
||||
"""
|
||||
|
||||
def __init__(self, stream):
|
||||
def __init__(self, stream: "TokenStream") -> None:
|
||||
self.stream = stream
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> "TokenStreamIterator":
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
def __next__(self) -> Token:
|
||||
token = self.stream.current
|
||||
|
||||
if token.type is TOKEN_EOF:
|
||||
self.stream.close()
|
||||
raise StopIteration()
|
||||
raise StopIteration
|
||||
|
||||
next(self.stream)
|
||||
return token
|
||||
|
||||
|
||||
@implements_iterator
|
||||
class TokenStream(object):
|
||||
class TokenStream:
|
||||
"""A token stream is an iterable that yields :class:`Token`\\s. The
|
||||
parser however does not iterate over it but calls :meth:`next` to go
|
||||
one token ahead. The current active token is stored as :attr:`current`.
|
||||
"""
|
||||
|
||||
def __init__(self, generator, name, filename):
|
||||
def __init__(
|
||||
self,
|
||||
generator: t.Iterable[Token],
|
||||
name: t.Optional[str],
|
||||
filename: t.Optional[str],
|
||||
):
|
||||
self._iter = iter(generator)
|
||||
self._pushed = deque()
|
||||
self._pushed: "te.Deque[Token]" = deque()
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.closed = False
|
||||
self.current = Token(1, TOKEN_INITIAL, "")
|
||||
next(self)
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> TokenStreamIterator:
|
||||
return TokenStreamIterator(self)
|
||||
|
||||
def __bool__(self):
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self._pushed) or self.current.type is not TOKEN_EOF
|
||||
|
||||
__nonzero__ = __bool__ # py2
|
||||
|
||||
@property
|
||||
def eos(self):
|
||||
def eos(self) -> bool:
|
||||
"""Are we at the end of the stream?"""
|
||||
return not self
|
||||
|
||||
def push(self, token):
|
||||
def push(self, token: Token) -> None:
|
||||
"""Push a token back to the stream."""
|
||||
self._pushed.append(token)
|
||||
|
||||
def look(self):
|
||||
def look(self) -> Token:
|
||||
"""Look at the next token."""
|
||||
old_token = next(self)
|
||||
result = self.current
|
||||
@@ -358,28 +357,31 @@ def look(self):
|
||||
self.current = old_token
|
||||
return result
|
||||
|
||||
def skip(self, n=1):
|
||||
def skip(self, n: int = 1) -> None:
|
||||
"""Got n tokens ahead."""
|
||||
for _ in range(n):
|
||||
next(self)
|
||||
|
||||
def next_if(self, expr):
|
||||
def next_if(self, expr: str) -> t.Optional[Token]:
|
||||
"""Perform the token test and return the token if it matched.
|
||||
Otherwise the return value is `None`.
|
||||
"""
|
||||
if self.current.test(expr):
|
||||
return next(self)
|
||||
|
||||
def skip_if(self, expr):
|
||||
return None
|
||||
|
||||
def skip_if(self, expr: str) -> bool:
|
||||
"""Like :meth:`next_if` but only returns `True` or `False`."""
|
||||
return self.next_if(expr) is not None
|
||||
|
||||
def __next__(self):
|
||||
def __next__(self) -> Token:
|
||||
"""Go one token ahead and return the old one.
|
||||
|
||||
Use the built-in :func:`next` instead of calling this directly.
|
||||
"""
|
||||
rv = self.current
|
||||
|
||||
if self._pushed:
|
||||
self.current = self._pushed.popleft()
|
||||
elif self.current.type is not TOKEN_EOF:
|
||||
@@ -387,40 +389,41 @@ def __next__(self):
|
||||
self.current = next(self._iter)
|
||||
except StopIteration:
|
||||
self.close()
|
||||
|
||||
return rv
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
"""Close the stream."""
|
||||
self.current = Token(self.current.lineno, TOKEN_EOF, "")
|
||||
self._iter = None
|
||||
self._iter = iter(())
|
||||
self.closed = True
|
||||
|
||||
def expect(self, expr):
|
||||
def expect(self, expr: str) -> Token:
|
||||
"""Expect a given token type and return it. This accepts the same
|
||||
argument as :meth:`jinja2.lexer.Token.test`.
|
||||
"""
|
||||
if not self.current.test(expr):
|
||||
expr = describe_token_expr(expr)
|
||||
|
||||
if self.current.type is TOKEN_EOF:
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected end of template, expected %r." % expr,
|
||||
f"unexpected end of template, expected {expr!r}.",
|
||||
self.current.lineno,
|
||||
self.name,
|
||||
self.filename,
|
||||
)
|
||||
|
||||
raise TemplateSyntaxError(
|
||||
"expected token %r, got %r" % (expr, describe_token(self.current)),
|
||||
f"expected token {expr!r}, got {describe_token(self.current)!r}",
|
||||
self.current.lineno,
|
||||
self.name,
|
||||
self.filename,
|
||||
)
|
||||
try:
|
||||
return self.current
|
||||
finally:
|
||||
next(self)
|
||||
|
||||
return next(self)
|
||||
|
||||
|
||||
def get_lexer(environment):
|
||||
def get_lexer(environment: "Environment") -> "Lexer":
|
||||
"""Return a lexer which is probably cached."""
|
||||
key = (
|
||||
environment.block_start_string,
|
||||
@@ -437,9 +440,10 @@ def get_lexer(environment):
|
||||
environment.keep_trailing_newline,
|
||||
)
|
||||
lexer = _lexer_cache.get(key)
|
||||
|
||||
if lexer is None:
|
||||
lexer = Lexer(environment)
|
||||
_lexer_cache[key] = lexer
|
||||
_lexer_cache[key] = lexer = Lexer(environment)
|
||||
|
||||
return lexer
|
||||
|
||||
|
||||
@@ -452,11 +456,17 @@ class OptionalLStrip(tuple):
|
||||
|
||||
# Even though it looks like a no-op, creating instances fails
|
||||
# without this.
|
||||
def __new__(cls, *members, **kwargs):
|
||||
return super(OptionalLStrip, cls).__new__(cls, members)
|
||||
def __new__(cls, *members, **kwargs): # type: ignore
|
||||
return super().__new__(cls, members)
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
class _Rule(t.NamedTuple):
|
||||
pattern: t.Pattern[str]
|
||||
tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]]
|
||||
command: t.Optional[str]
|
||||
|
||||
|
||||
class Lexer:
|
||||
"""Class that implements a lexer for a given environment. Automatically
|
||||
created by the environment class, usually you don't have to do that.
|
||||
|
||||
@@ -464,21 +474,21 @@ class Lexer(object):
|
||||
Multiple environments can share the same lexer.
|
||||
"""
|
||||
|
||||
def __init__(self, environment):
|
||||
def __init__(self, environment: "Environment") -> None:
|
||||
# shortcuts
|
||||
e = re.escape
|
||||
|
||||
def c(x):
|
||||
def c(x: str) -> t.Pattern[str]:
|
||||
return re.compile(x, re.M | re.S)
|
||||
|
||||
# lexing rules for tags
|
||||
tag_rules = [
|
||||
(whitespace_re, TOKEN_WHITESPACE, None),
|
||||
(float_re, TOKEN_FLOAT, None),
|
||||
(integer_re, TOKEN_INTEGER, None),
|
||||
(name_re, TOKEN_NAME, None),
|
||||
(string_re, TOKEN_STRING, None),
|
||||
(operator_re, TOKEN_OPERATOR, None),
|
||||
tag_rules: t.List[_Rule] = [
|
||||
_Rule(whitespace_re, TOKEN_WHITESPACE, None),
|
||||
_Rule(float_re, TOKEN_FLOAT, None),
|
||||
_Rule(integer_re, TOKEN_INTEGER, None),
|
||||
_Rule(name_re, TOKEN_NAME, None),
|
||||
_Rule(string_re, TOKEN_STRING, None),
|
||||
_Rule(operator_re, TOKEN_OPERATOR, None),
|
||||
]
|
||||
|
||||
# assemble the root lexing rule. because "|" is ungreedy
|
||||
@@ -489,8 +499,13 @@ def c(x):
|
||||
# is required.
|
||||
root_tag_rules = compile_rules(environment)
|
||||
|
||||
block_start_re = e(environment.block_start_string)
|
||||
block_end_re = e(environment.block_end_string)
|
||||
comment_end_re = e(environment.comment_end_string)
|
||||
variable_end_re = e(environment.variable_end_string)
|
||||
|
||||
# block suffix if trimming is enabled
|
||||
block_suffix_re = environment.trim_blocks and "\\n?" or ""
|
||||
block_suffix_re = "\\n?" if environment.trim_blocks else ""
|
||||
|
||||
# If lstrip is enabled, it should not be applied if there is any
|
||||
# non-whitespace between the newline and block.
|
||||
@@ -499,60 +514,44 @@ def c(x):
|
||||
self.newline_sequence = environment.newline_sequence
|
||||
self.keep_trailing_newline = environment.keep_trailing_newline
|
||||
|
||||
root_raw_re = (
|
||||
fr"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*"
|
||||
fr"(?:\-{block_end_re}\s*|{block_end_re}))"
|
||||
)
|
||||
root_parts_re = "|".join(
|
||||
[root_raw_re] + [fr"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules]
|
||||
)
|
||||
|
||||
# global lexing rules
|
||||
self.rules = {
|
||||
self.rules: t.Dict[str, t.List[_Rule]] = {
|
||||
"root": [
|
||||
# directives
|
||||
(
|
||||
c(
|
||||
"(.*?)(?:%s)"
|
||||
% "|".join(
|
||||
[
|
||||
r"(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))"
|
||||
% (
|
||||
e(environment.block_start_string),
|
||||
e(environment.block_end_string),
|
||||
e(environment.block_end_string),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
r"(?P<%s>%s(\-|\+|))" % (n, r)
|
||||
for n, r in root_tag_rules
|
||||
]
|
||||
)
|
||||
),
|
||||
OptionalLStrip(TOKEN_DATA, "#bygroup"),
|
||||
_Rule(
|
||||
c(fr"(.*?)(?:{root_parts_re})"),
|
||||
OptionalLStrip(TOKEN_DATA, "#bygroup"), # type: ignore
|
||||
"#bygroup",
|
||||
),
|
||||
# data
|
||||
(c(".+"), TOKEN_DATA, None),
|
||||
_Rule(c(".+"), TOKEN_DATA, None),
|
||||
],
|
||||
# comments
|
||||
TOKEN_COMMENT_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(
|
||||
r"(.*?)((?:\-%s\s*|%s)%s)"
|
||||
% (
|
||||
e(environment.comment_end_string),
|
||||
e(environment.comment_end_string),
|
||||
block_suffix_re,
|
||||
)
|
||||
fr"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*"
|
||||
fr"|{comment_end_re}{block_suffix_re}))"
|
||||
),
|
||||
(TOKEN_COMMENT, TOKEN_COMMENT_END),
|
||||
"#pop",
|
||||
),
|
||||
(c("(.)"), (Failure("Missing end of comment tag"),), None),
|
||||
_Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None),
|
||||
],
|
||||
# blocks
|
||||
TOKEN_BLOCK_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(
|
||||
r"(?:\-%s\s*|%s)%s"
|
||||
% (
|
||||
e(environment.block_end_string),
|
||||
e(environment.block_end_string),
|
||||
block_suffix_re,
|
||||
)
|
||||
fr"(?:\+{block_end_re}|\-{block_end_re}\s*"
|
||||
fr"|{block_end_re}{block_suffix_re})"
|
||||
),
|
||||
TOKEN_BLOCK_END,
|
||||
"#pop",
|
||||
@@ -561,14 +560,8 @@ def c(x):
|
||||
+ tag_rules,
|
||||
# variables
|
||||
TOKEN_VARIABLE_BEGIN: [
|
||||
(
|
||||
c(
|
||||
r"\-%s\s*|%s"
|
||||
% (
|
||||
e(environment.variable_end_string),
|
||||
e(environment.variable_end_string),
|
||||
)
|
||||
),
|
||||
_Rule(
|
||||
c(fr"\-{variable_end_re}\s*|{variable_end_re}"),
|
||||
TOKEN_VARIABLE_END,
|
||||
"#pop",
|
||||
)
|
||||
@@ -576,29 +569,25 @@ def c(x):
|
||||
+ tag_rules,
|
||||
# raw block
|
||||
TOKEN_RAW_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(
|
||||
r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))"
|
||||
% (
|
||||
e(environment.block_start_string),
|
||||
e(environment.block_end_string),
|
||||
e(environment.block_end_string),
|
||||
block_suffix_re,
|
||||
)
|
||||
fr"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*"
|
||||
fr"(?:\+{block_end_re}|\-{block_end_re}\s*"
|
||||
fr"|{block_end_re}{block_suffix_re}))"
|
||||
),
|
||||
OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),
|
||||
OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), # type: ignore
|
||||
"#pop",
|
||||
),
|
||||
(c("(.)"), (Failure("Missing end of raw directive"),), None),
|
||||
_Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None),
|
||||
],
|
||||
# line statements
|
||||
TOKEN_LINESTATEMENT_BEGIN: [
|
||||
(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
|
||||
_Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
|
||||
]
|
||||
+ tag_rules,
|
||||
# line comments
|
||||
TOKEN_LINECOMMENT_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(r"(.*?)()(?=\n|$)"),
|
||||
(TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
|
||||
"#pop",
|
||||
@@ -606,23 +595,39 @@ def c(x):
|
||||
],
|
||||
}
|
||||
|
||||
def _normalize_newlines(self, value):
|
||||
"""Called for strings and template data to normalize it to unicode."""
|
||||
def _normalize_newlines(self, value: str) -> str:
|
||||
"""Replace all newlines with the configured sequence in strings
|
||||
and template data.
|
||||
"""
|
||||
return newline_re.sub(self.newline_sequence, value)
|
||||
|
||||
def tokenize(self, source, name=None, filename=None, state=None):
|
||||
def tokenize(
|
||||
self,
|
||||
source: str,
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
state: t.Optional[str] = None,
|
||||
) -> TokenStream:
|
||||
"""Calls tokeniter + tokenize and wraps it in a token stream."""
|
||||
stream = self.tokeniter(source, name, filename, state)
|
||||
return TokenStream(self.wrap(stream, name, filename), name, filename)
|
||||
|
||||
def wrap(self, stream, name=None, filename=None):
|
||||
def wrap(
|
||||
self,
|
||||
stream: t.Iterable[t.Tuple[int, str, str]],
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
) -> t.Iterator[Token]:
|
||||
"""This is called with the stream as returned by `tokenize` and wraps
|
||||
every token in a :class:`Token` and converts the value.
|
||||
"""
|
||||
for lineno, token, value in stream:
|
||||
for lineno, token, value_str in stream:
|
||||
if token in ignored_tokens:
|
||||
continue
|
||||
elif token == TOKEN_LINESTATEMENT_BEGIN:
|
||||
|
||||
value: t.Any = value_str
|
||||
|
||||
if token == TOKEN_LINESTATEMENT_BEGIN:
|
||||
token = TOKEN_BLOCK_BEGIN
|
||||
elif token == TOKEN_LINESTATEMENT_END:
|
||||
token = TOKEN_BLOCK_END
|
||||
@@ -630,12 +635,13 @@ def wrap(self, stream, name=None, filename=None):
|
||||
elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
|
||||
continue
|
||||
elif token == TOKEN_DATA:
|
||||
value = self._normalize_newlines(value)
|
||||
value = self._normalize_newlines(value_str)
|
||||
elif token == "keyword":
|
||||
token = value
|
||||
token = value_str
|
||||
elif token == TOKEN_NAME:
|
||||
value = str(value)
|
||||
if check_ident and not value.isidentifier():
|
||||
value = value_str
|
||||
|
||||
if not value.isidentifier():
|
||||
raise TemplateSyntaxError(
|
||||
"Invalid character in identifier", lineno, name, filename
|
||||
)
|
||||
@@ -643,51 +649,63 @@ def wrap(self, stream, name=None, filename=None):
|
||||
# try to unescape string
|
||||
try:
|
||||
value = (
|
||||
self._normalize_newlines(value[1:-1])
|
||||
self._normalize_newlines(value_str[1:-1])
|
||||
.encode("ascii", "backslashreplace")
|
||||
.decode("unicode-escape")
|
||||
)
|
||||
except Exception as e:
|
||||
msg = str(e).split(":")[-1].strip()
|
||||
raise TemplateSyntaxError(msg, lineno, name, filename)
|
||||
raise TemplateSyntaxError(msg, lineno, name, filename) from e
|
||||
elif token == TOKEN_INTEGER:
|
||||
value = int(value.replace("_", ""))
|
||||
value = int(value_str.replace("_", ""), 0)
|
||||
elif token == TOKEN_FLOAT:
|
||||
# remove all "_" first to support more Python versions
|
||||
value = literal_eval(value.replace("_", ""))
|
||||
value = literal_eval(value_str.replace("_", ""))
|
||||
elif token == TOKEN_OPERATOR:
|
||||
token = operators[value]
|
||||
token = operators[value_str]
|
||||
|
||||
yield Token(lineno, token, value)
|
||||
|
||||
def tokeniter(self, source, name, filename=None, state=None):
|
||||
def tokeniter(
|
||||
self,
|
||||
source: str,
|
||||
name: t.Optional[str],
|
||||
filename: t.Optional[str] = None,
|
||||
state: t.Optional[str] = None,
|
||||
) -> t.Iterator[t.Tuple[int, str, str]]:
|
||||
"""This method tokenizes the text and returns the tokens in a
|
||||
generator. Use this method if you just want to tokenize a template.
|
||||
generator. Use this method if you just want to tokenize a template.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line
|
||||
breaks.
|
||||
"""
|
||||
source = text_type(source)
|
||||
lines = source.splitlines()
|
||||
if self.keep_trailing_newline and source:
|
||||
for newline in ("\r\n", "\r", "\n"):
|
||||
if source.endswith(newline):
|
||||
lines.append("")
|
||||
break
|
||||
lines = newline_re.split(source)[::2]
|
||||
|
||||
if not self.keep_trailing_newline and lines[-1] == "":
|
||||
del lines[-1]
|
||||
|
||||
source = "\n".join(lines)
|
||||
pos = 0
|
||||
lineno = 1
|
||||
stack = ["root"]
|
||||
|
||||
if state is not None and state != "root":
|
||||
assert state in ("variable", "block"), "invalid state"
|
||||
stack.append(state + "_begin")
|
||||
|
||||
statetokens = self.rules[stack[-1]]
|
||||
source_length = len(source)
|
||||
balancing_stack = []
|
||||
balancing_stack: t.List[str] = []
|
||||
lstrip_unless_re = self.lstrip_unless_re
|
||||
newlines_stripped = 0
|
||||
line_starting = True
|
||||
|
||||
while 1:
|
||||
while True:
|
||||
# tokenizer loop
|
||||
for regex, tokens, new_state in statetokens:
|
||||
m = regex.match(source, pos)
|
||||
|
||||
# if no match we try again with the next rule
|
||||
if m is None:
|
||||
continue
|
||||
@@ -711,7 +729,6 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# Rule supports lstrip. Match will look like
|
||||
# text, block type, whitespace control, type, control, ...
|
||||
text = groups[0]
|
||||
|
||||
# Skipping the text and first type, every other group is the
|
||||
# whitespace control for each type. One of the groups will be
|
||||
# -, +, or empty string instead of None.
|
||||
@@ -721,7 +738,7 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# Strip all whitespace between the text and the tag.
|
||||
stripped = text.rstrip()
|
||||
newlines_stripped = text[len(stripped) :].count("\n")
|
||||
groups = (stripped,) + groups[1:]
|
||||
groups = [stripped, *groups[1:]]
|
||||
elif (
|
||||
# Not marked for preserving whitespace.
|
||||
strip_sign != "+"
|
||||
@@ -732,11 +749,12 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
):
|
||||
# The start of text between the last newline and the tag.
|
||||
l_pos = text.rfind("\n") + 1
|
||||
|
||||
if l_pos > 0 or line_starting:
|
||||
# If there's only whitespace between the newline and the
|
||||
# tag, strip it.
|
||||
if not lstrip_unless_re.search(text, l_pos):
|
||||
groups = (text[:l_pos],) + groups[1:]
|
||||
groups = [text[:l_pos], *groups[1:]]
|
||||
|
||||
for idx, token in enumerate(tokens):
|
||||
# failure group
|
||||
@@ -746,28 +764,30 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# yield for the current token the first named
|
||||
# group that matched
|
||||
elif token == "#bygroup":
|
||||
for key, value in iteritems(m.groupdict()):
|
||||
for key, value in m.groupdict().items():
|
||||
if value is not None:
|
||||
yield lineno, key, value
|
||||
lineno += value.count("\n")
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"%r wanted to resolve "
|
||||
"the token dynamically"
|
||||
" but no group matched" % regex
|
||||
f"{regex!r} wanted to resolve the token dynamically"
|
||||
" but no group matched"
|
||||
)
|
||||
# normal group
|
||||
else:
|
||||
data = groups[idx]
|
||||
|
||||
if data or token not in ignore_if_empty:
|
||||
yield lineno, token, data
|
||||
|
||||
lineno += data.count("\n") + newlines_stripped
|
||||
newlines_stripped = 0
|
||||
|
||||
# strings as token just are yielded as it.
|
||||
else:
|
||||
data = m.group()
|
||||
|
||||
# update brace/parentheses balance
|
||||
if tokens == TOKEN_OPERATOR:
|
||||
if data == "{":
|
||||
@@ -779,24 +799,26 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
elif data in ("}", ")", "]"):
|
||||
if not balancing_stack:
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected '%s'" % data, lineno, name, filename
|
||||
f"unexpected '{data}'", lineno, name, filename
|
||||
)
|
||||
|
||||
expected_op = balancing_stack.pop()
|
||||
|
||||
if expected_op != data:
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected '%s', "
|
||||
"expected '%s'" % (data, expected_op),
|
||||
f"unexpected '{data}', expected '{expected_op}'",
|
||||
lineno,
|
||||
name,
|
||||
filename,
|
||||
)
|
||||
|
||||
# yield items
|
||||
if data or tokens not in ignore_if_empty:
|
||||
yield lineno, tokens, data
|
||||
|
||||
lineno += data.count("\n")
|
||||
|
||||
line_starting = m.group()[-1:] == "\n"
|
||||
|
||||
# fetch new position into new variable so that we can check
|
||||
# if there is a internal parsing error which would result
|
||||
# in an infinite loop
|
||||
@@ -809,27 +831,28 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
stack.pop()
|
||||
# resolve the new state by group checking
|
||||
elif new_state == "#bygroup":
|
||||
for key, value in iteritems(m.groupdict()):
|
||||
for key, value in m.groupdict().items():
|
||||
if value is not None:
|
||||
stack.append(key)
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"%r wanted to resolve the "
|
||||
"new state dynamically but"
|
||||
" no group matched" % regex
|
||||
f"{regex!r} wanted to resolve the new state dynamically"
|
||||
f" but no group matched"
|
||||
)
|
||||
# direct state name given
|
||||
else:
|
||||
stack.append(new_state)
|
||||
|
||||
statetokens = self.rules[stack[-1]]
|
||||
# we are still at the same position and no stack change.
|
||||
# this means a loop without break condition, avoid that and
|
||||
# raise error
|
||||
elif pos2 == pos:
|
||||
raise RuntimeError(
|
||||
"%r yielded empty string without stack change" % regex
|
||||
f"{regex!r} yielded empty string without stack change"
|
||||
)
|
||||
|
||||
# publish new function and start again
|
||||
pos = pos2
|
||||
break
|
||||
@@ -839,10 +862,8 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# end of text
|
||||
if pos >= source_length:
|
||||
return
|
||||
|
||||
# something went wrong
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected char %r at %d" % (source[pos], pos),
|
||||
lineno,
|
||||
name,
|
||||
filename,
|
||||
f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename
|
||||
)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user