Compare commits
484 Commits
revert-341
...
features/i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03084d2ff8 | ||
|
|
e804671fc0 | ||
|
|
c0b458e38a | ||
|
|
deef4177f7 | ||
|
|
a1c840b3e8 | ||
|
|
57e9e77475 | ||
|
|
d260fa59c9 | ||
|
|
0da69a45e3 | ||
|
|
153b79c2ed | ||
|
|
ffae0db115 | ||
|
|
37c8ebcafd | ||
|
|
03fc502da5 | ||
|
|
b73884d541 | ||
|
|
43f337d993 | ||
|
|
a8188a9f14 | ||
|
|
c19638bb0d | ||
|
|
ccb9969b64 | ||
|
|
fa93b5219c | ||
|
|
42a0cf9b96 | ||
|
|
4b67153613 | ||
|
|
c65c944853 | ||
|
|
ea6608edde | ||
|
|
07499f2355 | ||
|
|
781c4823e6 | ||
|
|
71a609b2f4 | ||
|
|
74595de359 | ||
|
|
9d70f6e6c7 | ||
|
|
7365d138fb | ||
|
|
b94030cc5d | ||
|
|
4414161787 | ||
|
|
881a0d4462 | ||
|
|
8d83af732a | ||
|
|
a5a2918202 | ||
|
|
8dc3a09104 | ||
|
|
8f6f77ac9f | ||
|
|
d0bd8f74cf | ||
|
|
9d00e7d15d | ||
|
|
cc333b600c | ||
|
|
006969a8cd | ||
|
|
d2618cd640 | ||
|
|
807b87068a | ||
|
|
6879c35d1c | ||
|
|
f1b8bc97f0 | ||
|
|
6fefb92413 | ||
|
|
03a2feef88 | ||
|
|
416615f30b | ||
|
|
b62c4276c9 | ||
|
|
b9861f7bbb | ||
|
|
856fe5941a | ||
|
|
0f7f600d1f | ||
|
|
9283a94ee4 | ||
|
|
726dee9e2d | ||
|
|
b5a5af8c05 | ||
|
|
289d6df911 | ||
|
|
6158e4d8aa | ||
|
|
8c3c8a4e4d | ||
|
|
954e241174 | ||
|
|
ae3a1618a2 | ||
|
|
0d96b0b311 | ||
|
|
f96f8a99e8 | ||
|
|
94cff84df1 | ||
|
|
582ee111f6 | ||
|
|
c55de3f5e0 | ||
|
|
0bdd4c10cf | ||
|
|
521cc41db0 | ||
|
|
c0eb2cf9e6 | ||
|
|
9f0bb4301f | ||
|
|
288e728144 | ||
|
|
47011f594f | ||
|
|
a00f692f20 | ||
|
|
68bffd4689 | ||
|
|
5acb20f145 | ||
|
|
8a1b817978 | ||
|
|
86e346e906 | ||
|
|
a5ef522f98 | ||
|
|
800ac7b53d | ||
|
|
691e8c69c4 | ||
|
|
ea970c8ab8 | ||
|
|
c05827e794 | ||
|
|
910b13000f | ||
|
|
9474fdf910 | ||
|
|
2cdac9392f | ||
|
|
6775c10e82 | ||
|
|
ea5d5517ce | ||
|
|
8780c718ea | ||
|
|
9687491568 | ||
|
|
9c56b97aa9 | ||
|
|
df10e711f7 | ||
|
|
6fdb5dfa0a | ||
|
|
ada174afbd | ||
|
|
eace2ebb08 | ||
|
|
441c0a4fee | ||
|
|
0a7b0c68cf | ||
|
|
6a44a146af | ||
|
|
033cb86fd6 | ||
|
|
5175189412 | ||
|
|
86378502f9 | ||
|
|
2530c7828b | ||
|
|
ab2f842424 | ||
|
|
b9f48da560 | ||
|
|
e21c1c5770 | ||
|
|
310b6b9466 | ||
|
|
a869cfd95d | ||
|
|
ddab6c4ac3 | ||
|
|
25cff6be14 | ||
|
|
5fe1281b14 | ||
|
|
43cf60814d | ||
|
|
582f165871 | ||
|
|
bf76f1e774 | ||
|
|
9cdb862856 | ||
|
|
2e8d165120 | ||
|
|
6984ee291a | ||
|
|
10d506d61b | ||
|
|
4549312c5e | ||
|
|
80761bdc9d | ||
|
|
4eb853856a | ||
|
|
790929c832 | ||
|
|
769378c959 | ||
|
|
a18e2f98b3 | ||
|
|
8f3bdf29dc | ||
|
|
a28bb90b93 | ||
|
|
5c9b591439 | ||
|
|
778325db42 | ||
|
|
f65bb62de4 | ||
|
|
e4881d5465 | ||
|
|
31cccdf52b | ||
|
|
2c1523debe | ||
|
|
c07881c843 | ||
|
|
cb807594b8 | ||
|
|
0227c0a98a | ||
|
|
20a1cdd95e | ||
|
|
2a779c1234 | ||
|
|
3f6109404d | ||
|
|
f83d47442d | ||
|
|
08aafe180b | ||
|
|
9cbbe64cf7 | ||
|
|
2b006bb6ec | ||
|
|
5509392151 | ||
|
|
2001be99b3 | ||
|
|
dd4409b62c | ||
|
|
ca265ea0c2 | ||
|
|
7a92579480 | ||
|
|
190dfd0269 | ||
|
|
b549548f69 | ||
|
|
79268cedd2 | ||
|
|
2004171b7e | ||
|
|
06312ddf18 | ||
|
|
3a0db729c7 | ||
|
|
9759331f43 | ||
|
|
ceca97518a | ||
|
|
1929d5e3de | ||
|
|
238e9c3613 | ||
|
|
d43e7cb5cd | ||
|
|
51a037d52a | ||
|
|
c91f8c2f14 | ||
|
|
04ad42e5ee | ||
|
|
d02c71e443 | ||
|
|
ca6e178890 | ||
|
|
b145085fff | ||
|
|
3a4b96e61c | ||
|
|
36d87a4783 | ||
|
|
6d2645f73b | ||
|
|
44f7363fbe | ||
|
|
9d936a2a75 | ||
|
|
18438c395d | ||
|
|
28a30bcea6 | ||
|
|
536c7709c2 | ||
|
|
e28738a01e | ||
|
|
5f8c706128 | ||
|
|
558695793f | ||
|
|
b43a27674b | ||
|
|
3d961b9a1f | ||
|
|
d100ac8923 | ||
|
|
e8fa8c5f01 | ||
|
|
be6bb413df | ||
|
|
d23c302ca2 | ||
|
|
ed0c1cea91 | ||
|
|
ffc42e287d | ||
|
|
ba0d182e10 | ||
|
|
8d8104de2c | ||
|
|
7975e0afbc | ||
|
|
4a43522763 | ||
|
|
30343d65ba | ||
|
|
38c1639c9c | ||
|
|
be5033c869 | ||
|
|
eb67497020 | ||
|
|
371268a9aa | ||
|
|
344e8d142a | ||
|
|
161fbfadf4 | ||
|
|
3304312b26 | ||
|
|
3279ee7068 | ||
|
|
8f3f838763 | ||
|
|
09864d00c5 | ||
|
|
0f7fa27327 | ||
|
|
a27139c081 | ||
|
|
4d4338db16 | ||
|
|
6d64ffdd1a | ||
|
|
e9ea9e2316 | ||
|
|
2a5509ea90 | ||
|
|
b9d027f0cc | ||
|
|
6d54dc2a44 | ||
|
|
6cd9cbf578 | ||
|
|
72e81796d1 | ||
|
|
f116e6762a | ||
|
|
c74bbc6723 | ||
|
|
492a603d5e | ||
|
|
dab68687bd | ||
|
|
1a32cea114 | ||
|
|
aaec76652b | ||
|
|
f748911ea0 | ||
|
|
e60e74694f | ||
|
|
2ef026b8c6 | ||
|
|
a6c2569b18 | ||
|
|
5483b5ff99 | ||
|
|
2b78a7099d | ||
|
|
34cdc6f52b | ||
|
|
3aafdb06c9 | ||
|
|
4a22c1c699 | ||
|
|
f021479ef0 | ||
|
|
3f374fb62f | ||
|
|
949be42f32 | ||
|
|
e5abd5abc1 | ||
|
|
4473d5d811 | ||
|
|
c3e61664cf | ||
|
|
c3217775c3 | ||
|
|
58a7e11db9 | ||
|
|
ac570bb5c4 | ||
|
|
b2c806f6fc | ||
|
|
bd613b3124 | ||
|
|
f1b85bc653 | ||
|
|
e1fab4dd51 | ||
|
|
a924079f66 | ||
|
|
c5aff1d412 | ||
|
|
6c9602ee64 | ||
|
|
64327bfef0 | ||
|
|
05c3cb7cc9 | ||
|
|
c87b251639 | ||
|
|
f2332a17d3 | ||
|
|
c7f24a132e | ||
|
|
96a7af1dd2 | ||
|
|
db1caa9e92 | ||
|
|
237d26460d | ||
|
|
1020b65297 | ||
|
|
dceb4c9d65 | ||
|
|
50570ea334 | ||
|
|
7e836b925d | ||
|
|
cec3da61d2 | ||
|
|
7ed53cf083 | ||
|
|
bdc3ab5b54 | ||
|
|
5a985e33ea | ||
|
|
9817593c1c | ||
|
|
1cc78dac38 | ||
|
|
e2c5fe4aa3 | ||
|
|
1bf87dbb5d | ||
|
|
ffe527b141 | ||
|
|
642c5b876b | ||
|
|
8b7bd6dc74 | ||
|
|
2f97dc7aa6 | ||
|
|
958d542f81 | ||
|
|
b1aae1c2ed | ||
|
|
690f9d69fe | ||
|
|
a78c16a609 | ||
|
|
7bb2d3cca3 | ||
|
|
7216050dd3 | ||
|
|
2f26e422d6 | ||
|
|
3477d578a3 | ||
|
|
aa8e1ba606 | ||
|
|
08e007e9a6 | ||
|
|
d6fb65ebc6 | ||
|
|
2b5be919dd | ||
|
|
cc2dff48a8 | ||
|
|
22922bf74c | ||
|
|
8a02463d7d | ||
|
|
c6465bd9bd | ||
|
|
9025caed6e | ||
|
|
7056a4bffd | ||
|
|
d2aa8466eb | ||
|
|
6e4684fbca | ||
|
|
fcbf617d38 | ||
|
|
1f8b55a021 | ||
|
|
b5f8ed07fb | ||
|
|
65bd9b9ac5 | ||
|
|
6250d84b41 | ||
|
|
99056e03bd | ||
|
|
1db849ee5f | ||
|
|
2f82b213df | ||
|
|
2a5f0158bc | ||
|
|
21a1f7dd97 | ||
|
|
4b5ed94af4 | ||
|
|
06788019a4 | ||
|
|
cab8f795a7 | ||
|
|
2db38bfa38 | ||
|
|
ea029442e6 | ||
|
|
43e38d0d12 | ||
|
|
2522c8b754 | ||
|
|
f64cb29aea | ||
|
|
80e30222e1 | ||
|
|
55356e9edb | ||
|
|
eec09f791d | ||
|
|
9032179b34 | ||
|
|
45b40115fb | ||
|
|
e030833129 | ||
|
|
e055dc0e64 | ||
|
|
c45729cba1 | ||
|
|
b02b2f0f00 | ||
|
|
3ded50cc8c | ||
|
|
a7280cd5bb | ||
|
|
2837b47ea5 | ||
|
|
ea2c61c683 | ||
|
|
217b34825a | ||
|
|
17d90f4cbc | ||
|
|
7a5bd8cac4 | ||
|
|
333da47dc7 | ||
|
|
8b68b4ae72 | ||
|
|
40a3fdefa8 | ||
|
|
a61474f2c1 | ||
|
|
b95a75779b | ||
|
|
0ff6a1bd1c | ||
|
|
f9cfc2f57e | ||
|
|
f4fb20e27e | ||
|
|
3ff5d49102 | ||
|
|
238d4f72f5 | ||
|
|
c5bc469eeb | ||
|
|
b01e7dca9d | ||
|
|
c62906f781 | ||
|
|
94bac8d6dd | ||
|
|
cd9c9b47e8 | ||
|
|
8560295529 | ||
|
|
fd248ad0b8 | ||
|
|
0578ccc0e6 | ||
|
|
fcc2ab8b4b | ||
|
|
76511ac039 | ||
|
|
e4547982b3 | ||
|
|
80722fbaa3 | ||
|
|
c2fa444344 | ||
|
|
088ece1219 | ||
|
|
fcdd275564 | ||
|
|
b6d6a1ab2c | ||
|
|
7efcb5ae73 | ||
|
|
06e6389258 | ||
|
|
b7f0f7879d | ||
|
|
f7cfbe2702 | ||
|
|
1466f8d602 | ||
|
|
9fdb36585f | ||
|
|
1f0a9fdc11 | ||
|
|
0baba62900 | ||
|
|
4a0e34eda8 | ||
|
|
88f2f59d92 | ||
|
|
c1d11975f5 | ||
|
|
cca56291c6 | ||
|
|
ef155c16f0 | ||
|
|
0952d314bd | ||
|
|
f29ac34558 | ||
|
|
47628521b9 | ||
|
|
62da76cb5d | ||
|
|
65c914fff7 | ||
|
|
dd7b2deb47 | ||
|
|
7d72aeb4fe | ||
|
|
43d97afd8b | ||
|
|
39f13853ba | ||
|
|
d65b9c559a | ||
|
|
bde5720a81 | ||
|
|
2371ec7497 | ||
|
|
aa3b6e598f | ||
|
|
8035eeb36d | ||
|
|
57383a2294 | ||
|
|
9517dab409 | ||
|
|
84fa4e6c4c | ||
|
|
f33507961d | ||
|
|
46010ef1e1 | ||
|
|
f9d9d43b63 | ||
|
|
db8f115013 | ||
|
|
09b5476049 | ||
|
|
14c4896ec2 | ||
|
|
b5ef5c2eb5 | ||
|
|
675afd884d | ||
|
|
0f5482dc9a | ||
|
|
069e5f874c | ||
|
|
cad01a03cb | ||
|
|
f10f8ed013 | ||
|
|
d991ec90e3 | ||
|
|
8353d1539f | ||
|
|
bf3d18bf06 | ||
|
|
0e69710f41 | ||
|
|
ec62150ed7 | ||
|
|
d37dc37504 | ||
|
|
38d37897d4 | ||
|
|
606eef43bd | ||
|
|
02a30f8d95 | ||
|
|
7e054cb7fc | ||
|
|
d29cb87ecc | ||
|
|
f8c0d9728d | ||
|
|
f5bff16745 | ||
|
|
2d1cb6d64a | ||
|
|
c6e35da2c7 | ||
|
|
f1cd327186 | ||
|
|
391ad8cec4 | ||
|
|
2c668f4bfd | ||
|
|
52fdae83f0 | ||
|
|
0ea81affd1 | ||
|
|
ddc6e233c7 | ||
|
|
7ee4499f2b | ||
|
|
641adae961 | ||
|
|
aed77efb9a | ||
|
|
ab6499ce1e | ||
|
|
412bec45aa | ||
|
|
c3dcd94ebc | ||
|
|
cb8f642297 | ||
|
|
92f19c8491 | ||
|
|
f3f8b31be5 | ||
|
|
63cadf04ea | ||
|
|
541e75350f | ||
|
|
8806e74419 | ||
|
|
381f8161b1 | ||
|
|
884123b7ce | ||
|
|
35aa875762 | ||
|
|
9b0e79fcab | ||
|
|
8ba0faa9ee | ||
|
|
d464185bba | ||
|
|
7f4d71252b | ||
|
|
7950311767 | ||
|
|
194f9a9ca9 | ||
|
|
a72021fd63 | ||
|
|
d910b3725b | ||
|
|
99f209019e | ||
|
|
c11a4e0ad3 | ||
|
|
4a429ec315 | ||
|
|
eadccfe332 | ||
|
|
dfab5b5ceb | ||
|
|
862029215c | ||
|
|
559c3de213 | ||
|
|
e3bf7358d7 | ||
|
|
b58ec9e2b9 | ||
|
|
95b5d54129 | ||
|
|
bcce9c3e9c | ||
|
|
4c05fe569c | ||
|
|
e550665df7 | ||
|
|
d92d34b162 | ||
|
|
f27be808a4 | ||
|
|
855d3519b6 | ||
|
|
37f232e319 | ||
|
|
ac1c29eac0 | ||
|
|
56072172f5 | ||
|
|
64d957dece | ||
|
|
3edc85ec21 | ||
|
|
d8006a9495 | ||
|
|
a2cfc07412 | ||
|
|
1295ea5d40 | ||
|
|
4664b3cd1e | ||
|
|
dc7e0e3ef6 | ||
|
|
9aa615aa98 | ||
|
|
85b6bf99a4 | ||
|
|
78ec3d5662 | ||
|
|
a7b5f2ef39 | ||
|
|
f71701f39d | ||
|
|
54008a2342 | ||
|
|
1670c325c6 | ||
|
|
534a994b4c | ||
|
|
359efca201 | ||
|
|
65809140f3 | ||
|
|
3f1622f9e7 | ||
|
|
8332a59194 | ||
|
|
05abea3a3a | ||
|
|
e7fc9ea243 | ||
|
|
eea3ea7675 | ||
|
|
895ac2626d | ||
|
|
94dc86e163 | ||
|
|
729b1c9fa6 | ||
|
|
82b7fe649f | ||
|
|
76417d6ac6 | ||
|
|
fe995542ab | ||
|
|
8f5209063d | ||
|
|
241a8f6be6 | ||
|
|
a8a0a6916a | ||
|
|
8d10dce651 | ||
|
|
a2938c9348 | ||
|
|
8017f4b55b | ||
|
|
588d2e295f | ||
|
|
c10b84f08d | ||
|
|
99044bedd7 | ||
|
|
3afe6f1adc | ||
|
|
fcd9038225 | ||
|
|
9d82024f1a | ||
|
|
bcefe6a73e |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
8
.github/workflows/setup_git.ps1
vendored
8
.github/workflows/setup_git.ps1
vendored
@@ -1,15 +1,9 @@
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
# (c) 2022 Lawrence Livermore National Laboratory
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
|
||||
4
.github/workflows/setup_git.sh
vendored
4
.github/workflows/setup_git.sh
vendored
@@ -2,10 +2,6 @@
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
# create a local pr base branch
|
||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||
|
||||
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -94,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -145,16 +145,16 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
spack -d bootstrap now --dev
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -185,10 +185,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
8
.github/workflows/valid-style.yml
vendored
8
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
175
.github/workflows/windows_python.yml
vendored
175
.github/workflows/windows_python.yml
vendored
@@ -10,15 +10,15 @@ concurrency:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -26,13 +26,11 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
dir
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -41,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -52,12 +50,11 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -66,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
- uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -78,81 +75,81 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
make-installer:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: make-installer
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
|
||||
@@ -10,6 +10,7 @@ def getpywin():
|
||||
try:
|
||||
import win32con # noqa: F401
|
||||
except ImportError:
|
||||
print("pyWin32 not installed but is required...\nInstalling via pip:")
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
||||
@@ -52,7 +52,6 @@ if defined py_path (
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
21
etc/spack/defaults/windows/packages.yaml
Normal file
21
etc/spack/defaults/windows/packages.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
mpi: [msmpi]
|
||||
@@ -1,162 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _analyze:
|
||||
|
||||
=======
|
||||
Analyze
|
||||
=======
|
||||
|
||||
|
||||
The analyze command is a front-end to various tools that let us analyze
|
||||
package installations. Each analyzer is a module for a different kind
|
||||
of analysis that can be done on a package installation, including (but not
|
||||
limited to) binary, log, or text analysis. Thus, the analyze command group
|
||||
allows you to take an existing package install, choose an analyzer,
|
||||
and extract some output for the package using it.
|
||||
|
||||
|
||||
-----------------
|
||||
Analyzer Metadata
|
||||
-----------------
|
||||
|
||||
For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the
|
||||
value that you specify in your spack config at ``config:analyzers_dir``.
|
||||
For example, here we see the results of running an analysis on zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
This means that you can always find analyzer output in this folder, and it
|
||||
is organized with the same logic as the package install it was run for.
|
||||
If you want to customize this top level folder, simply provide the ``--path``
|
||||
argument to ``spack analyze run``. The nested organization will be maintained
|
||||
within your custom root.
|
||||
|
||||
-----------------
|
||||
Listing Analyzers
|
||||
-----------------
|
||||
|
||||
If you aren't familiar with Spack's analyzers, you can quickly list those that
|
||||
are available:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze list-analyzers
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
a package install directory to save
|
||||
|
||||
-------------------
|
||||
Analyzing a Package
|
||||
-------------------
|
||||
|
||||
The analyze command, akin to install, will accept a package spec to perform
|
||||
an analysis for. The package must be installed. Let's walk through an example
|
||||
with zlib. We first ask to analyze it. However, since we have more than one
|
||||
install, we are asked to disambiguate:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib
|
||||
==> Error: zlib matches multiple packages.
|
||||
Matching packages:
|
||||
fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake
|
||||
sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
Use a more specific spec.
|
||||
|
||||
|
||||
We can then specify the spec version that we want to analyze:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib/fz2bs56
|
||||
|
||||
If you don't provide any specific analyzer names, by default all analyzers
|
||||
(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not
|
||||
have any result, it will be skipped. For example, here is a result running for
|
||||
zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/
|
||||
spack-analyzer-environment-variables.json
|
||||
spack-analyzer-install-files.json
|
||||
spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run
|
||||
spack analyze on libabigail (already installed) _using_ libabigail1
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --analyzer abigail libabigail
|
||||
|
||||
|
||||
.. _analyze_monitoring:
|
||||
|
||||
----------------------
|
||||
Monitoring An Analysis
|
||||
----------------------
|
||||
|
||||
For any kind of analysis, you can
|
||||
use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
as a server to upload the same run metadata to. You can
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
You should first export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor wget
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget
|
||||
|
||||
If your server doesn't have authentication, you can skip it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-disable-auth wget
|
||||
|
||||
Regardless of your choice, when you run analyze on an installed package (whether
|
||||
it was installed with ``--monitor`` or not, you'll see the results generating as they did
|
||||
before, and a message that the monitor server was pinged:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze --monitor wget
|
||||
...
|
||||
==> Sending result for wget bin/wget to monitor.
|
||||
@@ -36,7 +36,7 @@
|
||||
if not os.path.exists(link_name):
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring"))
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
@@ -74,8 +74,16 @@
|
||||
"--force", # Overwrite existing files
|
||||
"--no-toc", # Don't create a table of contents file
|
||||
"--output-dir=.", # Directory to place all output
|
||||
"--module-first", # emit module docs before submodule docs
|
||||
]
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"])
|
||||
sphinx_apidoc(
|
||||
apidoc_args
|
||||
+ [
|
||||
"_spack_root/lib/spack/spack",
|
||||
"_spack_root/lib/spack/spack/test/*.py",
|
||||
"_spack_root/lib/spack/spack/test/cmd/*.py",
|
||||
]
|
||||
)
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
|
||||
# Enable todo items
|
||||
@@ -200,12 +208,14 @@ def setup(sphinx):
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
||||
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
||||
Spack-specific variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands several special variables. These are:
|
||||
Spack understands over a dozen special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
|
||||
@@ -175,14 +175,11 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
@@ -235,7 +232,7 @@ Spack Subcommands
|
||||
Unit tests
|
||||
^^^^^^^^^^
|
||||
|
||||
:mod:`spack.test`
|
||||
``spack.test``
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
|
||||
@@ -67,7 +67,6 @@ or refer to the full manual below.
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
monitoring
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
@@ -78,12 +77,6 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Research
|
||||
|
||||
analyze
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
||||
@@ -1,265 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _monitoring:
|
||||
|
||||
==========
|
||||
Monitoring
|
||||
==========
|
||||
|
||||
You can use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
server to store a database of your packages, builds, and associated metadata
|
||||
for provenance, research, or some other kind of development. You should
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
-------------------
|
||||
Analysis Monitoring
|
||||
-------------------
|
||||
|
||||
To read about how to monitor an analysis (meaning you want to send analysis results
|
||||
to a server) see :ref:`analyze_monitoring`.
|
||||
|
||||
---------------------
|
||||
Monitoring An Install
|
||||
---------------------
|
||||
|
||||
Since an install is typically when you build packages, we logically want
|
||||
to tell spack to monitor during this step. Let's start with an example
|
||||
where we want to monitor the install of hdf5. Unless you have disabled authentication
|
||||
for the server, we first want to export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor hdf5
|
||||
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5
|
||||
|
||||
|
||||
As a precaution, we cut out early in the spack client if you have not provided
|
||||
authentication credentials. For example, if you run the command above without
|
||||
exporting your username or token, you'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER
|
||||
|
||||
This extra check is to ensure that we don't start any builds,
|
||||
and then discover that you forgot to export your token. However, if
|
||||
your monitoring server has authentication disabled, you can tell this to
|
||||
the client to skip this step:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-disable-auth hdf5
|
||||
|
||||
If the service is not running, you'll cleanly exit early - the install will
|
||||
not continue if you've asked it to monitor and there is no service.
|
||||
For example, here is what you'll see if the monitoring service is not running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[Errno 111] Connection refused
|
||||
|
||||
|
||||
If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-keep-going hdf5
|
||||
|
||||
This could mean that if a request fails, you only have partial or no data
|
||||
added to your monitoring database. This setting will not be applied to the
|
||||
first request to check if the server is running, but to subsequent requests.
|
||||
If you don't have a monitor server running and you want to build, simply
|
||||
don't provide the ``--monitor`` flag! Finally, if you want to provide one or
|
||||
more tags to your build, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Add one tag, "pizza"
|
||||
$ spack install --monitor --monitor-tags pizza hdf5
|
||||
|
||||
# Add two tags, "pizza" and "pasta"
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
|
||||
In the case that you want to save monitor results to your filesystem
|
||||
and then upload them later (perhaps you are in an environment where you don't
|
||||
have credentials or it isn't safe to use them) you can use the ``--monitor-save-local``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
||||
@@ -34,6 +34,15 @@ ubiquitous in the scientific software community. Second, it's a modern
|
||||
language and has many powerful features to help make package writing
|
||||
easy.
|
||||
|
||||
.. warning::
|
||||
|
||||
As a general rule, packages should install the software *from source*.
|
||||
The only exception is for proprietary software (e.g., vendor compilers).
|
||||
|
||||
If a special build system needs to be added in order to support building
|
||||
a package from source, then the associated code and recipe need to be added
|
||||
first.
|
||||
|
||||
|
||||
.. _installation_procedure:
|
||||
|
||||
@@ -2397,13 +2406,15 @@ this because uninstalling the dependency would break the package.
|
||||
|
||||
``build``, ``link``, and ``run`` dependencies all affect the hash of Spack
|
||||
packages (along with ``sha256`` sums of patches and archives used to build the
|
||||
package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of
|
||||
package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
|
||||
the ``package.py`` recipes). ``test`` dependencies do not affect the package
|
||||
hash, as they are only used to construct a test environment *after* building and
|
||||
installing a given package installation. Older versions of Spack did not include
|
||||
build dependencies in the hash, but this has been
|
||||
[fixed](https://github.com/spack/spack/pull/28504) as of [Spack
|
||||
``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0)
|
||||
build dependencies in the hash, but this has been
|
||||
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
|
||||
|
||||
.. |Spack v0.18| replace:: Spack ``v0.18``
|
||||
.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0
|
||||
|
||||
If the dependency type is not specified, Spack uses a default of
|
||||
``('build', 'link')``. This is the common case for compiler languages.
|
||||
|
||||
@@ -184,13 +184,48 @@ simply run the following commands:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --force
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack mark -i --all
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
Running ``spack install`` will reexamine your spack environment after
|
||||
a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
|
||||
57
lib/spack/external/__init__.py
vendored
57
lib/spack/external/__init__.py
vendored
@@ -11,25 +11,14 @@
|
||||
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.2
|
||||
* Version: 0.17.3
|
||||
|
||||
archspec
|
||||
--------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/argparse
|
||||
* Usage: We include our own version to be Python 3.X compatible.
|
||||
* Version: 1.4.0
|
||||
* Note: This package has been slightly modified to improve
|
||||
error message formatting. See the following commit if the
|
||||
vendored copy ever needs to be updated again:
|
||||
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
|
||||
* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -52,7 +41,7 @@
|
||||
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad)
|
||||
* Version: 22.1.0
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@@ -67,21 +56,14 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff)
|
||||
* Note: Last version supporting Python 2.7
|
||||
|
||||
functools32
|
||||
-----------
|
||||
* Homepage: https://github.com/MiCHiLU/python-functools32
|
||||
* Usage: Needed by jsonschema when using Python 2.7.
|
||||
* Version: 3.2.3-2
|
||||
* Version: 1.8.0
|
||||
|
||||
jinja2
|
||||
------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 2.11.3 (last version supporting Python 2.7)
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@@ -96,44 +78,21 @@
|
||||
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.15.2
|
||||
* Version: 1.16.2
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 1.1.1 (last version supporting Python 2.7)
|
||||
|
||||
py
|
||||
--
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/py
|
||||
* Usage: Needed by pytest. Library with cross-python path,
|
||||
ini-parsing, io, code, and log facilities.
|
||||
* Version: 1.4.34 (last version supporting Python 2.6)
|
||||
* Note: This packages has been modified:
|
||||
* https://github.com/pytest-dev/py/pull/186 was backported
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.16.1 (last version supporting Python 2.7)
|
||||
* Note: We only include the parts needed for `jsonschema`.
|
||||
|
||||
pytest
|
||||
------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/pytest
|
||||
* Usage: Testing framework used by Spack.
|
||||
* Version: 3.2.5 (last version supporting Python 2.6)
|
||||
* Note: This package has been slightly modified:
|
||||
* We improve Python 2.6 compatibility. See:
|
||||
https://github.com/spack/spack/pull/6801.
|
||||
* We have patched pytest not to depend on setuptools. See:
|
||||
https://github.com/spack/spack/pull/15612
|
||||
* Version: 0.18.0
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
|
||||
1
lib/spack/external/_vendoring/_pyrsistent_version.py
vendored
Normal file
1
lib/spack/external/_vendoring/_pyrsistent_version.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
__version__ = '0.18.0'
|
||||
1
lib/spack/external/_vendoring/_pyrsistent_version.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/_pyrsistent_version.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from _pyrsistent_version import *
|
||||
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from altgraph import *
|
||||
18
lib/spack/external/_vendoring/altgraph/LICENSE
vendored
Normal file
18
lib/spack/external/_vendoring/altgraph/LICENSE
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
Copyright (c) 2004 Istvan Albert unless otherwise noted.
|
||||
Copyright (c) 2006-2010 Bob Ippolito
|
||||
Copyright (2) 2010-2020 Ronald Oussoren, et. al.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
@@ -22,7 +23,7 @@
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
|
||||
__version__ = "21.2.0"
|
||||
__version__ = "22.1.0"
|
||||
__version_info__ = VersionInfo._from_version_string(__version__)
|
||||
|
||||
__title__ = "attrs"
|
||||
@@ -73,6 +74,6 @@
|
||||
]
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
from ._next_gen import define, field, frozen, mutable
|
||||
from ._next_gen import define, field, frozen, mutable # noqa: F401
|
||||
|
||||
__all__.extend((define, field, frozen, mutable))
|
||||
__all__.extend(("define", "field", "frozen", "mutable"))
|
||||
486
lib/spack/external/_vendoring/attr/__init__.pyi
vendored
Normal file
486
lib/spack/external/_vendoring/attr/__init__.pyi
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
# `import X as X` is required to make these public
|
||||
from . import converters as converters
|
||||
from . import exceptions as exceptions
|
||||
from . import filters as filters
|
||||
from . import setters as setters
|
||||
from . import validators as validators
|
||||
from ._cmp import cmp_using as cmp_using
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
__version__: str
|
||||
__version_info__: VersionInfo
|
||||
__title__: str
|
||||
__description__: str
|
||||
__url__: str
|
||||
__uri__: str
|
||||
__author__: str
|
||||
__email__: str
|
||||
__license__: str
|
||||
__copyright__: str
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_EqOrderType = Union[bool, Callable[[Any], Any]]
|
||||
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
||||
_ConverterType = Callable[[Any], Any]
|
||||
_FilterType = Callable[[Attribute[_T], _T], bool]
|
||||
_ReprType = Callable[[Any], str]
|
||||
_ReprArgType = Union[bool, _ReprType]
|
||||
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
|
||||
_OnSetAttrArgType = Union[
|
||||
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
|
||||
]
|
||||
_FieldTransformer = Callable[
|
||||
[type, List[Attribute[Any]]], List[Attribute[Any]]
|
||||
]
|
||||
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||
# or tuple, but those are invariant and so would prevent subtypes of
|
||||
# _ValidatorType from working when passed in a list or tuple.
|
||||
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
||||
|
||||
# A protocol to be able to statically accept an attrs class.
|
||||
class AttrsInstance(Protocol):
|
||||
__attrs_attrs__: ClassVar[Any]
|
||||
|
||||
# _make --
|
||||
|
||||
NOTHING: object
|
||||
|
||||
# NOTE: Factory lies about its return type to make this possible:
|
||||
# `x: List[int] # = Factory(list)`
|
||||
# Work around mypy issue #4554 in the common case by using an overload.
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Literal
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[Any], _T],
|
||||
takes_self: Literal[True],
|
||||
) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[], _T],
|
||||
takes_self: Literal[False],
|
||||
) -> _T: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
||||
takes_self: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# Static type inference support via __dataclass_transform__ implemented as per:
|
||||
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
|
||||
# This annotation must be applied to all overloads of "define" and "attrs"
|
||||
#
|
||||
# NOTE: This is a typing construct and does not exist at runtime. Extensions
|
||||
# wrapping attrs decorators should declare a separate __dataclass_transform__
|
||||
# signature in the extension module using the specification linked above to
|
||||
# provide pyright support.
|
||||
def __dataclass_transform__(
|
||||
*,
|
||||
eq_default: bool = True,
|
||||
order_default: bool = False,
|
||||
kw_only_default: bool = False,
|
||||
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
||||
) -> Callable[[_T], _T]: ...
|
||||
|
||||
class Attribute(Generic[_T]):
|
||||
name: str
|
||||
default: Optional[_T]
|
||||
validator: Optional[_ValidatorType[_T]]
|
||||
repr: _ReprArgType
|
||||
cmp: _EqOrderType
|
||||
eq: _EqOrderType
|
||||
order: _EqOrderType
|
||||
hash: Optional[bool]
|
||||
init: bool
|
||||
converter: Optional[_ConverterType]
|
||||
metadata: Dict[Any, Any]
|
||||
type: Optional[Type[_T]]
|
||||
kw_only: bool
|
||||
on_setattr: _OnSetAttrType
|
||||
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
||||
|
||||
# NOTE: We had several choices for the annotation to use for type arg:
|
||||
# 1) Type[_T]
|
||||
# - Pros: Handles simple cases correctly
|
||||
# - Cons: Might produce less informative errors in the case of conflicting
|
||||
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||
# 2) Callable[..., _T]
|
||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||
# - Cons: Terrible error messages for validator checks.
|
||||
# e.g. attr.ib(type=int, validator=validate_str)
|
||||
# -> error: Cannot infer function type argument
|
||||
# 3) type (and do all of the work in the mypy plugin)
|
||||
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||
# We chose option #1.
|
||||
|
||||
# `attr` lies about its return type to make the following possible:
|
||||
# attr() -> Any
|
||||
# attr(8) -> int
|
||||
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||
# This makes this type of assignments possible:
|
||||
# x: int = attr(8)
|
||||
#
|
||||
# This form catches explicit None or no default but with no other arguments
|
||||
# returns Any.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def attrib(
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: object = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: _C,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: None = ...,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: _C,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: None = ...,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
mutable = define
|
||||
frozen = define # they differ only in their defaults
|
||||
|
||||
def fields(cls: Type[AttrsInstance]) -> Any: ...
|
||||
def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
|
||||
def validate(inst: AttrsInstance) -> None: ...
|
||||
def resolve_types(
|
||||
cls: _C,
|
||||
globalns: Optional[Dict[str, Any]] = ...,
|
||||
localns: Optional[Dict[str, Any]] = ...,
|
||||
attribs: Optional[List[Attribute[Any]]] = ...,
|
||||
) -> _C: ...
|
||||
|
||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||
# [attr.ib()])` is valid
|
||||
def make_class(
|
||||
name: str,
|
||||
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
||||
bases: Tuple[type, ...] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
) -> type: ...
|
||||
|
||||
# _funcs --
|
||||
|
||||
# TODO: add support for returning TypedDict from the mypy plugin
|
||||
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||
# these:
|
||||
# https://github.com/python/mypy/issues/4236
|
||||
# https://github.com/python/typing/issues/253
|
||||
# XXX: remember to fix attrs.asdict/astuple too!
|
||||
def asdict(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: Optional[bool] = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
def has(cls: type) -> bool: ...
|
||||
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||
|
||||
# _config --
|
||||
|
||||
def set_run_validators(run: bool) -> None: ...
|
||||
def get_run_validators() -> bool: ...
|
||||
|
||||
# aliases --
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
||||
@@ -1,8 +1,9 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import functools
|
||||
import types
|
||||
|
||||
from ._compat import new_class
|
||||
from ._make import _make_ne
|
||||
|
||||
|
||||
@@ -78,7 +79,9 @@ def cmp_using(
|
||||
num_order_functions += 1
|
||||
body["__ge__"] = _make_operator("ge", ge)
|
||||
|
||||
type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
|
||||
type_ = types.new_class(
|
||||
class_name, (object,), {}, lambda ns: ns.update(body)
|
||||
)
|
||||
|
||||
# Add same type requirement.
|
||||
if require_same_type:
|
||||
13
lib/spack/external/_vendoring/attr/_cmp.pyi
vendored
Normal file
13
lib/spack/external/_vendoring/attr/_cmp.pyi
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
from typing import Any, Callable, Optional, Type
|
||||
|
||||
_CompareWithType = Callable[[Any, Any], bool]
|
||||
|
||||
def cmp_using(
|
||||
eq: Optional[_CompareWithType],
|
||||
lt: Optional[_CompareWithType],
|
||||
le: Optional[_CompareWithType],
|
||||
gt: Optional[_CompareWithType],
|
||||
ge: Optional[_CompareWithType],
|
||||
require_same_type: bool,
|
||||
class_name: str,
|
||||
) -> Type: ...
|
||||
185
lib/spack/external/_vendoring/attr/_compat.py
vendored
Normal file
185
lib/spack/external/_vendoring/attr/_compat.py
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import inspect
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import types
|
||||
import warnings
|
||||
|
||||
from collections.abc import Mapping, Sequence # noqa
|
||||
|
||||
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
PY36 = sys.version_info[:2] >= (3, 6)
|
||||
HAS_F_STRINGS = PY36
|
||||
PY310 = sys.version_info[:2] >= (3, 10)
|
||||
|
||||
|
||||
if PYPY or PY36:
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
warnings.warn(
|
||||
"Running interpreter doesn't sufficiently support code object "
|
||||
"introspection. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slotted classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
class _AnnotationExtractor:
|
||||
"""
|
||||
Extract type annotations from a callable, returning None whenever there
|
||||
is none.
|
||||
"""
|
||||
|
||||
__slots__ = ["sig"]
|
||||
|
||||
def __init__(self, callable):
|
||||
try:
|
||||
self.sig = inspect.signature(callable)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
self.sig = None
|
||||
|
||||
def get_first_param_type(self):
|
||||
"""
|
||||
Return the type annotation of the first argument if it's not empty.
|
||||
"""
|
||||
if not self.sig:
|
||||
return None
|
||||
|
||||
params = list(self.sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
return params[0].annotation
|
||||
|
||||
return None
|
||||
|
||||
def get_return_type(self):
|
||||
"""
|
||||
Return the return type if it's not empty.
|
||||
"""
|
||||
if (
|
||||
self.sig
|
||||
and self.sig.return_annotation is not inspect.Signature.empty
|
||||
):
|
||||
return self.sig.return_annotation
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""Return a function of two arguments (cell, value) which sets
|
||||
the value stored in the closure cell `cell` to `value`.
|
||||
"""
|
||||
# pypy makes this easy. (It also supports the logic below, but
|
||||
# why not do the easy/fast thing?)
|
||||
if PYPY:
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
return set_closure_cell
|
||||
|
||||
# Otherwise gotta do it the hard way.
|
||||
|
||||
# Create a function that will set its first cellvar to `value`.
|
||||
def set_first_cellvar_to(value):
|
||||
x = value
|
||||
return
|
||||
|
||||
# This function will be eliminated as dead code, but
|
||||
# not before its reference to `x` forces `x` to be
|
||||
# represented as a closure cell rather than a local.
|
||||
def force_x_to_be_a_cell(): # pragma: no cover
|
||||
return x
|
||||
|
||||
try:
|
||||
# Extract the code object and make sure our assumptions about
|
||||
# the closure behavior are correct.
|
||||
co = set_first_cellvar_to.__code__
|
||||
if co.co_cellvars != ("x",) or co.co_freevars != ():
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
# Convert this code object to a code object that sets the
|
||||
# function's first _freevar_ (not cellvar) to the argument.
|
||||
if sys.version_info >= (3, 8):
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.cell_contents = value
|
||||
|
||||
else:
|
||||
args = [co.co_argcount]
|
||||
args.append(co.co_kwonlyargcount)
|
||||
args.extend(
|
||||
[
|
||||
co.co_nlocals,
|
||||
co.co_stacksize,
|
||||
co.co_flags,
|
||||
co.co_code,
|
||||
co.co_consts,
|
||||
co.co_names,
|
||||
co.co_varnames,
|
||||
co.co_filename,
|
||||
co.co_name,
|
||||
co.co_firstlineno,
|
||||
co.co_lnotab,
|
||||
# These two arguments are reversed:
|
||||
co.co_cellvars,
|
||||
co.co_freevars,
|
||||
]
|
||||
)
|
||||
set_first_freevar_code = types.CodeType(*args)
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
# Create a function using the set_first_freevar_code,
|
||||
# whose first closure cell is `cell`. Calling it will
|
||||
# change the value of that cell.
|
||||
setter = types.FunctionType(
|
||||
set_first_freevar_code, {}, "setter", (), (cell,)
|
||||
)
|
||||
# And call it to set the cell.
|
||||
setter(value)
|
||||
|
||||
# Make sure it works on this interpreter:
|
||||
def make_func_with_cell():
|
||||
x = None
|
||||
|
||||
def func():
|
||||
return x # pragma: no cover
|
||||
|
||||
return func
|
||||
|
||||
cell = make_func_with_cell().__closure__[0]
|
||||
set_closure_cell(cell, 100)
|
||||
if cell.cell_contents != 100:
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
except Exception:
|
||||
return just_warn
|
||||
else:
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
|
||||
# Thread-local global to track attrs instances which are already being repr'd.
|
||||
# This is needed because there is no other (thread-safe) way to pass info
|
||||
# about the instances that are already being repr'd through the call stack
|
||||
# in order to ensure we don't perform infinite recursion.
|
||||
#
|
||||
# For instance, if an instance contains a dict which contains that instance,
|
||||
# we need to know that we're already repr'ing the outside instance from within
|
||||
# the dict's repr() call.
|
||||
#
|
||||
# This lives here rather than in _make.py so that the functions in _make.py
|
||||
# don't have a direct reference to the thread-local in their globals dict.
|
||||
# If they have such a reference, it breaks cloudpickle.
|
||||
repr_context = threading.local()
|
||||
@@ -1,4 +1,4 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
__all__ = ["set_run_validators", "get_run_validators"]
|
||||
@@ -9,6 +9,10 @@
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
||||
instead.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
raise TypeError("'run' must be bool.")
|
||||
@@ -19,5 +23,9 @@ def set_run_validators(run):
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
||||
instead.
|
||||
"""
|
||||
return _run_validators
|
||||
@@ -1,8 +1,8 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._make import NOTHING, _obj_setattr, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
@@ -25,7 +25,7 @@ def asdict(
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
called with the `attrs.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable dict_factory: A callable to produce dictionaries from. For
|
||||
example, to produce ordered dictionaries instead of normal Python
|
||||
@@ -46,6 +46,8 @@ def asdict(
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
.. versionadded:: 20.3.0 *value_serializer*
|
||||
.. versionadded:: 21.3.0 If a dict has a collection for a key, it is
|
||||
serialized as a tuple.
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
@@ -61,11 +63,11 @@ def asdict(
|
||||
if has(v.__class__):
|
||||
rv[a.name] = asdict(
|
||||
v,
|
||||
True,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain_collection_types is True else list
|
||||
@@ -73,10 +75,11 @@ def asdict(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
@@ -87,20 +90,22 @@ def asdict(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
filter,
|
||||
df,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
filter,
|
||||
df,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
for kk, vv in v.items()
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
@@ -111,6 +116,7 @@ def asdict(
|
||||
|
||||
def _asdict_anything(
|
||||
val,
|
||||
is_key,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
@@ -123,22 +129,29 @@ def _asdict_anything(
|
||||
# Attrs class.
|
||||
rv = asdict(
|
||||
val,
|
||||
True,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif isinstance(val, (tuple, list, set, frozenset)):
|
||||
cf = val.__class__ if retain_collection_types is True else list
|
||||
if retain_collection_types is True:
|
||||
cf = val.__class__
|
||||
elif is_key:
|
||||
cf = tuple
|
||||
else:
|
||||
cf = list
|
||||
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
@@ -148,13 +161,23 @@ def _asdict_anything(
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk, filter, df, retain_collection_types, value_serializer
|
||||
kk,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv, filter, df, retain_collection_types, value_serializer
|
||||
vv,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(val)
|
||||
for kk, vv in val.items()
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
@@ -181,7 +204,7 @@ def astuple(
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
called with the `attrs.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable tuple_factory: A callable to produce tuples from. For
|
||||
example, to produce lists instead of tuples.
|
||||
@@ -253,7 +276,7 @@ def astuple(
|
||||
if has(vv.__class__)
|
||||
else vv,
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
for kk, vv in v.items()
|
||||
)
|
||||
)
|
||||
else:
|
||||
@@ -291,7 +314,9 @@ def assoc(inst, **changes):
|
||||
class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use `evolve` instead.
|
||||
Use `attrs.evolve` instead if you can.
|
||||
This function will not be removed du to the slightly different approach
|
||||
compared to `attrs.evolve`.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
@@ -302,7 +327,7 @@ def assoc(inst, **changes):
|
||||
)
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in iteritems(changes):
|
||||
for k, v in changes.items():
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
raise AttrsAttributeNotFoundError(
|
||||
@@ -370,18 +395,16 @@ class and you didn't pass any attribs.
|
||||
:raise NameError: If types cannot be resolved because of missing variables.
|
||||
|
||||
:returns: *cls* so you can use this function also as a class decorator.
|
||||
Please note that you have to apply it **after** `attr.s`. That means
|
||||
the decorator has to come in the line **before** `attr.s`.
|
||||
Please note that you have to apply it **after** `attrs.define`. That
|
||||
means the decorator has to come in the line **before** `attrs.define`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionadded:: 21.1.0 *attribs*
|
||||
|
||||
"""
|
||||
try:
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
cls.__attrs_types_resolved__
|
||||
except AttributeError:
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
if getattr(cls, "__attrs_types_resolved__", None) != cls:
|
||||
import typing
|
||||
|
||||
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
|
||||
@@ -389,7 +412,9 @@ class and you didn't pass any attribs.
|
||||
if field.name in hints:
|
||||
# Since fields have been frozen we must work around it.
|
||||
_obj_setattr(field, "type", hints[field.name])
|
||||
cls.__attrs_types_resolved__ = True
|
||||
# We store the class we resolved so that subclasses know they haven't
|
||||
# been resolved.
|
||||
cls.__attrs_types_resolved__ = cls
|
||||
|
||||
# Return the class so you can use it as a decorator too.
|
||||
return cls
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,24 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
|
||||
`attr.ib` with different default values.
|
||||
"""
|
||||
|
||||
|
||||
from functools import partial
|
||||
|
||||
from attr.exceptions import UnannotatedAttributeError
|
||||
|
||||
from . import setters
|
||||
from ._make import NOTHING, _frozen_setattrs, attrib, attrs
|
||||
from ._funcs import asdict as _asdict
|
||||
from ._funcs import astuple as _astuple
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
_frozen_setattrs,
|
||||
_ng_default_on_setattr,
|
||||
attrib,
|
||||
attrs,
|
||||
)
|
||||
from .exceptions import UnannotatedAttributeError
|
||||
|
||||
|
||||
def define(
|
||||
@@ -32,22 +42,45 @@ def define(
|
||||
getstate_setstate=None,
|
||||
on_setattr=None,
|
||||
field_transformer=None,
|
||||
match_args=True,
|
||||
):
|
||||
r"""
|
||||
The only behavioral differences are the handling of the *auto_attribs*
|
||||
option:
|
||||
Define an ``attrs`` class.
|
||||
|
||||
Differences to the classic `attr.s` that it uses underneath:
|
||||
|
||||
- Automatically detect whether or not *auto_attribs* should be `True` (c.f.
|
||||
*auto_attribs* parameter).
|
||||
- If *frozen* is `False`, run converters and validators when setting an
|
||||
attribute by default.
|
||||
- *slots=True*
|
||||
|
||||
.. caution::
|
||||
|
||||
Usually this has only upsides and few visible effects in everyday
|
||||
programming. But it *can* lead to some suprising behaviors, so please
|
||||
make sure to read :term:`slotted classes`.
|
||||
- *auto_exc=True*
|
||||
- *auto_detect=True*
|
||||
- *order=False*
|
||||
- Some options that were only relevant on Python 2 or were kept around for
|
||||
backwards-compatibility have been removed.
|
||||
|
||||
Please note that these are all defaults and you can change them as you
|
||||
wish.
|
||||
|
||||
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
|
||||
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
|
||||
|
||||
1. If any attributes are annotated and no unannotated `attr.ib`\ s
|
||||
1. If any attributes are annotated and no unannotated `attrs.fields`\ s
|
||||
are found, it assumes *auto_attribs=True*.
|
||||
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||
`attr.ib`\ s.
|
||||
`attrs.fields`\ s.
|
||||
|
||||
and that mutable classes (``frozen=False``) validate on ``__setattr__``.
|
||||
For now, please refer to `attr.s` for the rest of the parameters.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
|
||||
"""
|
||||
|
||||
def do_it(cls, auto_attribs):
|
||||
@@ -72,6 +105,7 @@ def do_it(cls, auto_attribs):
|
||||
getstate_setstate=getstate_setstate,
|
||||
on_setattr=on_setattr,
|
||||
field_transformer=field_transformer,
|
||||
match_args=match_args,
|
||||
)
|
||||
|
||||
def wrap(cls):
|
||||
@@ -84,9 +118,9 @@ def wrap(cls):
|
||||
|
||||
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||
|
||||
# By default, mutable classes validate on setattr.
|
||||
# By default, mutable classes convert & validate on setattr.
|
||||
if frozen is False and on_setattr is None:
|
||||
on_setattr = setters.validate
|
||||
on_setattr = _ng_default_on_setattr
|
||||
|
||||
# However, if we subclass a frozen class, we inherit the immutability
|
||||
# and disable on_setattr.
|
||||
@@ -156,3 +190,31 @@ def field(
|
||||
order=order,
|
||||
on_setattr=on_setattr,
|
||||
)
|
||||
|
||||
|
||||
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
|
||||
"""
|
||||
Same as `attr.asdict`, except that collections types are always retained
|
||||
and dict is always used as *dict_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _asdict(
|
||||
inst=inst,
|
||||
recurse=recurse,
|
||||
filter=filter,
|
||||
value_serializer=value_serializer,
|
||||
retain_collection_types=True,
|
||||
)
|
||||
|
||||
|
||||
def astuple(inst, *, recurse=True, filter=None):
|
||||
"""
|
||||
Same as `attr.astuple`, except that collections types are always retained
|
||||
and `tuple` is always used as the *tuple_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _astuple(
|
||||
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
|
||||
)
|
||||
@@ -1,4 +1,5 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
from functools import total_ordering
|
||||
|
||||
@@ -8,7 +9,7 @@
|
||||
|
||||
@total_ordering
|
||||
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||
class VersionInfo(object):
|
||||
class VersionInfo:
|
||||
"""
|
||||
A version object that can be compared to tuple of length 1--4:
|
||||
|
||||
9
lib/spack/external/_vendoring/attr/_version_info.pyi
vendored
Normal file
9
lib/spack/external/_vendoring/attr/_version_info.pyi
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
class VersionInfo:
|
||||
@property
|
||||
def year(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def micro(self) -> int: ...
|
||||
@property
|
||||
def releaselevel(self) -> str: ...
|
||||
@@ -1,22 +1,21 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import PY2
|
||||
import typing
|
||||
|
||||
from ._compat import _AnnotationExtractor
|
||||
from ._make import NOTHING, Factory, pipe
|
||||
|
||||
|
||||
if not PY2:
|
||||
import inspect
|
||||
import typing
|
||||
|
||||
|
||||
__all__ = [
|
||||
"pipe",
|
||||
"optional",
|
||||
"default_if_none",
|
||||
"optional",
|
||||
"pipe",
|
||||
"to_bool",
|
||||
]
|
||||
|
||||
|
||||
@@ -39,22 +38,15 @@ def optional_converter(val):
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
if not PY2:
|
||||
sig = None
|
||||
try:
|
||||
sig = inspect.signature(converter)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
pass
|
||||
if sig:
|
||||
params = list(sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[
|
||||
params[0].annotation
|
||||
]
|
||||
if sig.return_annotation is not inspect.Signature.empty:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[
|
||||
sig.return_annotation
|
||||
]
|
||||
xtr = _AnnotationExtractor(converter)
|
||||
|
||||
t = xtr.get_first_param_type()
|
||||
if t:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[t]
|
||||
|
||||
rt = xtr.get_return_type()
|
||||
if rt:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[rt]
|
||||
|
||||
return optional_converter
|
||||
|
||||
@@ -65,14 +57,14 @@ def default_if_none(default=NOTHING, factory=None):
|
||||
result of *factory*.
|
||||
|
||||
:param default: Value to be used if ``None`` is passed. Passing an instance
|
||||
of `attr.Factory` is supported, however the ``takes_self`` option
|
||||
of `attrs.Factory` is supported, however the ``takes_self`` option
|
||||
is *not*.
|
||||
:param callable factory: A callable that takes no parameters whose result
|
||||
is used if ``None`` is passed.
|
||||
|
||||
:raises TypeError: If **neither** *default* or *factory* is passed.
|
||||
:raises TypeError: If **both** *default* and *factory* are passed.
|
||||
:raises ValueError: If an instance of `attr.Factory` is passed with
|
||||
:raises ValueError: If an instance of `attrs.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
@@ -109,3 +101,44 @@ def default_if_none_converter(val):
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
|
||||
|
||||
def to_bool(val):
|
||||
"""
|
||||
Convert "boolean" strings (e.g., from env. vars.) to real booleans.
|
||||
|
||||
Values mapping to :code:`True`:
|
||||
|
||||
- :code:`True`
|
||||
- :code:`"true"` / :code:`"t"`
|
||||
- :code:`"yes"` / :code:`"y"`
|
||||
- :code:`"on"`
|
||||
- :code:`"1"`
|
||||
- :code:`1`
|
||||
|
||||
Values mapping to :code:`False`:
|
||||
|
||||
- :code:`False`
|
||||
- :code:`"false"` / :code:`"f"`
|
||||
- :code:`"no"` / :code:`"n"`
|
||||
- :code:`"off"`
|
||||
- :code:`"0"`
|
||||
- :code:`0`
|
||||
|
||||
:raises ValueError: for any other value.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
val = val.lower()
|
||||
truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
|
||||
falsy = {False, "false", "f", "no", "n", "off", "0", 0}
|
||||
try:
|
||||
if val in truthy:
|
||||
return True
|
||||
if val in falsy:
|
||||
return False
|
||||
except TypeError:
|
||||
# Raised when "val" is not hashable (e.g., lists)
|
||||
pass
|
||||
raise ValueError("Cannot convert value to bool: {}".format(val))
|
||||
13
lib/spack/external/_vendoring/attr/converters.pyi
vendored
Normal file
13
lib/spack/external/_vendoring/attr/converters.pyi
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
from typing import Callable, Optional, TypeVar, overload
|
||||
|
||||
from . import _ConverterType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(default: _T) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
|
||||
def to_bool(val: str) -> bool: ...
|
||||
@@ -1,4 +1,4 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
17
lib/spack/external/_vendoring/attr/exceptions.pyi
vendored
Normal file
17
lib/spack/external/_vendoring/attr/exceptions.pyi
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
from typing import Any
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
msg: str = ...
|
||||
|
||||
class FrozenInstanceError(FrozenError): ...
|
||||
class FrozenAttributeError(FrozenError): ...
|
||||
class AttrsAttributeNotFoundError(ValueError): ...
|
||||
class NotAnAttrsClassError(ValueError): ...
|
||||
class DefaultAlreadySetError(RuntimeError): ...
|
||||
class UnannotatedAttributeError(RuntimeError): ...
|
||||
class PythonTooOldError(RuntimeError): ...
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
msg: str = ...
|
||||
value: Any = ...
|
||||
def __init__(self, msg: str, value: Any) -> None: ...
|
||||
@@ -1,10 +1,9 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful filters for `attr.asdict`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import isclass
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
@@ -13,17 +12,17 @@ def _split_what(what):
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isclass(cls)),
|
||||
frozenset(cls for cls in what if isinstance(cls, type)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Whitelist *what*.
|
||||
Include *what*.
|
||||
|
||||
:param what: What to whitelist.
|
||||
:type what: `list` of `type` or `attr.Attribute`\\ s
|
||||
:param what: What to include.
|
||||
:type what: `list` of `type` or `attrs.Attribute`\\ s
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
@@ -37,10 +36,10 @@ def include_(attribute, value):
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Blacklist *what*.
|
||||
Exclude *what*.
|
||||
|
||||
:param what: What to blacklist.
|
||||
:type what: `list` of classes or `attr.Attribute`\\ s.
|
||||
:param what: What to exclude.
|
||||
:type what: `list` of classes or `attrs.Attribute`\\ s.
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
6
lib/spack/external/_vendoring/attr/filters.pyi
vendored
Normal file
6
lib/spack/external/_vendoring/attr/filters.pyi
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
from typing import Any, Union
|
||||
|
||||
from . import Attribute, _FilterType
|
||||
|
||||
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
@@ -1,8 +1,9 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly used hooks for on_setattr.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from . import _config
|
||||
from .exceptions import FrozenAttributeError
|
||||
@@ -67,11 +68,6 @@ def convert(instance, attrib, new_value):
|
||||
return new_value
|
||||
|
||||
|
||||
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
# autodata stopped working, so the docstring is inlined in the API docs.
|
||||
NO_OP = object()
|
||||
"""
|
||||
Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
|
||||
Does not work in `pipe` or within lists.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
19
lib/spack/external/_vendoring/attr/setters.pyi
vendored
Normal file
19
lib/spack/external/_vendoring/attr/setters.pyi
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import Any, NewType, NoReturn, TypeVar, cast
|
||||
|
||||
from . import Attribute, _OnSetAttrType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def frozen(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> NoReturn: ...
|
||||
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||
|
||||
# convert is allowed to return Any, because they can be chained using pipe.
|
||||
def convert(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> Any: ...
|
||||
|
||||
_NoOpType = NewType("_NoOpType", object)
|
||||
NO_OP: _NoOpType
|
||||
@@ -1,30 +1,98 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import operator
|
||||
import re
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
from .exceptions import NotCallableError
|
||||
|
||||
|
||||
try:
|
||||
Pattern = re.Pattern
|
||||
except AttributeError: # Python <3.7 lacks a Pattern type.
|
||||
Pattern = type(re.compile(""))
|
||||
|
||||
|
||||
__all__ = [
|
||||
"and_",
|
||||
"deep_iterable",
|
||||
"deep_mapping",
|
||||
"disabled",
|
||||
"ge",
|
||||
"get_disabled",
|
||||
"gt",
|
||||
"in_",
|
||||
"instance_of",
|
||||
"is_callable",
|
||||
"le",
|
||||
"lt",
|
||||
"matches_re",
|
||||
"max_len",
|
||||
"min_len",
|
||||
"optional",
|
||||
"provides",
|
||||
"set_disabled",
|
||||
]
|
||||
|
||||
|
||||
def set_disabled(disabled):
|
||||
"""
|
||||
Globally disable or enable running validators.
|
||||
|
||||
By default, they are run.
|
||||
|
||||
:param disabled: If ``True``, disable running all validators.
|
||||
:type disabled: bool
|
||||
|
||||
.. warning::
|
||||
|
||||
This function is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(not disabled)
|
||||
|
||||
|
||||
def get_disabled():
|
||||
"""
|
||||
Return a bool indicating whether validators are currently disabled or not.
|
||||
|
||||
:return: ``True`` if validators are currently disabled.
|
||||
:rtype: bool
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return not get_run_validators()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disabled():
|
||||
"""
|
||||
Context manager that disables running validators within its context.
|
||||
|
||||
.. warning::
|
||||
|
||||
This context manager is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(False)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
set_run_validators(True)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InstanceOfValidator(object):
|
||||
class _InstanceOfValidator:
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -61,16 +129,15 @@ def instance_of(type):
|
||||
:type type: type or tuple of types
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected type, and the value it
|
||||
(of type `attrs.Attribute`), the expected type, and the value it
|
||||
got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MatchesReValidator(object):
|
||||
regex = attrib()
|
||||
flags = attrib()
|
||||
class _MatchesReValidator:
|
||||
pattern = attrib()
|
||||
match_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -79,18 +146,18 @@ def __call__(self, inst, attr, value):
|
||||
"""
|
||||
if not self.match_func(value):
|
||||
raise ValueError(
|
||||
"'{name}' must match regex {regex!r}"
|
||||
"'{name}' must match regex {pattern!r}"
|
||||
" ({value!r} doesn't)".format(
|
||||
name=attr.name, regex=self.regex.pattern, value=value
|
||||
name=attr.name, pattern=self.pattern.pattern, value=value
|
||||
),
|
||||
attr,
|
||||
self.regex,
|
||||
self.pattern,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<matches_re validator for pattern {regex!r}>".format(
|
||||
regex=self.regex
|
||||
return "<matches_re validator for pattern {pattern!r}>".format(
|
||||
pattern=self.pattern
|
||||
)
|
||||
|
||||
|
||||
@@ -99,48 +166,51 @@ def matches_re(regex, flags=0, func=None):
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string that doesn't match *regex*.
|
||||
|
||||
:param str regex: a regex string to match against
|
||||
:param regex: a regex string or precompiled pattern to match against
|
||||
:param int flags: flags that will be passed to the underlying re function
|
||||
(default 0)
|
||||
:param callable func: which underlying `re` function to call (options
|
||||
are `re.fullmatch`, `re.search`, `re.match`, default
|
||||
is ``None`` which means either `re.fullmatch` or an emulation of
|
||||
it on Python 2). For performance reasons, they won't be used directly
|
||||
but on a pre-`re.compile`\ ed pattern.
|
||||
:param callable func: which underlying `re` function to call. Valid options
|
||||
are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
|
||||
means `re.fullmatch`. For performance reasons, the pattern is always
|
||||
precompiled using `re.compile`.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
|
||||
"""
|
||||
fullmatch = getattr(re, "fullmatch", None)
|
||||
valid_funcs = (fullmatch, None, re.search, re.match)
|
||||
valid_funcs = (re.fullmatch, None, re.search, re.match)
|
||||
if func not in valid_funcs:
|
||||
raise ValueError(
|
||||
"'func' must be one of %s."
|
||||
% (
|
||||
"'func' must be one of {}.".format(
|
||||
", ".join(
|
||||
sorted(
|
||||
e and e.__name__ or "None" for e in set(valid_funcs)
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
pattern = re.compile(regex, flags)
|
||||
if isinstance(regex, Pattern):
|
||||
if flags:
|
||||
raise TypeError(
|
||||
"'flags' can only be used with a string pattern; "
|
||||
"pass flags to re.compile() instead"
|
||||
)
|
||||
pattern = regex
|
||||
else:
|
||||
pattern = re.compile(regex, flags)
|
||||
|
||||
if func is re.match:
|
||||
match_func = pattern.match
|
||||
elif func is re.search:
|
||||
match_func = pattern.search
|
||||
else:
|
||||
if fullmatch:
|
||||
match_func = pattern.fullmatch
|
||||
else:
|
||||
pattern = re.compile(r"(?:{})\Z".format(regex), flags)
|
||||
match_func = pattern.match
|
||||
match_func = pattern.fullmatch
|
||||
|
||||
return _MatchesReValidator(pattern, flags, match_func)
|
||||
return _MatchesReValidator(pattern, match_func)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _ProvidesValidator(object):
|
||||
class _ProvidesValidator:
|
||||
interface = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -175,14 +245,14 @@ def provides(interface):
|
||||
:type interface: ``zope.interface.Interface``
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected interface, and the
|
||||
(of type `attrs.Attribute`), the expected interface, and the
|
||||
value it got.
|
||||
"""
|
||||
return _ProvidesValidator(interface)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _OptionalValidator(object):
|
||||
class _OptionalValidator:
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -216,7 +286,7 @@ def optional(validator):
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InValidator(object):
|
||||
class _InValidator:
|
||||
options = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
@@ -229,7 +299,10 @@ def __call__(self, inst, attr, value):
|
||||
raise ValueError(
|
||||
"'{name}' must be in {options!r} (got {value!r})".format(
|
||||
name=attr.name, options=self.options, value=value
|
||||
)
|
||||
),
|
||||
attr,
|
||||
self.options,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -248,16 +321,20 @@ def in_(options):
|
||||
:type options: list, tuple, `enum.Enum`, ...
|
||||
|
||||
:raises ValueError: With a human readable error message, the attribute (of
|
||||
type `attr.Attribute`), the expected options, and the value it
|
||||
type `attrs.Attribute`), the expected options, and the value it
|
||||
got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
.. versionchanged:: 22.1.0
|
||||
The ValueError was incomplete until now and only contained the human
|
||||
readable error message. Now it contains all the information that has
|
||||
been promised since 17.1.0.
|
||||
"""
|
||||
return _InValidator(options)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=False, hash=True)
|
||||
class _IsCallableValidator(object):
|
||||
class _IsCallableValidator:
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
@@ -287,14 +364,14 @@ def is_callable():
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises `attr.exceptions.NotCallableError`: With a human readable error
|
||||
message containing the attribute (`attr.Attribute`) name,
|
||||
message containing the attribute (`attrs.Attribute`) name,
|
||||
and the value it got.
|
||||
"""
|
||||
return _IsCallableValidator()
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepIterable(object):
|
||||
class _DeepIterable:
|
||||
member_validator = attrib(validator=is_callable())
|
||||
iterable_validator = attrib(
|
||||
default=None, validator=optional(is_callable())
|
||||
@@ -329,7 +406,7 @@ def deep_iterable(member_validator, iterable_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of an iterable.
|
||||
|
||||
:param member_validator: Validator to apply to iterable members
|
||||
:param member_validator: Validator(s) to apply to iterable members
|
||||
:param iterable_validator: Validator to apply to iterable itself
|
||||
(optional)
|
||||
|
||||
@@ -337,11 +414,13 @@ def deep_iterable(member_validator, iterable_validator=None):
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
if isinstance(member_validator, (list, tuple)):
|
||||
member_validator = and_(*member_validator)
|
||||
return _DeepIterable(member_validator, iterable_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepMapping(object):
|
||||
class _DeepMapping:
|
||||
key_validator = attrib(validator=is_callable())
|
||||
value_validator = attrib(validator=is_callable())
|
||||
mapping_validator = attrib(default=None, validator=optional(is_callable()))
|
||||
@@ -377,3 +456,139 @@ def deep_mapping(key_validator, value_validator, mapping_validator=None):
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _NumberValidator:
|
||||
bound = attrib()
|
||||
compare_op = attrib()
|
||||
compare_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.compare_func(value, self.bound):
|
||||
raise ValueError(
|
||||
"'{name}' must be {op} {bound}: {value}".format(
|
||||
name=attr.name,
|
||||
op=self.compare_op,
|
||||
bound=self.bound,
|
||||
value=value,
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Validator for x {op} {bound}>".format(
|
||||
op=self.compare_op, bound=self.bound
|
||||
)
|
||||
|
||||
|
||||
def lt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number larger or equal to *val*.
|
||||
|
||||
:param val: Exclusive upper bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<", operator.lt)
|
||||
|
||||
|
||||
def le(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number greater than *val*.
|
||||
|
||||
:param val: Inclusive upper bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<=", operator.le)
|
||||
|
||||
|
||||
def ge(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number smaller than *val*.
|
||||
|
||||
:param val: Inclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">=", operator.ge)
|
||||
|
||||
|
||||
def gt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number smaller or equal to *val*.
|
||||
|
||||
:param val: Exclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">", operator.gt)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MaxLengthValidator:
|
||||
max_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) > self.max_length:
|
||||
raise ValueError(
|
||||
"Length of '{name}' must be <= {max}: {len}".format(
|
||||
name=attr.name, max=self.max_length, len=len(value)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<max_len validator for {max}>".format(max=self.max_length)
|
||||
|
||||
|
||||
def max_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is longer than *length*.
|
||||
|
||||
:param int length: Maximum length of the string or iterable
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _MaxLengthValidator(length)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MinLengthValidator:
|
||||
min_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) < self.min_length:
|
||||
raise ValueError(
|
||||
"Length of '{name}' must be => {min}: {len}".format(
|
||||
name=attr.name, min=self.min_length, len=len(value)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<min_len validator for {min}>".format(min=self.min_length)
|
||||
|
||||
|
||||
def min_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is shorter than *length*.
|
||||
|
||||
:param int length: Minimum length of the string or iterable
|
||||
|
||||
.. versionadded:: 22.1.0
|
||||
"""
|
||||
return _MinLengthValidator(length)
|
||||
80
lib/spack/external/_vendoring/attr/validators.pyi
vendored
Normal file
80
lib/spack/external/_vendoring/attr/validators.pyi
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Container,
|
||||
ContextManager,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Match,
|
||||
Optional,
|
||||
Pattern,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from . import _ValidatorType
|
||||
from . import _ValidatorArgType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_T3 = TypeVar("_T3")
|
||||
_I = TypeVar("_I", bound=Iterable)
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_M = TypeVar("_M", bound=Mapping)
|
||||
|
||||
def set_disabled(run: bool) -> None: ...
|
||||
def get_disabled() -> bool: ...
|
||||
def disabled() -> ContextManager[None]: ...
|
||||
|
||||
# To be more precise on instance_of use some overloads.
|
||||
# If there are more than 3 items in the tuple then we fall back to Any
|
||||
@overload
|
||||
def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2]]
|
||||
) -> _ValidatorType[Union[_T1, _T2]]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
|
||||
) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
|
||||
def provides(interface: Any) -> _ValidatorType[Any]: ...
|
||||
def optional(
|
||||
validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
|
||||
) -> _ValidatorType[Optional[_T]]: ...
|
||||
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||
def matches_re(
|
||||
regex: Union[Pattern[AnyStr], AnyStr],
|
||||
flags: int = ...,
|
||||
func: Optional[
|
||||
Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
|
||||
] = ...,
|
||||
) -> _ValidatorType[AnyStr]: ...
|
||||
def deep_iterable(
|
||||
member_validator: _ValidatorArgType[_T],
|
||||
iterable_validator: Optional[_ValidatorType[_I]] = ...,
|
||||
) -> _ValidatorType[_I]: ...
|
||||
def deep_mapping(
|
||||
key_validator: _ValidatorType[_K],
|
||||
value_validator: _ValidatorType[_V],
|
||||
mapping_validator: Optional[_ValidatorType[_M]] = ...,
|
||||
) -> _ValidatorType[_M]: ...
|
||||
def is_callable() -> _ValidatorType[_T]: ...
|
||||
def lt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def le(val: _T) -> _ValidatorType[_T]: ...
|
||||
def ge(val: _T) -> _ValidatorType[_T]: ...
|
||||
def gt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def max_len(length: int) -> _ValidatorType[_T]: ...
|
||||
def min_len(length: int) -> _ValidatorType[_T]: ...
|
||||
@@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Hynek Schlawack
|
||||
Copyright (c) 2015 Hynek Schlawack and the attrs contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
70
lib/spack/external/_vendoring/attrs/__init__.py
vendored
Normal file
70
lib/spack/external/_vendoring/attrs/__init__.py
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
__author__,
|
||||
__copyright__,
|
||||
__description__,
|
||||
__doc__,
|
||||
__email__,
|
||||
__license__,
|
||||
__title__,
|
||||
__url__,
|
||||
__version__,
|
||||
__version_info__,
|
||||
assoc,
|
||||
cmp_using,
|
||||
define,
|
||||
evolve,
|
||||
field,
|
||||
fields,
|
||||
fields_dict,
|
||||
frozen,
|
||||
has,
|
||||
make_class,
|
||||
mutable,
|
||||
resolve_types,
|
||||
validate,
|
||||
)
|
||||
from attr._next_gen import asdict, astuple
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__author__",
|
||||
"__copyright__",
|
||||
"__description__",
|
||||
"__doc__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__title__",
|
||||
"__url__",
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"Attribute",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"define",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"Factory",
|
||||
"field",
|
||||
"fields_dict",
|
||||
"fields",
|
||||
"filters",
|
||||
"frozen",
|
||||
"has",
|
||||
"make_class",
|
||||
"mutable",
|
||||
"NOTHING",
|
||||
"resolve_types",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
66
lib/spack/external/_vendoring/attrs/__init__.pyi
vendored
Normal file
66
lib/spack/external/_vendoring/attrs/__init__.pyi
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
# Because we need to type our own stuff, we have to make everything from
|
||||
# attr explicitly public too.
|
||||
from attr import __author__ as __author__
|
||||
from attr import __copyright__ as __copyright__
|
||||
from attr import __description__ as __description__
|
||||
from attr import __email__ as __email__
|
||||
from attr import __license__ as __license__
|
||||
from attr import __title__ as __title__
|
||||
from attr import __url__ as __url__
|
||||
from attr import __version__ as __version__
|
||||
from attr import __version_info__ as __version_info__
|
||||
from attr import _FilterType
|
||||
from attr import assoc as assoc
|
||||
from attr import Attribute as Attribute
|
||||
from attr import cmp_using as cmp_using
|
||||
from attr import converters as converters
|
||||
from attr import define as define
|
||||
from attr import evolve as evolve
|
||||
from attr import exceptions as exceptions
|
||||
from attr import Factory as Factory
|
||||
from attr import field as field
|
||||
from attr import fields as fields
|
||||
from attr import fields_dict as fields_dict
|
||||
from attr import filters as filters
|
||||
from attr import frozen as frozen
|
||||
from attr import has as has
|
||||
from attr import make_class as make_class
|
||||
from attr import mutable as mutable
|
||||
from attr import NOTHING as NOTHING
|
||||
from attr import resolve_types as resolve_types
|
||||
from attr import setters as setters
|
||||
from attr import validate as validate
|
||||
from attr import validators as validators
|
||||
|
||||
# TODO: see definition of attr.asdict/astuple
|
||||
def asdict(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: bool = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
3
lib/spack/external/_vendoring/attrs/converters.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/converters.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.converters import * # noqa
|
||||
3
lib/spack/external/_vendoring/attrs/exceptions.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/exceptions.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.exceptions import * # noqa
|
||||
3
lib/spack/external/_vendoring/attrs/filters.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/filters.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.filters import * # noqa
|
||||
0
lib/spack/external/_vendoring/attrs/py.typed
vendored
Normal file
0
lib/spack/external/_vendoring/attrs/py.typed
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/setters.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/setters.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.setters import * # noqa
|
||||
3
lib/spack/external/_vendoring/attrs/validators.py
vendored
Normal file
3
lib/spack/external/_vendoring/attrs/validators.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.validators import * # noqa
|
||||
202
lib/spack/external/_vendoring/distro/LICENSE
vendored
Normal file
202
lib/spack/external/_vendoring/distro/LICENSE
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
54
lib/spack/external/_vendoring/distro/__init__.py
vendored
Normal file
54
lib/spack/external/_vendoring/distro/__init__.py
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
from .distro import (
|
||||
NORMALIZED_DISTRO_ID,
|
||||
NORMALIZED_LSB_ID,
|
||||
NORMALIZED_OS_ID,
|
||||
LinuxDistribution,
|
||||
__version__,
|
||||
build_number,
|
||||
codename,
|
||||
distro_release_attr,
|
||||
distro_release_info,
|
||||
id,
|
||||
info,
|
||||
like,
|
||||
linux_distribution,
|
||||
lsb_release_attr,
|
||||
lsb_release_info,
|
||||
major_version,
|
||||
minor_version,
|
||||
name,
|
||||
os_release_attr,
|
||||
os_release_info,
|
||||
uname_attr,
|
||||
uname_info,
|
||||
version,
|
||||
version_parts,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"NORMALIZED_DISTRO_ID",
|
||||
"NORMALIZED_LSB_ID",
|
||||
"NORMALIZED_OS_ID",
|
||||
"LinuxDistribution",
|
||||
"build_number",
|
||||
"codename",
|
||||
"distro_release_attr",
|
||||
"distro_release_info",
|
||||
"id",
|
||||
"info",
|
||||
"like",
|
||||
"linux_distribution",
|
||||
"lsb_release_attr",
|
||||
"lsb_release_info",
|
||||
"major_version",
|
||||
"minor_version",
|
||||
"name",
|
||||
"os_release_attr",
|
||||
"os_release_info",
|
||||
"uname_attr",
|
||||
"uname_info",
|
||||
"version",
|
||||
"version_parts",
|
||||
]
|
||||
|
||||
__version__ = __version__
|
||||
4
lib/spack/external/_vendoring/distro/__main__.py
vendored
Normal file
4
lib/spack/external/_vendoring/distro/__main__.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
from .distro import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,3 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2015,2016,2017 Nir Cohen
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -36,40 +37,39 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
Sequence,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
__version__ = "1.6.0"
|
||||
try:
|
||||
from typing import TypedDict
|
||||
except ImportError:
|
||||
# Python 3.7
|
||||
TypedDict = dict
|
||||
|
||||
# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2
|
||||
# support, can use typing.TYPE_CHECKING instead. See:
|
||||
# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING
|
||||
if False: # pragma: nocover
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Optional,
|
||||
Sequence,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
__version__ = "1.8.0"
|
||||
|
||||
VersionDict = TypedDict(
|
||||
"VersionDict", {"major": str, "minor": str, "build_number": str}
|
||||
)
|
||||
InfoDict = TypedDict(
|
||||
"InfoDict",
|
||||
{
|
||||
"id": str,
|
||||
"version": str,
|
||||
"version_parts": VersionDict,
|
||||
"like": str,
|
||||
"codename": str,
|
||||
},
|
||||
)
|
||||
|
||||
class VersionDict(TypedDict):
|
||||
major: str
|
||||
minor: str
|
||||
build_number: str
|
||||
|
||||
|
||||
class InfoDict(TypedDict):
|
||||
id: str
|
||||
version: str
|
||||
version_parts: VersionDict
|
||||
like: str
|
||||
codename: str
|
||||
|
||||
|
||||
_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
|
||||
@@ -85,6 +85,7 @@
|
||||
#: * Value: Normalized value.
|
||||
NORMALIZED_OS_ID = {
|
||||
"ol": "oracle", # Oracle Linux
|
||||
"opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap
|
||||
}
|
||||
|
||||
#: Translation table for normalizing the "Distributor ID" attribute returned by
|
||||
@@ -121,6 +122,26 @@
|
||||
# Pattern for base file name of distro release file
|
||||
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
||||
|
||||
# Base file names to be looked up for if _UNIXCONFDIR is not readable.
|
||||
_DISTRO_RELEASE_BASENAMES = [
|
||||
"SuSE-release",
|
||||
"arch-release",
|
||||
"base-release",
|
||||
"centos-release",
|
||||
"fedora-release",
|
||||
"gentoo-release",
|
||||
"mageia-release",
|
||||
"mandrake-release",
|
||||
"mandriva-release",
|
||||
"mandrivalinux-release",
|
||||
"manjaro-release",
|
||||
"oracle-release",
|
||||
"redhat-release",
|
||||
"rocky-release",
|
||||
"sl-release",
|
||||
"slackware-version",
|
||||
]
|
||||
|
||||
# Base file names to be ignored when searching for distro release file
|
||||
_DISTRO_RELEASE_IGNORE_BASENAMES = (
|
||||
"debian_version",
|
||||
@@ -133,8 +154,7 @@
|
||||
)
|
||||
|
||||
|
||||
def linux_distribution(full_distribution_name=True):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
|
||||
"""
|
||||
.. deprecated:: 1.6.0
|
||||
|
||||
@@ -151,7 +171,8 @@ def linux_distribution(full_distribution_name=True):
|
||||
|
||||
* ``version``: The result of :func:`distro.version`.
|
||||
|
||||
* ``codename``: The result of :func:`distro.codename`.
|
||||
* ``codename``: The extra item (usually in parentheses) after the
|
||||
os-release version number, or the result of :func:`distro.codename`.
|
||||
|
||||
The interface of this function is compatible with the original
|
||||
:py:func:`platform.linux_distribution` function, supporting a subset of
|
||||
@@ -176,8 +197,7 @@ def linux_distribution(full_distribution_name=True):
|
||||
return _distro.linux_distribution(full_distribution_name)
|
||||
|
||||
|
||||
def id():
|
||||
# type: () -> str
|
||||
def id() -> str:
|
||||
"""
|
||||
Return the distro ID of the current distribution, as a
|
||||
machine-readable string.
|
||||
@@ -198,8 +218,9 @@ def id():
|
||||
"fedora" Fedora
|
||||
"sles" SUSE Linux Enterprise Server
|
||||
"opensuse" openSUSE
|
||||
"amazon" Amazon Linux
|
||||
"amzn" Amazon Linux
|
||||
"arch" Arch Linux
|
||||
"buildroot" Buildroot
|
||||
"cloudlinux" CloudLinux OS
|
||||
"exherbo" Exherbo Linux
|
||||
"gentoo" GenToo Linux
|
||||
@@ -219,6 +240,9 @@ def id():
|
||||
"netbsd" NetBSD
|
||||
"freebsd" FreeBSD
|
||||
"midnightbsd" MidnightBSD
|
||||
"rocky" Rocky Linux
|
||||
"aix" AIX
|
||||
"guix" Guix System
|
||||
============== =========================================
|
||||
|
||||
If you have a need to get distros for reliable IDs added into this set,
|
||||
@@ -256,8 +280,7 @@ def id():
|
||||
return _distro.id()
|
||||
|
||||
|
||||
def name(pretty=False):
|
||||
# type: (bool) -> str
|
||||
def name(pretty: bool = False) -> str:
|
||||
"""
|
||||
Return the name of the current OS distribution, as a human-readable
|
||||
string.
|
||||
@@ -296,8 +319,7 @@ def name(pretty=False):
|
||||
return _distro.name(pretty)
|
||||
|
||||
|
||||
def version(pretty=False, best=False):
|
||||
# type: (bool, bool) -> str
|
||||
def version(pretty: bool = False, best: bool = False) -> str:
|
||||
"""
|
||||
Return the version of the current OS distribution, as a human-readable
|
||||
string.
|
||||
@@ -313,6 +335,10 @@ def version(pretty=False, best=False):
|
||||
sources in a fixed priority order does not always yield the most precise
|
||||
version (e.g. for Debian 8.2, or CentOS 7.1).
|
||||
|
||||
Some other distributions may not provide this kind of information. In these
|
||||
cases, an empty string would be returned. This behavior can be observed
|
||||
with rolling releases distributions (e.g. Arch Linux).
|
||||
|
||||
The *best* parameter can be used to control the approach for the returned
|
||||
version:
|
||||
|
||||
@@ -341,8 +367,7 @@ def version(pretty=False, best=False):
|
||||
return _distro.version(pretty, best)
|
||||
|
||||
|
||||
def version_parts(best=False):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def version_parts(best: bool = False) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return the version of the current OS distribution as a tuple
|
||||
``(major, minor, build_number)`` with items as follows:
|
||||
@@ -359,8 +384,7 @@ def version_parts(best=False):
|
||||
return _distro.version_parts(best)
|
||||
|
||||
|
||||
def major_version(best=False):
|
||||
# type: (bool) -> str
|
||||
def major_version(best: bool = False) -> str:
|
||||
"""
|
||||
Return the major version of the current OS distribution, as a string,
|
||||
if provided.
|
||||
@@ -373,8 +397,7 @@ def major_version(best=False):
|
||||
return _distro.major_version(best)
|
||||
|
||||
|
||||
def minor_version(best=False):
|
||||
# type: (bool) -> str
|
||||
def minor_version(best: bool = False) -> str:
|
||||
"""
|
||||
Return the minor version of the current OS distribution, as a string,
|
||||
if provided.
|
||||
@@ -387,8 +410,7 @@ def minor_version(best=False):
|
||||
return _distro.minor_version(best)
|
||||
|
||||
|
||||
def build_number(best=False):
|
||||
# type: (bool) -> str
|
||||
def build_number(best: bool = False) -> str:
|
||||
"""
|
||||
Return the build number of the current OS distribution, as a string,
|
||||
if provided.
|
||||
@@ -401,8 +423,7 @@ def build_number(best=False):
|
||||
return _distro.build_number(best)
|
||||
|
||||
|
||||
def like():
|
||||
# type: () -> str
|
||||
def like() -> str:
|
||||
"""
|
||||
Return a space-separated list of distro IDs of distributions that are
|
||||
closely related to the current OS distribution in regards to packaging
|
||||
@@ -419,8 +440,7 @@ def like():
|
||||
return _distro.like()
|
||||
|
||||
|
||||
def codename():
|
||||
# type: () -> str
|
||||
def codename() -> str:
|
||||
"""
|
||||
Return the codename for the release of the current OS distribution,
|
||||
as a string.
|
||||
@@ -444,8 +464,7 @@ def codename():
|
||||
return _distro.codename()
|
||||
|
||||
|
||||
def info(pretty=False, best=False):
|
||||
# type: (bool, bool) -> InfoDict
|
||||
def info(pretty: bool = False, best: bool = False) -> InfoDict:
|
||||
"""
|
||||
Return certain machine-readable information items about the current OS
|
||||
distribution in a dictionary, as shown in the following example:
|
||||
@@ -489,8 +508,7 @@ def info(pretty=False, best=False):
|
||||
return _distro.info(pretty, best)
|
||||
|
||||
|
||||
def os_release_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def os_release_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the os-release file data source of the current OS distribution.
|
||||
@@ -500,8 +518,7 @@ def os_release_info():
|
||||
return _distro.os_release_info()
|
||||
|
||||
|
||||
def lsb_release_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def lsb_release_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the lsb_release command data source of the current OS distribution.
|
||||
@@ -512,8 +529,7 @@ def lsb_release_info():
|
||||
return _distro.lsb_release_info()
|
||||
|
||||
|
||||
def distro_release_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def distro_release_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the distro release file data source of the current OS distribution.
|
||||
@@ -523,8 +539,7 @@ def distro_release_info():
|
||||
return _distro.distro_release_info()
|
||||
|
||||
|
||||
def uname_info():
|
||||
# type: () -> Dict[str, str]
|
||||
def uname_info() -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the distro release file data source of the current OS distribution.
|
||||
@@ -532,8 +547,7 @@ def uname_info():
|
||||
return _distro.uname_info()
|
||||
|
||||
|
||||
def os_release_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def os_release_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the os-release file data source
|
||||
of the current OS distribution.
|
||||
@@ -552,8 +566,7 @@ def os_release_attr(attribute):
|
||||
return _distro.os_release_attr(attribute)
|
||||
|
||||
|
||||
def lsb_release_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def lsb_release_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the lsb_release command output
|
||||
data source of the current OS distribution.
|
||||
@@ -573,8 +586,7 @@ def lsb_release_attr(attribute):
|
||||
return _distro.lsb_release_attr(attribute)
|
||||
|
||||
|
||||
def distro_release_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def distro_release_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the current OS distribution.
|
||||
@@ -593,8 +605,7 @@ def distro_release_attr(attribute):
|
||||
return _distro.distro_release_attr(attribute)
|
||||
|
||||
|
||||
def uname_attr(attribute):
|
||||
# type: (str) -> str
|
||||
def uname_attr(attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the current OS distribution.
|
||||
@@ -615,25 +626,23 @@ def uname_attr(attribute):
|
||||
from functools import cached_property
|
||||
except ImportError:
|
||||
# Python < 3.8
|
||||
class cached_property(object): # type: ignore
|
||||
class cached_property: # type: ignore
|
||||
"""A version of @property which caches the value. On access, it calls the
|
||||
underlying function and sets the value in `__dict__` so future accesses
|
||||
will not re-call the property.
|
||||
"""
|
||||
|
||||
def __init__(self, f):
|
||||
# type: (Callable[[Any], Any]) -> None
|
||||
def __init__(self, f: Callable[[Any], Any]) -> None:
|
||||
self._fname = f.__name__
|
||||
self._f = f
|
||||
|
||||
def __get__(self, obj, owner):
|
||||
# type: (Any, Type[Any]) -> Any
|
||||
assert obj is not None, "call {} on an instance".format(self._fname)
|
||||
def __get__(self, obj: Any, owner: Type[Any]) -> Any:
|
||||
assert obj is not None, f"call {self._fname} on an instance"
|
||||
ret = obj.__dict__[self._fname] = self._f(obj)
|
||||
return ret
|
||||
|
||||
|
||||
class LinuxDistribution(object):
|
||||
class LinuxDistribution:
|
||||
"""
|
||||
Provides information about a OS distribution.
|
||||
|
||||
@@ -653,13 +662,13 @@ class LinuxDistribution(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
include_lsb=True,
|
||||
os_release_file="",
|
||||
distro_release_file="",
|
||||
include_uname=True,
|
||||
root_dir=None,
|
||||
):
|
||||
# type: (bool, str, str, bool, Optional[str]) -> None
|
||||
include_lsb: Optional[bool] = None,
|
||||
os_release_file: str = "",
|
||||
distro_release_file: str = "",
|
||||
include_uname: Optional[bool] = None,
|
||||
root_dir: Optional[str] = None,
|
||||
include_oslevel: Optional[bool] = None,
|
||||
) -> None:
|
||||
"""
|
||||
The initialization method of this class gathers information from the
|
||||
available data sources, and stores that in private instance attributes.
|
||||
@@ -699,7 +708,13 @@ def __init__(
|
||||
be empty.
|
||||
|
||||
* ``root_dir`` (string): The absolute path to the root directory to use
|
||||
to find distro-related information files.
|
||||
to find distro-related information files. Note that ``include_*``
|
||||
parameters must not be enabled in combination with ``root_dir``.
|
||||
|
||||
* ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
|
||||
output is included as a data source. If the oslevel command is not
|
||||
available in the program execution path the data source will be
|
||||
empty.
|
||||
|
||||
Public instance attributes:
|
||||
|
||||
@@ -718,14 +733,21 @@ def __init__(
|
||||
parameter. This controls whether the uname information will
|
||||
be loaded.
|
||||
|
||||
* ``include_oslevel`` (bool): The result of the ``include_oslevel``
|
||||
parameter. This controls whether (AIX) oslevel information will be
|
||||
loaded.
|
||||
|
||||
* ``root_dir`` (string): The result of the ``root_dir`` parameter.
|
||||
The absolute path to the root directory to use to find distro-related
|
||||
information files.
|
||||
|
||||
Raises:
|
||||
|
||||
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
|
||||
release file.
|
||||
* :py:exc:`ValueError`: Initialization parameters combination is not
|
||||
supported.
|
||||
|
||||
* :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
|
||||
some issue (other than not being available in the program execution
|
||||
path).
|
||||
* :py:exc:`OSError`: Some I/O issue with an os-release file or distro
|
||||
release file.
|
||||
|
||||
* :py:exc:`UnicodeError`: A data source has unexpected characters or
|
||||
uses an unexpected encoding.
|
||||
@@ -754,11 +776,24 @@ def __init__(
|
||||
self.os_release_file = usr_lib_os_release_file
|
||||
|
||||
self.distro_release_file = distro_release_file or "" # updated later
|
||||
self.include_lsb = include_lsb
|
||||
self.include_uname = include_uname
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
is_root_dir_defined = root_dir is not None
|
||||
if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
|
||||
raise ValueError(
|
||||
"Including subprocess data sources from specific root_dir is disallowed"
|
||||
" to prevent false information"
|
||||
)
|
||||
self.include_lsb = (
|
||||
include_lsb if include_lsb is not None else not is_root_dir_defined
|
||||
)
|
||||
self.include_uname = (
|
||||
include_uname if include_uname is not None else not is_root_dir_defined
|
||||
)
|
||||
self.include_oslevel = (
|
||||
include_oslevel if include_oslevel is not None else not is_root_dir_defined
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return repr of all info"""
|
||||
return (
|
||||
"LinuxDistribution("
|
||||
@@ -766,14 +801,18 @@ def __repr__(self):
|
||||
"distro_release_file={self.distro_release_file!r}, "
|
||||
"include_lsb={self.include_lsb!r}, "
|
||||
"include_uname={self.include_uname!r}, "
|
||||
"include_oslevel={self.include_oslevel!r}, "
|
||||
"root_dir={self.root_dir!r}, "
|
||||
"_os_release_info={self._os_release_info!r}, "
|
||||
"_lsb_release_info={self._lsb_release_info!r}, "
|
||||
"_distro_release_info={self._distro_release_info!r}, "
|
||||
"_uname_info={self._uname_info!r})".format(self=self)
|
||||
"_uname_info={self._uname_info!r}, "
|
||||
"_oslevel_info={self._oslevel_info!r})".format(self=self)
|
||||
)
|
||||
|
||||
def linux_distribution(self, full_distribution_name=True):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def linux_distribution(
|
||||
self, full_distribution_name: bool = True
|
||||
) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return information about the OS distribution that is compatible
|
||||
with Python's :func:`platform.linux_distribution`, supporting a subset
|
||||
@@ -784,18 +823,16 @@ def linux_distribution(self, full_distribution_name=True):
|
||||
return (
|
||||
self.name() if full_distribution_name else self.id(),
|
||||
self.version(),
|
||||
self.codename(),
|
||||
self._os_release_info.get("release_codename") or self.codename(),
|
||||
)
|
||||
|
||||
def id(self):
|
||||
# type: () -> str
|
||||
def id(self) -> str:
|
||||
"""Return the distro ID of the OS distribution, as a string.
|
||||
|
||||
For details, see :func:`distro.id`.
|
||||
"""
|
||||
|
||||
def normalize(distro_id, table):
|
||||
# type: (str, Dict[str, str]) -> str
|
||||
def normalize(distro_id: str, table: Dict[str, str]) -> str:
|
||||
distro_id = distro_id.lower().replace(" ", "_")
|
||||
return table.get(distro_id, distro_id)
|
||||
|
||||
@@ -817,8 +854,7 @@ def normalize(distro_id, table):
|
||||
|
||||
return ""
|
||||
|
||||
def name(self, pretty=False):
|
||||
# type: (bool) -> str
|
||||
def name(self, pretty: bool = False) -> str:
|
||||
"""
|
||||
Return the name of the OS distribution, as a string.
|
||||
|
||||
@@ -838,11 +874,10 @@ def name(self, pretty=False):
|
||||
name = self.distro_release_attr("name") or self.uname_attr("name")
|
||||
version = self.version(pretty=True)
|
||||
if version:
|
||||
name = name + " " + version
|
||||
name = f"{name} {version}"
|
||||
return name or ""
|
||||
|
||||
def version(self, pretty=False, best=False):
|
||||
# type: (bool, bool) -> str
|
||||
def version(self, pretty: bool = False, best: bool = False) -> str:
|
||||
"""
|
||||
Return the version of the OS distribution, as a string.
|
||||
|
||||
@@ -860,6 +895,12 @@ def version(self, pretty=False, best=False):
|
||||
).get("version_id", ""),
|
||||
self.uname_attr("release"),
|
||||
]
|
||||
if self.uname_attr("id").startswith("aix"):
|
||||
# On AIX platforms, prefer oslevel command output.
|
||||
versions.insert(0, self.oslevel_info())
|
||||
elif self.id() == "debian" or "debian" in self.like().split():
|
||||
# On Debian-like, add debian_version file content to candidates list.
|
||||
versions.append(self._debian_version)
|
||||
version = ""
|
||||
if best:
|
||||
# This algorithm uses the last version in priority order that has
|
||||
@@ -875,11 +916,10 @@ def version(self, pretty=False, best=False):
|
||||
version = v
|
||||
break
|
||||
if pretty and version and self.codename():
|
||||
version = "{0} ({1})".format(version, self.codename())
|
||||
version = f"{version} ({self.codename()})"
|
||||
return version
|
||||
|
||||
def version_parts(self, best=False):
|
||||
# type: (bool) -> Tuple[str, str, str]
|
||||
def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return the version of the OS distribution, as a tuple of version
|
||||
numbers.
|
||||
@@ -895,8 +935,7 @@ def version_parts(self, best=False):
|
||||
return major, minor or "", build_number or ""
|
||||
return "", "", ""
|
||||
|
||||
def major_version(self, best=False):
|
||||
# type: (bool) -> str
|
||||
def major_version(self, best: bool = False) -> str:
|
||||
"""
|
||||
Return the major version number of the current distribution.
|
||||
|
||||
@@ -904,8 +943,7 @@ def major_version(self, best=False):
|
||||
"""
|
||||
return self.version_parts(best)[0]
|
||||
|
||||
def minor_version(self, best=False):
|
||||
# type: (bool) -> str
|
||||
def minor_version(self, best: bool = False) -> str:
|
||||
"""
|
||||
Return the minor version number of the current distribution.
|
||||
|
||||
@@ -913,8 +951,7 @@ def minor_version(self, best=False):
|
||||
"""
|
||||
return self.version_parts(best)[1]
|
||||
|
||||
def build_number(self, best=False):
|
||||
# type: (bool) -> str
|
||||
def build_number(self, best: bool = False) -> str:
|
||||
"""
|
||||
Return the build number of the current distribution.
|
||||
|
||||
@@ -922,8 +959,7 @@ def build_number(self, best=False):
|
||||
"""
|
||||
return self.version_parts(best)[2]
|
||||
|
||||
def like(self):
|
||||
# type: () -> str
|
||||
def like(self) -> str:
|
||||
"""
|
||||
Return the IDs of distributions that are like the OS distribution.
|
||||
|
||||
@@ -931,8 +967,7 @@ def like(self):
|
||||
"""
|
||||
return self.os_release_attr("id_like") or ""
|
||||
|
||||
def codename(self):
|
||||
# type: () -> str
|
||||
def codename(self) -> str:
|
||||
"""
|
||||
Return the codename of the OS distribution.
|
||||
|
||||
@@ -949,8 +984,7 @@ def codename(self):
|
||||
or ""
|
||||
)
|
||||
|
||||
def info(self, pretty=False, best=False):
|
||||
# type: (bool, bool) -> InfoDict
|
||||
def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
|
||||
"""
|
||||
Return certain machine-readable information about the OS
|
||||
distribution.
|
||||
@@ -969,8 +1003,7 @@ def info(self, pretty=False, best=False):
|
||||
codename=self.codename(),
|
||||
)
|
||||
|
||||
def os_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def os_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the os-release file data source of the OS distribution.
|
||||
@@ -979,8 +1012,7 @@ def os_release_info(self):
|
||||
"""
|
||||
return self._os_release_info
|
||||
|
||||
def lsb_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def lsb_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the lsb_release command data source of the OS
|
||||
@@ -990,8 +1022,7 @@ def lsb_release_info(self):
|
||||
"""
|
||||
return self._lsb_release_info
|
||||
|
||||
def distro_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def distro_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the distro release file data source of the OS
|
||||
@@ -1001,8 +1032,7 @@ def distro_release_info(self):
|
||||
"""
|
||||
return self._distro_release_info
|
||||
|
||||
def uname_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def uname_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the uname command data source of the OS distribution.
|
||||
@@ -1011,8 +1041,13 @@ def uname_info(self):
|
||||
"""
|
||||
return self._uname_info
|
||||
|
||||
def os_release_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def oslevel_info(self) -> str:
|
||||
"""
|
||||
Return AIX' oslevel command output.
|
||||
"""
|
||||
return self._oslevel_info
|
||||
|
||||
def os_release_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the os-release file data
|
||||
source of the OS distribution.
|
||||
@@ -1021,8 +1056,7 @@ def os_release_attr(self, attribute):
|
||||
"""
|
||||
return self._os_release_info.get(attribute, "")
|
||||
|
||||
def lsb_release_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def lsb_release_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the lsb_release command
|
||||
output data source of the OS distribution.
|
||||
@@ -1031,8 +1065,7 @@ def lsb_release_attr(self, attribute):
|
||||
"""
|
||||
return self._lsb_release_info.get(attribute, "")
|
||||
|
||||
def distro_release_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def distro_release_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the OS distribution.
|
||||
@@ -1041,8 +1074,7 @@ def distro_release_attr(self, attribute):
|
||||
"""
|
||||
return self._distro_release_info.get(attribute, "")
|
||||
|
||||
def uname_attr(self, attribute):
|
||||
# type: (str) -> str
|
||||
def uname_attr(self, attribute: str) -> str:
|
||||
"""
|
||||
Return a single named information item from the uname command
|
||||
output data source of the OS distribution.
|
||||
@@ -1052,8 +1084,7 @@ def uname_attr(self, attribute):
|
||||
return self._uname_info.get(attribute, "")
|
||||
|
||||
@cached_property
|
||||
def _os_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def _os_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get the information items from the specified os-release file.
|
||||
|
||||
@@ -1061,13 +1092,12 @@ def _os_release_info(self):
|
||||
A dictionary containing all information items.
|
||||
"""
|
||||
if os.path.isfile(self.os_release_file):
|
||||
with open(self.os_release_file) as release_file:
|
||||
with open(self.os_release_file, encoding="utf-8") as release_file:
|
||||
return self._parse_os_release_content(release_file)
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_os_release_content(lines):
|
||||
# type: (TextIO) -> Dict[str, str]
|
||||
def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
|
||||
"""
|
||||
Parse the lines of an os-release file.
|
||||
|
||||
@@ -1084,16 +1114,6 @@ def _parse_os_release_content(lines):
|
||||
lexer = shlex.shlex(lines, posix=True)
|
||||
lexer.whitespace_split = True
|
||||
|
||||
# The shlex module defines its `wordchars` variable using literals,
|
||||
# making it dependent on the encoding of the Python source file.
|
||||
# In Python 2.6 and 2.7, the shlex source file is encoded in
|
||||
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
|
||||
# string. This causes a UnicodeDecodeError to be raised when the
|
||||
# parsed content is a unicode object. The following fix resolves that
|
||||
# (... but it should be fixed in shlex...):
|
||||
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
|
||||
lexer.wordchars = lexer.wordchars.decode("iso-8859-1")
|
||||
|
||||
tokens = list(lexer)
|
||||
for token in tokens:
|
||||
# At this point, all shell-like parsing has been done (i.e.
|
||||
@@ -1102,12 +1122,17 @@ def _parse_os_release_content(lines):
|
||||
# stripped, etc.), so the tokens are now either:
|
||||
# * variable assignments: var=value
|
||||
# * commands or their arguments (not allowed in os-release)
|
||||
# Ignore any tokens that are not variable assignments
|
||||
if "=" in token:
|
||||
k, v = token.split("=", 1)
|
||||
props[k.lower()] = v
|
||||
else:
|
||||
# Ignore any tokens that are not variable assignments
|
||||
pass
|
||||
|
||||
if "version" in props:
|
||||
# extract release codename (if any) from version attribute
|
||||
match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
|
||||
if match:
|
||||
release_codename = match.group(1) or match.group(2)
|
||||
props["codename"] = props["release_codename"] = release_codename
|
||||
|
||||
if "version_codename" in props:
|
||||
# os-release added a version_codename field. Use that in
|
||||
@@ -1118,22 +1143,11 @@ def _parse_os_release_content(lines):
|
||||
elif "ubuntu_codename" in props:
|
||||
# Same as above but a non-standard field name used on older Ubuntus
|
||||
props["codename"] = props["ubuntu_codename"]
|
||||
elif "version" in props:
|
||||
# If there is no version_codename, parse it from the version
|
||||
match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
|
||||
if match:
|
||||
codename = match.group()
|
||||
codename = codename.strip("()")
|
||||
codename = codename.strip(",")
|
||||
codename = codename.strip()
|
||||
# codename appears within paranthese.
|
||||
props["codename"] = codename
|
||||
|
||||
return props
|
||||
|
||||
@cached_property
|
||||
def _lsb_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def _lsb_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get the information items from the lsb_release command output.
|
||||
|
||||
@@ -1142,19 +1156,17 @@ def _lsb_release_info(self):
|
||||
"""
|
||||
if not self.include_lsb:
|
||||
return {}
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
try:
|
||||
cmd = ("lsb_release", "-a")
|
||||
stdout = subprocess.check_output(cmd, stderr=devnull)
|
||||
# Command not found or lsb_release returned error
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return {}
|
||||
try:
|
||||
cmd = ("lsb_release", "-a")
|
||||
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
||||
# Command not found or lsb_release returned error
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return {}
|
||||
content = self._to_str(stdout).splitlines()
|
||||
return self._parse_lsb_release_content(content)
|
||||
|
||||
@staticmethod
|
||||
def _parse_lsb_release_content(lines):
|
||||
# type: (Iterable[str]) -> Dict[str, str]
|
||||
def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
|
||||
"""
|
||||
Parse the output of the lsb_release command.
|
||||
|
||||
@@ -1178,20 +1190,41 @@ def _parse_lsb_release_content(lines):
|
||||
return props
|
||||
|
||||
@cached_property
|
||||
def _uname_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
try:
|
||||
cmd = ("uname", "-rs")
|
||||
stdout = subprocess.check_output(cmd, stderr=devnull)
|
||||
except OSError:
|
||||
return {}
|
||||
def _uname_info(self) -> Dict[str, str]:
|
||||
if not self.include_uname:
|
||||
return {}
|
||||
try:
|
||||
cmd = ("uname", "-rs")
|
||||
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
||||
except OSError:
|
||||
return {}
|
||||
content = self._to_str(stdout).splitlines()
|
||||
return self._parse_uname_content(content)
|
||||
|
||||
@cached_property
|
||||
def _oslevel_info(self) -> str:
|
||||
if not self.include_oslevel:
|
||||
return ""
|
||||
try:
|
||||
stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
|
||||
except (OSError, subprocess.CalledProcessError):
|
||||
return ""
|
||||
return self._to_str(stdout).strip()
|
||||
|
||||
@cached_property
|
||||
def _debian_version(self) -> str:
|
||||
try:
|
||||
with open(
|
||||
os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
|
||||
) as fp:
|
||||
return fp.readline().rstrip()
|
||||
except FileNotFoundError:
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _parse_uname_content(lines):
|
||||
# type: (Sequence[str]) -> Dict[str, str]
|
||||
def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
|
||||
if not lines:
|
||||
return {}
|
||||
props = {}
|
||||
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
|
||||
if match:
|
||||
@@ -1208,23 +1241,12 @@ def _parse_uname_content(lines):
|
||||
return props
|
||||
|
||||
@staticmethod
|
||||
def _to_str(text):
|
||||
# type: (Union[bytes, str]) -> str
|
||||
def _to_str(bytestring: bytes) -> str:
|
||||
encoding = sys.getfilesystemencoding()
|
||||
encoding = "utf-8" if encoding == "ascii" else encoding
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
if isinstance(text, bytes):
|
||||
return text.decode(encoding)
|
||||
else:
|
||||
if isinstance(text, unicode): # noqa
|
||||
return text.encode(encoding)
|
||||
|
||||
return text
|
||||
return bytestring.decode(encoding)
|
||||
|
||||
@cached_property
|
||||
def _distro_release_info(self):
|
||||
# type: () -> Dict[str, str]
|
||||
def _distro_release_info(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get the information items from the specified distro release file.
|
||||
|
||||
@@ -1241,14 +1263,14 @@ def _distro_release_info(self):
|
||||
# file), because we want to use what was specified as best as
|
||||
# possible.
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
if "name" in distro_info and "cloudlinux" in distro_info["name"].lower():
|
||||
distro_info["id"] = "cloudlinux"
|
||||
elif match:
|
||||
distro_info["id"] = match.group(1)
|
||||
return distro_info
|
||||
else:
|
||||
try:
|
||||
basenames = os.listdir(self.etc_dir)
|
||||
basenames = [
|
||||
basename
|
||||
for basename in os.listdir(self.etc_dir)
|
||||
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
@@ -1258,41 +1280,31 @@ def _distro_release_info(self):
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
basenames = [
|
||||
"SuSE-release",
|
||||
"arch-release",
|
||||
"base-release",
|
||||
"centos-release",
|
||||
"fedora-release",
|
||||
"gentoo-release",
|
||||
"mageia-release",
|
||||
"mandrake-release",
|
||||
"mandriva-release",
|
||||
"mandrivalinux-release",
|
||||
"manjaro-release",
|
||||
"oracle-release",
|
||||
"redhat-release",
|
||||
"sl-release",
|
||||
"slackware-version",
|
||||
]
|
||||
basenames = _DISTRO_RELEASE_BASENAMES
|
||||
for basename in basenames:
|
||||
if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
|
||||
continue
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
if match:
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
if "name" in distro_info:
|
||||
# The name is always present if the pattern matches
|
||||
self.distro_release_file = filepath
|
||||
distro_info["id"] = match.group(1)
|
||||
if "cloudlinux" in distro_info["name"].lower():
|
||||
distro_info["id"] = "cloudlinux"
|
||||
return distro_info
|
||||
return {}
|
||||
if match is None:
|
||||
continue
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
||||
continue
|
||||
self.distro_release_file = filepath
|
||||
break
|
||||
else: # the loop didn't "break": no candidate.
|
||||
return {}
|
||||
|
||||
def _parse_distro_release_file(self, filepath):
|
||||
# type: (str) -> Dict[str, str]
|
||||
if match is not None:
|
||||
distro_info["id"] = match.group(1)
|
||||
|
||||
# CloudLinux < 7: manually enrich info with proper id.
|
||||
if "cloudlinux" in distro_info.get("name", "").lower():
|
||||
distro_info["id"] = "cloudlinux"
|
||||
|
||||
return distro_info
|
||||
|
||||
def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
|
||||
"""
|
||||
Parse a distro release file.
|
||||
|
||||
@@ -1304,19 +1316,18 @@ def _parse_distro_release_file(self, filepath):
|
||||
A dictionary containing all information items.
|
||||
"""
|
||||
try:
|
||||
with open(filepath) as fp:
|
||||
with open(filepath, encoding="utf-8") as fp:
|
||||
# Only parse the first line. For instance, on SLES there
|
||||
# are multiple lines. We don't want them...
|
||||
return self._parse_distro_release_content(fp.readline())
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
# Ignore not being able to read a specific, seemingly version
|
||||
# related file.
|
||||
# See https://github.com/python-distro/distro/issues/162
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def _parse_distro_release_content(line):
|
||||
# type: (str) -> Dict[str, str]
|
||||
def _parse_distro_release_content(line: str) -> Dict[str, str]:
|
||||
"""
|
||||
Parse a line from a distro release file.
|
||||
|
||||
@@ -1344,8 +1355,7 @@ def _parse_distro_release_content(line):
|
||||
_distro = LinuxDistribution()
|
||||
|
||||
|
||||
def main():
|
||||
# type: () -> None
|
||||
def main() -> None:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.addHandler(logging.StreamHandler(sys.stdout))
|
||||
@@ -1367,7 +1377,10 @@ def main():
|
||||
|
||||
if args.root_dir:
|
||||
dist = LinuxDistribution(
|
||||
include_lsb=False, include_uname=False, root_dir=args.root_dir
|
||||
include_lsb=False,
|
||||
include_uname=False,
|
||||
include_oslevel=False,
|
||||
root_dir=args.root_dir,
|
||||
)
|
||||
else:
|
||||
dist = _distro
|
||||
0
lib/spack/external/_vendoring/distro/py.typed
vendored
Normal file
0
lib/spack/external/_vendoring/distro/py.typed
vendored
Normal file
45
lib/spack/external/_vendoring/jinja2/__init__.py
vendored
Normal file
45
lib/spack/external/_vendoring/jinja2/__init__.py
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Jinja is a template engine written in pure Python. It provides a
|
||||
non-XML syntax that supports inline expressions and an optional
|
||||
sandboxed environment.
|
||||
"""
|
||||
from .bccache import BytecodeCache as BytecodeCache
|
||||
from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache
|
||||
from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache
|
||||
from .environment import Environment as Environment
|
||||
from .environment import Template as Template
|
||||
from .exceptions import TemplateAssertionError as TemplateAssertionError
|
||||
from .exceptions import TemplateError as TemplateError
|
||||
from .exceptions import TemplateNotFound as TemplateNotFound
|
||||
from .exceptions import TemplateRuntimeError as TemplateRuntimeError
|
||||
from .exceptions import TemplatesNotFound as TemplatesNotFound
|
||||
from .exceptions import TemplateSyntaxError as TemplateSyntaxError
|
||||
from .exceptions import UndefinedError as UndefinedError
|
||||
from .filters import contextfilter
|
||||
from .filters import environmentfilter
|
||||
from .filters import evalcontextfilter
|
||||
from .loaders import BaseLoader as BaseLoader
|
||||
from .loaders import ChoiceLoader as ChoiceLoader
|
||||
from .loaders import DictLoader as DictLoader
|
||||
from .loaders import FileSystemLoader as FileSystemLoader
|
||||
from .loaders import FunctionLoader as FunctionLoader
|
||||
from .loaders import ModuleLoader as ModuleLoader
|
||||
from .loaders import PackageLoader as PackageLoader
|
||||
from .loaders import PrefixLoader as PrefixLoader
|
||||
from .runtime import ChainableUndefined as ChainableUndefined
|
||||
from .runtime import DebugUndefined as DebugUndefined
|
||||
from .runtime import make_logging_undefined as make_logging_undefined
|
||||
from .runtime import StrictUndefined as StrictUndefined
|
||||
from .runtime import Undefined as Undefined
|
||||
from .utils import clear_caches as clear_caches
|
||||
from .utils import contextfunction
|
||||
from .utils import environmentfunction
|
||||
from .utils import escape
|
||||
from .utils import evalcontextfunction
|
||||
from .utils import is_undefined as is_undefined
|
||||
from .utils import Markup
|
||||
from .utils import pass_context as pass_context
|
||||
from .utils import pass_environment as pass_environment
|
||||
from .utils import pass_eval_context as pass_eval_context
|
||||
from .utils import select_autoescape as select_autoescape
|
||||
|
||||
__version__ = "3.0.3"
|
||||
75
lib/spack/external/_vendoring/jinja2/async_utils.py
vendored
Normal file
75
lib/spack/external/_vendoring/jinja2/async_utils.py
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import inspect
|
||||
import typing as t
|
||||
from functools import wraps
|
||||
|
||||
from .utils import _PassArg
|
||||
from .utils import pass_eval_context
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
|
||||
def async_variant(normal_func): # type: ignore
|
||||
def decorator(async_func): # type: ignore
|
||||
pass_arg = _PassArg.from_obj(normal_func)
|
||||
need_eval_context = pass_arg is None
|
||||
|
||||
if pass_arg is _PassArg.environment:
|
||||
|
||||
def is_async(args: t.Any) -> bool:
|
||||
return t.cast(bool, args[0].is_async)
|
||||
|
||||
else:
|
||||
|
||||
def is_async(args: t.Any) -> bool:
|
||||
return t.cast(bool, args[0].environment.is_async)
|
||||
|
||||
@wraps(normal_func)
|
||||
def wrapper(*args, **kwargs): # type: ignore
|
||||
b = is_async(args)
|
||||
|
||||
if need_eval_context:
|
||||
args = args[1:]
|
||||
|
||||
if b:
|
||||
return async_func(*args, **kwargs)
|
||||
|
||||
return normal_func(*args, **kwargs)
|
||||
|
||||
if need_eval_context:
|
||||
wrapper = pass_eval_context(wrapper)
|
||||
|
||||
wrapper.jinja_async_variant = True
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)}
|
||||
|
||||
|
||||
async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V":
|
||||
# Avoid a costly call to isawaitable
|
||||
if type(value) in _common_primitives:
|
||||
return t.cast("V", value)
|
||||
|
||||
if inspect.isawaitable(value):
|
||||
return await t.cast("t.Awaitable[V]", value)
|
||||
|
||||
return t.cast("V", value)
|
||||
|
||||
|
||||
async def auto_aiter(
|
||||
iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
|
||||
) -> "t.AsyncIterator[V]":
|
||||
if hasattr(iterable, "__aiter__"):
|
||||
async for item in t.cast("t.AsyncIterable[V]", iterable):
|
||||
yield item
|
||||
else:
|
||||
for item in t.cast("t.Iterable[V]", iterable):
|
||||
yield item
|
||||
|
||||
|
||||
async def auto_to_list(
|
||||
value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
|
||||
) -> t.List["V"]:
|
||||
return [x async for x in auto_aiter(value)]
|
||||
@@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""The optional bytecode cache system. This is useful if you have very
|
||||
complex template situations and the compilation of all those templates
|
||||
slows down your application too much.
|
||||
@@ -8,22 +7,30 @@
|
||||
"""
|
||||
import errno
|
||||
import fnmatch
|
||||
import marshal
|
||||
import os
|
||||
import pickle
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
import typing as t
|
||||
from hashlib import sha1
|
||||
from os import listdir
|
||||
from os import path
|
||||
from io import BytesIO
|
||||
from types import CodeType
|
||||
|
||||
from ._compat import BytesIO
|
||||
from ._compat import marshal_dump
|
||||
from ._compat import marshal_load
|
||||
from ._compat import pickle
|
||||
from ._compat import text_type
|
||||
from .utils import open_if_exists
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
bc_version = 4
|
||||
class _MemcachedClient(te.Protocol):
|
||||
def get(self, key: str) -> bytes:
|
||||
...
|
||||
|
||||
def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None:
|
||||
...
|
||||
|
||||
|
||||
bc_version = 5
|
||||
# Magic bytes to identify Jinja bytecode cache files. Contains the
|
||||
# Python major and minor version to avoid loading incompatible bytecode
|
||||
# if a project upgrades its Python version.
|
||||
@@ -34,7 +41,7 @@
|
||||
)
|
||||
|
||||
|
||||
class Bucket(object):
|
||||
class Bucket:
|
||||
"""Buckets are used to store the bytecode for one template. It's created
|
||||
and initialized by the bytecode cache and passed to the loading functions.
|
||||
|
||||
@@ -43,17 +50,17 @@ class Bucket(object):
|
||||
cache subclasses don't have to care about cache invalidation.
|
||||
"""
|
||||
|
||||
def __init__(self, environment, key, checksum):
|
||||
def __init__(self, environment: "Environment", key: str, checksum: str) -> None:
|
||||
self.environment = environment
|
||||
self.key = key
|
||||
self.checksum = checksum
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
def reset(self) -> None:
|
||||
"""Resets the bucket (unloads the bytecode)."""
|
||||
self.code = None
|
||||
self.code: t.Optional[CodeType] = None
|
||||
|
||||
def load_bytecode(self, f):
|
||||
def load_bytecode(self, f: t.BinaryIO) -> None:
|
||||
"""Loads bytecode from a file or file like object."""
|
||||
# make sure the magic header is correct
|
||||
magic = f.read(len(bc_magic))
|
||||
@@ -67,31 +74,31 @@ def load_bytecode(self, f):
|
||||
return
|
||||
# if marshal_load fails then we need to reload
|
||||
try:
|
||||
self.code = marshal_load(f)
|
||||
self.code = marshal.load(f)
|
||||
except (EOFError, ValueError, TypeError):
|
||||
self.reset()
|
||||
return
|
||||
|
||||
def write_bytecode(self, f):
|
||||
def write_bytecode(self, f: t.BinaryIO) -> None:
|
||||
"""Dump the bytecode into the file or file like object passed."""
|
||||
if self.code is None:
|
||||
raise TypeError("can't write empty bucket")
|
||||
f.write(bc_magic)
|
||||
pickle.dump(self.checksum, f, 2)
|
||||
marshal_dump(self.code, f)
|
||||
marshal.dump(self.code, f)
|
||||
|
||||
def bytecode_from_string(self, string):
|
||||
"""Load bytecode from a string."""
|
||||
def bytecode_from_string(self, string: bytes) -> None:
|
||||
"""Load bytecode from bytes."""
|
||||
self.load_bytecode(BytesIO(string))
|
||||
|
||||
def bytecode_to_string(self):
|
||||
"""Return the bytecode as string."""
|
||||
def bytecode_to_string(self) -> bytes:
|
||||
"""Return the bytecode as bytes."""
|
||||
out = BytesIO()
|
||||
self.write_bytecode(out)
|
||||
return out.getvalue()
|
||||
|
||||
|
||||
class BytecodeCache(object):
|
||||
class BytecodeCache:
|
||||
"""To implement your own bytecode cache you have to subclass this class
|
||||
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
|
||||
these methods are passed a :class:`~jinja2.bccache.Bucket`.
|
||||
@@ -120,41 +127,48 @@ def dump_bytecode(self, bucket):
|
||||
Jinja.
|
||||
"""
|
||||
|
||||
def load_bytecode(self, bucket):
|
||||
def load_bytecode(self, bucket: Bucket) -> None:
|
||||
"""Subclasses have to override this method to load bytecode into a
|
||||
bucket. If they are not able to find code in the cache for the
|
||||
bucket, it must not do anything.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def dump_bytecode(self, bucket):
|
||||
def dump_bytecode(self, bucket: Bucket) -> None:
|
||||
"""Subclasses have to override this method to write the bytecode
|
||||
from a bucket back to the cache. If it unable to do so it must not
|
||||
fail silently but raise an exception.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
"""Clears the cache. This method is not used by Jinja but should be
|
||||
implemented to allow applications to clear the bytecode cache used
|
||||
by a particular environment.
|
||||
"""
|
||||
|
||||
def get_cache_key(self, name, filename=None):
|
||||
def get_cache_key(
|
||||
self, name: str, filename: t.Optional[t.Union[str]] = None
|
||||
) -> str:
|
||||
"""Returns the unique hash key for this template name."""
|
||||
hash = sha1(name.encode("utf-8"))
|
||||
|
||||
if filename is not None:
|
||||
filename = "|" + filename
|
||||
if isinstance(filename, text_type):
|
||||
filename = filename.encode("utf-8")
|
||||
hash.update(filename)
|
||||
hash.update(f"|{filename}".encode())
|
||||
|
||||
return hash.hexdigest()
|
||||
|
||||
def get_source_checksum(self, source):
|
||||
def get_source_checksum(self, source: str) -> str:
|
||||
"""Returns a checksum for the source."""
|
||||
return sha1(source.encode("utf-8")).hexdigest()
|
||||
|
||||
def get_bucket(self, environment, name, filename, source):
|
||||
def get_bucket(
|
||||
self,
|
||||
environment: "Environment",
|
||||
name: str,
|
||||
filename: t.Optional[str],
|
||||
source: str,
|
||||
) -> Bucket:
|
||||
"""Return a cache bucket for the given template. All arguments are
|
||||
mandatory but filename may be `None`.
|
||||
"""
|
||||
@@ -164,7 +178,7 @@ def get_bucket(self, environment, name, filename, source):
|
||||
self.load_bytecode(bucket)
|
||||
return bucket
|
||||
|
||||
def set_bucket(self, bucket):
|
||||
def set_bucket(self, bucket: Bucket) -> None:
|
||||
"""Put the bucket into the cache."""
|
||||
self.dump_bytecode(bucket)
|
||||
|
||||
@@ -187,14 +201,16 @@ class FileSystemBytecodeCache(BytecodeCache):
|
||||
This bytecode cache supports clearing of the cache using the clear method.
|
||||
"""
|
||||
|
||||
def __init__(self, directory=None, pattern="__jinja2_%s.cache"):
|
||||
def __init__(
|
||||
self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
|
||||
) -> None:
|
||||
if directory is None:
|
||||
directory = self._get_default_cache_dir()
|
||||
self.directory = directory
|
||||
self.pattern = pattern
|
||||
|
||||
def _get_default_cache_dir(self):
|
||||
def _unsafe_dir():
|
||||
def _get_default_cache_dir(self) -> str:
|
||||
def _unsafe_dir() -> "te.NoReturn":
|
||||
raise RuntimeError(
|
||||
"Cannot determine safe temp directory. You "
|
||||
"need to explicitly provide one."
|
||||
@@ -209,7 +225,7 @@ def _unsafe_dir():
|
||||
if not hasattr(os, "getuid"):
|
||||
_unsafe_dir()
|
||||
|
||||
dirname = "_jinja2-cache-%d" % os.getuid()
|
||||
dirname = f"_jinja2-cache-{os.getuid()}"
|
||||
actual_dir = os.path.join(tmpdir, dirname)
|
||||
|
||||
try:
|
||||
@@ -240,34 +256,30 @@ def _unsafe_dir():
|
||||
|
||||
return actual_dir
|
||||
|
||||
def _get_cache_filename(self, bucket):
|
||||
return path.join(self.directory, self.pattern % bucket.key)
|
||||
def _get_cache_filename(self, bucket: Bucket) -> str:
|
||||
return os.path.join(self.directory, self.pattern % (bucket.key,))
|
||||
|
||||
def load_bytecode(self, bucket):
|
||||
f = open_if_exists(self._get_cache_filename(bucket), "rb")
|
||||
if f is not None:
|
||||
try:
|
||||
def load_bytecode(self, bucket: Bucket) -> None:
|
||||
filename = self._get_cache_filename(bucket)
|
||||
|
||||
if os.path.exists(filename):
|
||||
with open(filename, "rb") as f:
|
||||
bucket.load_bytecode(f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def dump_bytecode(self, bucket):
|
||||
f = open(self._get_cache_filename(bucket), "wb")
|
||||
try:
|
||||
def dump_bytecode(self, bucket: Bucket) -> None:
|
||||
with open(self._get_cache_filename(bucket), "wb") as f:
|
||||
bucket.write_bytecode(f)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
# imported lazily here because google app-engine doesn't support
|
||||
# write access on the file system and the function does not exist
|
||||
# normally.
|
||||
from os import remove
|
||||
|
||||
files = fnmatch.filter(listdir(self.directory), self.pattern % "*")
|
||||
files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",))
|
||||
for filename in files:
|
||||
try:
|
||||
remove(path.join(self.directory, filename))
|
||||
remove(os.path.join(self.directory, filename))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@@ -284,7 +296,7 @@ class MemcachedBytecodeCache(BytecodeCache):
|
||||
- `python-memcached <https://pypi.org/project/python-memcached/>`_
|
||||
|
||||
(Unfortunately the django cache interface is not compatible because it
|
||||
does not support storing binary data, only unicode. You can however pass
|
||||
does not support storing binary data, only text. You can however pass
|
||||
the underlying cache client to the bytecode cache which is available
|
||||
as `django.core.cache.cache._client`.)
|
||||
|
||||
@@ -319,32 +331,34 @@ class MemcachedBytecodeCache(BytecodeCache):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client,
|
||||
prefix="jinja2/bytecode/",
|
||||
timeout=None,
|
||||
ignore_memcache_errors=True,
|
||||
client: "_MemcachedClient",
|
||||
prefix: str = "jinja2/bytecode/",
|
||||
timeout: t.Optional[int] = None,
|
||||
ignore_memcache_errors: bool = True,
|
||||
):
|
||||
self.client = client
|
||||
self.prefix = prefix
|
||||
self.timeout = timeout
|
||||
self.ignore_memcache_errors = ignore_memcache_errors
|
||||
|
||||
def load_bytecode(self, bucket):
|
||||
def load_bytecode(self, bucket: Bucket) -> None:
|
||||
try:
|
||||
code = self.client.get(self.prefix + bucket.key)
|
||||
except Exception:
|
||||
if not self.ignore_memcache_errors:
|
||||
raise
|
||||
code = None
|
||||
if code is not None:
|
||||
else:
|
||||
bucket.bytecode_from_string(code)
|
||||
|
||||
def dump_bytecode(self, bucket):
|
||||
args = (self.prefix + bucket.key, bucket.bytecode_to_string())
|
||||
if self.timeout is not None:
|
||||
args += (self.timeout,)
|
||||
def dump_bytecode(self, bucket: Bucket) -> None:
|
||||
key = self.prefix + bucket.key
|
||||
value = bucket.bytecode_to_string()
|
||||
|
||||
try:
|
||||
self.client.set(*args)
|
||||
if self.timeout is not None:
|
||||
self.client.set(key, value, self.timeout)
|
||||
else:
|
||||
self.client.set(key, value)
|
||||
except Exception:
|
||||
if not self.ignore_memcache_errors:
|
||||
raise
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#: list of lorem ipsum words used by the lipsum() helper function
|
||||
LOREM_IPSUM_WORDS = u"""\
|
||||
LOREM_IPSUM_WORDS = """\
|
||||
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
|
||||
auctor augue bibendum blandit class commodo condimentum congue consectetuer
|
||||
consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
|
||||
@@ -1,38 +1,38 @@
|
||||
import platform
|
||||
import sys
|
||||
import typing as t
|
||||
from types import CodeType
|
||||
from types import TracebackType
|
||||
|
||||
from . import TemplateSyntaxError
|
||||
from ._compat import PYPY
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .utils import internal_code
|
||||
from .utils import missing
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .runtime import Context
|
||||
|
||||
def rewrite_traceback_stack(source=None):
|
||||
|
||||
def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException:
|
||||
"""Rewrite the current exception to replace any tracebacks from
|
||||
within compiled template code with tracebacks that look like they
|
||||
came from the template source.
|
||||
|
||||
This must be called within an ``except`` block.
|
||||
|
||||
:param exc_info: A :meth:`sys.exc_info` tuple. If not provided,
|
||||
the current ``exc_info`` is used.
|
||||
:param source: For ``TemplateSyntaxError``, the original source if
|
||||
known.
|
||||
:return: A :meth:`sys.exc_info` tuple that can be re-raised.
|
||||
:return: The original exception with the rewritten traceback.
|
||||
"""
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
_, exc_value, tb = sys.exc_info()
|
||||
exc_value = t.cast(BaseException, exc_value)
|
||||
tb = t.cast(TracebackType, tb)
|
||||
|
||||
if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated:
|
||||
exc_value.translated = True
|
||||
exc_value.source = source
|
||||
|
||||
try:
|
||||
# Remove the old traceback on Python 3, otherwise the frames
|
||||
# from the compiler still show up.
|
||||
exc_value.with_traceback(None)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Remove the old traceback, otherwise the frames from the
|
||||
# compiler still show up.
|
||||
exc_value.with_traceback(None)
|
||||
# Outside of runtime, so the frame isn't executing template
|
||||
# code, but it still needs to point at the template.
|
||||
tb = fake_traceback(
|
||||
@@ -70,10 +70,12 @@ def rewrite_traceback_stack(source=None):
|
||||
for tb in reversed(stack):
|
||||
tb_next = tb_set_next(tb, tb_next)
|
||||
|
||||
return exc_type, exc_value, tb_next
|
||||
return exc_value.with_traceback(tb_next)
|
||||
|
||||
|
||||
def fake_traceback(exc_value, tb, filename, lineno):
|
||||
def fake_traceback( # type: ignore
|
||||
exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int
|
||||
) -> TracebackType:
|
||||
"""Produce a new traceback object that looks like it came from the
|
||||
template source instead of the compiled code. The filename, line
|
||||
number, and location name will point to the template, and the local
|
||||
@@ -100,79 +102,60 @@ def fake_traceback(exc_value, tb, filename, lineno):
|
||||
"__jinja_exception__": exc_value,
|
||||
}
|
||||
# Raise an exception at the correct line number.
|
||||
code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec")
|
||||
code: CodeType = compile(
|
||||
"\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec"
|
||||
)
|
||||
|
||||
# Build a new code object that points to the template file and
|
||||
# replaces the location with a block name.
|
||||
try:
|
||||
location = "template"
|
||||
location = "template"
|
||||
|
||||
if tb is not None:
|
||||
function = tb.tb_frame.f_code.co_name
|
||||
if tb is not None:
|
||||
function = tb.tb_frame.f_code.co_name
|
||||
|
||||
if function == "root":
|
||||
location = "top-level template code"
|
||||
elif function.startswith("block_"):
|
||||
location = 'block "%s"' % function[6:]
|
||||
if function == "root":
|
||||
location = "top-level template code"
|
||||
elif function.startswith("block_"):
|
||||
location = f"block {function[6:]!r}"
|
||||
|
||||
# Collect arguments for the new code object. CodeType only
|
||||
# accepts positional arguments, and arguments were inserted in
|
||||
# new Python versions.
|
||||
code_args = []
|
||||
|
||||
for attr in (
|
||||
"argcount",
|
||||
"posonlyargcount", # Python 3.8
|
||||
"kwonlyargcount", # Python 3
|
||||
"nlocals",
|
||||
"stacksize",
|
||||
"flags",
|
||||
"code", # codestring
|
||||
"consts", # constants
|
||||
"names",
|
||||
"varnames",
|
||||
("filename", filename),
|
||||
("name", location),
|
||||
"firstlineno",
|
||||
"lnotab",
|
||||
"freevars",
|
||||
"cellvars",
|
||||
):
|
||||
if isinstance(attr, tuple):
|
||||
# Replace with given value.
|
||||
code_args.append(attr[1])
|
||||
continue
|
||||
|
||||
try:
|
||||
# Copy original value if it exists.
|
||||
code_args.append(getattr(code, "co_" + attr))
|
||||
except AttributeError:
|
||||
# Some arguments were added later.
|
||||
continue
|
||||
|
||||
code = CodeType(*code_args)
|
||||
except Exception:
|
||||
# Some environments such as Google App Engine don't support
|
||||
# modifying code objects.
|
||||
pass
|
||||
if sys.version_info >= (3, 8):
|
||||
code = code.replace(co_name=location)
|
||||
else:
|
||||
code = CodeType(
|
||||
code.co_argcount,
|
||||
code.co_kwonlyargcount,
|
||||
code.co_nlocals,
|
||||
code.co_stacksize,
|
||||
code.co_flags,
|
||||
code.co_code,
|
||||
code.co_consts,
|
||||
code.co_names,
|
||||
code.co_varnames,
|
||||
code.co_filename,
|
||||
location,
|
||||
code.co_firstlineno,
|
||||
code.co_lnotab,
|
||||
code.co_freevars,
|
||||
code.co_cellvars,
|
||||
)
|
||||
|
||||
# Execute the new code, which is guaranteed to raise, and return
|
||||
# the new traceback without this frame.
|
||||
try:
|
||||
exec(code, globals, locals)
|
||||
except BaseException:
|
||||
return sys.exc_info()[2].tb_next
|
||||
return sys.exc_info()[2].tb_next # type: ignore
|
||||
|
||||
|
||||
def get_template_locals(real_locals):
|
||||
def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]:
|
||||
"""Based on the runtime locals, get the context that would be
|
||||
available at that point in the template.
|
||||
"""
|
||||
# Start with the current template context.
|
||||
ctx = real_locals.get("context")
|
||||
ctx: "t.Optional[Context]" = real_locals.get("context")
|
||||
|
||||
if ctx:
|
||||
data = ctx.get_all().copy()
|
||||
if ctx is not None:
|
||||
data: t.Dict[str, t.Any] = ctx.get_all().copy()
|
||||
else:
|
||||
data = {}
|
||||
|
||||
@@ -180,7 +163,7 @@ def get_template_locals(real_locals):
|
||||
# rather than pushing a context. Local variables follow the scheme
|
||||
# l_depth_name. Find the highest-depth local that has a value for
|
||||
# each name.
|
||||
local_overrides = {}
|
||||
local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {}
|
||||
|
||||
for name, value in real_locals.items():
|
||||
if not name.startswith("l_") or value is missing:
|
||||
@@ -188,8 +171,8 @@ def get_template_locals(real_locals):
|
||||
continue
|
||||
|
||||
try:
|
||||
_, depth, name = name.split("_", 2)
|
||||
depth = int(depth)
|
||||
_, depth_str, name = name.split("_", 2)
|
||||
depth = int(depth_str)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
@@ -210,31 +193,37 @@ def get_template_locals(real_locals):
|
||||
|
||||
if sys.version_info >= (3, 7):
|
||||
# tb_next is directly assignable as of Python 3.7
|
||||
def tb_set_next(tb, tb_next):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
tb.tb_next = tb_next
|
||||
return tb
|
||||
|
||||
|
||||
elif PYPY:
|
||||
elif platform.python_implementation() == "PyPy":
|
||||
# PyPy might have special support, and won't work with ctypes.
|
||||
try:
|
||||
import tputil
|
||||
import tputil # type: ignore
|
||||
except ImportError:
|
||||
# Without tproxy support, use the original traceback.
|
||||
def tb_set_next(tb, tb_next):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
return tb
|
||||
|
||||
else:
|
||||
# With tproxy support, create a proxy around the traceback that
|
||||
# returns the new tb_next.
|
||||
def tb_set_next(tb, tb_next):
|
||||
def controller(op):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
def controller(op): # type: ignore
|
||||
if op.opname == "__getattribute__" and op.args[0] == "tb_next":
|
||||
return tb_next
|
||||
|
||||
return op.delegate()
|
||||
|
||||
return tputil.make_proxy(controller, obj=tb)
|
||||
return tputil.make_proxy(controller, obj=tb) # type: ignore
|
||||
|
||||
|
||||
else:
|
||||
@@ -250,7 +239,9 @@ class _CTraceback(ctypes.Structure):
|
||||
("tb_next", ctypes.py_object),
|
||||
]
|
||||
|
||||
def tb_set_next(tb, tb_next):
|
||||
def tb_set_next(
|
||||
tb: TracebackType, tb_next: t.Optional[TracebackType]
|
||||
) -> TracebackType:
|
||||
c_tb = _CTraceback.from_address(id(tb))
|
||||
|
||||
# Clear out the old tb_next.
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._compat import range_type
|
||||
import typing as t
|
||||
|
||||
from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401
|
||||
from .tests import TESTS as DEFAULT_TESTS # noqa: F401
|
||||
from .utils import Cycler
|
||||
@@ -7,6 +7,9 @@
|
||||
from .utils import Joiner
|
||||
from .utils import Namespace
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
# defaults for the parser / lexer
|
||||
BLOCK_START_STRING = "{%"
|
||||
BLOCK_END_STRING = "%}"
|
||||
@@ -14,17 +17,17 @@
|
||||
VARIABLE_END_STRING = "}}"
|
||||
COMMENT_START_STRING = "{#"
|
||||
COMMENT_END_STRING = "#}"
|
||||
LINE_STATEMENT_PREFIX = None
|
||||
LINE_COMMENT_PREFIX = None
|
||||
LINE_STATEMENT_PREFIX: t.Optional[str] = None
|
||||
LINE_COMMENT_PREFIX: t.Optional[str] = None
|
||||
TRIM_BLOCKS = False
|
||||
LSTRIP_BLOCKS = False
|
||||
NEWLINE_SEQUENCE = "\n"
|
||||
NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n"
|
||||
KEEP_TRAILING_NEWLINE = False
|
||||
|
||||
# default filters, tests and namespace
|
||||
|
||||
DEFAULT_NAMESPACE = {
|
||||
"range": range_type,
|
||||
"range": range,
|
||||
"dict": dict,
|
||||
"lipsum": generate_lorem_ipsum,
|
||||
"cycler": Cycler,
|
||||
@@ -33,10 +36,11 @@
|
||||
}
|
||||
|
||||
# default policies
|
||||
DEFAULT_POLICIES = {
|
||||
DEFAULT_POLICIES: t.Dict[str, t.Any] = {
|
||||
"compiler.ascii_str": True,
|
||||
"urlize.rel": "noopener",
|
||||
"urlize.target": None,
|
||||
"urlize.extra_schemes": None,
|
||||
"truncate.leeway": 5,
|
||||
"json.dumps_function": None,
|
||||
"json.dumps_kwargs": {"sort_keys": True},
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,44 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._compat import imap
|
||||
from ._compat import implements_to_string
|
||||
from ._compat import PY2
|
||||
from ._compat import text_type
|
||||
import typing as t
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .runtime import Undefined
|
||||
|
||||
|
||||
class TemplateError(Exception):
|
||||
"""Baseclass for all template errors."""
|
||||
|
||||
if PY2:
|
||||
def __init__(self, message: t.Optional[str] = None) -> None:
|
||||
super().__init__(message)
|
||||
|
||||
def __init__(self, message=None):
|
||||
if message is not None:
|
||||
message = text_type(message).encode("utf-8")
|
||||
Exception.__init__(self, message)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self.args:
|
||||
message = self.args[0]
|
||||
if message is not None:
|
||||
return message.decode("utf-8", "replace")
|
||||
|
||||
def __unicode__(self):
|
||||
return self.message or u""
|
||||
|
||||
else:
|
||||
|
||||
def __init__(self, message=None):
|
||||
Exception.__init__(self, message)
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self.args:
|
||||
message = self.args[0]
|
||||
if message is not None:
|
||||
return message
|
||||
@property
|
||||
def message(self) -> t.Optional[str]:
|
||||
return self.args[0] if self.args else None
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class TemplateNotFound(IOError, LookupError, TemplateError):
|
||||
"""Raised if a template does not exist.
|
||||
|
||||
@@ -47,11 +23,15 @@ class TemplateNotFound(IOError, LookupError, TemplateError):
|
||||
provided, an :exc:`UndefinedError` is raised.
|
||||
"""
|
||||
|
||||
# looks weird, but removes the warning descriptor that just
|
||||
# bogusly warns us about message being deprecated
|
||||
message = None
|
||||
# Silence the Python warning about message being deprecated since
|
||||
# it's not valid here.
|
||||
message: t.Optional[str] = None
|
||||
|
||||
def __init__(self, name, message=None):
|
||||
def __init__(
|
||||
self,
|
||||
name: t.Optional[t.Union[str, "Undefined"]],
|
||||
message: t.Optional[str] = None,
|
||||
) -> None:
|
||||
IOError.__init__(self, name)
|
||||
|
||||
if message is None:
|
||||
@@ -66,8 +46,8 @@ def __init__(self, name, message=None):
|
||||
self.name = name
|
||||
self.templates = [name]
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
def __str__(self) -> str:
|
||||
return str(self.message)
|
||||
|
||||
|
||||
class TemplatesNotFound(TemplateNotFound):
|
||||
@@ -82,7 +62,11 @@ class TemplatesNotFound(TemplateNotFound):
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
|
||||
def __init__(self, names=(), message=None):
|
||||
def __init__(
|
||||
self,
|
||||
names: t.Sequence[t.Union[str, "Undefined"]] = (),
|
||||
message: t.Optional[str] = None,
|
||||
) -> None:
|
||||
if message is None:
|
||||
from .runtime import Undefined
|
||||
|
||||
@@ -94,52 +78,57 @@ def __init__(self, names=(), message=None):
|
||||
else:
|
||||
parts.append(name)
|
||||
|
||||
message = u"none of the templates given were found: " + u", ".join(
|
||||
imap(text_type, parts)
|
||||
)
|
||||
TemplateNotFound.__init__(self, names and names[-1] or None, message)
|
||||
parts_str = ", ".join(map(str, parts))
|
||||
message = f"none of the templates given were found: {parts_str}"
|
||||
|
||||
super().__init__(names[-1] if names else None, message)
|
||||
self.templates = list(names)
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class TemplateSyntaxError(TemplateError):
|
||||
"""Raised to tell the user that there is a problem with the template."""
|
||||
|
||||
def __init__(self, message, lineno, name=None, filename=None):
|
||||
TemplateError.__init__(self, message)
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
lineno: int,
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.lineno = lineno
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.source = None
|
||||
self.source: t.Optional[str] = None
|
||||
|
||||
# this is set to True if the debug.translate_syntax_error
|
||||
# function translated the syntax error into a new traceback
|
||||
self.translated = False
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
# for translated errors we only return the message
|
||||
if self.translated:
|
||||
return self.message
|
||||
return t.cast(str, self.message)
|
||||
|
||||
# otherwise attach some stuff
|
||||
location = "line %d" % self.lineno
|
||||
location = f"line {self.lineno}"
|
||||
name = self.filename or self.name
|
||||
if name:
|
||||
location = 'File "%s", %s' % (name, location)
|
||||
lines = [self.message, " " + location]
|
||||
location = f'File "{name}", {location}'
|
||||
lines = [t.cast(str, self.message), " " + location]
|
||||
|
||||
# if the source is set, add the line to the output
|
||||
if self.source is not None:
|
||||
try:
|
||||
line = self.source.splitlines()[self.lineno - 1]
|
||||
except IndexError:
|
||||
line = None
|
||||
if line:
|
||||
pass
|
||||
else:
|
||||
lines.append(" " + line.strip())
|
||||
|
||||
return u"\n".join(lines)
|
||||
return "\n".join(lines)
|
||||
|
||||
def __reduce__(self):
|
||||
def __reduce__(self): # type: ignore
|
||||
# https://bugs.python.org/issue1692335 Exceptions that take
|
||||
# multiple required arguments have problems with pickling.
|
||||
# Without this, raises TypeError: __init__() missing 1 required
|
||||
@@ -1,53 +1,58 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Extension API for adding custom tags and behavior."""
|
||||
import pprint
|
||||
import re
|
||||
from sys import version_info
|
||||
import typing as t
|
||||
import warnings
|
||||
|
||||
from markupsafe import Markup
|
||||
|
||||
from . import defaults
|
||||
from . import nodes
|
||||
from ._compat import iteritems
|
||||
from ._compat import string_types
|
||||
from ._compat import with_metaclass
|
||||
from .defaults import BLOCK_END_STRING
|
||||
from .defaults import BLOCK_START_STRING
|
||||
from .defaults import COMMENT_END_STRING
|
||||
from .defaults import COMMENT_START_STRING
|
||||
from .defaults import KEEP_TRAILING_NEWLINE
|
||||
from .defaults import LINE_COMMENT_PREFIX
|
||||
from .defaults import LINE_STATEMENT_PREFIX
|
||||
from .defaults import LSTRIP_BLOCKS
|
||||
from .defaults import NEWLINE_SEQUENCE
|
||||
from .defaults import TRIM_BLOCKS
|
||||
from .defaults import VARIABLE_END_STRING
|
||||
from .defaults import VARIABLE_START_STRING
|
||||
from .environment import Environment
|
||||
from .exceptions import TemplateAssertionError
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .nodes import ContextReference
|
||||
from .runtime import concat
|
||||
from .utils import contextfunction
|
||||
from .runtime import concat # type: ignore
|
||||
from .runtime import Context
|
||||
from .runtime import Undefined
|
||||
from .utils import import_string
|
||||
from .utils import pass_context
|
||||
|
||||
# the only real useful gettext functions for a Jinja template. Note
|
||||
# that ugettext must be assigned to gettext as Jinja doesn't support
|
||||
# non unicode strings.
|
||||
GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext")
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .lexer import Token
|
||||
from .lexer import TokenStream
|
||||
from .parser import Parser
|
||||
|
||||
class _TranslationsBasic(te.Protocol):
|
||||
def gettext(self, message: str) -> str:
|
||||
...
|
||||
|
||||
def ngettext(self, singular: str, plural: str, n: int) -> str:
|
||||
pass
|
||||
|
||||
class _TranslationsContext(_TranslationsBasic):
|
||||
def pgettext(self, context: str, message: str) -> str:
|
||||
...
|
||||
|
||||
def npgettext(self, context: str, singular: str, plural: str, n: int) -> str:
|
||||
...
|
||||
|
||||
_SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext]
|
||||
|
||||
|
||||
# I18N functions available in Jinja templates. If the I18N library
|
||||
# provides ugettext, it will be assigned to gettext.
|
||||
GETTEXT_FUNCTIONS: t.Tuple[str, ...] = (
|
||||
"_",
|
||||
"gettext",
|
||||
"ngettext",
|
||||
"pgettext",
|
||||
"npgettext",
|
||||
)
|
||||
_ws_re = re.compile(r"\s*\n\s*")
|
||||
|
||||
|
||||
class ExtensionRegistry(type):
|
||||
"""Gives the extension an unique identifier."""
|
||||
|
||||
def __new__(mcs, name, bases, d):
|
||||
rv = type.__new__(mcs, name, bases, d)
|
||||
rv.identifier = rv.__module__ + "." + rv.__name__
|
||||
return rv
|
||||
|
||||
|
||||
class Extension(with_metaclass(ExtensionRegistry, object)):
|
||||
class Extension:
|
||||
"""Extensions can be used to add extra functionality to the Jinja template
|
||||
system at the parser level. Custom extensions are bound to an environment
|
||||
but may not store environment specific data on `self`. The reason for
|
||||
@@ -66,8 +71,13 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
|
||||
name as includes the name of the extension (fragment cache).
|
||||
"""
|
||||
|
||||
identifier: t.ClassVar[str]
|
||||
|
||||
def __init_subclass__(cls) -> None:
|
||||
cls.identifier = f"{cls.__module__}.{cls.__name__}"
|
||||
|
||||
#: if this extension parses this is the list of tags it's listening to.
|
||||
tags = set()
|
||||
tags: t.Set[str] = set()
|
||||
|
||||
#: the priority of that extension. This is especially useful for
|
||||
#: extensions that preprocess values. A lower value means higher
|
||||
@@ -76,24 +86,28 @@ class Extension(with_metaclass(ExtensionRegistry, object)):
|
||||
#: .. versionadded:: 2.4
|
||||
priority = 100
|
||||
|
||||
def __init__(self, environment):
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
self.environment = environment
|
||||
|
||||
def bind(self, environment):
|
||||
def bind(self, environment: Environment) -> "Extension":
|
||||
"""Create a copy of this extension bound to another environment."""
|
||||
rv = object.__new__(self.__class__)
|
||||
rv = t.cast(Extension, object.__new__(self.__class__))
|
||||
rv.__dict__.update(self.__dict__)
|
||||
rv.environment = environment
|
||||
return rv
|
||||
|
||||
def preprocess(self, source, name, filename=None):
|
||||
def preprocess(
|
||||
self, source: str, name: t.Optional[str], filename: t.Optional[str] = None
|
||||
) -> str:
|
||||
"""This method is called before the actual lexing and can be used to
|
||||
preprocess the source. The `filename` is optional. The return value
|
||||
must be the preprocessed source.
|
||||
"""
|
||||
return source
|
||||
|
||||
def filter_stream(self, stream):
|
||||
def filter_stream(
|
||||
self, stream: "TokenStream"
|
||||
) -> t.Union["TokenStream", t.Iterable["Token"]]:
|
||||
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
|
||||
to filter tokens returned. This method has to return an iterable of
|
||||
:class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
|
||||
@@ -101,7 +115,7 @@ def filter_stream(self, stream):
|
||||
"""
|
||||
return stream
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
|
||||
"""If any of the :attr:`tags` matched this method is called with the
|
||||
parser as first argument. The token the parser stream is pointing at
|
||||
is the name token that matched. This method has to return one or a
|
||||
@@ -109,7 +123,9 @@ def parse(self, parser):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def attr(self, name, lineno=None):
|
||||
def attr(
|
||||
self, name: str, lineno: t.Optional[int] = None
|
||||
) -> nodes.ExtensionAttribute:
|
||||
"""Return an attribute node for the current extension. This is useful
|
||||
to pass constants on extensions to generated template code.
|
||||
|
||||
@@ -120,8 +136,14 @@ def attr(self, name, lineno=None):
|
||||
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
|
||||
|
||||
def call_method(
|
||||
self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
args: t.Optional[t.List[nodes.Expr]] = None,
|
||||
kwargs: t.Optional[t.List[nodes.Keyword]] = None,
|
||||
dyn_args: t.Optional[nodes.Expr] = None,
|
||||
dyn_kwargs: t.Optional[nodes.Expr] = None,
|
||||
lineno: t.Optional[int] = None,
|
||||
) -> nodes.Call:
|
||||
"""Call a method of the extension. This is a shortcut for
|
||||
:meth:`attr` + :class:`jinja2.nodes.Call`.
|
||||
"""
|
||||
@@ -139,38 +161,88 @@ def call_method(
|
||||
)
|
||||
|
||||
|
||||
@contextfunction
|
||||
def _gettext_alias(__context, *args, **kwargs):
|
||||
@pass_context
|
||||
def _gettext_alias(
|
||||
__context: Context, *args: t.Any, **kwargs: t.Any
|
||||
) -> t.Union[t.Any, Undefined]:
|
||||
return __context.call(__context.resolve("gettext"), *args, **kwargs)
|
||||
|
||||
|
||||
def _make_new_gettext(func):
|
||||
@contextfunction
|
||||
def gettext(__context, __string, **variables):
|
||||
def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def gettext(__context: Context, __string: str, **variables: t.Any) -> str:
|
||||
rv = __context.call(func, __string)
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
# Always treat as a format string, even if there are no
|
||||
# variables. This makes translation strings more consistent
|
||||
# and predictable. This requires escaping
|
||||
return rv % variables
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return gettext
|
||||
|
||||
|
||||
def _make_new_ngettext(func):
|
||||
@contextfunction
|
||||
def ngettext(__context, __singular, __plural, __num, **variables):
|
||||
def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def ngettext(
|
||||
__context: Context,
|
||||
__singular: str,
|
||||
__plural: str,
|
||||
__num: int,
|
||||
**variables: t.Any,
|
||||
) -> str:
|
||||
variables.setdefault("num", __num)
|
||||
rv = __context.call(func, __singular, __plural, __num)
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
# Always treat as a format string, see gettext comment above.
|
||||
return rv % variables
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return ngettext
|
||||
|
||||
|
||||
def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def pgettext(
|
||||
__context: Context, __string_ctx: str, __string: str, **variables: t.Any
|
||||
) -> str:
|
||||
variables.setdefault("context", __string_ctx)
|
||||
rv = __context.call(func, __string_ctx, __string)
|
||||
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
|
||||
# Always treat as a format string, see gettext comment above.
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return pgettext
|
||||
|
||||
|
||||
def _make_new_npgettext(
|
||||
func: t.Callable[[str, str, str, int], str]
|
||||
) -> t.Callable[..., str]:
|
||||
@pass_context
|
||||
def npgettext(
|
||||
__context: Context,
|
||||
__string_ctx: str,
|
||||
__singular: str,
|
||||
__plural: str,
|
||||
__num: int,
|
||||
**variables: t.Any,
|
||||
) -> str:
|
||||
variables.setdefault("context", __string_ctx)
|
||||
variables.setdefault("num", __num)
|
||||
rv = __context.call(func, __string_ctx, __singular, __plural, __num)
|
||||
|
||||
if __context.eval_ctx.autoescape:
|
||||
rv = Markup(rv)
|
||||
|
||||
# Always treat as a format string, see gettext comment above.
|
||||
return rv % variables # type: ignore
|
||||
|
||||
return npgettext
|
||||
|
||||
|
||||
class InternationalizationExtension(Extension):
|
||||
"""This extension adds gettext support to Jinja."""
|
||||
|
||||
@@ -183,8 +255,8 @@ class InternationalizationExtension(Extension):
|
||||
# something is called twice here. One time for the gettext value and
|
||||
# the other time for the n-parameter of the ngettext function.
|
||||
|
||||
def __init__(self, environment):
|
||||
Extension.__init__(self, environment)
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
super().__init__(environment)
|
||||
environment.globals["_"] = _gettext_alias
|
||||
environment.extend(
|
||||
install_gettext_translations=self._install,
|
||||
@@ -195,38 +267,92 @@ def __init__(self, environment):
|
||||
newstyle_gettext=False,
|
||||
)
|
||||
|
||||
def _install(self, translations, newstyle=None):
|
||||
def _install(
|
||||
self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None
|
||||
) -> None:
|
||||
# ugettext and ungettext are preferred in case the I18N library
|
||||
# is providing compatibility with older Python versions.
|
||||
gettext = getattr(translations, "ugettext", None)
|
||||
if gettext is None:
|
||||
gettext = translations.gettext
|
||||
ngettext = getattr(translations, "ungettext", None)
|
||||
if ngettext is None:
|
||||
ngettext = translations.ngettext
|
||||
self._install_callables(gettext, ngettext, newstyle)
|
||||
|
||||
def _install_null(self, newstyle=None):
|
||||
pgettext = getattr(translations, "pgettext", None)
|
||||
npgettext = getattr(translations, "npgettext", None)
|
||||
self._install_callables(
|
||||
lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle
|
||||
gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext
|
||||
)
|
||||
|
||||
def _install_callables(self, gettext, ngettext, newstyle=None):
|
||||
def _install_null(self, newstyle: t.Optional[bool] = None) -> None:
|
||||
import gettext
|
||||
|
||||
translations = gettext.NullTranslations()
|
||||
|
||||
if hasattr(translations, "pgettext"):
|
||||
# Python < 3.8
|
||||
pgettext = translations.pgettext # type: ignore
|
||||
else:
|
||||
|
||||
def pgettext(c: str, s: str) -> str:
|
||||
return s
|
||||
|
||||
if hasattr(translations, "npgettext"):
|
||||
npgettext = translations.npgettext # type: ignore
|
||||
else:
|
||||
|
||||
def npgettext(c: str, s: str, p: str, n: int) -> str:
|
||||
return s if n == 1 else p
|
||||
|
||||
self._install_callables(
|
||||
gettext=translations.gettext,
|
||||
ngettext=translations.ngettext,
|
||||
newstyle=newstyle,
|
||||
pgettext=pgettext,
|
||||
npgettext=npgettext,
|
||||
)
|
||||
|
||||
def _install_callables(
|
||||
self,
|
||||
gettext: t.Callable[[str], str],
|
||||
ngettext: t.Callable[[str, str, int], str],
|
||||
newstyle: t.Optional[bool] = None,
|
||||
pgettext: t.Optional[t.Callable[[str, str], str]] = None,
|
||||
npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None,
|
||||
) -> None:
|
||||
if newstyle is not None:
|
||||
self.environment.newstyle_gettext = newstyle
|
||||
if self.environment.newstyle_gettext:
|
||||
self.environment.newstyle_gettext = newstyle # type: ignore
|
||||
if self.environment.newstyle_gettext: # type: ignore
|
||||
gettext = _make_new_gettext(gettext)
|
||||
ngettext = _make_new_ngettext(ngettext)
|
||||
self.environment.globals.update(gettext=gettext, ngettext=ngettext)
|
||||
|
||||
def _uninstall(self, translations):
|
||||
for key in "gettext", "ngettext":
|
||||
if pgettext is not None:
|
||||
pgettext = _make_new_pgettext(pgettext)
|
||||
|
||||
if npgettext is not None:
|
||||
npgettext = _make_new_npgettext(npgettext)
|
||||
|
||||
self.environment.globals.update(
|
||||
gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext
|
||||
)
|
||||
|
||||
def _uninstall(self, translations: "_SupportedTranslations") -> None:
|
||||
for key in ("gettext", "ngettext", "pgettext", "npgettext"):
|
||||
self.environment.globals.pop(key, None)
|
||||
|
||||
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
|
||||
if isinstance(source, string_types):
|
||||
def _extract(
|
||||
self,
|
||||
source: t.Union[str, nodes.Template],
|
||||
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
|
||||
) -> t.Iterator[
|
||||
t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
|
||||
]:
|
||||
if isinstance(source, str):
|
||||
source = self.environment.parse(source)
|
||||
return extract_from_ast(source, gettext_functions)
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]:
|
||||
"""Parse a translatable tag."""
|
||||
lineno = next(parser.stream).lineno
|
||||
num_called_num = False
|
||||
@@ -234,9 +360,9 @@ def parse(self, parser):
|
||||
# find all the variables referenced. Additionally a variable can be
|
||||
# defined in the body of the trans block too, but this is checked at
|
||||
# a later state.
|
||||
plural_expr = None
|
||||
plural_expr_assignment = None
|
||||
variables = {}
|
||||
plural_expr: t.Optional[nodes.Expr] = None
|
||||
plural_expr_assignment: t.Optional[nodes.Assign] = None
|
||||
variables: t.Dict[str, nodes.Expr] = {}
|
||||
trimmed = None
|
||||
while parser.stream.current.type != "block_end":
|
||||
if variables:
|
||||
@@ -246,34 +372,34 @@ def parse(self, parser):
|
||||
if parser.stream.skip_if("colon"):
|
||||
break
|
||||
|
||||
name = parser.stream.expect("name")
|
||||
if name.value in variables:
|
||||
token = parser.stream.expect("name")
|
||||
if token.value in variables:
|
||||
parser.fail(
|
||||
"translatable variable %r defined twice." % name.value,
|
||||
name.lineno,
|
||||
f"translatable variable {token.value!r} defined twice.",
|
||||
token.lineno,
|
||||
exc=TemplateAssertionError,
|
||||
)
|
||||
|
||||
# expressions
|
||||
if parser.stream.current.type == "assign":
|
||||
next(parser.stream)
|
||||
variables[name.value] = var = parser.parse_expression()
|
||||
elif trimmed is None and name.value in ("trimmed", "notrimmed"):
|
||||
trimmed = name.value == "trimmed"
|
||||
variables[token.value] = var = parser.parse_expression()
|
||||
elif trimmed is None and token.value in ("trimmed", "notrimmed"):
|
||||
trimmed = token.value == "trimmed"
|
||||
continue
|
||||
else:
|
||||
variables[name.value] = var = nodes.Name(name.value, "load")
|
||||
variables[token.value] = var = nodes.Name(token.value, "load")
|
||||
|
||||
if plural_expr is None:
|
||||
if isinstance(var, nodes.Call):
|
||||
plural_expr = nodes.Name("_trans", "load")
|
||||
variables[name.value] = plural_expr
|
||||
variables[token.value] = plural_expr
|
||||
plural_expr_assignment = nodes.Assign(
|
||||
nodes.Name("_trans", "store"), var
|
||||
)
|
||||
else:
|
||||
plural_expr = var
|
||||
num_called_num = name.value == "num"
|
||||
num_called_num = token.value == "num"
|
||||
|
||||
parser.stream.expect("block_end")
|
||||
|
||||
@@ -294,15 +420,15 @@ def parse(self, parser):
|
||||
have_plural = True
|
||||
next(parser.stream)
|
||||
if parser.stream.current.type != "block_end":
|
||||
name = parser.stream.expect("name")
|
||||
if name.value not in variables:
|
||||
token = parser.stream.expect("name")
|
||||
if token.value not in variables:
|
||||
parser.fail(
|
||||
"unknown variable %r for pluralization" % name.value,
|
||||
name.lineno,
|
||||
f"unknown variable {token.value!r} for pluralization",
|
||||
token.lineno,
|
||||
exc=TemplateAssertionError,
|
||||
)
|
||||
plural_expr = variables[name.value]
|
||||
num_called_num = name.value == "num"
|
||||
plural_expr = variables[token.value]
|
||||
num_called_num = token.value == "num"
|
||||
parser.stream.expect("block_end")
|
||||
plural_names, plural = self._parse_block(parser, False)
|
||||
next(parser.stream)
|
||||
@@ -311,9 +437,9 @@ def parse(self, parser):
|
||||
next(parser.stream)
|
||||
|
||||
# register free names as simple name expressions
|
||||
for var in referenced:
|
||||
if var not in variables:
|
||||
variables[var] = nodes.Name(var, "load")
|
||||
for name in referenced:
|
||||
if name not in variables:
|
||||
variables[name] = nodes.Name(name, "load")
|
||||
|
||||
if not have_plural:
|
||||
plural_expr = None
|
||||
@@ -341,14 +467,17 @@ def parse(self, parser):
|
||||
else:
|
||||
return node
|
||||
|
||||
def _trim_whitespace(self, string, _ws_re=_ws_re):
|
||||
def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str:
|
||||
return _ws_re.sub(" ", string.strip())
|
||||
|
||||
def _parse_block(self, parser, allow_pluralize):
|
||||
def _parse_block(
|
||||
self, parser: "Parser", allow_pluralize: bool
|
||||
) -> t.Tuple[t.List[str], str]:
|
||||
"""Parse until the next block tag with a given name."""
|
||||
referenced = []
|
||||
buf = []
|
||||
while 1:
|
||||
|
||||
while True:
|
||||
if parser.stream.current.type == "data":
|
||||
buf.append(parser.stream.current.value.replace("%", "%%"))
|
||||
next(parser.stream)
|
||||
@@ -356,7 +485,7 @@ def _parse_block(self, parser, allow_pluralize):
|
||||
next(parser.stream)
|
||||
name = parser.stream.expect("name").value
|
||||
referenced.append(name)
|
||||
buf.append("%%(%s)s" % name)
|
||||
buf.append(f"%({name})s")
|
||||
parser.stream.expect("variable_end")
|
||||
elif parser.stream.current.type == "block_begin":
|
||||
next(parser.stream)
|
||||
@@ -379,12 +508,21 @@ def _parse_block(self, parser, allow_pluralize):
|
||||
return referenced, concat(buf)
|
||||
|
||||
def _make_node(
|
||||
self, singular, plural, variables, plural_expr, vars_referenced, num_called_num
|
||||
):
|
||||
self,
|
||||
singular: str,
|
||||
plural: t.Optional[str],
|
||||
variables: t.Dict[str, nodes.Expr],
|
||||
plural_expr: t.Optional[nodes.Expr],
|
||||
vars_referenced: bool,
|
||||
num_called_num: bool,
|
||||
) -> nodes.Output:
|
||||
"""Generates a useful node from the data provided."""
|
||||
newstyle = self.environment.newstyle_gettext # type: ignore
|
||||
node: nodes.Expr
|
||||
|
||||
# no variables referenced? no need to escape for old style
|
||||
# gettext invocations only if there are vars.
|
||||
if not vars_referenced and not self.environment.newstyle_gettext:
|
||||
if not vars_referenced and not newstyle:
|
||||
singular = singular.replace("%%", "%")
|
||||
if plural:
|
||||
plural = plural.replace("%%", "%")
|
||||
@@ -408,8 +546,8 @@ def _make_node(
|
||||
# in case newstyle gettext is used, the method is powerful
|
||||
# enough to handle the variable expansion and autoescape
|
||||
# handling itself
|
||||
if self.environment.newstyle_gettext:
|
||||
for key, value in iteritems(variables):
|
||||
if newstyle:
|
||||
for key, value in variables.items():
|
||||
# the function adds that later anyways in case num was
|
||||
# called num, so just skip it.
|
||||
if num_called_num and key == "num":
|
||||
@@ -439,9 +577,9 @@ class ExprStmtExtension(Extension):
|
||||
that it doesn't print the return value.
|
||||
"""
|
||||
|
||||
tags = set(["do"])
|
||||
tags = {"do"}
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> nodes.ExprStmt:
|
||||
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
|
||||
node.node = parser.parse_tuple()
|
||||
return node
|
||||
@@ -450,9 +588,9 @@ def parse(self, parser):
|
||||
class LoopControlExtension(Extension):
|
||||
"""Adds break and continue to the template engine."""
|
||||
|
||||
tags = set(["break", "continue"])
|
||||
tags = {"break", "continue"}
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]:
|
||||
token = next(parser.stream)
|
||||
if token.value == "break":
|
||||
return nodes.Break(lineno=token.lineno)
|
||||
@@ -460,11 +598,25 @@ def parse(self, parser):
|
||||
|
||||
|
||||
class WithExtension(Extension):
|
||||
pass
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
super().__init__(environment)
|
||||
warnings.warn(
|
||||
"The 'with' extension is deprecated and will be removed in"
|
||||
" Jinja 3.1. This is built in now.",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
|
||||
class AutoEscapeExtension(Extension):
|
||||
pass
|
||||
def __init__(self, environment: Environment) -> None:
|
||||
super().__init__(environment)
|
||||
warnings.warn(
|
||||
"The 'autoescape' extension is deprecated and will be"
|
||||
" removed in Jinja 3.1. This is built in now.",
|
||||
DeprecationWarning,
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
|
||||
class DebugExtension(Extension):
|
||||
@@ -490,13 +642,13 @@ class DebugExtension(Extension):
|
||||
|
||||
tags = {"debug"}
|
||||
|
||||
def parse(self, parser):
|
||||
def parse(self, parser: "Parser") -> nodes.Output:
|
||||
lineno = parser.stream.expect("name:debug").lineno
|
||||
context = ContextReference()
|
||||
context = nodes.ContextReference()
|
||||
result = self.call_method("_render", [context], lineno=lineno)
|
||||
return nodes.Output([result], lineno=lineno)
|
||||
|
||||
def _render(self, context):
|
||||
def _render(self, context: Context) -> str:
|
||||
result = {
|
||||
"context": context.get_all(),
|
||||
"filters": sorted(self.environment.filters.keys()),
|
||||
@@ -504,13 +656,16 @@ def _render(self, context):
|
||||
}
|
||||
|
||||
# Set the depth since the intent is to show the top few names.
|
||||
if version_info[:2] >= (3, 4):
|
||||
return pprint.pformat(result, depth=3, compact=True)
|
||||
else:
|
||||
return pprint.pformat(result, depth=3)
|
||||
return pprint.pformat(result, depth=3, compact=True)
|
||||
|
||||
|
||||
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True):
|
||||
def extract_from_ast(
|
||||
ast: nodes.Template,
|
||||
gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS,
|
||||
babel_style: bool = True,
|
||||
) -> t.Iterator[
|
||||
t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]]
|
||||
]:
|
||||
"""Extract localizable strings from the given template node. Per
|
||||
default this function returns matches in babel style that means non string
|
||||
parameters as well as keyword arguments are returned as `None`. This
|
||||
@@ -538,23 +693,26 @@ def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True
|
||||
* ``lineno`` is the number of the line on which the string was found,
|
||||
* ``function`` is the name of the ``gettext`` function used (if the
|
||||
string was extracted from embedded Python code), and
|
||||
* ``message`` is the string itself (a ``unicode`` object, or a tuple
|
||||
of ``unicode`` objects for functions with multiple string arguments).
|
||||
* ``message`` is the string, or a tuple of strings for functions
|
||||
with multiple string arguments.
|
||||
|
||||
This extraction function operates on the AST and is because of that unable
|
||||
to extract any comments. For comment support you have to use the babel
|
||||
extraction interface or extract comments yourself.
|
||||
"""
|
||||
for node in node.find_all(nodes.Call):
|
||||
out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]
|
||||
|
||||
for node in ast.find_all(nodes.Call):
|
||||
if (
|
||||
not isinstance(node.node, nodes.Name)
|
||||
or node.node.name not in gettext_functions
|
||||
):
|
||||
continue
|
||||
|
||||
strings = []
|
||||
strings: t.List[t.Optional[str]] = []
|
||||
|
||||
for arg in node.args:
|
||||
if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types):
|
||||
if isinstance(arg, nodes.Const) and isinstance(arg.value, str):
|
||||
strings.append(arg.value)
|
||||
else:
|
||||
strings.append(None)
|
||||
@@ -567,31 +725,35 @@ def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True
|
||||
strings.append(None)
|
||||
|
||||
if not babel_style:
|
||||
strings = tuple(x for x in strings if x is not None)
|
||||
if not strings:
|
||||
out = tuple(x for x in strings if x is not None)
|
||||
|
||||
if not out:
|
||||
continue
|
||||
else:
|
||||
if len(strings) == 1:
|
||||
strings = strings[0]
|
||||
out = strings[0]
|
||||
else:
|
||||
strings = tuple(strings)
|
||||
yield node.lineno, node.node.name, strings
|
||||
out = tuple(strings)
|
||||
|
||||
yield node.lineno, node.node.name, out
|
||||
|
||||
|
||||
class _CommentFinder(object):
|
||||
class _CommentFinder:
|
||||
"""Helper class to find comments in a token stream. Can only
|
||||
find comments for gettext calls forwards. Once the comment
|
||||
from line 4 is found, a comment for line 1 will not return a
|
||||
usable value.
|
||||
"""
|
||||
|
||||
def __init__(self, tokens, comment_tags):
|
||||
def __init__(
|
||||
self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str]
|
||||
) -> None:
|
||||
self.tokens = tokens
|
||||
self.comment_tags = comment_tags
|
||||
self.offset = 0
|
||||
self.last_lineno = 0
|
||||
|
||||
def find_backwards(self, offset):
|
||||
def find_backwards(self, offset: int) -> t.List[str]:
|
||||
try:
|
||||
for _, token_type, token_value in reversed(
|
||||
self.tokens[self.offset : offset]
|
||||
@@ -607,7 +769,7 @@ def find_backwards(self, offset):
|
||||
finally:
|
||||
self.offset = offset
|
||||
|
||||
def find_comments(self, lineno):
|
||||
def find_comments(self, lineno: int) -> t.List[str]:
|
||||
if not self.comment_tags or self.last_lineno > lineno:
|
||||
return []
|
||||
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]):
|
||||
@@ -616,7 +778,16 @@ def find_comments(self, lineno):
|
||||
return self.find_backwards(len(self.tokens))
|
||||
|
||||
|
||||
def babel_extract(fileobj, keywords, comment_tags, options):
|
||||
def babel_extract(
|
||||
fileobj: t.BinaryIO,
|
||||
keywords: t.Sequence[str],
|
||||
comment_tags: t.Sequence[str],
|
||||
options: t.Dict[str, t.Any],
|
||||
) -> t.Iterator[
|
||||
t.Tuple[
|
||||
int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str]
|
||||
]
|
||||
]:
|
||||
"""Babel extraction method for Jinja templates.
|
||||
|
||||
.. versionchanged:: 2.3
|
||||
@@ -644,33 +815,37 @@ def babel_extract(fileobj, keywords, comment_tags, options):
|
||||
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
|
||||
(comments will be empty currently)
|
||||
"""
|
||||
extensions = set()
|
||||
for extension in options.get("extensions", "").split(","):
|
||||
extension = extension.strip()
|
||||
if not extension:
|
||||
continue
|
||||
extensions.add(import_string(extension))
|
||||
if InternationalizationExtension not in extensions:
|
||||
extensions.add(InternationalizationExtension)
|
||||
extensions: t.Dict[t.Type[Extension], None] = {}
|
||||
|
||||
def getbool(options, key, default=False):
|
||||
return options.get(key, str(default)).lower() in ("1", "on", "yes", "true")
|
||||
for extension_name in options.get("extensions", "").split(","):
|
||||
extension_name = extension_name.strip()
|
||||
|
||||
if not extension_name:
|
||||
continue
|
||||
|
||||
extensions[import_string(extension_name)] = None
|
||||
|
||||
if InternationalizationExtension not in extensions:
|
||||
extensions[InternationalizationExtension] = None
|
||||
|
||||
def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool:
|
||||
return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"}
|
||||
|
||||
silent = getbool(options, "silent", True)
|
||||
environment = Environment(
|
||||
options.get("block_start_string", BLOCK_START_STRING),
|
||||
options.get("block_end_string", BLOCK_END_STRING),
|
||||
options.get("variable_start_string", VARIABLE_START_STRING),
|
||||
options.get("variable_end_string", VARIABLE_END_STRING),
|
||||
options.get("comment_start_string", COMMENT_START_STRING),
|
||||
options.get("comment_end_string", COMMENT_END_STRING),
|
||||
options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX,
|
||||
options.get("line_comment_prefix") or LINE_COMMENT_PREFIX,
|
||||
getbool(options, "trim_blocks", TRIM_BLOCKS),
|
||||
getbool(options, "lstrip_blocks", LSTRIP_BLOCKS),
|
||||
NEWLINE_SEQUENCE,
|
||||
getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE),
|
||||
frozenset(extensions),
|
||||
options.get("block_start_string", defaults.BLOCK_START_STRING),
|
||||
options.get("block_end_string", defaults.BLOCK_END_STRING),
|
||||
options.get("variable_start_string", defaults.VARIABLE_START_STRING),
|
||||
options.get("variable_end_string", defaults.VARIABLE_END_STRING),
|
||||
options.get("comment_start_string", defaults.COMMENT_START_STRING),
|
||||
options.get("comment_end_string", defaults.COMMENT_END_STRING),
|
||||
options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX,
|
||||
options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX,
|
||||
getbool(options, "trim_blocks", defaults.TRIM_BLOCKS),
|
||||
getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS),
|
||||
defaults.NEWLINE_SEQUENCE,
|
||||
getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE),
|
||||
tuple(extensions),
|
||||
cache_size=0,
|
||||
auto_reload=False,
|
||||
)
|
||||
@@ -678,7 +853,7 @@ def getbool(options, key, default=False):
|
||||
if getbool(options, "trimmed"):
|
||||
environment.policies["ext.i18n.trimmed"] = True
|
||||
if getbool(options, "newstyle_gettext"):
|
||||
environment.newstyle_gettext = True
|
||||
environment.newstyle_gettext = True # type: ignore
|
||||
|
||||
source = fileobj.read().decode(options.get("encoding", "utf-8"))
|
||||
try:
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,6 @@
|
||||
from ._compat import iteritems
|
||||
import typing as t
|
||||
|
||||
from . import nodes
|
||||
from .visitor import NodeVisitor
|
||||
|
||||
VAR_LOAD_PARAMETER = "param"
|
||||
@@ -7,7 +9,9 @@
|
||||
VAR_LOAD_UNDEFINED = "undefined"
|
||||
|
||||
|
||||
def find_symbols(nodes, parent_symbols=None):
|
||||
def find_symbols(
|
||||
nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None
|
||||
) -> "Symbols":
|
||||
sym = Symbols(parent=parent_symbols)
|
||||
visitor = FrameSymbolVisitor(sym)
|
||||
for node in nodes:
|
||||
@@ -15,66 +19,79 @@ def find_symbols(nodes, parent_symbols=None):
|
||||
return sym
|
||||
|
||||
|
||||
def symbols_for_node(node, parent_symbols=None):
|
||||
def symbols_for_node(
|
||||
node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None
|
||||
) -> "Symbols":
|
||||
sym = Symbols(parent=parent_symbols)
|
||||
sym.analyze_node(node)
|
||||
return sym
|
||||
|
||||
|
||||
class Symbols(object):
|
||||
def __init__(self, parent=None, level=None):
|
||||
class Symbols:
|
||||
def __init__(
|
||||
self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None
|
||||
) -> None:
|
||||
if level is None:
|
||||
if parent is None:
|
||||
level = 0
|
||||
else:
|
||||
level = parent.level + 1
|
||||
self.level = level
|
||||
self.parent = parent
|
||||
self.refs = {}
|
||||
self.loads = {}
|
||||
self.stores = set()
|
||||
|
||||
def analyze_node(self, node, **kwargs):
|
||||
self.level: int = level
|
||||
self.parent = parent
|
||||
self.refs: t.Dict[str, str] = {}
|
||||
self.loads: t.Dict[str, t.Any] = {}
|
||||
self.stores: t.Set[str] = set()
|
||||
|
||||
def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None:
|
||||
visitor = RootVisitor(self)
|
||||
visitor.visit(node, **kwargs)
|
||||
|
||||
def _define_ref(self, name, load=None):
|
||||
ident = "l_%d_%s" % (self.level, name)
|
||||
def _define_ref(
|
||||
self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None
|
||||
) -> str:
|
||||
ident = f"l_{self.level}_{name}"
|
||||
self.refs[name] = ident
|
||||
if load is not None:
|
||||
self.loads[ident] = load
|
||||
return ident
|
||||
|
||||
def find_load(self, target):
|
||||
def find_load(self, target: str) -> t.Optional[t.Any]:
|
||||
if target in self.loads:
|
||||
return self.loads[target]
|
||||
|
||||
if self.parent is not None:
|
||||
return self.parent.find_load(target)
|
||||
|
||||
def find_ref(self, name):
|
||||
return None
|
||||
|
||||
def find_ref(self, name: str) -> t.Optional[str]:
|
||||
if name in self.refs:
|
||||
return self.refs[name]
|
||||
|
||||
if self.parent is not None:
|
||||
return self.parent.find_ref(name)
|
||||
|
||||
def ref(self, name):
|
||||
return None
|
||||
|
||||
def ref(self, name: str) -> str:
|
||||
rv = self.find_ref(name)
|
||||
if rv is None:
|
||||
raise AssertionError(
|
||||
"Tried to resolve a name to a reference that "
|
||||
"was unknown to the frame (%r)" % name
|
||||
"Tried to resolve a name to a reference that was"
|
||||
f" unknown to the frame ({name!r})"
|
||||
)
|
||||
return rv
|
||||
|
||||
def copy(self):
|
||||
rv = object.__new__(self.__class__)
|
||||
def copy(self) -> "Symbols":
|
||||
rv = t.cast(Symbols, object.__new__(self.__class__))
|
||||
rv.__dict__.update(self.__dict__)
|
||||
rv.refs = self.refs.copy()
|
||||
rv.loads = self.loads.copy()
|
||||
rv.stores = self.stores.copy()
|
||||
return rv
|
||||
|
||||
def store(self, name):
|
||||
def store(self, name: str) -> None:
|
||||
self.stores.add(name)
|
||||
|
||||
# If we have not see the name referenced yet, we need to figure
|
||||
@@ -92,17 +109,16 @@ def store(self, name):
|
||||
# Otherwise we can just set it to undefined.
|
||||
self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
|
||||
|
||||
def declare_parameter(self, name):
|
||||
def declare_parameter(self, name: str) -> str:
|
||||
self.stores.add(name)
|
||||
return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
|
||||
|
||||
def load(self, name):
|
||||
target = self.find_ref(name)
|
||||
if target is None:
|
||||
def load(self, name: str) -> None:
|
||||
if self.find_ref(name) is None:
|
||||
self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
|
||||
|
||||
def branch_update(self, branch_symbols):
|
||||
stores = {}
|
||||
def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None:
|
||||
stores: t.Dict[str, int] = {}
|
||||
for branch in branch_symbols:
|
||||
for target in branch.stores:
|
||||
if target in self.stores:
|
||||
@@ -114,10 +130,11 @@ def branch_update(self, branch_symbols):
|
||||
self.loads.update(sym.loads)
|
||||
self.stores.update(sym.stores)
|
||||
|
||||
for name, branch_count in iteritems(stores):
|
||||
for name, branch_count in stores.items():
|
||||
if branch_count == len(branch_symbols):
|
||||
continue
|
||||
target = self.find_ref(name)
|
||||
|
||||
target = self.find_ref(name) # type: ignore
|
||||
assert target is not None, "should not happen"
|
||||
|
||||
if self.parent is not None:
|
||||
@@ -127,56 +144,64 @@ def branch_update(self, branch_symbols):
|
||||
continue
|
||||
self.loads[target] = (VAR_LOAD_RESOLVE, name)
|
||||
|
||||
def dump_stores(self):
|
||||
rv = {}
|
||||
node = self
|
||||
def dump_stores(self) -> t.Dict[str, str]:
|
||||
rv: t.Dict[str, str] = {}
|
||||
node: t.Optional["Symbols"] = self
|
||||
|
||||
while node is not None:
|
||||
for name in node.stores:
|
||||
for name in sorted(node.stores):
|
||||
if name not in rv:
|
||||
rv[name] = self.find_ref(name)
|
||||
rv[name] = self.find_ref(name) # type: ignore
|
||||
|
||||
node = node.parent
|
||||
|
||||
return rv
|
||||
|
||||
def dump_param_targets(self):
|
||||
def dump_param_targets(self) -> t.Set[str]:
|
||||
rv = set()
|
||||
node = self
|
||||
node: t.Optional["Symbols"] = self
|
||||
|
||||
while node is not None:
|
||||
for target, (instr, _) in iteritems(self.loads):
|
||||
for target, (instr, _) in self.loads.items():
|
||||
if instr == VAR_LOAD_PARAMETER:
|
||||
rv.add(target)
|
||||
|
||||
node = node.parent
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
class RootVisitor(NodeVisitor):
|
||||
def __init__(self, symbols):
|
||||
def __init__(self, symbols: "Symbols") -> None:
|
||||
self.sym_visitor = FrameSymbolVisitor(symbols)
|
||||
|
||||
def _simple_visit(self, node, **kwargs):
|
||||
def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None:
|
||||
for child in node.iter_child_nodes():
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
visit_Template = (
|
||||
visit_Block
|
||||
) = (
|
||||
visit_Macro
|
||||
) = (
|
||||
visit_FilterBlock
|
||||
) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit
|
||||
visit_Template = _simple_visit
|
||||
visit_Block = _simple_visit
|
||||
visit_Macro = _simple_visit
|
||||
visit_FilterBlock = _simple_visit
|
||||
visit_Scope = _simple_visit
|
||||
visit_If = _simple_visit
|
||||
visit_ScopedEvalContextModifier = _simple_visit
|
||||
|
||||
def visit_AssignBlock(self, node, **kwargs):
|
||||
def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
|
||||
for child in node.body:
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def visit_CallBlock(self, node, **kwargs):
|
||||
def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
|
||||
for child in node.iter_child_nodes(exclude=("call",)):
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def visit_OverlayScope(self, node, **kwargs):
|
||||
def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
|
||||
for child in node.body:
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def visit_For(self, node, for_branch="body", **kwargs):
|
||||
def visit_For(
|
||||
self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any
|
||||
) -> None:
|
||||
if for_branch == "body":
|
||||
self.sym_visitor.visit(node.target, store_as_param=True)
|
||||
branch = node.body
|
||||
@@ -189,28 +214,30 @@ def visit_For(self, node, for_branch="body", **kwargs):
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("Unknown for branch")
|
||||
for item in branch or ():
|
||||
self.sym_visitor.visit(item)
|
||||
|
||||
def visit_With(self, node, **kwargs):
|
||||
if branch:
|
||||
for item in branch:
|
||||
self.sym_visitor.visit(item)
|
||||
|
||||
def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
|
||||
for target in node.targets:
|
||||
self.sym_visitor.visit(target)
|
||||
for child in node.body:
|
||||
self.sym_visitor.visit(child)
|
||||
|
||||
def generic_visit(self, node, *args, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"Cannot find symbols for %r" % node.__class__.__name__
|
||||
)
|
||||
def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None:
|
||||
raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}")
|
||||
|
||||
|
||||
class FrameSymbolVisitor(NodeVisitor):
|
||||
"""A visitor for `Frame.inspect`."""
|
||||
|
||||
def __init__(self, symbols):
|
||||
def __init__(self, symbols: "Symbols") -> None:
|
||||
self.symbols = symbols
|
||||
|
||||
def visit_Name(self, node, store_as_param=False, **kwargs):
|
||||
def visit_Name(
|
||||
self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any
|
||||
) -> None:
|
||||
"""All assignments to names go through this function."""
|
||||
if store_as_param or node.ctx == "param":
|
||||
self.symbols.declare_parameter(node.name)
|
||||
@@ -219,72 +246,73 @@ def visit_Name(self, node, store_as_param=False, **kwargs):
|
||||
elif node.ctx == "load":
|
||||
self.symbols.load(node.name)
|
||||
|
||||
def visit_NSRef(self, node, **kwargs):
|
||||
def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None:
|
||||
self.symbols.load(node.name)
|
||||
|
||||
def visit_If(self, node, **kwargs):
|
||||
def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None:
|
||||
self.visit(node.test, **kwargs)
|
||||
|
||||
original_symbols = self.symbols
|
||||
|
||||
def inner_visit(nodes):
|
||||
def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols":
|
||||
self.symbols = rv = original_symbols.copy()
|
||||
|
||||
for subnode in nodes:
|
||||
self.visit(subnode, **kwargs)
|
||||
|
||||
self.symbols = original_symbols
|
||||
return rv
|
||||
|
||||
body_symbols = inner_visit(node.body)
|
||||
elif_symbols = inner_visit(node.elif_)
|
||||
else_symbols = inner_visit(node.else_ or ())
|
||||
|
||||
self.symbols.branch_update([body_symbols, elif_symbols, else_symbols])
|
||||
|
||||
def visit_Macro(self, node, **kwargs):
|
||||
def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None:
|
||||
self.symbols.store(node.name)
|
||||
|
||||
def visit_Import(self, node, **kwargs):
|
||||
def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None:
|
||||
self.generic_visit(node, **kwargs)
|
||||
self.symbols.store(node.target)
|
||||
|
||||
def visit_FromImport(self, node, **kwargs):
|
||||
def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None:
|
||||
self.generic_visit(node, **kwargs)
|
||||
|
||||
for name in node.names:
|
||||
if isinstance(name, tuple):
|
||||
self.symbols.store(name[1])
|
||||
else:
|
||||
self.symbols.store(name)
|
||||
|
||||
def visit_Assign(self, node, **kwargs):
|
||||
def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None:
|
||||
"""Visit assignments in the correct order."""
|
||||
self.visit(node.node, **kwargs)
|
||||
self.visit(node.target, **kwargs)
|
||||
|
||||
def visit_For(self, node, **kwargs):
|
||||
def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None:
|
||||
"""Visiting stops at for blocks. However the block sequence
|
||||
is visited as part of the outer scope.
|
||||
"""
|
||||
self.visit(node.iter, **kwargs)
|
||||
|
||||
def visit_CallBlock(self, node, **kwargs):
|
||||
def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None:
|
||||
self.visit(node.call, **kwargs)
|
||||
|
||||
def visit_FilterBlock(self, node, **kwargs):
|
||||
def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None:
|
||||
self.visit(node.filter, **kwargs)
|
||||
|
||||
def visit_With(self, node, **kwargs):
|
||||
def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None:
|
||||
for target in node.values:
|
||||
self.visit(target)
|
||||
|
||||
def visit_AssignBlock(self, node, **kwargs):
|
||||
def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None:
|
||||
"""Stop visiting at block assigns."""
|
||||
self.visit(node.target, **kwargs)
|
||||
|
||||
def visit_Scope(self, node, **kwargs):
|
||||
def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None:
|
||||
"""Stop visiting at scopes."""
|
||||
|
||||
def visit_Block(self, node, **kwargs):
|
||||
def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None:
|
||||
"""Stop visiting at blocks."""
|
||||
|
||||
def visit_OverlayScope(self, node, **kwargs):
|
||||
def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None:
|
||||
"""Do not visit into overlay scopes."""
|
||||
@@ -1,32 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Implements a Jinja / Python combination lexer. The ``Lexer`` class
|
||||
is used to do some preprocessing. It filters out invalid operators like
|
||||
the bitshift operators we don't allow in templates. It separates
|
||||
template code and python code in expressions.
|
||||
"""
|
||||
import re
|
||||
import typing as t
|
||||
from ast import literal_eval
|
||||
from collections import deque
|
||||
from operator import itemgetter
|
||||
from sys import intern
|
||||
|
||||
from ._compat import implements_iterator
|
||||
from ._compat import intern
|
||||
from ._compat import iteritems
|
||||
from ._compat import text_type
|
||||
from ._identifier import pattern as name_re
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .utils import LRUCache
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
# cache for the lexers. Exists in order to be able to have multiple
|
||||
# environments with the same lexer
|
||||
_lexer_cache = LRUCache(50)
|
||||
_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50) # type: ignore
|
||||
|
||||
# static regular expressions
|
||||
whitespace_re = re.compile(r"\s+", re.U)
|
||||
whitespace_re = re.compile(r"\s+")
|
||||
newline_re = re.compile(r"(\r\n|\r|\n)")
|
||||
string_re = re.compile(
|
||||
r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S
|
||||
)
|
||||
integer_re = re.compile(r"(\d+_)*\d+")
|
||||
integer_re = re.compile(
|
||||
r"""
|
||||
(
|
||||
0b(_?[0-1])+ # binary
|
||||
|
|
||||
0o(_?[0-7])+ # octal
|
||||
|
|
||||
0x(_?[\da-f])+ # hex
|
||||
|
|
||||
[1-9](_?\d)* # decimal
|
||||
|
|
||||
0(_?0)* # decimal zero
|
||||
)
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
float_re = re.compile(
|
||||
r"""
|
||||
(?<!\.) # doesn't start with a .
|
||||
@@ -41,20 +57,6 @@
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
|
||||
try:
|
||||
# check if this Python supports Unicode identifiers
|
||||
compile("föö", "<unknown>", "eval")
|
||||
except SyntaxError:
|
||||
# Python 2, no Unicode support, use ASCII identifiers
|
||||
name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*")
|
||||
check_ident = False
|
||||
else:
|
||||
# Unicode support, import generated re pattern and set flag to use
|
||||
# str.isidentifier to validate during lexing.
|
||||
from ._identifier import pattern as name_re
|
||||
|
||||
check_ident = True
|
||||
|
||||
# internal the tokens and keep references to them
|
||||
TOKEN_ADD = intern("add")
|
||||
TOKEN_ASSIGN = intern("assign")
|
||||
@@ -136,10 +138,10 @@
|
||||
";": TOKEN_SEMICOLON,
|
||||
}
|
||||
|
||||
reverse_operators = dict([(v, k) for k, v in iteritems(operators)])
|
||||
reverse_operators = {v: k for k, v in operators.items()}
|
||||
assert len(operators) == len(reverse_operators), "operators dropped"
|
||||
operator_re = re.compile(
|
||||
"(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))
|
||||
f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})"
|
||||
)
|
||||
|
||||
ignored_tokens = frozenset(
|
||||
@@ -158,9 +160,10 @@
|
||||
)
|
||||
|
||||
|
||||
def _describe_token_type(token_type):
|
||||
def _describe_token_type(token_type: str) -> str:
|
||||
if token_type in reverse_operators:
|
||||
return reverse_operators[token_type]
|
||||
|
||||
return {
|
||||
TOKEN_COMMENT_BEGIN: "begin of comment",
|
||||
TOKEN_COMMENT_END: "end of comment",
|
||||
@@ -177,32 +180,35 @@ def _describe_token_type(token_type):
|
||||
}.get(token_type, token_type)
|
||||
|
||||
|
||||
def describe_token(token):
|
||||
def describe_token(token: "Token") -> str:
|
||||
"""Returns a description of the token."""
|
||||
if token.type == TOKEN_NAME:
|
||||
return token.value
|
||||
|
||||
return _describe_token_type(token.type)
|
||||
|
||||
|
||||
def describe_token_expr(expr):
|
||||
def describe_token_expr(expr: str) -> str:
|
||||
"""Like `describe_token` but for token expressions."""
|
||||
if ":" in expr:
|
||||
type, value = expr.split(":", 1)
|
||||
|
||||
if type == TOKEN_NAME:
|
||||
return value
|
||||
else:
|
||||
type = expr
|
||||
|
||||
return _describe_token_type(type)
|
||||
|
||||
|
||||
def count_newlines(value):
|
||||
def count_newlines(value: str) -> int:
|
||||
"""Count the number of newline characters in the string. This is
|
||||
useful for extensions that filter a stream.
|
||||
"""
|
||||
return len(newline_re.findall(value))
|
||||
|
||||
|
||||
def compile_rules(environment):
|
||||
def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]:
|
||||
"""Compiles all the rules from the environment into a list of rules."""
|
||||
e = re.escape
|
||||
rules = [
|
||||
@@ -243,36 +249,30 @@ def compile_rules(environment):
|
||||
return [x[1:] for x in sorted(rules, reverse=True)]
|
||||
|
||||
|
||||
class Failure(object):
|
||||
class Failure:
|
||||
"""Class that raises a `TemplateSyntaxError` if called.
|
||||
Used by the `Lexer` to specify known errors.
|
||||
"""
|
||||
|
||||
def __init__(self, message, cls=TemplateSyntaxError):
|
||||
def __init__(
|
||||
self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError
|
||||
) -> None:
|
||||
self.message = message
|
||||
self.error_class = cls
|
||||
|
||||
def __call__(self, lineno, filename):
|
||||
def __call__(self, lineno: int, filename: str) -> "te.NoReturn":
|
||||
raise self.error_class(self.message, lineno, filename)
|
||||
|
||||
|
||||
class Token(tuple):
|
||||
"""Token class."""
|
||||
class Token(t.NamedTuple):
|
||||
lineno: int
|
||||
type: str
|
||||
value: str
|
||||
|
||||
__slots__ = ()
|
||||
lineno, type, value = (property(itemgetter(x)) for x in range(3))
|
||||
def __str__(self) -> str:
|
||||
return describe_token(self)
|
||||
|
||||
def __new__(cls, lineno, type, value):
|
||||
return tuple.__new__(cls, (lineno, intern(str(type)), value))
|
||||
|
||||
def __str__(self):
|
||||
if self.type in reverse_operators:
|
||||
return reverse_operators[self.type]
|
||||
elif self.type == "name":
|
||||
return self.value
|
||||
return self.type
|
||||
|
||||
def test(self, expr):
|
||||
def test(self, expr: str) -> bool:
|
||||
"""Test a token against a token expression. This can either be a
|
||||
token type or ``'token_type:token_value'``. This can only test
|
||||
against string values and types.
|
||||
@@ -281,76 +281,75 @@ def test(self, expr):
|
||||
# passed an iterable of not interned strings.
|
||||
if self.type == expr:
|
||||
return True
|
||||
elif ":" in expr:
|
||||
|
||||
if ":" in expr:
|
||||
return expr.split(":", 1) == [self.type, self.value]
|
||||
|
||||
return False
|
||||
|
||||
def test_any(self, *iterable):
|
||||
def test_any(self, *iterable: str) -> bool:
|
||||
"""Test against multiple token expressions."""
|
||||
for expr in iterable:
|
||||
if self.test(expr):
|
||||
return True
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value)
|
||||
return any(self.test(expr) for expr in iterable)
|
||||
|
||||
|
||||
@implements_iterator
|
||||
class TokenStreamIterator(object):
|
||||
class TokenStreamIterator:
|
||||
"""The iterator for tokenstreams. Iterate over the stream
|
||||
until the eof token is reached.
|
||||
"""
|
||||
|
||||
def __init__(self, stream):
|
||||
def __init__(self, stream: "TokenStream") -> None:
|
||||
self.stream = stream
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> "TokenStreamIterator":
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
def __next__(self) -> Token:
|
||||
token = self.stream.current
|
||||
|
||||
if token.type is TOKEN_EOF:
|
||||
self.stream.close()
|
||||
raise StopIteration()
|
||||
raise StopIteration
|
||||
|
||||
next(self.stream)
|
||||
return token
|
||||
|
||||
|
||||
@implements_iterator
|
||||
class TokenStream(object):
|
||||
class TokenStream:
|
||||
"""A token stream is an iterable that yields :class:`Token`\\s. The
|
||||
parser however does not iterate over it but calls :meth:`next` to go
|
||||
one token ahead. The current active token is stored as :attr:`current`.
|
||||
"""
|
||||
|
||||
def __init__(self, generator, name, filename):
|
||||
def __init__(
|
||||
self,
|
||||
generator: t.Iterable[Token],
|
||||
name: t.Optional[str],
|
||||
filename: t.Optional[str],
|
||||
):
|
||||
self._iter = iter(generator)
|
||||
self._pushed = deque()
|
||||
self._pushed: "te.Deque[Token]" = deque()
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.closed = False
|
||||
self.current = Token(1, TOKEN_INITIAL, "")
|
||||
next(self)
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> TokenStreamIterator:
|
||||
return TokenStreamIterator(self)
|
||||
|
||||
def __bool__(self):
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self._pushed) or self.current.type is not TOKEN_EOF
|
||||
|
||||
__nonzero__ = __bool__ # py2
|
||||
|
||||
@property
|
||||
def eos(self):
|
||||
def eos(self) -> bool:
|
||||
"""Are we at the end of the stream?"""
|
||||
return not self
|
||||
|
||||
def push(self, token):
|
||||
def push(self, token: Token) -> None:
|
||||
"""Push a token back to the stream."""
|
||||
self._pushed.append(token)
|
||||
|
||||
def look(self):
|
||||
def look(self) -> Token:
|
||||
"""Look at the next token."""
|
||||
old_token = next(self)
|
||||
result = self.current
|
||||
@@ -358,28 +357,31 @@ def look(self):
|
||||
self.current = old_token
|
||||
return result
|
||||
|
||||
def skip(self, n=1):
|
||||
def skip(self, n: int = 1) -> None:
|
||||
"""Got n tokens ahead."""
|
||||
for _ in range(n):
|
||||
next(self)
|
||||
|
||||
def next_if(self, expr):
|
||||
def next_if(self, expr: str) -> t.Optional[Token]:
|
||||
"""Perform the token test and return the token if it matched.
|
||||
Otherwise the return value is `None`.
|
||||
"""
|
||||
if self.current.test(expr):
|
||||
return next(self)
|
||||
|
||||
def skip_if(self, expr):
|
||||
return None
|
||||
|
||||
def skip_if(self, expr: str) -> bool:
|
||||
"""Like :meth:`next_if` but only returns `True` or `False`."""
|
||||
return self.next_if(expr) is not None
|
||||
|
||||
def __next__(self):
|
||||
def __next__(self) -> Token:
|
||||
"""Go one token ahead and return the old one.
|
||||
|
||||
Use the built-in :func:`next` instead of calling this directly.
|
||||
"""
|
||||
rv = self.current
|
||||
|
||||
if self._pushed:
|
||||
self.current = self._pushed.popleft()
|
||||
elif self.current.type is not TOKEN_EOF:
|
||||
@@ -387,40 +389,41 @@ def __next__(self):
|
||||
self.current = next(self._iter)
|
||||
except StopIteration:
|
||||
self.close()
|
||||
|
||||
return rv
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
"""Close the stream."""
|
||||
self.current = Token(self.current.lineno, TOKEN_EOF, "")
|
||||
self._iter = None
|
||||
self._iter = iter(())
|
||||
self.closed = True
|
||||
|
||||
def expect(self, expr):
|
||||
def expect(self, expr: str) -> Token:
|
||||
"""Expect a given token type and return it. This accepts the same
|
||||
argument as :meth:`jinja2.lexer.Token.test`.
|
||||
"""
|
||||
if not self.current.test(expr):
|
||||
expr = describe_token_expr(expr)
|
||||
|
||||
if self.current.type is TOKEN_EOF:
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected end of template, expected %r." % expr,
|
||||
f"unexpected end of template, expected {expr!r}.",
|
||||
self.current.lineno,
|
||||
self.name,
|
||||
self.filename,
|
||||
)
|
||||
|
||||
raise TemplateSyntaxError(
|
||||
"expected token %r, got %r" % (expr, describe_token(self.current)),
|
||||
f"expected token {expr!r}, got {describe_token(self.current)!r}",
|
||||
self.current.lineno,
|
||||
self.name,
|
||||
self.filename,
|
||||
)
|
||||
try:
|
||||
return self.current
|
||||
finally:
|
||||
next(self)
|
||||
|
||||
return next(self)
|
||||
|
||||
|
||||
def get_lexer(environment):
|
||||
def get_lexer(environment: "Environment") -> "Lexer":
|
||||
"""Return a lexer which is probably cached."""
|
||||
key = (
|
||||
environment.block_start_string,
|
||||
@@ -437,9 +440,10 @@ def get_lexer(environment):
|
||||
environment.keep_trailing_newline,
|
||||
)
|
||||
lexer = _lexer_cache.get(key)
|
||||
|
||||
if lexer is None:
|
||||
lexer = Lexer(environment)
|
||||
_lexer_cache[key] = lexer
|
||||
_lexer_cache[key] = lexer = Lexer(environment)
|
||||
|
||||
return lexer
|
||||
|
||||
|
||||
@@ -452,11 +456,17 @@ class OptionalLStrip(tuple):
|
||||
|
||||
# Even though it looks like a no-op, creating instances fails
|
||||
# without this.
|
||||
def __new__(cls, *members, **kwargs):
|
||||
return super(OptionalLStrip, cls).__new__(cls, members)
|
||||
def __new__(cls, *members, **kwargs): # type: ignore
|
||||
return super().__new__(cls, members)
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
class _Rule(t.NamedTuple):
|
||||
pattern: t.Pattern[str]
|
||||
tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]]
|
||||
command: t.Optional[str]
|
||||
|
||||
|
||||
class Lexer:
|
||||
"""Class that implements a lexer for a given environment. Automatically
|
||||
created by the environment class, usually you don't have to do that.
|
||||
|
||||
@@ -464,21 +474,21 @@ class Lexer(object):
|
||||
Multiple environments can share the same lexer.
|
||||
"""
|
||||
|
||||
def __init__(self, environment):
|
||||
def __init__(self, environment: "Environment") -> None:
|
||||
# shortcuts
|
||||
e = re.escape
|
||||
|
||||
def c(x):
|
||||
def c(x: str) -> t.Pattern[str]:
|
||||
return re.compile(x, re.M | re.S)
|
||||
|
||||
# lexing rules for tags
|
||||
tag_rules = [
|
||||
(whitespace_re, TOKEN_WHITESPACE, None),
|
||||
(float_re, TOKEN_FLOAT, None),
|
||||
(integer_re, TOKEN_INTEGER, None),
|
||||
(name_re, TOKEN_NAME, None),
|
||||
(string_re, TOKEN_STRING, None),
|
||||
(operator_re, TOKEN_OPERATOR, None),
|
||||
tag_rules: t.List[_Rule] = [
|
||||
_Rule(whitespace_re, TOKEN_WHITESPACE, None),
|
||||
_Rule(float_re, TOKEN_FLOAT, None),
|
||||
_Rule(integer_re, TOKEN_INTEGER, None),
|
||||
_Rule(name_re, TOKEN_NAME, None),
|
||||
_Rule(string_re, TOKEN_STRING, None),
|
||||
_Rule(operator_re, TOKEN_OPERATOR, None),
|
||||
]
|
||||
|
||||
# assemble the root lexing rule. because "|" is ungreedy
|
||||
@@ -489,8 +499,13 @@ def c(x):
|
||||
# is required.
|
||||
root_tag_rules = compile_rules(environment)
|
||||
|
||||
block_start_re = e(environment.block_start_string)
|
||||
block_end_re = e(environment.block_end_string)
|
||||
comment_end_re = e(environment.comment_end_string)
|
||||
variable_end_re = e(environment.variable_end_string)
|
||||
|
||||
# block suffix if trimming is enabled
|
||||
block_suffix_re = environment.trim_blocks and "\\n?" or ""
|
||||
block_suffix_re = "\\n?" if environment.trim_blocks else ""
|
||||
|
||||
# If lstrip is enabled, it should not be applied if there is any
|
||||
# non-whitespace between the newline and block.
|
||||
@@ -499,60 +514,44 @@ def c(x):
|
||||
self.newline_sequence = environment.newline_sequence
|
||||
self.keep_trailing_newline = environment.keep_trailing_newline
|
||||
|
||||
root_raw_re = (
|
||||
fr"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*"
|
||||
fr"(?:\-{block_end_re}\s*|{block_end_re}))"
|
||||
)
|
||||
root_parts_re = "|".join(
|
||||
[root_raw_re] + [fr"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules]
|
||||
)
|
||||
|
||||
# global lexing rules
|
||||
self.rules = {
|
||||
self.rules: t.Dict[str, t.List[_Rule]] = {
|
||||
"root": [
|
||||
# directives
|
||||
(
|
||||
c(
|
||||
"(.*?)(?:%s)"
|
||||
% "|".join(
|
||||
[
|
||||
r"(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))"
|
||||
% (
|
||||
e(environment.block_start_string),
|
||||
e(environment.block_end_string),
|
||||
e(environment.block_end_string),
|
||||
)
|
||||
]
|
||||
+ [
|
||||
r"(?P<%s>%s(\-|\+|))" % (n, r)
|
||||
for n, r in root_tag_rules
|
||||
]
|
||||
)
|
||||
),
|
||||
OptionalLStrip(TOKEN_DATA, "#bygroup"),
|
||||
_Rule(
|
||||
c(fr"(.*?)(?:{root_parts_re})"),
|
||||
OptionalLStrip(TOKEN_DATA, "#bygroup"), # type: ignore
|
||||
"#bygroup",
|
||||
),
|
||||
# data
|
||||
(c(".+"), TOKEN_DATA, None),
|
||||
_Rule(c(".+"), TOKEN_DATA, None),
|
||||
],
|
||||
# comments
|
||||
TOKEN_COMMENT_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(
|
||||
r"(.*?)((?:\-%s\s*|%s)%s)"
|
||||
% (
|
||||
e(environment.comment_end_string),
|
||||
e(environment.comment_end_string),
|
||||
block_suffix_re,
|
||||
)
|
||||
fr"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*"
|
||||
fr"|{comment_end_re}{block_suffix_re}))"
|
||||
),
|
||||
(TOKEN_COMMENT, TOKEN_COMMENT_END),
|
||||
"#pop",
|
||||
),
|
||||
(c("(.)"), (Failure("Missing end of comment tag"),), None),
|
||||
_Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None),
|
||||
],
|
||||
# blocks
|
||||
TOKEN_BLOCK_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(
|
||||
r"(?:\-%s\s*|%s)%s"
|
||||
% (
|
||||
e(environment.block_end_string),
|
||||
e(environment.block_end_string),
|
||||
block_suffix_re,
|
||||
)
|
||||
fr"(?:\+{block_end_re}|\-{block_end_re}\s*"
|
||||
fr"|{block_end_re}{block_suffix_re})"
|
||||
),
|
||||
TOKEN_BLOCK_END,
|
||||
"#pop",
|
||||
@@ -561,14 +560,8 @@ def c(x):
|
||||
+ tag_rules,
|
||||
# variables
|
||||
TOKEN_VARIABLE_BEGIN: [
|
||||
(
|
||||
c(
|
||||
r"\-%s\s*|%s"
|
||||
% (
|
||||
e(environment.variable_end_string),
|
||||
e(environment.variable_end_string),
|
||||
)
|
||||
),
|
||||
_Rule(
|
||||
c(fr"\-{variable_end_re}\s*|{variable_end_re}"),
|
||||
TOKEN_VARIABLE_END,
|
||||
"#pop",
|
||||
)
|
||||
@@ -576,29 +569,25 @@ def c(x):
|
||||
+ tag_rules,
|
||||
# raw block
|
||||
TOKEN_RAW_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(
|
||||
r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))"
|
||||
% (
|
||||
e(environment.block_start_string),
|
||||
e(environment.block_end_string),
|
||||
e(environment.block_end_string),
|
||||
block_suffix_re,
|
||||
)
|
||||
fr"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*"
|
||||
fr"(?:\+{block_end_re}|\-{block_end_re}\s*"
|
||||
fr"|{block_end_re}{block_suffix_re}))"
|
||||
),
|
||||
OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END),
|
||||
OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), # type: ignore
|
||||
"#pop",
|
||||
),
|
||||
(c("(.)"), (Failure("Missing end of raw directive"),), None),
|
||||
_Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None),
|
||||
],
|
||||
# line statements
|
||||
TOKEN_LINESTATEMENT_BEGIN: [
|
||||
(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
|
||||
_Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop")
|
||||
]
|
||||
+ tag_rules,
|
||||
# line comments
|
||||
TOKEN_LINECOMMENT_BEGIN: [
|
||||
(
|
||||
_Rule(
|
||||
c(r"(.*?)()(?=\n|$)"),
|
||||
(TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END),
|
||||
"#pop",
|
||||
@@ -606,23 +595,39 @@ def c(x):
|
||||
],
|
||||
}
|
||||
|
||||
def _normalize_newlines(self, value):
|
||||
"""Called for strings and template data to normalize it to unicode."""
|
||||
def _normalize_newlines(self, value: str) -> str:
|
||||
"""Replace all newlines with the configured sequence in strings
|
||||
and template data.
|
||||
"""
|
||||
return newline_re.sub(self.newline_sequence, value)
|
||||
|
||||
def tokenize(self, source, name=None, filename=None, state=None):
|
||||
def tokenize(
|
||||
self,
|
||||
source: str,
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
state: t.Optional[str] = None,
|
||||
) -> TokenStream:
|
||||
"""Calls tokeniter + tokenize and wraps it in a token stream."""
|
||||
stream = self.tokeniter(source, name, filename, state)
|
||||
return TokenStream(self.wrap(stream, name, filename), name, filename)
|
||||
|
||||
def wrap(self, stream, name=None, filename=None):
|
||||
def wrap(
|
||||
self,
|
||||
stream: t.Iterable[t.Tuple[int, str, str]],
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
) -> t.Iterator[Token]:
|
||||
"""This is called with the stream as returned by `tokenize` and wraps
|
||||
every token in a :class:`Token` and converts the value.
|
||||
"""
|
||||
for lineno, token, value in stream:
|
||||
for lineno, token, value_str in stream:
|
||||
if token in ignored_tokens:
|
||||
continue
|
||||
elif token == TOKEN_LINESTATEMENT_BEGIN:
|
||||
|
||||
value: t.Any = value_str
|
||||
|
||||
if token == TOKEN_LINESTATEMENT_BEGIN:
|
||||
token = TOKEN_BLOCK_BEGIN
|
||||
elif token == TOKEN_LINESTATEMENT_END:
|
||||
token = TOKEN_BLOCK_END
|
||||
@@ -630,12 +635,13 @@ def wrap(self, stream, name=None, filename=None):
|
||||
elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END):
|
||||
continue
|
||||
elif token == TOKEN_DATA:
|
||||
value = self._normalize_newlines(value)
|
||||
value = self._normalize_newlines(value_str)
|
||||
elif token == "keyword":
|
||||
token = value
|
||||
token = value_str
|
||||
elif token == TOKEN_NAME:
|
||||
value = str(value)
|
||||
if check_ident and not value.isidentifier():
|
||||
value = value_str
|
||||
|
||||
if not value.isidentifier():
|
||||
raise TemplateSyntaxError(
|
||||
"Invalid character in identifier", lineno, name, filename
|
||||
)
|
||||
@@ -643,51 +649,63 @@ def wrap(self, stream, name=None, filename=None):
|
||||
# try to unescape string
|
||||
try:
|
||||
value = (
|
||||
self._normalize_newlines(value[1:-1])
|
||||
self._normalize_newlines(value_str[1:-1])
|
||||
.encode("ascii", "backslashreplace")
|
||||
.decode("unicode-escape")
|
||||
)
|
||||
except Exception as e:
|
||||
msg = str(e).split(":")[-1].strip()
|
||||
raise TemplateSyntaxError(msg, lineno, name, filename)
|
||||
raise TemplateSyntaxError(msg, lineno, name, filename) from e
|
||||
elif token == TOKEN_INTEGER:
|
||||
value = int(value.replace("_", ""))
|
||||
value = int(value_str.replace("_", ""), 0)
|
||||
elif token == TOKEN_FLOAT:
|
||||
# remove all "_" first to support more Python versions
|
||||
value = literal_eval(value.replace("_", ""))
|
||||
value = literal_eval(value_str.replace("_", ""))
|
||||
elif token == TOKEN_OPERATOR:
|
||||
token = operators[value]
|
||||
token = operators[value_str]
|
||||
|
||||
yield Token(lineno, token, value)
|
||||
|
||||
def tokeniter(self, source, name, filename=None, state=None):
|
||||
def tokeniter(
|
||||
self,
|
||||
source: str,
|
||||
name: t.Optional[str],
|
||||
filename: t.Optional[str] = None,
|
||||
state: t.Optional[str] = None,
|
||||
) -> t.Iterator[t.Tuple[int, str, str]]:
|
||||
"""This method tokenizes the text and returns the tokens in a
|
||||
generator. Use this method if you just want to tokenize a template.
|
||||
generator. Use this method if you just want to tokenize a template.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line
|
||||
breaks.
|
||||
"""
|
||||
source = text_type(source)
|
||||
lines = source.splitlines()
|
||||
if self.keep_trailing_newline and source:
|
||||
for newline in ("\r\n", "\r", "\n"):
|
||||
if source.endswith(newline):
|
||||
lines.append("")
|
||||
break
|
||||
lines = newline_re.split(source)[::2]
|
||||
|
||||
if not self.keep_trailing_newline and lines[-1] == "":
|
||||
del lines[-1]
|
||||
|
||||
source = "\n".join(lines)
|
||||
pos = 0
|
||||
lineno = 1
|
||||
stack = ["root"]
|
||||
|
||||
if state is not None and state != "root":
|
||||
assert state in ("variable", "block"), "invalid state"
|
||||
stack.append(state + "_begin")
|
||||
|
||||
statetokens = self.rules[stack[-1]]
|
||||
source_length = len(source)
|
||||
balancing_stack = []
|
||||
balancing_stack: t.List[str] = []
|
||||
lstrip_unless_re = self.lstrip_unless_re
|
||||
newlines_stripped = 0
|
||||
line_starting = True
|
||||
|
||||
while 1:
|
||||
while True:
|
||||
# tokenizer loop
|
||||
for regex, tokens, new_state in statetokens:
|
||||
m = regex.match(source, pos)
|
||||
|
||||
# if no match we try again with the next rule
|
||||
if m is None:
|
||||
continue
|
||||
@@ -711,7 +729,6 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# Rule supports lstrip. Match will look like
|
||||
# text, block type, whitespace control, type, control, ...
|
||||
text = groups[0]
|
||||
|
||||
# Skipping the text and first type, every other group is the
|
||||
# whitespace control for each type. One of the groups will be
|
||||
# -, +, or empty string instead of None.
|
||||
@@ -721,7 +738,7 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# Strip all whitespace between the text and the tag.
|
||||
stripped = text.rstrip()
|
||||
newlines_stripped = text[len(stripped) :].count("\n")
|
||||
groups = (stripped,) + groups[1:]
|
||||
groups = [stripped, *groups[1:]]
|
||||
elif (
|
||||
# Not marked for preserving whitespace.
|
||||
strip_sign != "+"
|
||||
@@ -732,11 +749,12 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
):
|
||||
# The start of text between the last newline and the tag.
|
||||
l_pos = text.rfind("\n") + 1
|
||||
|
||||
if l_pos > 0 or line_starting:
|
||||
# If there's only whitespace between the newline and the
|
||||
# tag, strip it.
|
||||
if not lstrip_unless_re.search(text, l_pos):
|
||||
groups = (text[:l_pos],) + groups[1:]
|
||||
groups = [text[:l_pos], *groups[1:]]
|
||||
|
||||
for idx, token in enumerate(tokens):
|
||||
# failure group
|
||||
@@ -746,28 +764,30 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# yield for the current token the first named
|
||||
# group that matched
|
||||
elif token == "#bygroup":
|
||||
for key, value in iteritems(m.groupdict()):
|
||||
for key, value in m.groupdict().items():
|
||||
if value is not None:
|
||||
yield lineno, key, value
|
||||
lineno += value.count("\n")
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"%r wanted to resolve "
|
||||
"the token dynamically"
|
||||
" but no group matched" % regex
|
||||
f"{regex!r} wanted to resolve the token dynamically"
|
||||
" but no group matched"
|
||||
)
|
||||
# normal group
|
||||
else:
|
||||
data = groups[idx]
|
||||
|
||||
if data or token not in ignore_if_empty:
|
||||
yield lineno, token, data
|
||||
|
||||
lineno += data.count("\n") + newlines_stripped
|
||||
newlines_stripped = 0
|
||||
|
||||
# strings as token just are yielded as it.
|
||||
else:
|
||||
data = m.group()
|
||||
|
||||
# update brace/parentheses balance
|
||||
if tokens == TOKEN_OPERATOR:
|
||||
if data == "{":
|
||||
@@ -779,24 +799,26 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
elif data in ("}", ")", "]"):
|
||||
if not balancing_stack:
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected '%s'" % data, lineno, name, filename
|
||||
f"unexpected '{data}'", lineno, name, filename
|
||||
)
|
||||
|
||||
expected_op = balancing_stack.pop()
|
||||
|
||||
if expected_op != data:
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected '%s', "
|
||||
"expected '%s'" % (data, expected_op),
|
||||
f"unexpected '{data}', expected '{expected_op}'",
|
||||
lineno,
|
||||
name,
|
||||
filename,
|
||||
)
|
||||
|
||||
# yield items
|
||||
if data or tokens not in ignore_if_empty:
|
||||
yield lineno, tokens, data
|
||||
|
||||
lineno += data.count("\n")
|
||||
|
||||
line_starting = m.group()[-1:] == "\n"
|
||||
|
||||
# fetch new position into new variable so that we can check
|
||||
# if there is a internal parsing error which would result
|
||||
# in an infinite loop
|
||||
@@ -809,27 +831,28 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
stack.pop()
|
||||
# resolve the new state by group checking
|
||||
elif new_state == "#bygroup":
|
||||
for key, value in iteritems(m.groupdict()):
|
||||
for key, value in m.groupdict().items():
|
||||
if value is not None:
|
||||
stack.append(key)
|
||||
break
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"%r wanted to resolve the "
|
||||
"new state dynamically but"
|
||||
" no group matched" % regex
|
||||
f"{regex!r} wanted to resolve the new state dynamically"
|
||||
f" but no group matched"
|
||||
)
|
||||
# direct state name given
|
||||
else:
|
||||
stack.append(new_state)
|
||||
|
||||
statetokens = self.rules[stack[-1]]
|
||||
# we are still at the same position and no stack change.
|
||||
# this means a loop without break condition, avoid that and
|
||||
# raise error
|
||||
elif pos2 == pos:
|
||||
raise RuntimeError(
|
||||
"%r yielded empty string without stack change" % regex
|
||||
f"{regex!r} yielded empty string without stack change"
|
||||
)
|
||||
|
||||
# publish new function and start again
|
||||
pos = pos2
|
||||
break
|
||||
@@ -839,10 +862,8 @@ def tokeniter(self, source, name, filename=None, state=None):
|
||||
# end of text
|
||||
if pos >= source_length:
|
||||
return
|
||||
|
||||
# something went wrong
|
||||
raise TemplateSyntaxError(
|
||||
"unexpected char %r at %d" % (source[pos], pos),
|
||||
lineno,
|
||||
name,
|
||||
filename,
|
||||
f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename
|
||||
)
|
||||
@@ -1,33 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""API and implementations for loading templates from different data
|
||||
sources.
|
||||
"""
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
import typing as t
|
||||
import weakref
|
||||
import zipimport
|
||||
from collections import abc
|
||||
from hashlib import sha1
|
||||
from os import path
|
||||
from importlib import import_module
|
||||
from types import ModuleType
|
||||
|
||||
from ._compat import abc
|
||||
from ._compat import fspath
|
||||
from ._compat import iteritems
|
||||
from ._compat import string_types
|
||||
from .exceptions import TemplateNotFound
|
||||
from .utils import internalcode
|
||||
from .utils import open_if_exists
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .environment import Environment
|
||||
from .environment import Template
|
||||
|
||||
def split_template_path(template):
|
||||
|
||||
def split_template_path(template: str) -> t.List[str]:
|
||||
"""Split a path into segments and perform a sanity check. If it detects
|
||||
'..' in the path it will raise a `TemplateNotFound` error.
|
||||
"""
|
||||
pieces = []
|
||||
for piece in template.split("/"):
|
||||
if (
|
||||
path.sep in piece
|
||||
or (path.altsep and path.altsep in piece)
|
||||
or piece == path.pardir
|
||||
os.path.sep in piece
|
||||
or (os.path.altsep and os.path.altsep in piece)
|
||||
or piece == os.path.pardir
|
||||
):
|
||||
raise TemplateNotFound(template)
|
||||
elif piece and piece != ".":
|
||||
@@ -35,7 +38,7 @@ def split_template_path(template):
|
||||
return pieces
|
||||
|
||||
|
||||
class BaseLoader(object):
|
||||
class BaseLoader:
|
||||
"""Baseclass for all loaders. Subclass this and override `get_source` to
|
||||
implement a custom loading mechanism. The environment provides a
|
||||
`get_template` method that calls the loader's `load` method to get the
|
||||
@@ -57,8 +60,8 @@ def get_source(self, environment, template):
|
||||
if not exists(path):
|
||||
raise TemplateNotFound(template)
|
||||
mtime = getmtime(path)
|
||||
with file(path) as f:
|
||||
source = f.read().decode('utf-8')
|
||||
with open(path) as f:
|
||||
source = f.read()
|
||||
return source, path, lambda: mtime == getmtime(path)
|
||||
"""
|
||||
|
||||
@@ -68,16 +71,18 @@ def get_source(self, environment, template):
|
||||
#: .. versionadded:: 2.4
|
||||
has_source_access = True
|
||||
|
||||
def get_source(self, environment, template):
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
|
||||
"""Get the template source, filename and reload helper for a template.
|
||||
It's passed the environment and template name and has to return a
|
||||
tuple in the form ``(source, filename, uptodate)`` or raise a
|
||||
`TemplateNotFound` error if it can't locate the template.
|
||||
|
||||
The source part of the returned tuple must be the source of the
|
||||
template as unicode string or a ASCII bytestring. The filename should
|
||||
be the name of the file on the filesystem if it was loaded from there,
|
||||
otherwise `None`. The filename is used by python for the tracebacks
|
||||
template as a string. The filename should be the name of the
|
||||
file on the filesystem if it was loaded from there, otherwise
|
||||
``None``. The filename is used by Python for the tracebacks
|
||||
if no loader extension is used.
|
||||
|
||||
The last item in the tuple is the `uptodate` function. If auto
|
||||
@@ -88,18 +93,23 @@ def get_source(self, environment, template):
|
||||
"""
|
||||
if not self.has_source_access:
|
||||
raise RuntimeError(
|
||||
"%s cannot provide access to the source" % self.__class__.__name__
|
||||
f"{type(self).__name__} cannot provide access to the source"
|
||||
)
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
def list_templates(self):
|
||||
def list_templates(self) -> t.List[str]:
|
||||
"""Iterates over all templates. If the loader does not support that
|
||||
it should raise a :exc:`TypeError` which is the default behavior.
|
||||
"""
|
||||
raise TypeError("this loader cannot iterate over all templates")
|
||||
|
||||
@internalcode
|
||||
def load(self, environment, name, globals=None):
|
||||
def load(
|
||||
self,
|
||||
environment: "Environment",
|
||||
name: str,
|
||||
globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
|
||||
) -> "Template":
|
||||
"""Loads a template. This method looks up the template in the cache
|
||||
or loads one by calling :meth:`get_source`. Subclasses should not
|
||||
override this method as loaders working on collections of other
|
||||
@@ -139,44 +149,51 @@ def load(self, environment, name, globals=None):
|
||||
|
||||
|
||||
class FileSystemLoader(BaseLoader):
|
||||
"""Loads templates from the file system. This loader can find templates
|
||||
in folders on the file system and is the preferred way to load them.
|
||||
"""Load templates from a directory in the file system.
|
||||
|
||||
The loader takes the path to the templates as string, or if multiple
|
||||
locations are wanted a list of them which is then looked up in the
|
||||
given order::
|
||||
The path can be relative or absolute. Relative paths are relative to
|
||||
the current working directory.
|
||||
|
||||
>>> loader = FileSystemLoader('/path/to/templates')
|
||||
>>> loader = FileSystemLoader(['/path/to/templates', '/other/path'])
|
||||
.. code-block:: python
|
||||
|
||||
Per default the template encoding is ``'utf-8'`` which can be changed
|
||||
by setting the `encoding` parameter to something else.
|
||||
loader = FileSystemLoader("templates")
|
||||
|
||||
To follow symbolic links, set the *followlinks* parameter to ``True``::
|
||||
A list of paths can be given. The directories will be searched in
|
||||
order, stopping at the first matching template.
|
||||
|
||||
>>> loader = FileSystemLoader('/path/to/templates', followlinks=True)
|
||||
.. code-block:: python
|
||||
|
||||
loader = FileSystemLoader(["/override/templates", "/default/templates"])
|
||||
|
||||
:param searchpath: A path, or list of paths, to the directory that
|
||||
contains the templates.
|
||||
:param encoding: Use this encoding to read the text from template
|
||||
files.
|
||||
:param followlinks: Follow symbolic links in the path.
|
||||
|
||||
.. versionchanged:: 2.8
|
||||
The ``followlinks`` parameter was added.
|
||||
Added the ``followlinks`` parameter.
|
||||
"""
|
||||
|
||||
def __init__(self, searchpath, encoding="utf-8", followlinks=False):
|
||||
if not isinstance(searchpath, abc.Iterable) or isinstance(
|
||||
searchpath, string_types
|
||||
):
|
||||
def __init__(
|
||||
self,
|
||||
searchpath: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]],
|
||||
encoding: str = "utf-8",
|
||||
followlinks: bool = False,
|
||||
) -> None:
|
||||
if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str):
|
||||
searchpath = [searchpath]
|
||||
|
||||
# In Python 3.5, os.path.join doesn't support Path. This can be
|
||||
# simplified to list(searchpath) when Python 3.5 is dropped.
|
||||
self.searchpath = [fspath(p) for p in searchpath]
|
||||
|
||||
self.searchpath = [os.fspath(p) for p in searchpath]
|
||||
self.encoding = encoding
|
||||
self.followlinks = followlinks
|
||||
|
||||
def get_source(self, environment, template):
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, str, t.Callable[[], bool]]:
|
||||
pieces = split_template_path(template)
|
||||
for searchpath in self.searchpath:
|
||||
filename = path.join(searchpath, *pieces)
|
||||
filename = os.path.join(searchpath, *pieces)
|
||||
f = open_if_exists(filename)
|
||||
if f is None:
|
||||
continue
|
||||
@@ -185,18 +202,18 @@ def get_source(self, environment, template):
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
mtime = path.getmtime(filename)
|
||||
mtime = os.path.getmtime(filename)
|
||||
|
||||
def uptodate():
|
||||
def uptodate() -> bool:
|
||||
try:
|
||||
return path.getmtime(filename) == mtime
|
||||
return os.path.getmtime(filename) == mtime
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
return contents, filename, uptodate
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
def list_templates(self):
|
||||
def list_templates(self) -> t.List[str]:
|
||||
found = set()
|
||||
for searchpath in self.searchpath:
|
||||
walk_dir = os.walk(searchpath, followlinks=self.followlinks)
|
||||
@@ -215,105 +232,194 @@ def list_templates(self):
|
||||
|
||||
|
||||
class PackageLoader(BaseLoader):
|
||||
"""Load templates from python eggs or packages. It is constructed with
|
||||
the name of the python package and the path to the templates in that
|
||||
package::
|
||||
"""Load templates from a directory in a Python package.
|
||||
|
||||
loader = PackageLoader('mypackage', 'views')
|
||||
:param package_name: Import name of the package that contains the
|
||||
template directory.
|
||||
:param package_path: Directory within the imported package that
|
||||
contains the templates.
|
||||
:param encoding: Encoding of template files.
|
||||
|
||||
If the package path is not given, ``'templates'`` is assumed.
|
||||
The following example looks up templates in the ``pages`` directory
|
||||
within the ``project.ui`` package.
|
||||
|
||||
Per default the template encoding is ``'utf-8'`` which can be changed
|
||||
by setting the `encoding` parameter to something else. Due to the nature
|
||||
of eggs it's only possible to reload templates if the package was loaded
|
||||
from the file system and not a zip file.
|
||||
.. code-block:: python
|
||||
|
||||
loader = PackageLoader("project.ui", "pages")
|
||||
|
||||
Only packages installed as directories (standard pip behavior) or
|
||||
zip/egg files (less common) are supported. The Python API for
|
||||
introspecting data in packages is too limited to support other
|
||||
installation methods the way this loader requires.
|
||||
|
||||
There is limited support for :pep:`420` namespace packages. The
|
||||
template directory is assumed to only be in one namespace
|
||||
contributor. Zip files contributing to a namespace are not
|
||||
supported.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
No longer uses ``setuptools`` as a dependency.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Limited PEP 420 namespace package support.
|
||||
"""
|
||||
|
||||
def __init__(self, package_name, package_path="templates", encoding="utf-8"):
|
||||
from pkg_resources import DefaultProvider
|
||||
from pkg_resources import get_provider
|
||||
from pkg_resources import ResourceManager
|
||||
def __init__(
|
||||
self,
|
||||
package_name: str,
|
||||
package_path: "str" = "templates",
|
||||
encoding: str = "utf-8",
|
||||
) -> None:
|
||||
package_path = os.path.normpath(package_path).rstrip(os.path.sep)
|
||||
|
||||
# normpath preserves ".", which isn't valid in zip paths.
|
||||
if package_path == os.path.curdir:
|
||||
package_path = ""
|
||||
elif package_path[:2] == os.path.curdir + os.path.sep:
|
||||
package_path = package_path[2:]
|
||||
|
||||
provider = get_provider(package_name)
|
||||
self.encoding = encoding
|
||||
self.manager = ResourceManager()
|
||||
self.filesystem_bound = isinstance(provider, DefaultProvider)
|
||||
self.provider = provider
|
||||
self.package_path = package_path
|
||||
self.package_name = package_name
|
||||
self.encoding = encoding
|
||||
|
||||
def get_source(self, environment, template):
|
||||
pieces = split_template_path(template)
|
||||
p = "/".join((self.package_path,) + tuple(pieces))
|
||||
# Make sure the package exists. This also makes namespace
|
||||
# packages work, otherwise get_loader returns None.
|
||||
import_module(package_name)
|
||||
spec = importlib.util.find_spec(package_name)
|
||||
assert spec is not None, "An import spec was not found for the package."
|
||||
loader = spec.loader
|
||||
assert loader is not None, "A loader was not found for the package."
|
||||
self._loader = loader
|
||||
self._archive = None
|
||||
template_root = None
|
||||
|
||||
if not self.provider.has_resource(p):
|
||||
raise TemplateNotFound(template)
|
||||
if isinstance(loader, zipimport.zipimporter):
|
||||
self._archive = loader.archive
|
||||
pkgdir = next(iter(spec.submodule_search_locations)) # type: ignore
|
||||
template_root = os.path.join(pkgdir, package_path)
|
||||
else:
|
||||
roots: t.List[str] = []
|
||||
|
||||
filename = uptodate = None
|
||||
# One element for regular packages, multiple for namespace
|
||||
# packages, or None for single module file.
|
||||
if spec.submodule_search_locations:
|
||||
roots.extend(spec.submodule_search_locations)
|
||||
# A single module file, use the parent directory instead.
|
||||
elif spec.origin is not None:
|
||||
roots.append(os.path.dirname(spec.origin))
|
||||
|
||||
if self.filesystem_bound:
|
||||
filename = self.provider.get_resource_filename(self.manager, p)
|
||||
mtime = path.getmtime(filename)
|
||||
for root in roots:
|
||||
root = os.path.join(root, package_path)
|
||||
|
||||
def uptodate():
|
||||
try:
|
||||
return path.getmtime(filename) == mtime
|
||||
except OSError:
|
||||
return False
|
||||
if os.path.isdir(root):
|
||||
template_root = root
|
||||
break
|
||||
|
||||
source = self.provider.get_resource_string(self.manager, p)
|
||||
return source.decode(self.encoding), filename, uptodate
|
||||
if template_root is None:
|
||||
raise ValueError(
|
||||
f"The {package_name!r} package was not installed in a"
|
||||
" way that PackageLoader understands."
|
||||
)
|
||||
|
||||
def list_templates(self):
|
||||
path = self.package_path
|
||||
self._template_root = template_root
|
||||
|
||||
if path[:2] == "./":
|
||||
path = path[2:]
|
||||
elif path == ".":
|
||||
path = ""
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, str, t.Optional[t.Callable[[], bool]]]:
|
||||
p = os.path.join(self._template_root, *split_template_path(template))
|
||||
up_to_date: t.Optional[t.Callable[[], bool]]
|
||||
|
||||
offset = len(path)
|
||||
results = []
|
||||
if self._archive is None:
|
||||
# Package is a directory.
|
||||
if not os.path.isfile(p):
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
def _walk(path):
|
||||
for filename in self.provider.resource_listdir(path):
|
||||
fullname = path + "/" + filename
|
||||
with open(p, "rb") as f:
|
||||
source = f.read()
|
||||
|
||||
if self.provider.resource_isdir(fullname):
|
||||
_walk(fullname)
|
||||
else:
|
||||
results.append(fullname[offset:].lstrip("/"))
|
||||
mtime = os.path.getmtime(p)
|
||||
|
||||
def up_to_date() -> bool:
|
||||
return os.path.isfile(p) and os.path.getmtime(p) == mtime
|
||||
|
||||
else:
|
||||
# Package is a zip file.
|
||||
try:
|
||||
source = self._loader.get_data(p) # type: ignore
|
||||
except OSError as e:
|
||||
raise TemplateNotFound(template) from e
|
||||
|
||||
# Could use the zip's mtime for all template mtimes, but
|
||||
# would need to safely reload the module if it's out of
|
||||
# date, so just report it as always current.
|
||||
up_to_date = None
|
||||
|
||||
return source.decode(self.encoding), p, up_to_date
|
||||
|
||||
def list_templates(self) -> t.List[str]:
|
||||
results: t.List[str] = []
|
||||
|
||||
if self._archive is None:
|
||||
# Package is a directory.
|
||||
offset = len(self._template_root)
|
||||
|
||||
for dirpath, _, filenames in os.walk(self._template_root):
|
||||
dirpath = dirpath[offset:].lstrip(os.path.sep)
|
||||
results.extend(
|
||||
os.path.join(dirpath, name).replace(os.path.sep, "/")
|
||||
for name in filenames
|
||||
)
|
||||
else:
|
||||
if not hasattr(self._loader, "_files"):
|
||||
raise TypeError(
|
||||
"This zip import does not have the required"
|
||||
" metadata to list templates."
|
||||
)
|
||||
|
||||
# Package is a zip file.
|
||||
prefix = (
|
||||
self._template_root[len(self._archive) :].lstrip(os.path.sep)
|
||||
+ os.path.sep
|
||||
)
|
||||
offset = len(prefix)
|
||||
|
||||
for name in self._loader._files.keys(): # type: ignore
|
||||
# Find names under the templates directory that aren't directories.
|
||||
if name.startswith(prefix) and name[-1] != os.path.sep:
|
||||
results.append(name[offset:].replace(os.path.sep, "/"))
|
||||
|
||||
_walk(path)
|
||||
results.sort()
|
||||
return results
|
||||
|
||||
|
||||
class DictLoader(BaseLoader):
|
||||
"""Loads a template from a python dict. It's passed a dict of unicode
|
||||
strings bound to template names. This loader is useful for unittesting:
|
||||
"""Loads a template from a Python dict mapping template names to
|
||||
template source. This loader is useful for unittesting:
|
||||
|
||||
>>> loader = DictLoader({'index.html': 'source here'})
|
||||
|
||||
Because auto reloading is rarely useful this is disabled per default.
|
||||
"""
|
||||
|
||||
def __init__(self, mapping):
|
||||
def __init__(self, mapping: t.Mapping[str, str]) -> None:
|
||||
self.mapping = mapping
|
||||
|
||||
def get_source(self, environment, template):
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, None, t.Callable[[], bool]]:
|
||||
if template in self.mapping:
|
||||
source = self.mapping[template]
|
||||
return source, None, lambda: source == self.mapping.get(template)
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
def list_templates(self):
|
||||
def list_templates(self) -> t.List[str]:
|
||||
return sorted(self.mapping)
|
||||
|
||||
|
||||
class FunctionLoader(BaseLoader):
|
||||
"""A loader that is passed a function which does the loading. The
|
||||
function receives the name of the template and has to return either
|
||||
an unicode string with the template source, a tuple in the form ``(source,
|
||||
a string with the template source, a tuple in the form ``(source,
|
||||
filename, uptodatefunc)`` or `None` if the template does not exist.
|
||||
|
||||
>>> def load_template(name):
|
||||
@@ -328,15 +434,30 @@ class FunctionLoader(BaseLoader):
|
||||
return value.
|
||||
"""
|
||||
|
||||
def __init__(self, load_func):
|
||||
def __init__(
|
||||
self,
|
||||
load_func: t.Callable[
|
||||
[str],
|
||||
t.Optional[
|
||||
t.Union[
|
||||
str, t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]
|
||||
]
|
||||
],
|
||||
],
|
||||
) -> None:
|
||||
self.load_func = load_func
|
||||
|
||||
def get_source(self, environment, template):
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
|
||||
rv = self.load_func(template)
|
||||
|
||||
if rv is None:
|
||||
raise TemplateNotFound(template)
|
||||
elif isinstance(rv, string_types):
|
||||
|
||||
if isinstance(rv, str):
|
||||
return rv, None, None
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
@@ -355,40 +476,49 @@ class PrefixLoader(BaseLoader):
|
||||
by loading ``'app2/index.html'`` the file from the second.
|
||||
"""
|
||||
|
||||
def __init__(self, mapping, delimiter="/"):
|
||||
def __init__(
|
||||
self, mapping: t.Mapping[str, BaseLoader], delimiter: str = "/"
|
||||
) -> None:
|
||||
self.mapping = mapping
|
||||
self.delimiter = delimiter
|
||||
|
||||
def get_loader(self, template):
|
||||
def get_loader(self, template: str) -> t.Tuple[BaseLoader, str]:
|
||||
try:
|
||||
prefix, name = template.split(self.delimiter, 1)
|
||||
loader = self.mapping[prefix]
|
||||
except (ValueError, KeyError):
|
||||
raise TemplateNotFound(template)
|
||||
except (ValueError, KeyError) as e:
|
||||
raise TemplateNotFound(template) from e
|
||||
return loader, name
|
||||
|
||||
def get_source(self, environment, template):
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
|
||||
loader, name = self.get_loader(template)
|
||||
try:
|
||||
return loader.get_source(environment, name)
|
||||
except TemplateNotFound:
|
||||
except TemplateNotFound as e:
|
||||
# re-raise the exception with the correct filename here.
|
||||
# (the one that includes the prefix)
|
||||
raise TemplateNotFound(template)
|
||||
raise TemplateNotFound(template) from e
|
||||
|
||||
@internalcode
|
||||
def load(self, environment, name, globals=None):
|
||||
def load(
|
||||
self,
|
||||
environment: "Environment",
|
||||
name: str,
|
||||
globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
|
||||
) -> "Template":
|
||||
loader, local_name = self.get_loader(name)
|
||||
try:
|
||||
return loader.load(environment, local_name, globals)
|
||||
except TemplateNotFound:
|
||||
except TemplateNotFound as e:
|
||||
# re-raise the exception with the correct filename here.
|
||||
# (the one that includes the prefix)
|
||||
raise TemplateNotFound(name)
|
||||
raise TemplateNotFound(name) from e
|
||||
|
||||
def list_templates(self):
|
||||
def list_templates(self) -> t.List[str]:
|
||||
result = []
|
||||
for prefix, loader in iteritems(self.mapping):
|
||||
for prefix, loader in self.mapping.items():
|
||||
for template in loader.list_templates():
|
||||
result.append(prefix + self.delimiter + template)
|
||||
return result
|
||||
@@ -408,10 +538,12 @@ class ChoiceLoader(BaseLoader):
|
||||
from a different location.
|
||||
"""
|
||||
|
||||
def __init__(self, loaders):
|
||||
def __init__(self, loaders: t.Sequence[BaseLoader]) -> None:
|
||||
self.loaders = loaders
|
||||
|
||||
def get_source(self, environment, template):
|
||||
def get_source(
|
||||
self, environment: "Environment", template: str
|
||||
) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]:
|
||||
for loader in self.loaders:
|
||||
try:
|
||||
return loader.get_source(environment, template)
|
||||
@@ -420,7 +552,12 @@ def get_source(self, environment, template):
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
@internalcode
|
||||
def load(self, environment, name, globals=None):
|
||||
def load(
|
||||
self,
|
||||
environment: "Environment",
|
||||
name: str,
|
||||
globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
|
||||
) -> "Template":
|
||||
for loader in self.loaders:
|
||||
try:
|
||||
return loader.load(environment, name, globals)
|
||||
@@ -428,7 +565,7 @@ def load(self, environment, name, globals=None):
|
||||
pass
|
||||
raise TemplateNotFound(name)
|
||||
|
||||
def list_templates(self):
|
||||
def list_templates(self) -> t.List[str]:
|
||||
found = set()
|
||||
for loader in self.loaders:
|
||||
found.update(loader.list_templates())
|
||||
@@ -454,17 +591,19 @@ class ModuleLoader(BaseLoader):
|
||||
|
||||
has_source_access = False
|
||||
|
||||
def __init__(self, path):
|
||||
package_name = "_jinja2_module_templates_%x" % id(self)
|
||||
def __init__(
|
||||
self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]]
|
||||
) -> None:
|
||||
package_name = f"_jinja2_module_templates_{id(self):x}"
|
||||
|
||||
# create a fake module that looks for the templates in the
|
||||
# path given.
|
||||
mod = _TemplateModule(package_name)
|
||||
|
||||
if not isinstance(path, abc.Iterable) or isinstance(path, string_types):
|
||||
if not isinstance(path, abc.Iterable) or isinstance(path, str):
|
||||
path = [path]
|
||||
|
||||
mod.__path__ = [fspath(p) for p in path]
|
||||
mod.__path__ = [os.fspath(p) for p in path] # type: ignore
|
||||
|
||||
sys.modules[package_name] = weakref.proxy(
|
||||
mod, lambda x: sys.modules.pop(package_name, None)
|
||||
@@ -477,28 +616,37 @@ def __init__(self, path):
|
||||
self.package_name = package_name
|
||||
|
||||
@staticmethod
|
||||
def get_template_key(name):
|
||||
def get_template_key(name: str) -> str:
|
||||
return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def get_module_filename(name):
|
||||
def get_module_filename(name: str) -> str:
|
||||
return ModuleLoader.get_template_key(name) + ".py"
|
||||
|
||||
@internalcode
|
||||
def load(self, environment, name, globals=None):
|
||||
def load(
|
||||
self,
|
||||
environment: "Environment",
|
||||
name: str,
|
||||
globals: t.Optional[t.MutableMapping[str, t.Any]] = None,
|
||||
) -> "Template":
|
||||
key = self.get_template_key(name)
|
||||
module = "%s.%s" % (self.package_name, key)
|
||||
module = f"{self.package_name}.{key}"
|
||||
mod = getattr(self.module, module, None)
|
||||
|
||||
if mod is None:
|
||||
try:
|
||||
mod = __import__(module, None, None, ["root"])
|
||||
except ImportError:
|
||||
raise TemplateNotFound(name)
|
||||
except ImportError as e:
|
||||
raise TemplateNotFound(name) from e
|
||||
|
||||
# remove the entry from sys.modules, we only want the attribute
|
||||
# on the module object we have stored on the loader.
|
||||
sys.modules.pop(module, None)
|
||||
|
||||
if globals is None:
|
||||
globals = {}
|
||||
|
||||
return environment.template_class.from_module_dict(
|
||||
environment, mod.__dict__, globals
|
||||
)
|
||||
@@ -1,32 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Functions that expose information about templates that might be
|
||||
interesting for introspection.
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from . import nodes
|
||||
from ._compat import iteritems
|
||||
from ._compat import string_types
|
||||
from .compiler import CodeGenerator
|
||||
from .compiler import Frame
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .environment import Environment
|
||||
|
||||
|
||||
class TrackingCodeGenerator(CodeGenerator):
|
||||
"""We abuse the code generator for introspection."""
|
||||
|
||||
def __init__(self, environment):
|
||||
CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>")
|
||||
self.undeclared_identifiers = set()
|
||||
def __init__(self, environment: "Environment") -> None:
|
||||
super().__init__(environment, "<introspection>", "<introspection>")
|
||||
self.undeclared_identifiers: t.Set[str] = set()
|
||||
|
||||
def write(self, x):
|
||||
def write(self, x: str) -> None:
|
||||
"""Don't write."""
|
||||
|
||||
def enter_frame(self, frame):
|
||||
def enter_frame(self, frame: Frame) -> None:
|
||||
"""Remember all undeclared identifiers."""
|
||||
CodeGenerator.enter_frame(self, frame)
|
||||
for _, (action, param) in iteritems(frame.symbols.loads):
|
||||
super().enter_frame(frame)
|
||||
|
||||
for _, (action, param) in frame.symbols.loads.items():
|
||||
if action == "resolve" and param not in self.environment.globals:
|
||||
self.undeclared_identifiers.add(param)
|
||||
|
||||
|
||||
def find_undeclared_variables(ast):
|
||||
def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]:
|
||||
"""Returns a set of all variables in the AST that will be looked up from
|
||||
the context at runtime. Because at compile time it's not known which
|
||||
variables will be used depending on the path the execution takes at
|
||||
@@ -35,7 +39,7 @@ def find_undeclared_variables(ast):
|
||||
>>> from jinja2 import Environment, meta
|
||||
>>> env = Environment()
|
||||
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
|
||||
>>> meta.find_undeclared_variables(ast) == set(['bar'])
|
||||
>>> meta.find_undeclared_variables(ast) == {'bar'}
|
||||
True
|
||||
|
||||
.. admonition:: Implementation
|
||||
@@ -45,12 +49,16 @@ def find_undeclared_variables(ast):
|
||||
:exc:`TemplateAssertionError` during compilation and as a matter of
|
||||
fact this function can currently raise that exception as well.
|
||||
"""
|
||||
codegen = TrackingCodeGenerator(ast.environment)
|
||||
codegen = TrackingCodeGenerator(ast.environment) # type: ignore
|
||||
codegen.visit(ast)
|
||||
return codegen.undeclared_identifiers
|
||||
|
||||
|
||||
def find_referenced_templates(ast):
|
||||
_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
|
||||
_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include]
|
||||
|
||||
|
||||
def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]:
|
||||
"""Finds all the referenced templates from the AST. This will return an
|
||||
iterator over all the hardcoded template extensions, inclusions and
|
||||
imports. If dynamic inheritance or inclusion is used, `None` will be
|
||||
@@ -65,17 +73,19 @@ def find_referenced_templates(ast):
|
||||
This function is useful for dependency tracking. For example if you want
|
||||
to rebuild parts of the website after a layout template has changed.
|
||||
"""
|
||||
for node in ast.find_all(
|
||||
(nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)
|
||||
):
|
||||
if not isinstance(node.template, nodes.Const):
|
||||
template_name: t.Any
|
||||
|
||||
for node in ast.find_all(_ref_types):
|
||||
template: nodes.Expr = node.template # type: ignore
|
||||
|
||||
if not isinstance(template, nodes.Const):
|
||||
# a tuple with some non consts in there
|
||||
if isinstance(node.template, (nodes.Tuple, nodes.List)):
|
||||
for template_name in node.template.items:
|
||||
if isinstance(template, (nodes.Tuple, nodes.List)):
|
||||
for template_name in template.items:
|
||||
# something const, only yield the strings and ignore
|
||||
# non-string consts that really just make no sense
|
||||
if isinstance(template_name, nodes.Const):
|
||||
if isinstance(template_name.value, string_types):
|
||||
if isinstance(template_name.value, str):
|
||||
yield template_name.value
|
||||
# something dynamic in there
|
||||
else:
|
||||
@@ -85,16 +95,16 @@ def find_referenced_templates(ast):
|
||||
yield None
|
||||
continue
|
||||
# constant is a basestring, direct template name
|
||||
if isinstance(node.template.value, string_types):
|
||||
yield node.template.value
|
||||
if isinstance(template.value, str):
|
||||
yield template.value
|
||||
# a tuple or list (latter *should* not happen) made of consts,
|
||||
# yield the consts that are strings. We could warn here for
|
||||
# non string values
|
||||
elif isinstance(node, nodes.Include) and isinstance(
|
||||
node.template.value, (tuple, list)
|
||||
template.value, (tuple, list)
|
||||
):
|
||||
for template_name in node.template.value:
|
||||
if isinstance(template_name, string_types):
|
||||
for template_name in template.value:
|
||||
if isinstance(template_name, str):
|
||||
yield template_name
|
||||
# something else we don't care about, we could warn here
|
||||
else:
|
||||
124
lib/spack/external/_vendoring/jinja2/nativetypes.py
vendored
Normal file
124
lib/spack/external/_vendoring/jinja2/nativetypes.py
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import typing as t
|
||||
from ast import literal_eval
|
||||
from ast import parse
|
||||
from itertools import chain
|
||||
from itertools import islice
|
||||
|
||||
from . import nodes
|
||||
from .compiler import CodeGenerator
|
||||
from .compiler import Frame
|
||||
from .compiler import has_safe_repr
|
||||
from .environment import Environment
|
||||
from .environment import Template
|
||||
|
||||
|
||||
def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]:
|
||||
"""Return a native Python type from the list of compiled nodes. If
|
||||
the result is a single node, its value is returned. Otherwise, the
|
||||
nodes are concatenated as strings. If the result can be parsed with
|
||||
:func:`ast.literal_eval`, the parsed value is returned. Otherwise,
|
||||
the string is returned.
|
||||
|
||||
:param values: Iterable of outputs to concatenate.
|
||||
"""
|
||||
head = list(islice(values, 2))
|
||||
|
||||
if not head:
|
||||
return None
|
||||
|
||||
if len(head) == 1:
|
||||
raw = head[0]
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
else:
|
||||
raw = "".join([str(v) for v in chain(head, values)])
|
||||
|
||||
try:
|
||||
return literal_eval(
|
||||
# In Python 3.10+ ast.literal_eval removes leading spaces/tabs
|
||||
# from the given string. For backwards compatibility we need to
|
||||
# parse the string ourselves without removing leading spaces/tabs.
|
||||
parse(raw, mode="eval")
|
||||
)
|
||||
except (ValueError, SyntaxError, MemoryError):
|
||||
return raw
|
||||
|
||||
|
||||
class NativeCodeGenerator(CodeGenerator):
|
||||
"""A code generator which renders Python types by not adding
|
||||
``str()`` around output nodes.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _default_finalize(value: t.Any) -> t.Any:
|
||||
return value
|
||||
|
||||
def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
|
||||
return repr("".join([str(v) for v in group]))
|
||||
|
||||
def _output_child_to_const(
|
||||
self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
|
||||
) -> t.Any:
|
||||
const = node.as_const(frame.eval_ctx)
|
||||
|
||||
if not has_safe_repr(const):
|
||||
raise nodes.Impossible()
|
||||
|
||||
if isinstance(node, nodes.TemplateData):
|
||||
return const
|
||||
|
||||
return finalize.const(const) # type: ignore
|
||||
|
||||
def _output_child_pre(
|
||||
self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
|
||||
) -> None:
|
||||
if finalize.src is not None:
|
||||
self.write(finalize.src)
|
||||
|
||||
def _output_child_post(
|
||||
self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
|
||||
) -> None:
|
||||
if finalize.src is not None:
|
||||
self.write(")")
|
||||
|
||||
|
||||
class NativeEnvironment(Environment):
|
||||
"""An environment that renders templates to native Python types."""
|
||||
|
||||
code_generator_class = NativeCodeGenerator
|
||||
|
||||
|
||||
class NativeTemplate(Template):
|
||||
environment_class = NativeEnvironment
|
||||
|
||||
def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
"""Render the template to produce a native Python type. If the
|
||||
result is a single node, its value is returned. Otherwise, the
|
||||
nodes are concatenated as strings. If the result can be parsed
|
||||
with :func:`ast.literal_eval`, the parsed value is returned.
|
||||
Otherwise, the string is returned.
|
||||
"""
|
||||
ctx = self.new_context(dict(*args, **kwargs))
|
||||
|
||||
try:
|
||||
return native_concat(self.root_render_func(ctx)) # type: ignore
|
||||
except Exception:
|
||||
return self.environment.handle_exception()
|
||||
|
||||
async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
if not self.environment.is_async:
|
||||
raise RuntimeError(
|
||||
"The environment was not created with async mode enabled."
|
||||
)
|
||||
|
||||
ctx = self.new_context(dict(*args, **kwargs))
|
||||
|
||||
try:
|
||||
return native_concat(
|
||||
[n async for n in self.root_render_func(ctx)] # type: ignore
|
||||
)
|
||||
except Exception:
|
||||
return self.environment.handle_exception()
|
||||
|
||||
|
||||
NativeEnvironment.template_class = NativeTemplate
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""The optimizer tries to constant fold expressions and modify the AST
|
||||
in place so that it should be faster to evaluate.
|
||||
|
||||
@@ -8,23 +7,30 @@
|
||||
would have a different scope. The solution would be a second syntax tree
|
||||
that stored the scoping rules.
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from . import nodes
|
||||
from .visitor import NodeTransformer
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .environment import Environment
|
||||
|
||||
def optimize(node, environment):
|
||||
|
||||
def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node:
|
||||
"""The context hint can be used to perform an static optimization
|
||||
based on the context given."""
|
||||
optimizer = Optimizer(environment)
|
||||
return optimizer.visit(node)
|
||||
return t.cast(nodes.Node, optimizer.visit(node))
|
||||
|
||||
|
||||
class Optimizer(NodeTransformer):
|
||||
def __init__(self, environment):
|
||||
def __init__(self, environment: "t.Optional[Environment]") -> None:
|
||||
self.environment = environment
|
||||
|
||||
def generic_visit(self, node, *args, **kwargs):
|
||||
node = super(Optimizer, self).generic_visit(node, *args, **kwargs)
|
||||
def generic_visit(
|
||||
self, node: nodes.Node, *args: t.Any, **kwargs: t.Any
|
||||
) -> nodes.Node:
|
||||
node = super().generic_visit(node, *args, **kwargs)
|
||||
|
||||
# Do constant folding. Some other nodes besides Expr have
|
||||
# as_const, but folding them causes errors later on.
|
||||
@@ -1,12 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Parse tokens from the lexer into nodes for the compiler."""
|
||||
import typing
|
||||
import typing as t
|
||||
|
||||
from . import nodes
|
||||
from ._compat import imap
|
||||
from .exceptions import TemplateAssertionError
|
||||
from .exceptions import TemplateSyntaxError
|
||||
from .lexer import describe_token
|
||||
from .lexer import describe_token_expr
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include)
|
||||
_MacroCall = t.TypeVar("_MacroCall", nodes.Macro, nodes.CallBlock)
|
||||
|
||||
_statement_keywords = frozenset(
|
||||
[
|
||||
"for",
|
||||
@@ -25,7 +33,7 @@
|
||||
)
|
||||
_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"])
|
||||
|
||||
_math_nodes = {
|
||||
_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = {
|
||||
"add": nodes.Add,
|
||||
"sub": nodes.Sub,
|
||||
"mul": nodes.Mul,
|
||||
@@ -35,26 +43,40 @@
|
||||
}
|
||||
|
||||
|
||||
class Parser(object):
|
||||
class Parser:
|
||||
"""This is the central parsing class Jinja uses. It's passed to
|
||||
extensions and can be used to parse expressions or statements.
|
||||
"""
|
||||
|
||||
def __init__(self, environment, source, name=None, filename=None, state=None):
|
||||
def __init__(
|
||||
self,
|
||||
environment: "Environment",
|
||||
source: str,
|
||||
name: t.Optional[str] = None,
|
||||
filename: t.Optional[str] = None,
|
||||
state: t.Optional[str] = None,
|
||||
) -> None:
|
||||
self.environment = environment
|
||||
self.stream = environment._tokenize(source, name, filename, state)
|
||||
self.name = name
|
||||
self.filename = filename
|
||||
self.closed = False
|
||||
self.extensions = {}
|
||||
self.extensions: t.Dict[
|
||||
str, t.Callable[["Parser"], t.Union[nodes.Node, t.List[nodes.Node]]]
|
||||
] = {}
|
||||
for extension in environment.iter_extensions():
|
||||
for tag in extension.tags:
|
||||
self.extensions[tag] = extension.parse
|
||||
self._last_identifier = 0
|
||||
self._tag_stack = []
|
||||
self._end_token_stack = []
|
||||
self._tag_stack: t.List[str] = []
|
||||
self._end_token_stack: t.List[t.Tuple[str, ...]] = []
|
||||
|
||||
def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
|
||||
def fail(
|
||||
self,
|
||||
msg: str,
|
||||
lineno: t.Optional[int] = None,
|
||||
exc: t.Type[TemplateSyntaxError] = TemplateSyntaxError,
|
||||
) -> "te.NoReturn":
|
||||
"""Convenience method that raises `exc` with the message, passed
|
||||
line number or last line number as well as the current name and
|
||||
filename.
|
||||
@@ -63,13 +85,18 @@ def fail(self, msg, lineno=None, exc=TemplateSyntaxError):
|
||||
lineno = self.stream.current.lineno
|
||||
raise exc(msg, lineno, self.name, self.filename)
|
||||
|
||||
def _fail_ut_eof(self, name, end_token_stack, lineno):
|
||||
expected = []
|
||||
def _fail_ut_eof(
|
||||
self,
|
||||
name: t.Optional[str],
|
||||
end_token_stack: t.List[t.Tuple[str, ...]],
|
||||
lineno: t.Optional[int],
|
||||
) -> "te.NoReturn":
|
||||
expected: t.Set[str] = set()
|
||||
for exprs in end_token_stack:
|
||||
expected.extend(imap(describe_token_expr, exprs))
|
||||
expected.update(map(describe_token_expr, exprs))
|
||||
if end_token_stack:
|
||||
currently_looking = " or ".join(
|
||||
"'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1]
|
||||
currently_looking: t.Optional[str] = " or ".join(
|
||||
map(repr, map(describe_token_expr, end_token_stack[-1]))
|
||||
)
|
||||
else:
|
||||
currently_looking = None
|
||||
@@ -77,59 +104,65 @@ def _fail_ut_eof(self, name, end_token_stack, lineno):
|
||||
if name is None:
|
||||
message = ["Unexpected end of template."]
|
||||
else:
|
||||
message = ["Encountered unknown tag '%s'." % name]
|
||||
message = [f"Encountered unknown tag {name!r}."]
|
||||
|
||||
if currently_looking:
|
||||
if name is not None and name in expected:
|
||||
message.append(
|
||||
"You probably made a nesting mistake. Jinja "
|
||||
"is expecting this tag, but currently looking "
|
||||
"for %s." % currently_looking
|
||||
"You probably made a nesting mistake. Jinja is expecting this tag,"
|
||||
f" but currently looking for {currently_looking}."
|
||||
)
|
||||
else:
|
||||
message.append(
|
||||
"Jinja was looking for the following tags: "
|
||||
"%s." % currently_looking
|
||||
f"Jinja was looking for the following tags: {currently_looking}."
|
||||
)
|
||||
|
||||
if self._tag_stack:
|
||||
message.append(
|
||||
"The innermost block that needs to be "
|
||||
"closed is '%s'." % self._tag_stack[-1]
|
||||
"The innermost block that needs to be closed is"
|
||||
f" {self._tag_stack[-1]!r}."
|
||||
)
|
||||
|
||||
self.fail(" ".join(message), lineno)
|
||||
|
||||
def fail_unknown_tag(self, name, lineno=None):
|
||||
def fail_unknown_tag(
|
||||
self, name: str, lineno: t.Optional[int] = None
|
||||
) -> "te.NoReturn":
|
||||
"""Called if the parser encounters an unknown tag. Tries to fail
|
||||
with a human readable error message that could help to identify
|
||||
the problem.
|
||||
"""
|
||||
return self._fail_ut_eof(name, self._end_token_stack, lineno)
|
||||
self._fail_ut_eof(name, self._end_token_stack, lineno)
|
||||
|
||||
def fail_eof(self, end_tokens=None, lineno=None):
|
||||
def fail_eof(
|
||||
self,
|
||||
end_tokens: t.Optional[t.Tuple[str, ...]] = None,
|
||||
lineno: t.Optional[int] = None,
|
||||
) -> "te.NoReturn":
|
||||
"""Like fail_unknown_tag but for end of template situations."""
|
||||
stack = list(self._end_token_stack)
|
||||
if end_tokens is not None:
|
||||
stack.append(end_tokens)
|
||||
return self._fail_ut_eof(None, stack, lineno)
|
||||
self._fail_ut_eof(None, stack, lineno)
|
||||
|
||||
def is_tuple_end(self, extra_end_rules=None):
|
||||
def is_tuple_end(
|
||||
self, extra_end_rules: t.Optional[t.Tuple[str, ...]] = None
|
||||
) -> bool:
|
||||
"""Are we at the end of a tuple?"""
|
||||
if self.stream.current.type in ("variable_end", "block_end", "rparen"):
|
||||
return True
|
||||
elif extra_end_rules is not None:
|
||||
return self.stream.current.test_any(extra_end_rules)
|
||||
return self.stream.current.test_any(extra_end_rules) # type: ignore
|
||||
return False
|
||||
|
||||
def free_identifier(self, lineno=None):
|
||||
def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName:
|
||||
"""Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
|
||||
self._last_identifier += 1
|
||||
rv = object.__new__(nodes.InternalName)
|
||||
nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno)
|
||||
return rv
|
||||
nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
|
||||
return rv # type: ignore
|
||||
|
||||
def parse_statement(self):
|
||||
def parse_statement(self) -> t.Union[nodes.Node, t.List[nodes.Node]]:
|
||||
"""Parse a single statement."""
|
||||
token = self.stream.current
|
||||
if token.type != "name":
|
||||
@@ -138,7 +171,8 @@ def parse_statement(self):
|
||||
pop_tag = True
|
||||
try:
|
||||
if token.value in _statement_keywords:
|
||||
return getattr(self, "parse_" + self.stream.current.value)()
|
||||
f = getattr(self, f"parse_{self.stream.current.value}")
|
||||
return f() # type: ignore
|
||||
if token.value == "call":
|
||||
return self.parse_call_block()
|
||||
if token.value == "filter":
|
||||
@@ -157,7 +191,9 @@ def parse_statement(self):
|
||||
if pop_tag:
|
||||
self._tag_stack.pop()
|
||||
|
||||
def parse_statements(self, end_tokens, drop_needle=False):
|
||||
def parse_statements(
|
||||
self, end_tokens: t.Tuple[str, ...], drop_needle: bool = False
|
||||
) -> t.List[nodes.Node]:
|
||||
"""Parse multiple statements into a list until one of the end tokens
|
||||
is reached. This is used to parse the body of statements as it also
|
||||
parses template data if appropriate. The parser checks first if the
|
||||
@@ -184,7 +220,7 @@ def parse_statements(self, end_tokens, drop_needle=False):
|
||||
next(self.stream)
|
||||
return result
|
||||
|
||||
def parse_set(self):
|
||||
def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]:
|
||||
"""Parse an assign statement."""
|
||||
lineno = next(self.stream).lineno
|
||||
target = self.parse_assign_target(with_namespace=True)
|
||||
@@ -195,7 +231,7 @@ def parse_set(self):
|
||||
body = self.parse_statements(("name:endset",), drop_needle=True)
|
||||
return nodes.AssignBlock(target, filter_node, body, lineno=lineno)
|
||||
|
||||
def parse_for(self):
|
||||
def parse_for(self) -> nodes.For:
|
||||
"""Parse a for loop."""
|
||||
lineno = self.stream.expect("name:for").lineno
|
||||
target = self.parse_assign_target(extra_end_rules=("name:in",))
|
||||
@@ -214,10 +250,10 @@ def parse_for(self):
|
||||
else_ = self.parse_statements(("name:endfor",), drop_needle=True)
|
||||
return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno)
|
||||
|
||||
def parse_if(self):
|
||||
def parse_if(self) -> nodes.If:
|
||||
"""Parse an if construct."""
|
||||
node = result = nodes.If(lineno=self.stream.expect("name:if").lineno)
|
||||
while 1:
|
||||
while True:
|
||||
node.test = self.parse_tuple(with_condexpr=False)
|
||||
node.body = self.parse_statements(("name:elif", "name:else", "name:endif"))
|
||||
node.elif_ = []
|
||||
@@ -232,10 +268,10 @@ def parse_if(self):
|
||||
break
|
||||
return result
|
||||
|
||||
def parse_with(self):
|
||||
def parse_with(self) -> nodes.With:
|
||||
node = nodes.With(lineno=next(self.stream).lineno)
|
||||
targets = []
|
||||
values = []
|
||||
targets: t.List[nodes.Expr] = []
|
||||
values: t.List[nodes.Expr] = []
|
||||
while self.stream.current.type != "block_end":
|
||||
if targets:
|
||||
self.stream.expect("comma")
|
||||
@@ -249,37 +285,50 @@ def parse_with(self):
|
||||
node.body = self.parse_statements(("name:endwith",), drop_needle=True)
|
||||
return node
|
||||
|
||||
def parse_autoescape(self):
|
||||
def parse_autoescape(self) -> nodes.Scope:
|
||||
node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno)
|
||||
node.options = [nodes.Keyword("autoescape", self.parse_expression())]
|
||||
node.body = self.parse_statements(("name:endautoescape",), drop_needle=True)
|
||||
return nodes.Scope([node])
|
||||
|
||||
def parse_block(self):
|
||||
def parse_block(self) -> nodes.Block:
|
||||
node = nodes.Block(lineno=next(self.stream).lineno)
|
||||
node.name = self.stream.expect("name").value
|
||||
node.scoped = self.stream.skip_if("name:scoped")
|
||||
node.required = self.stream.skip_if("name:required")
|
||||
|
||||
# common problem people encounter when switching from django
|
||||
# to jinja. we do not support hyphens in block names, so let's
|
||||
# raise a nicer error message in that case.
|
||||
if self.stream.current.type == "sub":
|
||||
self.fail(
|
||||
"Block names in Jinja have to be valid Python "
|
||||
"identifiers and may not contain hyphens, use an "
|
||||
"underscore instead."
|
||||
"Block names in Jinja have to be valid Python identifiers and may not"
|
||||
" contain hyphens, use an underscore instead."
|
||||
)
|
||||
|
||||
node.body = self.parse_statements(("name:endblock",), drop_needle=True)
|
||||
|
||||
# enforce that required blocks only contain whitespace or comments
|
||||
# by asserting that the body, if not empty, is just TemplateData nodes
|
||||
# with whitespace data
|
||||
if node.required and not all(
|
||||
isinstance(child, nodes.TemplateData) and child.data.isspace()
|
||||
for body in node.body
|
||||
for child in body.nodes # type: ignore
|
||||
):
|
||||
self.fail("Required blocks can only contain comments or whitespace")
|
||||
|
||||
self.stream.skip_if("name:" + node.name)
|
||||
return node
|
||||
|
||||
def parse_extends(self):
|
||||
def parse_extends(self) -> nodes.Extends:
|
||||
node = nodes.Extends(lineno=next(self.stream).lineno)
|
||||
node.template = self.parse_expression()
|
||||
return node
|
||||
|
||||
def parse_import_context(self, node, default):
|
||||
def parse_import_context(
|
||||
self, node: _ImportInclude, default: bool
|
||||
) -> _ImportInclude:
|
||||
if self.stream.current.test_any(
|
||||
"name:with", "name:without"
|
||||
) and self.stream.look().test("name:context"):
|
||||
@@ -289,7 +338,7 @@ def parse_import_context(self, node, default):
|
||||
node.with_context = default
|
||||
return node
|
||||
|
||||
def parse_include(self):
|
||||
def parse_include(self) -> nodes.Include:
|
||||
node = nodes.Include(lineno=next(self.stream).lineno)
|
||||
node.template = self.parse_expression()
|
||||
if self.stream.current.test("name:ignore") and self.stream.look().test(
|
||||
@@ -301,30 +350,34 @@ def parse_include(self):
|
||||
node.ignore_missing = False
|
||||
return self.parse_import_context(node, True)
|
||||
|
||||
def parse_import(self):
|
||||
def parse_import(self) -> nodes.Import:
|
||||
node = nodes.Import(lineno=next(self.stream).lineno)
|
||||
node.template = self.parse_expression()
|
||||
self.stream.expect("name:as")
|
||||
node.target = self.parse_assign_target(name_only=True).name
|
||||
return self.parse_import_context(node, False)
|
||||
|
||||
def parse_from(self):
|
||||
def parse_from(self) -> nodes.FromImport:
|
||||
node = nodes.FromImport(lineno=next(self.stream).lineno)
|
||||
node.template = self.parse_expression()
|
||||
self.stream.expect("name:import")
|
||||
node.names = []
|
||||
|
||||
def parse_context():
|
||||
if self.stream.current.value in (
|
||||
"with",
|
||||
"without",
|
||||
) and self.stream.look().test("name:context"):
|
||||
def parse_context() -> bool:
|
||||
if (
|
||||
self.stream.current.value
|
||||
in {
|
||||
"with",
|
||||
"without",
|
||||
}
|
||||
and self.stream.look().test("name:context")
|
||||
):
|
||||
node.with_context = next(self.stream).value == "with"
|
||||
self.stream.skip()
|
||||
return True
|
||||
return False
|
||||
|
||||
while 1:
|
||||
while True:
|
||||
if node.names:
|
||||
self.stream.expect("comma")
|
||||
if self.stream.current.type == "name":
|
||||
@@ -350,9 +403,9 @@ def parse_context():
|
||||
node.with_context = False
|
||||
return node
|
||||
|
||||
def parse_signature(self, node):
|
||||
node.args = args = []
|
||||
node.defaults = defaults = []
|
||||
def parse_signature(self, node: _MacroCall) -> None:
|
||||
args = node.args = []
|
||||
defaults = node.defaults = []
|
||||
self.stream.expect("lparen")
|
||||
while self.stream.current.type != "rparen":
|
||||
if args:
|
||||
@@ -366,7 +419,7 @@ def parse_signature(self, node):
|
||||
args.append(arg)
|
||||
self.stream.expect("rparen")
|
||||
|
||||
def parse_call_block(self):
|
||||
def parse_call_block(self) -> nodes.CallBlock:
|
||||
node = nodes.CallBlock(lineno=next(self.stream).lineno)
|
||||
if self.stream.current.type == "lparen":
|
||||
self.parse_signature(node)
|
||||
@@ -374,26 +427,27 @@ def parse_call_block(self):
|
||||
node.args = []
|
||||
node.defaults = []
|
||||
|
||||
node.call = self.parse_expression()
|
||||
if not isinstance(node.call, nodes.Call):
|
||||
call_node = self.parse_expression()
|
||||
if not isinstance(call_node, nodes.Call):
|
||||
self.fail("expected call", node.lineno)
|
||||
node.call = call_node
|
||||
node.body = self.parse_statements(("name:endcall",), drop_needle=True)
|
||||
return node
|
||||
|
||||
def parse_filter_block(self):
|
||||
def parse_filter_block(self) -> nodes.FilterBlock:
|
||||
node = nodes.FilterBlock(lineno=next(self.stream).lineno)
|
||||
node.filter = self.parse_filter(None, start_inline=True)
|
||||
node.filter = self.parse_filter(None, start_inline=True) # type: ignore
|
||||
node.body = self.parse_statements(("name:endfilter",), drop_needle=True)
|
||||
return node
|
||||
|
||||
def parse_macro(self):
|
||||
def parse_macro(self) -> nodes.Macro:
|
||||
node = nodes.Macro(lineno=next(self.stream).lineno)
|
||||
node.name = self.parse_assign_target(name_only=True).name
|
||||
self.parse_signature(node)
|
||||
node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
|
||||
return node
|
||||
|
||||
def parse_print(self):
|
||||
def parse_print(self) -> nodes.Output:
|
||||
node = nodes.Output(lineno=next(self.stream).lineno)
|
||||
node.nodes = []
|
||||
while self.stream.current.type != "block_end":
|
||||
@@ -402,13 +456,29 @@ def parse_print(self):
|
||||
node.nodes.append(self.parse_expression())
|
||||
return node
|
||||
|
||||
@typing.overload
|
||||
def parse_assign_target(
|
||||
self, with_tuple: bool = ..., name_only: "te.Literal[True]" = ...
|
||||
) -> nodes.Name:
|
||||
...
|
||||
|
||||
@typing.overload
|
||||
def parse_assign_target(
|
||||
self,
|
||||
with_tuple=True,
|
||||
name_only=False,
|
||||
extra_end_rules=None,
|
||||
with_namespace=False,
|
||||
):
|
||||
with_tuple: bool = True,
|
||||
name_only: bool = False,
|
||||
extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
|
||||
with_namespace: bool = False,
|
||||
) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]:
|
||||
...
|
||||
|
||||
def parse_assign_target(
|
||||
self,
|
||||
with_tuple: bool = True,
|
||||
name_only: bool = False,
|
||||
extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
|
||||
with_namespace: bool = False,
|
||||
) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]:
|
||||
"""Parse an assignment target. As Jinja allows assignments to
|
||||
tuples, this function can parse all allowed assignment targets. Per
|
||||
default assignments to tuples are parsed, that can be disable however
|
||||
@@ -417,6 +487,8 @@ def parse_assign_target(
|
||||
parameter is forwarded to the tuple parsing function. If
|
||||
`with_namespace` is enabled, a namespace assignment may be parsed.
|
||||
"""
|
||||
target: nodes.Expr
|
||||
|
||||
if with_namespace and self.stream.look().type == "dot":
|
||||
token = self.stream.expect("name")
|
||||
next(self.stream) # dot
|
||||
@@ -432,14 +504,17 @@ def parse_assign_target(
|
||||
)
|
||||
else:
|
||||
target = self.parse_primary()
|
||||
|
||||
target.set_ctx("store")
|
||||
|
||||
if not target.can_assign():
|
||||
self.fail(
|
||||
"can't assign to %r" % target.__class__.__name__.lower(), target.lineno
|
||||
f"can't assign to {type(target).__name__.lower()!r}", target.lineno
|
||||
)
|
||||
return target
|
||||
|
||||
def parse_expression(self, with_condexpr=True):
|
||||
return target # type: ignore
|
||||
|
||||
def parse_expression(self, with_condexpr: bool = True) -> nodes.Expr:
|
||||
"""Parse an expression. Per default all expressions are parsed, if
|
||||
the optional `with_condexpr` parameter is set to `False` conditional
|
||||
expressions are not parsed.
|
||||
@@ -448,9 +523,11 @@ def parse_expression(self, with_condexpr=True):
|
||||
return self.parse_condexpr()
|
||||
return self.parse_or()
|
||||
|
||||
def parse_condexpr(self):
|
||||
def parse_condexpr(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
expr1 = self.parse_or()
|
||||
expr3: t.Optional[nodes.Expr]
|
||||
|
||||
while self.stream.skip_if("name:if"):
|
||||
expr2 = self.parse_or()
|
||||
if self.stream.skip_if("name:else"):
|
||||
@@ -461,7 +538,7 @@ def parse_condexpr(self):
|
||||
lineno = self.stream.current.lineno
|
||||
return expr1
|
||||
|
||||
def parse_or(self):
|
||||
def parse_or(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
left = self.parse_and()
|
||||
while self.stream.skip_if("name:or"):
|
||||
@@ -470,7 +547,7 @@ def parse_or(self):
|
||||
lineno = self.stream.current.lineno
|
||||
return left
|
||||
|
||||
def parse_and(self):
|
||||
def parse_and(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
left = self.parse_not()
|
||||
while self.stream.skip_if("name:and"):
|
||||
@@ -479,17 +556,17 @@ def parse_and(self):
|
||||
lineno = self.stream.current.lineno
|
||||
return left
|
||||
|
||||
def parse_not(self):
|
||||
def parse_not(self) -> nodes.Expr:
|
||||
if self.stream.current.test("name:not"):
|
||||
lineno = next(self.stream).lineno
|
||||
return nodes.Not(self.parse_not(), lineno=lineno)
|
||||
return self.parse_compare()
|
||||
|
||||
def parse_compare(self):
|
||||
def parse_compare(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
expr = self.parse_math1()
|
||||
ops = []
|
||||
while 1:
|
||||
while True:
|
||||
token_type = self.stream.current.type
|
||||
if token_type in _compare_operators:
|
||||
next(self.stream)
|
||||
@@ -508,7 +585,7 @@ def parse_compare(self):
|
||||
return expr
|
||||
return nodes.Compare(expr, ops, lineno=lineno)
|
||||
|
||||
def parse_math1(self):
|
||||
def parse_math1(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
left = self.parse_concat()
|
||||
while self.stream.current.type in ("add", "sub"):
|
||||
@@ -519,7 +596,7 @@ def parse_math1(self):
|
||||
lineno = self.stream.current.lineno
|
||||
return left
|
||||
|
||||
def parse_concat(self):
|
||||
def parse_concat(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
args = [self.parse_math2()]
|
||||
while self.stream.current.type == "tilde":
|
||||
@@ -529,7 +606,7 @@ def parse_concat(self):
|
||||
return args[0]
|
||||
return nodes.Concat(args, lineno=lineno)
|
||||
|
||||
def parse_math2(self):
|
||||
def parse_math2(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
left = self.parse_pow()
|
||||
while self.stream.current.type in ("mul", "div", "floordiv", "mod"):
|
||||
@@ -540,7 +617,7 @@ def parse_math2(self):
|
||||
lineno = self.stream.current.lineno
|
||||
return left
|
||||
|
||||
def parse_pow(self):
|
||||
def parse_pow(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
left = self.parse_unary()
|
||||
while self.stream.current.type == "pow":
|
||||
@@ -550,9 +627,11 @@ def parse_pow(self):
|
||||
lineno = self.stream.current.lineno
|
||||
return left
|
||||
|
||||
def parse_unary(self, with_filter=True):
|
||||
def parse_unary(self, with_filter: bool = True) -> nodes.Expr:
|
||||
token_type = self.stream.current.type
|
||||
lineno = self.stream.current.lineno
|
||||
node: nodes.Expr
|
||||
|
||||
if token_type == "sub":
|
||||
next(self.stream)
|
||||
node = nodes.Neg(self.parse_unary(False), lineno=lineno)
|
||||
@@ -566,8 +645,9 @@ def parse_unary(self, with_filter=True):
|
||||
node = self.parse_filter_expr(node)
|
||||
return node
|
||||
|
||||
def parse_primary(self):
|
||||
def parse_primary(self) -> nodes.Expr:
|
||||
token = self.stream.current
|
||||
node: nodes.Expr
|
||||
if token.type == "name":
|
||||
if token.value in ("true", "false", "True", "False"):
|
||||
node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno)
|
||||
@@ -596,16 +676,16 @@ def parse_primary(self):
|
||||
elif token.type == "lbrace":
|
||||
node = self.parse_dict()
|
||||
else:
|
||||
self.fail("unexpected '%s'" % describe_token(token), token.lineno)
|
||||
self.fail(f"unexpected {describe_token(token)!r}", token.lineno)
|
||||
return node
|
||||
|
||||
def parse_tuple(
|
||||
self,
|
||||
simplified=False,
|
||||
with_condexpr=True,
|
||||
extra_end_rules=None,
|
||||
explicit_parentheses=False,
|
||||
):
|
||||
simplified: bool = False,
|
||||
with_condexpr: bool = True,
|
||||
extra_end_rules: t.Optional[t.Tuple[str, ...]] = None,
|
||||
explicit_parentheses: bool = False,
|
||||
) -> t.Union[nodes.Tuple, nodes.Expr]:
|
||||
"""Works like `parse_expression` but if multiple expressions are
|
||||
delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
|
||||
This method could also return a regular expression instead of a tuple
|
||||
@@ -631,12 +711,13 @@ def parse_tuple(
|
||||
parse = self.parse_expression
|
||||
else:
|
||||
|
||||
def parse():
|
||||
def parse() -> nodes.Expr:
|
||||
return self.parse_expression(with_condexpr=False)
|
||||
|
||||
args = []
|
||||
args: t.List[nodes.Expr] = []
|
||||
is_tuple = False
|
||||
while 1:
|
||||
|
||||
while True:
|
||||
if args:
|
||||
self.stream.expect("comma")
|
||||
if self.is_tuple_end(extra_end_rules):
|
||||
@@ -658,15 +739,15 @@ def parse():
|
||||
# tuple.
|
||||
if not explicit_parentheses:
|
||||
self.fail(
|
||||
"Expected an expression, got '%s'"
|
||||
% describe_token(self.stream.current)
|
||||
"Expected an expression,"
|
||||
f" got {describe_token(self.stream.current)!r}"
|
||||
)
|
||||
|
||||
return nodes.Tuple(args, "load", lineno=lineno)
|
||||
|
||||
def parse_list(self):
|
||||
def parse_list(self) -> nodes.List:
|
||||
token = self.stream.expect("lbracket")
|
||||
items = []
|
||||
items: t.List[nodes.Expr] = []
|
||||
while self.stream.current.type != "rbracket":
|
||||
if items:
|
||||
self.stream.expect("comma")
|
||||
@@ -676,9 +757,9 @@ def parse_list(self):
|
||||
self.stream.expect("rbracket")
|
||||
return nodes.List(items, lineno=token.lineno)
|
||||
|
||||
def parse_dict(self):
|
||||
def parse_dict(self) -> nodes.Dict:
|
||||
token = self.stream.expect("lbrace")
|
||||
items = []
|
||||
items: t.List[nodes.Pair] = []
|
||||
while self.stream.current.type != "rbrace":
|
||||
if items:
|
||||
self.stream.expect("comma")
|
||||
@@ -691,8 +772,8 @@ def parse_dict(self):
|
||||
self.stream.expect("rbrace")
|
||||
return nodes.Dict(items, lineno=token.lineno)
|
||||
|
||||
def parse_postfix(self, node):
|
||||
while 1:
|
||||
def parse_postfix(self, node: nodes.Expr) -> nodes.Expr:
|
||||
while True:
|
||||
token_type = self.stream.current.type
|
||||
if token_type == "dot" or token_type == "lbracket":
|
||||
node = self.parse_subscript(node)
|
||||
@@ -704,11 +785,11 @@ def parse_postfix(self, node):
|
||||
break
|
||||
return node
|
||||
|
||||
def parse_filter_expr(self, node):
|
||||
while 1:
|
||||
def parse_filter_expr(self, node: nodes.Expr) -> nodes.Expr:
|
||||
while True:
|
||||
token_type = self.stream.current.type
|
||||
if token_type == "pipe":
|
||||
node = self.parse_filter(node)
|
||||
node = self.parse_filter(node) # type: ignore
|
||||
elif token_type == "name" and self.stream.current.value == "is":
|
||||
node = self.parse_test(node)
|
||||
# calls are valid both after postfix expressions (getattr
|
||||
@@ -719,8 +800,12 @@ def parse_filter_expr(self, node):
|
||||
break
|
||||
return node
|
||||
|
||||
def parse_subscript(self, node):
|
||||
def parse_subscript(
|
||||
self, node: nodes.Expr
|
||||
) -> t.Union[nodes.Getattr, nodes.Getitem]:
|
||||
token = next(self.stream)
|
||||
arg: nodes.Expr
|
||||
|
||||
if token.type == "dot":
|
||||
attr_token = self.stream.current
|
||||
next(self.stream)
|
||||
@@ -733,7 +818,7 @@ def parse_subscript(self, node):
|
||||
arg = nodes.Const(attr_token.value, lineno=attr_token.lineno)
|
||||
return nodes.Getitem(node, arg, "load", lineno=token.lineno)
|
||||
if token.type == "lbracket":
|
||||
args = []
|
||||
args: t.List[nodes.Expr] = []
|
||||
while self.stream.current.type != "rbracket":
|
||||
if args:
|
||||
self.stream.expect("comma")
|
||||
@@ -746,8 +831,9 @@ def parse_subscript(self, node):
|
||||
return nodes.Getitem(node, arg, "load", lineno=token.lineno)
|
||||
self.fail("expected subscript expression", token.lineno)
|
||||
|
||||
def parse_subscribed(self):
|
||||
def parse_subscribed(self) -> nodes.Expr:
|
||||
lineno = self.stream.current.lineno
|
||||
args: t.List[t.Optional[nodes.Expr]]
|
||||
|
||||
if self.stream.current.type == "colon":
|
||||
next(self.stream)
|
||||
@@ -777,23 +863,26 @@ def parse_subscribed(self):
|
||||
|
||||
return nodes.Slice(lineno=lineno, *args)
|
||||
|
||||
def parse_call(self, node):
|
||||
def parse_call_args(self) -> t.Tuple:
|
||||
token = self.stream.expect("lparen")
|
||||
args = []
|
||||
kwargs = []
|
||||
dyn_args = dyn_kwargs = None
|
||||
dyn_args = None
|
||||
dyn_kwargs = None
|
||||
require_comma = False
|
||||
|
||||
def ensure(expr):
|
||||
def ensure(expr: bool) -> None:
|
||||
if not expr:
|
||||
self.fail("invalid syntax for function call expression", token.lineno)
|
||||
|
||||
while self.stream.current.type != "rparen":
|
||||
if require_comma:
|
||||
self.stream.expect("comma")
|
||||
|
||||
# support for trailing comma
|
||||
if self.stream.current.type == "rparen":
|
||||
break
|
||||
|
||||
if self.stream.current.type == "mul":
|
||||
ensure(dyn_args is None and dyn_kwargs is None)
|
||||
next(self.stream)
|
||||
@@ -819,13 +908,20 @@ def ensure(expr):
|
||||
args.append(self.parse_expression())
|
||||
|
||||
require_comma = True
|
||||
self.stream.expect("rparen")
|
||||
|
||||
if node is None:
|
||||
return args, kwargs, dyn_args, dyn_kwargs
|
||||
self.stream.expect("rparen")
|
||||
return args, kwargs, dyn_args, dyn_kwargs
|
||||
|
||||
def parse_call(self, node: nodes.Expr) -> nodes.Call:
|
||||
# The lparen will be expected in parse_call_args, but the lineno
|
||||
# needs to be recorded before the stream is advanced.
|
||||
token = self.stream.current
|
||||
args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
|
||||
return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno)
|
||||
|
||||
def parse_filter(self, node, start_inline=False):
|
||||
def parse_filter(
|
||||
self, node: t.Optional[nodes.Expr], start_inline: bool = False
|
||||
) -> t.Optional[nodes.Expr]:
|
||||
while self.stream.current.type == "pipe" or start_inline:
|
||||
if not start_inline:
|
||||
next(self.stream)
|
||||
@@ -835,7 +931,7 @@ def parse_filter(self, node, start_inline=False):
|
||||
next(self.stream)
|
||||
name += "." + self.stream.expect("name").value
|
||||
if self.stream.current.type == "lparen":
|
||||
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
|
||||
args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
|
||||
else:
|
||||
args = []
|
||||
kwargs = []
|
||||
@@ -846,7 +942,7 @@ def parse_filter(self, node, start_inline=False):
|
||||
start_inline = False
|
||||
return node
|
||||
|
||||
def parse_test(self, node):
|
||||
def parse_test(self, node: nodes.Expr) -> nodes.Expr:
|
||||
token = next(self.stream)
|
||||
if self.stream.current.test("name:not"):
|
||||
next(self.stream)
|
||||
@@ -860,16 +956,20 @@ def parse_test(self, node):
|
||||
dyn_args = dyn_kwargs = None
|
||||
kwargs = []
|
||||
if self.stream.current.type == "lparen":
|
||||
args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None)
|
||||
elif self.stream.current.type in (
|
||||
"name",
|
||||
"string",
|
||||
"integer",
|
||||
"float",
|
||||
"lparen",
|
||||
"lbracket",
|
||||
"lbrace",
|
||||
) and not self.stream.current.test_any("name:else", "name:or", "name:and"):
|
||||
args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args()
|
||||
elif (
|
||||
self.stream.current.type
|
||||
in {
|
||||
"name",
|
||||
"string",
|
||||
"integer",
|
||||
"float",
|
||||
"lparen",
|
||||
"lbracket",
|
||||
"lbrace",
|
||||
}
|
||||
and not self.stream.current.test_any("name:else", "name:or", "name:and")
|
||||
):
|
||||
if self.stream.current.test("name:is"):
|
||||
self.fail("You cannot chain multiple tests with is")
|
||||
arg_node = self.parse_primary()
|
||||
@@ -884,15 +984,17 @@ def parse_test(self, node):
|
||||
node = nodes.Not(node, lineno=token.lineno)
|
||||
return node
|
||||
|
||||
def subparse(self, end_tokens=None):
|
||||
body = []
|
||||
data_buffer = []
|
||||
def subparse(
|
||||
self, end_tokens: t.Optional[t.Tuple[str, ...]] = None
|
||||
) -> t.List[nodes.Node]:
|
||||
body: t.List[nodes.Node] = []
|
||||
data_buffer: t.List[nodes.Node] = []
|
||||
add_data = data_buffer.append
|
||||
|
||||
if end_tokens is not None:
|
||||
self._end_token_stack.append(end_tokens)
|
||||
|
||||
def flush_data():
|
||||
def flush_data() -> None:
|
||||
if data_buffer:
|
||||
lineno = data_buffer[0].lineno
|
||||
body.append(nodes.Output(data_buffer[:], lineno=lineno))
|
||||
@@ -929,10 +1031,9 @@ def flush_data():
|
||||
finally:
|
||||
if end_tokens is not None:
|
||||
self._end_token_stack.pop()
|
||||
|
||||
return body
|
||||
|
||||
def parse(self):
|
||||
def parse(self) -> nodes.Template:
|
||||
"""Parse the whole template into a `Template` node."""
|
||||
result = nodes.Template(self.subparse(), lineno=1)
|
||||
result.set_environment(self.environment)
|
||||
0
lib/spack/external/_vendoring/jinja2/py.typed
vendored
Normal file
0
lib/spack/external/_vendoring/jinja2/py.typed
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,42 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""A sandbox layer that ensures unsafe operations cannot be performed.
|
||||
Useful when the template itself comes from an untrusted source.
|
||||
"""
|
||||
import operator
|
||||
import types
|
||||
import warnings
|
||||
import typing as t
|
||||
from _string import formatter_field_name_split # type: ignore
|
||||
from collections import abc
|
||||
from collections import deque
|
||||
from string import Formatter
|
||||
|
||||
from markupsafe import EscapeFormatter
|
||||
from markupsafe import Markup
|
||||
|
||||
from ._compat import abc
|
||||
from ._compat import PY2
|
||||
from ._compat import range_type
|
||||
from ._compat import string_types
|
||||
from .environment import Environment
|
||||
from .exceptions import SecurityError
|
||||
from .runtime import Context
|
||||
from .runtime import Undefined
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
#: maximum number of items a range may produce
|
||||
MAX_RANGE = 100000
|
||||
|
||||
#: attributes of function objects that are considered unsafe.
|
||||
if PY2:
|
||||
UNSAFE_FUNCTION_ATTRIBUTES = {
|
||||
"func_closure",
|
||||
"func_code",
|
||||
"func_dict",
|
||||
"func_defaults",
|
||||
"func_globals",
|
||||
}
|
||||
else:
|
||||
# On versions > python 2 the special attributes on functions are gone,
|
||||
# but they remain on methods and generators for whatever reason.
|
||||
UNSAFE_FUNCTION_ATTRIBUTES = set()
|
||||
#: Unsafe function attributes.
|
||||
UNSAFE_FUNCTION_ATTRIBUTES: t.Set[str] = set()
|
||||
|
||||
#: unsafe method attributes. function attributes are unsafe for methods too
|
||||
UNSAFE_METHOD_ATTRIBUTES = {"im_class", "im_func", "im_self"}
|
||||
#: Unsafe method attributes. Function attributes are unsafe for methods too.
|
||||
UNSAFE_METHOD_ATTRIBUTES: t.Set[str] = set()
|
||||
|
||||
#: unsafe generator attributes.
|
||||
UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"}
|
||||
@@ -47,41 +37,9 @@
|
||||
#: unsafe attributes on async generators
|
||||
UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"}
|
||||
|
||||
# make sure we don't warn in python 2.6 about stuff we don't care about
|
||||
warnings.filterwarnings(
|
||||
"ignore", "the sets module", DeprecationWarning, module=__name__
|
||||
)
|
||||
|
||||
_mutable_set_types = (set,)
|
||||
_mutable_mapping_types = (dict,)
|
||||
_mutable_sequence_types = (list,)
|
||||
|
||||
# on python 2.x we can register the user collection types
|
||||
try:
|
||||
from UserDict import UserDict, DictMixin
|
||||
from UserList import UserList
|
||||
|
||||
_mutable_mapping_types += (UserDict, DictMixin)
|
||||
_mutable_set_types += (UserList,)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# if sets is still available, register the mutable set from there as well
|
||||
try:
|
||||
from sets import Set
|
||||
|
||||
_mutable_set_types += (Set,)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
#: register Python 2.6 abstract base classes
|
||||
_mutable_set_types += (abc.MutableSet,)
|
||||
_mutable_mapping_types += (abc.MutableMapping,)
|
||||
_mutable_sequence_types += (abc.MutableSequence,)
|
||||
|
||||
_mutable_spec = (
|
||||
_mutable_spec: t.Tuple[t.Tuple[t.Type, t.FrozenSet[str]], ...] = (
|
||||
(
|
||||
_mutable_set_types,
|
||||
abc.MutableSet,
|
||||
frozenset(
|
||||
[
|
||||
"add",
|
||||
@@ -96,11 +54,11 @@
|
||||
),
|
||||
),
|
||||
(
|
||||
_mutable_mapping_types,
|
||||
abc.MutableMapping,
|
||||
frozenset(["clear", "pop", "popitem", "setdefault", "update"]),
|
||||
),
|
||||
(
|
||||
_mutable_sequence_types,
|
||||
abc.MutableSequence,
|
||||
frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]),
|
||||
),
|
||||
(
|
||||
@@ -122,76 +80,49 @@
|
||||
)
|
||||
|
||||
|
||||
class _MagicFormatMapping(abc.Mapping):
|
||||
"""This class implements a dummy wrapper to fix a bug in the Python
|
||||
standard library for string formatting.
|
||||
|
||||
See https://bugs.python.org/issue13598 for information about why
|
||||
this is necessary.
|
||||
"""
|
||||
|
||||
def __init__(self, args, kwargs):
|
||||
self._args = args
|
||||
self._kwargs = kwargs
|
||||
self._last_index = 0
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == "":
|
||||
idx = self._last_index
|
||||
self._last_index += 1
|
||||
try:
|
||||
return self._args[idx]
|
||||
except LookupError:
|
||||
pass
|
||||
key = str(idx)
|
||||
return self._kwargs[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._kwargs)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._kwargs)
|
||||
|
||||
|
||||
def inspect_format_method(callable):
|
||||
def inspect_format_method(callable: t.Callable) -> t.Optional[str]:
|
||||
if not isinstance(
|
||||
callable, (types.MethodType, types.BuiltinMethodType)
|
||||
) or callable.__name__ not in ("format", "format_map"):
|
||||
return None
|
||||
|
||||
obj = callable.__self__
|
||||
if isinstance(obj, string_types):
|
||||
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
|
||||
return None
|
||||
|
||||
def safe_range(*args):
|
||||
|
||||
def safe_range(*args: int) -> range:
|
||||
"""A range that can't generate ranges with a length of more than
|
||||
MAX_RANGE items.
|
||||
"""
|
||||
rng = range_type(*args)
|
||||
rng = range(*args)
|
||||
|
||||
if len(rng) > MAX_RANGE:
|
||||
raise OverflowError(
|
||||
"Range too big. The sandbox blocks ranges larger than"
|
||||
" MAX_RANGE (%d)." % MAX_RANGE
|
||||
f" MAX_RANGE ({MAX_RANGE})."
|
||||
)
|
||||
|
||||
return rng
|
||||
|
||||
|
||||
def unsafe(f):
|
||||
def unsafe(f: F) -> F:
|
||||
"""Marks a function or method as unsafe.
|
||||
|
||||
::
|
||||
.. code-block: python
|
||||
|
||||
@unsafe
|
||||
def delete(self):
|
||||
pass
|
||||
"""
|
||||
f.unsafe_callable = True
|
||||
f.unsafe_callable = True # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
def is_internal_attribute(obj, attr):
|
||||
def is_internal_attribute(obj: t.Any, attr: str) -> bool:
|
||||
"""Test if the attribute given is an internal python attribute. For
|
||||
example this function returns `True` for the `func_code` attribute of
|
||||
python objects. This is useful if the environment method
|
||||
@@ -228,12 +159,10 @@ def is_internal_attribute(obj, attr):
|
||||
return attr.startswith("__")
|
||||
|
||||
|
||||
def modifies_known_mutable(obj, attr):
|
||||
def modifies_known_mutable(obj: t.Any, attr: str) -> bool:
|
||||
"""This function checks if an attribute on a builtin mutable object
|
||||
(list, dict, set or deque) would modify it if called. It also supports
|
||||
the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
|
||||
with Python 2.6 onwards the abstract base classes `MutableSet`,
|
||||
`MutableMapping`, and `MutableSequence`.
|
||||
(list, dict, set or deque) or the corresponding ABCs would modify it
|
||||
if called.
|
||||
|
||||
>>> modifies_known_mutable({}, "clear")
|
||||
True
|
||||
@@ -244,8 +173,7 @@ def modifies_known_mutable(obj, attr):
|
||||
>>> modifies_known_mutable([], "index")
|
||||
False
|
||||
|
||||
If called with an unsupported object (such as unicode) `False` is
|
||||
returned.
|
||||
If called with an unsupported object, ``False`` is returned.
|
||||
|
||||
>>> modifies_known_mutable("foo", "upper")
|
||||
False
|
||||
@@ -272,7 +200,7 @@ class SandboxedEnvironment(Environment):
|
||||
#: default callback table for the binary operators. A copy of this is
|
||||
#: available on each instance of a sandboxed environment as
|
||||
#: :attr:`binop_table`
|
||||
default_binop_table = {
|
||||
default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
|
||||
"+": operator.add,
|
||||
"-": operator.sub,
|
||||
"*": operator.mul,
|
||||
@@ -285,7 +213,10 @@ class SandboxedEnvironment(Environment):
|
||||
#: default callback table for the unary operators. A copy of this is
|
||||
#: available on each instance of a sandboxed environment as
|
||||
#: :attr:`unop_table`
|
||||
default_unop_table = {"+": operator.pos, "-": operator.neg}
|
||||
default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
|
||||
"+": operator.pos,
|
||||
"-": operator.neg,
|
||||
}
|
||||
|
||||
#: a set of binary operators that should be intercepted. Each operator
|
||||
#: that is added to this set (empty by default) is delegated to the
|
||||
@@ -301,7 +232,7 @@ class SandboxedEnvironment(Environment):
|
||||
#: interested in.
|
||||
#:
|
||||
#: .. versionadded:: 2.6
|
||||
intercepted_binops = frozenset()
|
||||
intercepted_binops: t.FrozenSet[str] = frozenset()
|
||||
|
||||
#: a set of unary operators that should be intercepted. Each operator
|
||||
#: that is added to this set (empty by default) is delegated to the
|
||||
@@ -316,32 +247,15 @@ class SandboxedEnvironment(Environment):
|
||||
#: interested in.
|
||||
#:
|
||||
#: .. versionadded:: 2.6
|
||||
intercepted_unops = frozenset()
|
||||
intercepted_unops: t.FrozenSet[str] = frozenset()
|
||||
|
||||
def intercept_unop(self, operator):
|
||||
"""Called during template compilation with the name of a unary
|
||||
operator to check if it should be intercepted at runtime. If this
|
||||
method returns `True`, :meth:`call_unop` is executed for this unary
|
||||
operator. The default implementation of :meth:`call_unop` will use
|
||||
the :attr:`unop_table` dictionary to perform the operator with the
|
||||
same logic as the builtin one.
|
||||
|
||||
The following unary operators are interceptable: ``+`` and ``-``
|
||||
|
||||
Intercepted calls are always slower than the native operator call,
|
||||
so make sure only to intercept the ones you are interested in.
|
||||
|
||||
.. versionadded:: 2.6
|
||||
"""
|
||||
return False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Environment.__init__(self, *args, **kwargs)
|
||||
def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.globals["range"] = safe_range
|
||||
self.binop_table = self.default_binop_table.copy()
|
||||
self.unop_table = self.default_unop_table.copy()
|
||||
|
||||
def is_safe_attribute(self, obj, attr, value):
|
||||
def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool:
|
||||
"""The sandboxed environment will call this method to check if the
|
||||
attribute of an object is safe to access. Per default all attributes
|
||||
starting with an underscore are considered private as well as the
|
||||
@@ -350,17 +264,20 @@ def is_safe_attribute(self, obj, attr, value):
|
||||
"""
|
||||
return not (attr.startswith("_") or is_internal_attribute(obj, attr))
|
||||
|
||||
def is_safe_callable(self, obj):
|
||||
"""Check if an object is safely callable. Per default a function is
|
||||
considered safe unless the `unsafe_callable` attribute exists and is
|
||||
True. Override this method to alter the behavior, but this won't
|
||||
affect the `unsafe` decorator from this module.
|
||||
def is_safe_callable(self, obj: t.Any) -> bool:
|
||||
"""Check if an object is safely callable. By default callables
|
||||
are considered safe unless decorated with :func:`unsafe`.
|
||||
|
||||
This also recognizes the Django convention of setting
|
||||
``func.alters_data = True``.
|
||||
"""
|
||||
return not (
|
||||
getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False)
|
||||
)
|
||||
|
||||
def call_binop(self, context, operator, left, right):
|
||||
def call_binop(
|
||||
self, context: Context, operator: str, left: t.Any, right: t.Any
|
||||
) -> t.Any:
|
||||
"""For intercepted binary operator calls (:meth:`intercepted_binops`)
|
||||
this function is executed instead of the builtin operator. This can
|
||||
be used to fine tune the behavior of certain operators.
|
||||
@@ -369,7 +286,7 @@ def call_binop(self, context, operator, left, right):
|
||||
"""
|
||||
return self.binop_table[operator](left, right)
|
||||
|
||||
def call_unop(self, context, operator, arg):
|
||||
def call_unop(self, context: Context, operator: str, arg: t.Any) -> t.Any:
|
||||
"""For intercepted unary operator calls (:meth:`intercepted_unops`)
|
||||
this function is executed instead of the builtin operator. This can
|
||||
be used to fine tune the behavior of certain operators.
|
||||
@@ -378,12 +295,14 @@ def call_unop(self, context, operator, arg):
|
||||
"""
|
||||
return self.unop_table[operator](arg)
|
||||
|
||||
def getitem(self, obj, argument):
|
||||
def getitem(
|
||||
self, obj: t.Any, argument: t.Union[str, t.Any]
|
||||
) -> t.Union[t.Any, Undefined]:
|
||||
"""Subscribe an object from sandboxed code."""
|
||||
try:
|
||||
return obj[argument]
|
||||
except (TypeError, LookupError):
|
||||
if isinstance(argument, string_types):
|
||||
if isinstance(argument, str):
|
||||
try:
|
||||
attr = str(argument)
|
||||
except Exception:
|
||||
@@ -399,7 +318,7 @@ def getitem(self, obj, argument):
|
||||
return self.unsafe_undefined(obj, argument)
|
||||
return self.undefined(obj=obj, name=argument)
|
||||
|
||||
def getattr(self, obj, attribute):
|
||||
def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]:
|
||||
"""Subscribe an object from sandboxed code and prefer the
|
||||
attribute. The attribute passed *must* be a bytestring.
|
||||
"""
|
||||
@@ -416,40 +335,52 @@ def getattr(self, obj, attribute):
|
||||
return self.unsafe_undefined(obj, attribute)
|
||||
return self.undefined(obj=obj, name=attribute)
|
||||
|
||||
def unsafe_undefined(self, obj, attribute):
|
||||
def unsafe_undefined(self, obj: t.Any, attribute: str) -> Undefined:
|
||||
"""Return an undefined object for unsafe attributes."""
|
||||
return self.undefined(
|
||||
"access to attribute %r of %r "
|
||||
"object is unsafe." % (attribute, obj.__class__.__name__),
|
||||
f"access to attribute {attribute!r} of"
|
||||
f" {type(obj).__name__!r} object is unsafe.",
|
||||
name=attribute,
|
||||
obj=obj,
|
||||
exc=SecurityError,
|
||||
)
|
||||
|
||||
def format_string(self, s, args, kwargs, format_func=None):
|
||||
def format_string(
|
||||
self,
|
||||
s: str,
|
||||
args: t.Tuple[t.Any, ...],
|
||||
kwargs: t.Dict[str, t.Any],
|
||||
format_func: t.Optional[t.Callable] = None,
|
||||
) -> str:
|
||||
"""If a format call is detected, then this is routed through this
|
||||
method so that our safety sandbox can be used for it.
|
||||
"""
|
||||
formatter: SandboxedFormatter
|
||||
if isinstance(s, Markup):
|
||||
formatter = SandboxedEscapeFormatter(self, s.escape)
|
||||
formatter = SandboxedEscapeFormatter(self, escape=s.escape)
|
||||
else:
|
||||
formatter = SandboxedFormatter(self)
|
||||
|
||||
if format_func is not None and format_func.__name__ == "format_map":
|
||||
if len(args) != 1 or kwargs:
|
||||
raise TypeError(
|
||||
"format_map() takes exactly one argument %d given"
|
||||
% (len(args) + (kwargs is not None))
|
||||
"format_map() takes exactly one argument"
|
||||
f" {len(args) + (kwargs is not None)} given"
|
||||
)
|
||||
|
||||
kwargs = args[0]
|
||||
args = None
|
||||
args = ()
|
||||
|
||||
kwargs = _MagicFormatMapping(args, kwargs)
|
||||
rv = formatter.vformat(s, args, kwargs)
|
||||
return type(s)(rv)
|
||||
|
||||
def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
|
||||
def call(
|
||||
__self, # noqa: B902
|
||||
__context: Context,
|
||||
__obj: t.Any,
|
||||
*args: t.Any,
|
||||
**kwargs: t.Any,
|
||||
) -> t.Any:
|
||||
"""Call an object from sandboxed code."""
|
||||
fmt = inspect_format_method(__obj)
|
||||
if fmt is not None:
|
||||
@@ -458,7 +389,7 @@ def call(__self, __context, __obj, *args, **kwargs): # noqa: B902
|
||||
# the double prefixes are to avoid double keyword argument
|
||||
# errors when proxying the call.
|
||||
if not __self.is_safe_callable(__obj):
|
||||
raise SecurityError("%r is not safely callable" % (__obj,))
|
||||
raise SecurityError(f"{__obj!r} is not safely callable")
|
||||
return __context.call(__obj, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -468,26 +399,21 @@ class ImmutableSandboxedEnvironment(SandboxedEnvironment):
|
||||
`dict` by using the :func:`modifies_known_mutable` function.
|
||||
"""
|
||||
|
||||
def is_safe_attribute(self, obj, attr, value):
|
||||
if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
|
||||
def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool:
|
||||
if not super().is_safe_attribute(obj, attr, value):
|
||||
return False
|
||||
|
||||
return not modifies_known_mutable(obj, attr)
|
||||
|
||||
|
||||
# This really is not a public API apparently.
|
||||
try:
|
||||
from _string import formatter_field_name_split
|
||||
except ImportError:
|
||||
|
||||
def formatter_field_name_split(field_name):
|
||||
return field_name._formatter_field_name_split()
|
||||
|
||||
|
||||
class SandboxedFormatterMixin(object):
|
||||
def __init__(self, env):
|
||||
class SandboxedFormatter(Formatter):
|
||||
def __init__(self, env: Environment, **kwargs: t.Any) -> None:
|
||||
self._env = env
|
||||
super().__init__(**kwargs) # type: ignore
|
||||
|
||||
def get_field(self, field_name, args, kwargs):
|
||||
def get_field(
|
||||
self, field_name: str, args: t.Sequence[t.Any], kwargs: t.Mapping[str, t.Any]
|
||||
) -> t.Tuple[t.Any, str]:
|
||||
first, rest = formatter_field_name_split(field_name)
|
||||
obj = self.get_value(first, args, kwargs)
|
||||
for is_attr, i in rest:
|
||||
@@ -498,13 +424,5 @@ def get_field(self, field_name, args, kwargs):
|
||||
return obj, first
|
||||
|
||||
|
||||
class SandboxedFormatter(SandboxedFormatterMixin, Formatter):
|
||||
def __init__(self, env):
|
||||
SandboxedFormatterMixin.__init__(self, env)
|
||||
Formatter.__init__(self)
|
||||
|
||||
|
||||
class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter):
|
||||
def __init__(self, env, escape):
|
||||
SandboxedFormatterMixin.__init__(self, env)
|
||||
EscapeFormatter.__init__(self, escape)
|
||||
class SandboxedEscapeFormatter(SandboxedFormatter, EscapeFormatter):
|
||||
pass
|
||||
@@ -1,36 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Built-in template tests used with the ``is`` operator."""
|
||||
import decimal
|
||||
import operator
|
||||
import re
|
||||
import typing as t
|
||||
from collections import abc
|
||||
from numbers import Number
|
||||
|
||||
from ._compat import abc
|
||||
from ._compat import integer_types
|
||||
from ._compat import string_types
|
||||
from ._compat import text_type
|
||||
from .runtime import Undefined
|
||||
from .utils import pass_environment
|
||||
|
||||
number_re = re.compile(r"^-?\d+(\.\d+)?$")
|
||||
regex_type = type(number_re)
|
||||
test_callable = callable
|
||||
if t.TYPE_CHECKING:
|
||||
from .environment import Environment
|
||||
|
||||
|
||||
def test_odd(value):
|
||||
def test_odd(value: int) -> bool:
|
||||
"""Return true if the variable is odd."""
|
||||
return value % 2 == 1
|
||||
|
||||
|
||||
def test_even(value):
|
||||
def test_even(value: int) -> bool:
|
||||
"""Return true if the variable is even."""
|
||||
return value % 2 == 0
|
||||
|
||||
|
||||
def test_divisibleby(value, num):
|
||||
def test_divisibleby(value: int, num: int) -> bool:
|
||||
"""Check if a variable is divisible by a number."""
|
||||
return value % num == 0
|
||||
|
||||
|
||||
def test_defined(value):
|
||||
def test_defined(value: t.Any) -> bool:
|
||||
"""Return true if the variable is defined:
|
||||
|
||||
.. sourcecode:: jinja
|
||||
@@ -47,17 +43,57 @@ def test_defined(value):
|
||||
return not isinstance(value, Undefined)
|
||||
|
||||
|
||||
def test_undefined(value):
|
||||
def test_undefined(value: t.Any) -> bool:
|
||||
"""Like :func:`defined` but the other way round."""
|
||||
return isinstance(value, Undefined)
|
||||
|
||||
|
||||
def test_none(value):
|
||||
@pass_environment
|
||||
def test_filter(env: "Environment", value: str) -> bool:
|
||||
"""Check if a filter exists by name. Useful if a filter may be
|
||||
optionally available.
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% if 'markdown' is filter %}
|
||||
{{ value | markdown }}
|
||||
{% else %}
|
||||
{{ value }}
|
||||
{% endif %}
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
return value in env.filters
|
||||
|
||||
|
||||
@pass_environment
|
||||
def test_test(env: "Environment", value: str) -> bool:
|
||||
"""Check if a test exists by name. Useful if a test may be
|
||||
optionally available.
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{% if 'loud' is test %}
|
||||
{% if value is loud %}
|
||||
{{ value|upper }}
|
||||
{% else %}
|
||||
{{ value|lower }}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{{ value }}
|
||||
{% endif %}
|
||||
|
||||
.. versionadded:: 3.0
|
||||
"""
|
||||
return value in env.tests
|
||||
|
||||
|
||||
def test_none(value: t.Any) -> bool:
|
||||
"""Return true if the variable is none."""
|
||||
return value is None
|
||||
|
||||
|
||||
def test_boolean(value):
|
||||
def test_boolean(value: t.Any) -> bool:
|
||||
"""Return true if the object is a boolean value.
|
||||
|
||||
.. versionadded:: 2.11
|
||||
@@ -65,7 +101,7 @@ def test_boolean(value):
|
||||
return value is True or value is False
|
||||
|
||||
|
||||
def test_false(value):
|
||||
def test_false(value: t.Any) -> bool:
|
||||
"""Return true if the object is False.
|
||||
|
||||
.. versionadded:: 2.11
|
||||
@@ -73,7 +109,7 @@ def test_false(value):
|
||||
return value is False
|
||||
|
||||
|
||||
def test_true(value):
|
||||
def test_true(value: t.Any) -> bool:
|
||||
"""Return true if the object is True.
|
||||
|
||||
.. versionadded:: 2.11
|
||||
@@ -82,16 +118,16 @@ def test_true(value):
|
||||
|
||||
|
||||
# NOTE: The existing 'number' test matches booleans and floats
|
||||
def test_integer(value):
|
||||
def test_integer(value: t.Any) -> bool:
|
||||
"""Return true if the object is an integer.
|
||||
|
||||
.. versionadded:: 2.11
|
||||
"""
|
||||
return isinstance(value, integer_types) and value is not True and value is not False
|
||||
return isinstance(value, int) and value is not True and value is not False
|
||||
|
||||
|
||||
# NOTE: The existing 'number' test matches booleans and integers
|
||||
def test_float(value):
|
||||
def test_float(value: t.Any) -> bool:
|
||||
"""Return true if the object is a float.
|
||||
|
||||
.. versionadded:: 2.11
|
||||
@@ -99,22 +135,22 @@ def test_float(value):
|
||||
return isinstance(value, float)
|
||||
|
||||
|
||||
def test_lower(value):
|
||||
def test_lower(value: str) -> bool:
|
||||
"""Return true if the variable is lowercased."""
|
||||
return text_type(value).islower()
|
||||
return str(value).islower()
|
||||
|
||||
|
||||
def test_upper(value):
|
||||
def test_upper(value: str) -> bool:
|
||||
"""Return true if the variable is uppercased."""
|
||||
return text_type(value).isupper()
|
||||
return str(value).isupper()
|
||||
|
||||
|
||||
def test_string(value):
|
||||
def test_string(value: t.Any) -> bool:
|
||||
"""Return true if the object is a string."""
|
||||
return isinstance(value, string_types)
|
||||
return isinstance(value, str)
|
||||
|
||||
|
||||
def test_mapping(value):
|
||||
def test_mapping(value: t.Any) -> bool:
|
||||
"""Return true if the object is a mapping (dict etc.).
|
||||
|
||||
.. versionadded:: 2.6
|
||||
@@ -122,12 +158,12 @@ def test_mapping(value):
|
||||
return isinstance(value, abc.Mapping)
|
||||
|
||||
|
||||
def test_number(value):
|
||||
def test_number(value: t.Any) -> bool:
|
||||
"""Return true if the variable is a number."""
|
||||
return isinstance(value, integer_types + (float, complex, decimal.Decimal))
|
||||
return isinstance(value, Number)
|
||||
|
||||
|
||||
def test_sequence(value):
|
||||
def test_sequence(value: t.Any) -> bool:
|
||||
"""Return true if the variable is a sequence. Sequences are variables
|
||||
that are iterable.
|
||||
"""
|
||||
@@ -136,10 +172,11 @@ def test_sequence(value):
|
||||
value.__getitem__
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def test_sameas(value, other):
|
||||
def test_sameas(value: t.Any, other: t.Any) -> bool:
|
||||
"""Check if an object points to the same memory address than another
|
||||
object:
|
||||
|
||||
@@ -152,21 +189,22 @@ def test_sameas(value, other):
|
||||
return value is other
|
||||
|
||||
|
||||
def test_iterable(value):
|
||||
def test_iterable(value: t.Any) -> bool:
|
||||
"""Check if it's possible to iterate over an object."""
|
||||
try:
|
||||
iter(value)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def test_escaped(value):
|
||||
def test_escaped(value: t.Any) -> bool:
|
||||
"""Check if the value is escaped."""
|
||||
return hasattr(value, "__html__")
|
||||
|
||||
|
||||
def test_in(value, seq):
|
||||
def test_in(value: t.Any, seq: t.Container) -> bool:
|
||||
"""Check if value is in seq.
|
||||
|
||||
.. versionadded:: 2.10
|
||||
@@ -180,6 +218,8 @@ def test_in(value, seq):
|
||||
"divisibleby": test_divisibleby,
|
||||
"defined": test_defined,
|
||||
"undefined": test_undefined,
|
||||
"filter": test_filter,
|
||||
"test": test_test,
|
||||
"none": test_none,
|
||||
"boolean": test_boolean,
|
||||
"false": test_false,
|
||||
@@ -193,7 +233,7 @@ def test_in(value, seq):
|
||||
"number": test_number,
|
||||
"sequence": test_sequence,
|
||||
"iterable": test_iterable,
|
||||
"callable": test_callable,
|
||||
"callable": callable,
|
||||
"sameas": test_sameas,
|
||||
"escaped": test_escaped,
|
||||
"in": test_in,
|
||||
854
lib/spack/external/_vendoring/jinja2/utils.py
vendored
Normal file
854
lib/spack/external/_vendoring/jinja2/utils.py
vendored
Normal file
@@ -0,0 +1,854 @@
|
||||
import enum
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
import warnings
|
||||
from collections import abc
|
||||
from collections import deque
|
||||
from random import choice
|
||||
from random import randrange
|
||||
from threading import Lock
|
||||
from types import CodeType
|
||||
from urllib.parse import quote_from_bytes
|
||||
|
||||
import markupsafe
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
# special singleton representing missing values for the runtime
|
||||
missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})()
|
||||
|
||||
internal_code: t.MutableSet[CodeType] = set()
|
||||
|
||||
concat = "".join
|
||||
|
||||
|
||||
def pass_context(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.runtime.Context` as the first argument
|
||||
to the decorated function when called while rendering a template.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
|
||||
If only ``Context.eval_context`` is needed, use
|
||||
:func:`pass_eval_context`. If only ``Context.environment`` is
|
||||
needed, use :func:`pass_environment`.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
Replaces ``contextfunction`` and ``contextfilter``.
|
||||
"""
|
||||
f.jinja_pass_arg = _PassArg.context # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
def pass_eval_context(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.nodes.EvalContext` as the first argument
|
||||
to the decorated function when called while rendering a template.
|
||||
See :ref:`eval-context`.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
|
||||
If only ``EvalContext.environment`` is needed, use
|
||||
:func:`pass_environment`.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
Replaces ``evalcontextfunction`` and ``evalcontextfilter``.
|
||||
"""
|
||||
f.jinja_pass_arg = _PassArg.eval_context # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
def pass_environment(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.Environment` as the first argument to
|
||||
the decorated function when called while rendering a template.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
Replaces ``environmentfunction`` and ``environmentfilter``.
|
||||
"""
|
||||
f.jinja_pass_arg = _PassArg.environment # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
class _PassArg(enum.Enum):
|
||||
context = enum.auto()
|
||||
eval_context = enum.auto()
|
||||
environment = enum.auto()
|
||||
|
||||
@classmethod
|
||||
def from_obj(cls, obj: F) -> t.Optional["_PassArg"]:
|
||||
if hasattr(obj, "jinja_pass_arg"):
|
||||
return obj.jinja_pass_arg # type: ignore
|
||||
|
||||
for prefix in "context", "eval_context", "environment":
|
||||
squashed = prefix.replace("_", "")
|
||||
|
||||
for name in f"{squashed}function", f"{squashed}filter":
|
||||
if getattr(obj, name, False) is True:
|
||||
warnings.warn(
|
||||
f"{name!r} is deprecated and will stop working"
|
||||
f" in Jinja 3.1. Use 'pass_{prefix}' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return cls[prefix]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def contextfunction(f: F) -> F:
|
||||
"""Pass the context as the first argument to the decorated function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use :func:`~jinja2.pass_context`
|
||||
instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
"'contextfunction' is renamed to 'pass_context', the old name"
|
||||
" will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return pass_context(f)
|
||||
|
||||
|
||||
def evalcontextfunction(f: F) -> F:
|
||||
"""Pass the eval context as the first argument to the decorated
|
||||
function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_eval_context` instead.
|
||||
|
||||
.. versionadded:: 2.4
|
||||
"""
|
||||
warnings.warn(
|
||||
"'evalcontextfunction' is renamed to 'pass_eval_context', the"
|
||||
" old name will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return pass_eval_context(f)
|
||||
|
||||
|
||||
def environmentfunction(f: F) -> F:
|
||||
"""Pass the environment as the first argument to the decorated
|
||||
function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_environment` instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
"'environmentfunction' is renamed to 'pass_environment', the"
|
||||
" old name will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return pass_environment(f)
|
||||
|
||||
|
||||
def internalcode(f: F) -> F:
|
||||
"""Marks the function as internally used"""
|
||||
internal_code.add(f.__code__)
|
||||
return f
|
||||
|
||||
|
||||
def is_undefined(obj: t.Any) -> bool:
|
||||
"""Check if the object passed is undefined. This does nothing more than
|
||||
performing an instance check against :class:`Undefined` but looks nicer.
|
||||
This can be used for custom filters or tests that want to react to
|
||||
undefined variables. For example a custom default filter can look like
|
||||
this::
|
||||
|
||||
def default(var, default=''):
|
||||
if is_undefined(var):
|
||||
return default
|
||||
return var
|
||||
"""
|
||||
from .runtime import Undefined
|
||||
|
||||
return isinstance(obj, Undefined)
|
||||
|
||||
|
||||
def consume(iterable: t.Iterable[t.Any]) -> None:
|
||||
"""Consumes an iterable without doing anything with it."""
|
||||
for _ in iterable:
|
||||
pass
|
||||
|
||||
|
||||
def clear_caches() -> None:
|
||||
"""Jinja keeps internal caches for environments and lexers. These are
|
||||
used so that Jinja doesn't have to recreate environments and lexers all
|
||||
the time. Normally you don't have to care about that but if you are
|
||||
measuring memory consumption you may want to clean the caches.
|
||||
"""
|
||||
from .environment import get_spontaneous_environment
|
||||
from .lexer import _lexer_cache
|
||||
|
||||
get_spontaneous_environment.cache_clear()
|
||||
_lexer_cache.clear()
|
||||
|
||||
|
||||
def import_string(import_name: str, silent: bool = False) -> t.Any:
|
||||
"""Imports an object based on a string. This is useful if you want to
|
||||
use import paths as endpoints or something similar. An import path can
|
||||
be specified either in dotted notation (``xml.sax.saxutils.escape``)
|
||||
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
|
||||
|
||||
If the `silent` is True the return value will be `None` if the import
|
||||
fails.
|
||||
|
||||
:return: imported object
|
||||
"""
|
||||
try:
|
||||
if ":" in import_name:
|
||||
module, obj = import_name.split(":", 1)
|
||||
elif "." in import_name:
|
||||
module, _, obj = import_name.rpartition(".")
|
||||
else:
|
||||
return __import__(import_name)
|
||||
return getattr(__import__(module, None, None, [obj]), obj)
|
||||
except (ImportError, AttributeError):
|
||||
if not silent:
|
||||
raise
|
||||
|
||||
|
||||
def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO]:
|
||||
"""Returns a file descriptor for the filename if that file exists,
|
||||
otherwise ``None``.
|
||||
"""
|
||||
if not os.path.isfile(filename):
|
||||
return None
|
||||
|
||||
return open(filename, mode)
|
||||
|
||||
|
||||
def object_type_repr(obj: t.Any) -> str:
|
||||
"""Returns the name of the object's type. For some recognized
|
||||
singletons the name of the object is returned instead. (For
|
||||
example for `None` and `Ellipsis`).
|
||||
"""
|
||||
if obj is None:
|
||||
return "None"
|
||||
elif obj is Ellipsis:
|
||||
return "Ellipsis"
|
||||
|
||||
cls = type(obj)
|
||||
|
||||
if cls.__module__ == "builtins":
|
||||
return f"{cls.__name__} object"
|
||||
|
||||
return f"{cls.__module__}.{cls.__name__} object"
|
||||
|
||||
|
||||
def pformat(obj: t.Any) -> str:
|
||||
"""Format an object using :func:`pprint.pformat`."""
|
||||
from pprint import pformat # type: ignore
|
||||
|
||||
return pformat(obj)
|
||||
|
||||
|
||||
_http_re = re.compile(
|
||||
r"""
|
||||
^
|
||||
(
|
||||
(https?://|www\.) # scheme or www
|
||||
(([\w%-]+\.)+)? # subdomain
|
||||
(
|
||||
[a-z]{2,63} # basic tld
|
||||
|
|
||||
xn--[\w%]{2,59} # idna tld
|
||||
)
|
||||
|
|
||||
([\w%-]{2,63}\.)+ # basic domain
|
||||
(com|net|int|edu|gov|org|info|mil) # basic tld
|
||||
|
|
||||
(https?://) # scheme
|
||||
(
|
||||
(([\d]{1,3})(\.[\d]{1,3}){3}) # IPv4
|
||||
|
|
||||
(\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}]) # IPv6
|
||||
)
|
||||
)
|
||||
(?::[\d]{1,5})? # port
|
||||
(?:[/?#]\S*)? # path, query, and fragment
|
||||
$
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$")
|
||||
|
||||
|
||||
def urlize(
|
||||
text: str,
|
||||
trim_url_limit: t.Optional[int] = None,
|
||||
rel: t.Optional[str] = None,
|
||||
target: t.Optional[str] = None,
|
||||
extra_schemes: t.Optional[t.Iterable[str]] = None,
|
||||
) -> str:
|
||||
"""Convert URLs in text into clickable links.
|
||||
|
||||
This may not recognize links in some situations. Usually, a more
|
||||
comprehensive formatter, such as a Markdown library, is a better
|
||||
choice.
|
||||
|
||||
Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email
|
||||
addresses. Links with trailing punctuation (periods, commas, closing
|
||||
parentheses) and leading punctuation (opening parentheses) are
|
||||
recognized excluding the punctuation. Email addresses that include
|
||||
header fields are not recognized (for example,
|
||||
``mailto:address@example.com?cc=copy@example.com``).
|
||||
|
||||
:param text: Original text containing URLs to link.
|
||||
:param trim_url_limit: Shorten displayed URL values to this length.
|
||||
:param target: Add the ``target`` attribute to links.
|
||||
:param rel: Add the ``rel`` attribute to links.
|
||||
:param extra_schemes: Recognize URLs that start with these schemes
|
||||
in addition to the default behavior.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The ``extra_schemes`` parameter was added.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Generate ``https://`` links for URLs without a scheme.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The parsing rules were updated. Recognize email addresses with
|
||||
or without the ``mailto:`` scheme. Validate IP addresses. Ignore
|
||||
parentheses and brackets in more cases.
|
||||
"""
|
||||
if trim_url_limit is not None:
|
||||
|
||||
def trim_url(x: str) -> str:
|
||||
if len(x) > trim_url_limit: # type: ignore
|
||||
return f"{x[:trim_url_limit]}..."
|
||||
|
||||
return x
|
||||
|
||||
else:
|
||||
|
||||
def trim_url(x: str) -> str:
|
||||
return x
|
||||
|
||||
words = re.split(r"(\s+)", str(markupsafe.escape(text)))
|
||||
rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else ""
|
||||
target_attr = f' target="{markupsafe.escape(target)}"' if target else ""
|
||||
|
||||
for i, word in enumerate(words):
|
||||
head, middle, tail = "", word, ""
|
||||
match = re.match(r"^([(<]|<)+", middle)
|
||||
|
||||
if match:
|
||||
head = match.group()
|
||||
middle = middle[match.end() :]
|
||||
|
||||
# Unlike lead, which is anchored to the start of the string,
|
||||
# need to check that the string ends with any of the characters
|
||||
# before trying to match all of them, to avoid backtracking.
|
||||
if middle.endswith((")", ">", ".", ",", "\n", ">")):
|
||||
match = re.search(r"([)>.,\n]|>)+$", middle)
|
||||
|
||||
if match:
|
||||
tail = match.group()
|
||||
middle = middle[: match.start()]
|
||||
|
||||
# Prefer balancing parentheses in URLs instead of ignoring a
|
||||
# trailing character.
|
||||
for start_char, end_char in ("(", ")"), ("<", ">"), ("<", ">"):
|
||||
start_count = middle.count(start_char)
|
||||
|
||||
if start_count <= middle.count(end_char):
|
||||
# Balanced, or lighter on the left
|
||||
continue
|
||||
|
||||
# Move as many as possible from the tail to balance
|
||||
for _ in range(min(start_count, tail.count(end_char))):
|
||||
end_index = tail.index(end_char) + len(end_char)
|
||||
# Move anything in the tail before the end char too
|
||||
middle += tail[:end_index]
|
||||
tail = tail[end_index:]
|
||||
|
||||
if _http_re.match(middle):
|
||||
if middle.startswith("https://") or middle.startswith("http://"):
|
||||
middle = (
|
||||
f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>'
|
||||
)
|
||||
else:
|
||||
middle = (
|
||||
f'<a href="https://{middle}"{rel_attr}{target_attr}>'
|
||||
f"{trim_url(middle)}</a>"
|
||||
)
|
||||
|
||||
elif middle.startswith("mailto:") and _email_re.match(middle[7:]):
|
||||
middle = f'<a href="{middle}">{middle[7:]}</a>'
|
||||
|
||||
elif (
|
||||
"@" in middle
|
||||
and not middle.startswith("www.")
|
||||
and ":" not in middle
|
||||
and _email_re.match(middle)
|
||||
):
|
||||
middle = f'<a href="mailto:{middle}">{middle}</a>'
|
||||
|
||||
elif extra_schemes is not None:
|
||||
for scheme in extra_schemes:
|
||||
if middle != scheme and middle.startswith(scheme):
|
||||
middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>'
|
||||
|
||||
words[i] = f"{head}{middle}{tail}"
|
||||
|
||||
return "".join(words)
|
||||
|
||||
|
||||
def generate_lorem_ipsum(
|
||||
n: int = 5, html: bool = True, min: int = 20, max: int = 100
|
||||
) -> str:
|
||||
"""Generate some lorem ipsum for the template."""
|
||||
from .constants import LOREM_IPSUM_WORDS
|
||||
|
||||
words = LOREM_IPSUM_WORDS.split()
|
||||
result = []
|
||||
|
||||
for _ in range(n):
|
||||
next_capitalized = True
|
||||
last_comma = last_fullstop = 0
|
||||
word = None
|
||||
last = None
|
||||
p = []
|
||||
|
||||
# each paragraph contains out of 20 to 100 words.
|
||||
for idx, _ in enumerate(range(randrange(min, max))):
|
||||
while True:
|
||||
word = choice(words)
|
||||
if word != last:
|
||||
last = word
|
||||
break
|
||||
if next_capitalized:
|
||||
word = word.capitalize()
|
||||
next_capitalized = False
|
||||
# add commas
|
||||
if idx - randrange(3, 8) > last_comma:
|
||||
last_comma = idx
|
||||
last_fullstop += 2
|
||||
word += ","
|
||||
# add end of sentences
|
||||
if idx - randrange(10, 20) > last_fullstop:
|
||||
last_comma = last_fullstop = idx
|
||||
word += "."
|
||||
next_capitalized = True
|
||||
p.append(word)
|
||||
|
||||
# ensure that the paragraph ends with a dot.
|
||||
p_str = " ".join(p)
|
||||
|
||||
if p_str.endswith(","):
|
||||
p_str = p_str[:-1] + "."
|
||||
elif not p_str.endswith("."):
|
||||
p_str += "."
|
||||
|
||||
result.append(p_str)
|
||||
|
||||
if not html:
|
||||
return "\n\n".join(result)
|
||||
return markupsafe.Markup(
|
||||
"\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result)
|
||||
)
|
||||
|
||||
|
||||
def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str:
|
||||
"""Quote a string for use in a URL using the given charset.
|
||||
|
||||
:param obj: String or bytes to quote. Other types are converted to
|
||||
string then encoded to bytes using the given charset.
|
||||
:param charset: Encode text to bytes using this charset.
|
||||
:param for_qs: Quote "/" and use "+" for spaces.
|
||||
"""
|
||||
if not isinstance(obj, bytes):
|
||||
if not isinstance(obj, str):
|
||||
obj = str(obj)
|
||||
|
||||
obj = obj.encode(charset)
|
||||
|
||||
safe = b"" if for_qs else b"/"
|
||||
rv = quote_from_bytes(obj, safe)
|
||||
|
||||
if for_qs:
|
||||
rv = rv.replace("%20", "+")
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def unicode_urlencode(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str:
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"'unicode_urlencode' has been renamed to 'url_quote'. The old"
|
||||
" name will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return url_quote(obj, charset=charset, for_qs=for_qs)
|
||||
|
||||
|
||||
@abc.MutableMapping.register
|
||||
class LRUCache:
|
||||
"""A simple LRU Cache implementation."""
|
||||
|
||||
# this is fast for small capacities (something below 1000) but doesn't
|
||||
# scale. But as long as it's only used as storage for templates this
|
||||
# won't do any harm.
|
||||
|
||||
def __init__(self, capacity: int) -> None:
|
||||
self.capacity = capacity
|
||||
self._mapping: t.Dict[t.Any, t.Any] = {}
|
||||
self._queue: "te.Deque[t.Any]" = deque()
|
||||
self._postinit()
|
||||
|
||||
def _postinit(self) -> None:
|
||||
# alias all queue methods for faster lookup
|
||||
self._popleft = self._queue.popleft
|
||||
self._pop = self._queue.pop
|
||||
self._remove = self._queue.remove
|
||||
self._wlock = Lock()
|
||||
self._append = self._queue.append
|
||||
|
||||
def __getstate__(self) -> t.Mapping[str, t.Any]:
|
||||
return {
|
||||
"capacity": self.capacity,
|
||||
"_mapping": self._mapping,
|
||||
"_queue": self._queue,
|
||||
}
|
||||
|
||||
def __setstate__(self, d: t.Mapping[str, t.Any]) -> None:
|
||||
self.__dict__.update(d)
|
||||
self._postinit()
|
||||
|
||||
def __getnewargs__(self) -> t.Tuple:
|
||||
return (self.capacity,)
|
||||
|
||||
def copy(self) -> "LRUCache":
|
||||
"""Return a shallow copy of the instance."""
|
||||
rv = self.__class__(self.capacity)
|
||||
rv._mapping.update(self._mapping)
|
||||
rv._queue.extend(self._queue)
|
||||
return rv
|
||||
|
||||
def get(self, key: t.Any, default: t.Any = None) -> t.Any:
|
||||
"""Return an item from the cache dict or `default`"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any:
|
||||
"""Set `default` if the key is not in the cache otherwise
|
||||
leave unchanged. Return the value of this key.
|
||||
"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear the cache."""
|
||||
with self._wlock:
|
||||
self._mapping.clear()
|
||||
self._queue.clear()
|
||||
|
||||
def __contains__(self, key: t.Any) -> bool:
|
||||
"""Check if a key exists in this cache."""
|
||||
return key in self._mapping
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the current size of the cache."""
|
||||
return len(self._mapping)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{type(self).__name__} {self._mapping!r}>"
|
||||
|
||||
def __getitem__(self, key: t.Any) -> t.Any:
|
||||
"""Get an item from the cache. Moves the item up so that it has the
|
||||
highest priority then.
|
||||
|
||||
Raise a `KeyError` if it does not exist.
|
||||
"""
|
||||
with self._wlock:
|
||||
rv = self._mapping[key]
|
||||
|
||||
if self._queue[-1] != key:
|
||||
try:
|
||||
self._remove(key)
|
||||
except ValueError:
|
||||
# if something removed the key from the container
|
||||
# when we read, ignore the ValueError that we would
|
||||
# get otherwise.
|
||||
pass
|
||||
|
||||
self._append(key)
|
||||
|
||||
return rv
|
||||
|
||||
def __setitem__(self, key: t.Any, value: t.Any) -> None:
|
||||
"""Sets the value for an item. Moves the item up so that it
|
||||
has the highest priority then.
|
||||
"""
|
||||
with self._wlock:
|
||||
if key in self._mapping:
|
||||
self._remove(key)
|
||||
elif len(self._mapping) == self.capacity:
|
||||
del self._mapping[self._popleft()]
|
||||
|
||||
self._append(key)
|
||||
self._mapping[key] = value
|
||||
|
||||
def __delitem__(self, key: t.Any) -> None:
|
||||
"""Remove an item from the cache dict.
|
||||
Raise a `KeyError` if it does not exist.
|
||||
"""
|
||||
with self._wlock:
|
||||
del self._mapping[key]
|
||||
|
||||
try:
|
||||
self._remove(key)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]:
|
||||
"""Return a list of items."""
|
||||
result = [(key, self._mapping[key]) for key in list(self._queue)]
|
||||
result.reverse()
|
||||
return result
|
||||
|
||||
def values(self) -> t.Iterable[t.Any]:
|
||||
"""Return a list of all values."""
|
||||
return [x[1] for x in self.items()]
|
||||
|
||||
def keys(self) -> t.Iterable[t.Any]:
|
||||
"""Return a list of all keys ordered by most recent usage."""
|
||||
return list(self)
|
||||
|
||||
def __iter__(self) -> t.Iterator[t.Any]:
|
||||
return reversed(tuple(self._queue))
|
||||
|
||||
def __reversed__(self) -> t.Iterator[t.Any]:
|
||||
"""Iterate over the keys in the cache dict, oldest items
|
||||
coming first.
|
||||
"""
|
||||
return iter(tuple(self._queue))
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
|
||||
def select_autoescape(
|
||||
enabled_extensions: t.Collection[str] = ("html", "htm", "xml"),
|
||||
disabled_extensions: t.Collection[str] = (),
|
||||
default_for_string: bool = True,
|
||||
default: bool = False,
|
||||
) -> t.Callable[[t.Optional[str]], bool]:
|
||||
"""Intelligently sets the initial value of autoescaping based on the
|
||||
filename of the template. This is the recommended way to configure
|
||||
autoescaping if you do not want to write a custom function yourself.
|
||||
|
||||
If you want to enable it for all templates created from strings or
|
||||
for all templates with `.html` and `.xml` extensions::
|
||||
|
||||
from jinja2 import Environment, select_autoescape
|
||||
env = Environment(autoescape=select_autoescape(
|
||||
enabled_extensions=('html', 'xml'),
|
||||
default_for_string=True,
|
||||
))
|
||||
|
||||
Example configuration to turn it on at all times except if the template
|
||||
ends with `.txt`::
|
||||
|
||||
from jinja2 import Environment, select_autoescape
|
||||
env = Environment(autoescape=select_autoescape(
|
||||
disabled_extensions=('txt',),
|
||||
default_for_string=True,
|
||||
default=True,
|
||||
))
|
||||
|
||||
The `enabled_extensions` is an iterable of all the extensions that
|
||||
autoescaping should be enabled for. Likewise `disabled_extensions` is
|
||||
a list of all templates it should be disabled for. If a template is
|
||||
loaded from a string then the default from `default_for_string` is used.
|
||||
If nothing matches then the initial value of autoescaping is set to the
|
||||
value of `default`.
|
||||
|
||||
For security reasons this function operates case insensitive.
|
||||
|
||||
.. versionadded:: 2.9
|
||||
"""
|
||||
enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
|
||||
disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
|
||||
|
||||
def autoescape(template_name: t.Optional[str]) -> bool:
|
||||
if template_name is None:
|
||||
return default_for_string
|
||||
template_name = template_name.lower()
|
||||
if template_name.endswith(enabled_patterns):
|
||||
return True
|
||||
if template_name.endswith(disabled_patterns):
|
||||
return False
|
||||
return default
|
||||
|
||||
return autoescape
|
||||
|
||||
|
||||
def htmlsafe_json_dumps(
|
||||
obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
|
||||
) -> markupsafe.Markup:
|
||||
"""Serialize an object to a string of JSON with :func:`json.dumps`,
|
||||
then replace HTML-unsafe characters with Unicode escapes and mark
|
||||
the result safe with :class:`~markupsafe.Markup`.
|
||||
|
||||
This is available in templates as the ``|tojson`` filter.
|
||||
|
||||
The following characters are escaped: ``<``, ``>``, ``&``, ``'``.
|
||||
|
||||
The returned string is safe to render in HTML documents and
|
||||
``<script>`` tags. The exception is in HTML attributes that are
|
||||
double quoted; either use single quotes or the ``|forceescape``
|
||||
filter.
|
||||
|
||||
:param obj: The object to serialize to JSON.
|
||||
:param dumps: The ``dumps`` function to use. Defaults to
|
||||
``env.policies["json.dumps_function"]``, which defaults to
|
||||
:func:`json.dumps`.
|
||||
:param kwargs: Extra arguments to pass to ``dumps``. Merged onto
|
||||
``env.policies["json.dumps_kwargs"]``.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The ``dumper`` parameter is renamed to ``dumps``.
|
||||
|
||||
.. versionadded:: 2.9
|
||||
"""
|
||||
if dumps is None:
|
||||
dumps = json.dumps
|
||||
|
||||
return markupsafe.Markup(
|
||||
dumps(obj, **kwargs)
|
||||
.replace("<", "\\u003c")
|
||||
.replace(">", "\\u003e")
|
||||
.replace("&", "\\u0026")
|
||||
.replace("'", "\\u0027")
|
||||
)
|
||||
|
||||
|
||||
class Cycler:
|
||||
"""Cycle through values by yield them one at a time, then restarting
|
||||
once the end is reached. Available as ``cycler`` in templates.
|
||||
|
||||
Similar to ``loop.cycle``, but can be used outside loops or across
|
||||
multiple loops. For example, render a list of folders and files in a
|
||||
list, alternating giving them "odd" and "even" classes.
|
||||
|
||||
.. code-block:: html+jinja
|
||||
|
||||
{% set row_class = cycler("odd", "even") %}
|
||||
<ul class="browser">
|
||||
{% for folder in folders %}
|
||||
<li class="folder {{ row_class.next() }}">{{ folder }}
|
||||
{% endfor %}
|
||||
{% for file in files %}
|
||||
<li class="file {{ row_class.next() }}">{{ file }}
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
:param items: Each positional argument will be yielded in the order
|
||||
given for each cycle.
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
|
||||
def __init__(self, *items: t.Any) -> None:
|
||||
if not items:
|
||||
raise RuntimeError("at least one item has to be provided")
|
||||
self.items = items
|
||||
self.pos = 0
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Resets the current item to the first item."""
|
||||
self.pos = 0
|
||||
|
||||
@property
|
||||
def current(self) -> t.Any:
|
||||
"""Return the current item. Equivalent to the item that will be
|
||||
returned next time :meth:`next` is called.
|
||||
"""
|
||||
return self.items[self.pos]
|
||||
|
||||
def next(self) -> t.Any:
|
||||
"""Return the current item, then advance :attr:`current` to the
|
||||
next item.
|
||||
"""
|
||||
rv = self.current
|
||||
self.pos = (self.pos + 1) % len(self.items)
|
||||
return rv
|
||||
|
||||
__next__ = next
|
||||
|
||||
|
||||
class Joiner:
|
||||
"""A joining helper for templates."""
|
||||
|
||||
def __init__(self, sep: str = ", ") -> None:
|
||||
self.sep = sep
|
||||
self.used = False
|
||||
|
||||
def __call__(self) -> str:
|
||||
if not self.used:
|
||||
self.used = True
|
||||
return ""
|
||||
return self.sep
|
||||
|
||||
|
||||
class Namespace:
|
||||
"""A namespace object that can hold arbitrary attributes. It may be
|
||||
initialized from a dictionary or with keyword arguments."""
|
||||
|
||||
def __init__(*args: t.Any, **kwargs: t.Any) -> None: # noqa: B902
|
||||
self, args = args[0], args[1:]
|
||||
self.__attrs = dict(*args, **kwargs)
|
||||
|
||||
def __getattribute__(self, name: str) -> t.Any:
|
||||
# __class__ is needed for the awaitable check in async mode
|
||||
if name in {"_Namespace__attrs", "__class__"}:
|
||||
return object.__getattribute__(self, name)
|
||||
try:
|
||||
return self.__attrs[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name) from None
|
||||
|
||||
def __setitem__(self, name: str, value: t.Any) -> None:
|
||||
self.__attrs[name] = value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Namespace {self.__attrs!r}>"
|
||||
|
||||
|
||||
class Markup(markupsafe.Markup):
|
||||
def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore
|
||||
warnings.warn(
|
||||
"'jinja2.Markup' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import 'markupsafe.Markup' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return super().__new__(cls, base, encoding, errors)
|
||||
|
||||
|
||||
def escape(s: t.Any) -> str:
|
||||
warnings.warn(
|
||||
"'jinja2.escape' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import 'markupsafe.escape' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return markupsafe.escape(s)
|
||||
@@ -1,11 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""API for traversing the AST nodes. Implemented by the compiler and
|
||||
meta introspection.
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
from .nodes import Node
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
class NodeVisitor(object):
|
||||
class VisitCallable(te.Protocol):
|
||||
def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
...
|
||||
|
||||
|
||||
class NodeVisitor:
|
||||
"""Walks the abstract syntax tree and call visitor functions for every
|
||||
node found. The visitor functions may return values which will be
|
||||
forwarded by the `visit` method.
|
||||
@@ -17,22 +25,23 @@ class name of the node. So a `TryFinally` node visit function would
|
||||
(return value `None`) the `generic_visit` visitor is used instead.
|
||||
"""
|
||||
|
||||
def get_visitor(self, node):
|
||||
def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]":
|
||||
"""Return the visitor function for this node or `None` if no visitor
|
||||
exists for this node. In that case the generic visit function is
|
||||
used instead.
|
||||
"""
|
||||
method = "visit_" + node.__class__.__name__
|
||||
return getattr(self, method, None)
|
||||
return getattr(self, f"visit_{type(node).__name__}", None) # type: ignore
|
||||
|
||||
def visit(self, node, *args, **kwargs):
|
||||
def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
"""Visit a node."""
|
||||
f = self.get_visitor(node)
|
||||
|
||||
if f is not None:
|
||||
return f(node, *args, **kwargs)
|
||||
|
||||
return self.generic_visit(node, *args, **kwargs)
|
||||
|
||||
def generic_visit(self, node, *args, **kwargs):
|
||||
def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for node in node.iter_child_nodes():
|
||||
self.visit(node, *args, **kwargs)
|
||||
@@ -49,7 +58,7 @@ class NodeTransformer(NodeVisitor):
|
||||
replacement takes place.
|
||||
"""
|
||||
|
||||
def generic_visit(self, node, *args, **kwargs):
|
||||
def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> Node:
|
||||
for field, old_value in node.iter_fields():
|
||||
if isinstance(old_value, list):
|
||||
new_values = []
|
||||
@@ -71,11 +80,13 @@ def generic_visit(self, node, *args, **kwargs):
|
||||
setattr(node, field, new_node)
|
||||
return node
|
||||
|
||||
def visit_list(self, node, *args, **kwargs):
|
||||
def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.List[Node]:
|
||||
"""As transformers may return lists in some places this method
|
||||
can be used to enforce a list as return value.
|
||||
"""
|
||||
rv = self.visit(node, *args, **kwargs)
|
||||
|
||||
if not isinstance(rv, list):
|
||||
rv = [rv]
|
||||
return [rv]
|
||||
|
||||
return rv
|
||||
1
lib/spack/external/_vendoring/jsonschema.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/jsonschema.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from jsonschema import *
|
||||
@@ -27,11 +27,5 @@
|
||||
RefResolver,
|
||||
validate,
|
||||
)
|
||||
# try:
|
||||
# from importlib import metadata
|
||||
# except ImportError: # for Python<3.8
|
||||
# import importlib_metadata as metadata
|
||||
# __version__ = metadata.version("jsonschema")
|
||||
# set the version manually here, as we don't install dist-info or egg-info
|
||||
# files for vendored spack externals.
|
||||
__version__ = '3.2.0'
|
||||
|
||||
__version__ = "3.2.0"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user