Compare commits
590 Commits
staging-fa
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50276fb9a2 | ||
|
|
84c03f504e | ||
|
|
553846537b | ||
|
|
c6213b4d1a | ||
|
|
ecd1d51f4d | ||
|
|
fd1952d205 | ||
|
|
19926ce9c5 | ||
|
|
33a052a413 | ||
|
|
59740a3f2e | ||
|
|
4704486d49 | ||
|
|
dc632d9934 | ||
|
|
3687fa3a0b | ||
|
|
42b02362e1 | ||
|
|
05abf4e953 | ||
|
|
a3bae5a40c | ||
|
|
456089ba22 | ||
|
|
d15bda6f83 | ||
|
|
de38d16edd | ||
|
|
fd77359a5a | ||
|
|
3581de3619 | ||
|
|
84d3306857 | ||
|
|
5bbfb7bb18 | ||
|
|
69ce064955 | ||
|
|
ab3c2437e8 | ||
|
|
70dec0bd33 | ||
|
|
54197e6af4 | ||
|
|
c237bc163a | ||
|
|
5f5d7aafa2 | ||
|
|
5c2504702e | ||
|
|
776a26e021 | ||
|
|
999e4e9327 | ||
|
|
3a5fc4c416 | ||
|
|
663b7acdc1 | ||
|
|
e8d10d7e9e | ||
|
|
caa6a73b53 | ||
|
|
839b8befc3 | ||
|
|
a2fb8db0e7 | ||
|
|
311b4cd295 | ||
|
|
0d471ea5b7 | ||
|
|
8d9cbcb54c | ||
|
|
23845c53e0 | ||
|
|
e9e823c2e7 | ||
|
|
fe4bcd08a9 | ||
|
|
47cc05c39f | ||
|
|
a2ebb72da0 | ||
|
|
4413e41389 | ||
|
|
d45f6720f4 | ||
|
|
e2042058f1 | ||
|
|
bb54adc298 | ||
|
|
1ef3f8f5f5 | ||
|
|
ca6dd299ee | ||
|
|
5f9bed8409 | ||
|
|
625631832e | ||
|
|
7ec5a79b46 | ||
|
|
a187346d08 | ||
|
|
b442168127 | ||
|
|
bdd38ec73c | ||
|
|
98f6ea2875 | ||
|
|
a977f9deb9 | ||
|
|
4f60392b3e | ||
|
|
35ff0b71b6 | ||
|
|
1575848e81 | ||
|
|
657fd89531 | ||
|
|
0f20d6bfa8 | ||
|
|
c553f33cf0 | ||
|
|
c5b593a34c | ||
|
|
1bc0a28e35 | ||
|
|
0f71ee73cf | ||
|
|
4deb3111d3 | ||
|
|
5bf958fd1b | ||
|
|
7eed3d8bc1 | ||
|
|
7c78444174 | ||
|
|
19c8f00aba | ||
|
|
a6b3079ae3 | ||
|
|
acafab9b5f | ||
|
|
ef67d84096 | ||
|
|
7c56a1d764 | ||
|
|
cfdccd0a03 | ||
|
|
22f1e5ad0b | ||
|
|
05a051162d | ||
|
|
f968d2f0ca | ||
|
|
67d510ec4b | ||
|
|
f44d95e7c2 | ||
|
|
00495cb263 | ||
|
|
40ab937954 | ||
|
|
59bb7f6f18 | ||
|
|
bf3cf80161 | ||
|
|
63ea6585cd | ||
|
|
ea149103be | ||
|
|
9c7e8d3135 | ||
|
|
8d08599c2e | ||
|
|
3e617554bb | ||
|
|
9a0b6a31c9 | ||
|
|
8d7df1587a | ||
|
|
dad6671556 | ||
|
|
bf9a4e2862 | ||
|
|
719254cb89 | ||
|
|
281ad3e16e | ||
|
|
d4515ba2fe | ||
|
|
b78070c653 | ||
|
|
aa0637d87b | ||
|
|
d5779cd65d | ||
|
|
1c45f2ab2e | ||
|
|
0eca067dd7 | ||
|
|
1ed7c15c97 | ||
|
|
dae5e305db | ||
|
|
1d8af300c4 | ||
|
|
1f4971c293 | ||
|
|
fd1d5cc9ef | ||
|
|
e6700d2089 | ||
|
|
d1048413f8 | ||
|
|
3e0391eb26 | ||
|
|
049635cd1a | ||
|
|
ca30448b74 | ||
|
|
a79a8c4c7a | ||
|
|
3f5f60aa62 | ||
|
|
79a45bf6c7 | ||
|
|
0852421d17 | ||
|
|
f368fb4951 | ||
|
|
0af6deb998 | ||
|
|
c9fb5c61ab | ||
|
|
a3a5c1da40 | ||
|
|
1f1fabade2 | ||
|
|
4db4e22d09 | ||
|
|
5b61a937a1 | ||
|
|
edc9089808 | ||
|
|
a1540964f5 | ||
|
|
95ab14dfe7 | ||
|
|
d68ad18de2 | ||
|
|
305e7b4c68 | ||
|
|
79096b753c | ||
|
|
6d6d83ea3e | ||
|
|
f78c47f441 | ||
|
|
13b6b27fea | ||
|
|
dfc132af9d | ||
|
|
1a8a881082 | ||
|
|
aa61bfdedd | ||
|
|
b51cfd02af | ||
|
|
76a815c145 | ||
|
|
236220ef8e | ||
|
|
60a287290d | ||
|
|
cc301e1e62 | ||
|
|
f3bf1b80b0 | ||
|
|
3ef72c5eed | ||
|
|
ccaad93b62 | ||
|
|
fb16cedabf | ||
|
|
3116e844be | ||
|
|
e3bb36afac | ||
|
|
30fbfffb96 | ||
|
|
8cb3c65f55 | ||
|
|
7417496d9e | ||
|
|
c27f9a7c40 | ||
|
|
19857e3fa0 | ||
|
|
956ac59246 | ||
|
|
ba632451ef | ||
|
|
606b782bb0 | ||
|
|
e2ca121180 | ||
|
|
b453032363 | ||
|
|
98697683a5 | ||
|
|
6f93057f83 | ||
|
|
88871c4cb0 | ||
|
|
ad34ca14b6 | ||
|
|
e0e19de2c1 | ||
|
|
5085f864b3 | ||
|
|
6f40845ba3 | ||
|
|
6788467670 | ||
|
|
87f0603c87 | ||
|
|
0d25d51a04 | ||
|
|
d0ddf60737 | ||
|
|
91ab34350f | ||
|
|
38e6bc6e47 | ||
|
|
6ccea00bba | ||
|
|
c152a6dbbf | ||
|
|
b7df29ff56 | ||
|
|
77c2d02a4d | ||
|
|
aea263ec2c | ||
|
|
1666f6d3b0 | ||
|
|
63a1b6020e | ||
|
|
77a21114a8 | ||
|
|
6e2c5b47d4 | ||
|
|
e2f4f874a2 | ||
|
|
63f65d30bc | ||
|
|
307799fd27 | ||
|
|
bf82f9fff2 | ||
|
|
a9862b27c1 | ||
|
|
2ae439cd00 | ||
|
|
21a84297d8 | ||
|
|
e22cc1cc17 | ||
|
|
eee25ec33f | ||
|
|
59e8e1af5d | ||
|
|
1fd41adbfa | ||
|
|
e94b92f73e | ||
|
|
5d06444a57 | ||
|
|
9d582e19b1 | ||
|
|
bf34129d62 | ||
|
|
c9dd9afe5e | ||
|
|
b40229daa6 | ||
|
|
253f8b8c4c | ||
|
|
c03c642719 | ||
|
|
f581199930 | ||
|
|
3637fea711 | ||
|
|
e23492ee15 | ||
|
|
9f4f288d00 | ||
|
|
b36a4da1da | ||
|
|
5ecdbc97a7 | ||
|
|
0d4680c01f | ||
|
|
9374b1d9b4 | ||
|
|
22ea970beb | ||
|
|
45c6b89ec7 | ||
|
|
70c6903191 | ||
|
|
f33be41b0f | ||
|
|
5f53dab6de | ||
|
|
4dbd2618fb | ||
|
|
a43bdf9479 | ||
|
|
9360a8eebe | ||
|
|
7d84a7e086 | ||
|
|
ea46306e71 | ||
|
|
97faefb5b3 | ||
|
|
745e5e2c40 | ||
|
|
84315e8e56 | ||
|
|
0a302154b0 | ||
|
|
4a5355f5dc | ||
|
|
88b49f2c6a | ||
|
|
1684dff8bc | ||
|
|
8870b3a342 | ||
|
|
258256e739 | ||
|
|
133ce88284 | ||
|
|
099438dc3f | ||
|
|
94d87dac25 | ||
|
|
4384e62d01 | ||
|
|
cb4434c72b | ||
|
|
892e1a3206 | ||
|
|
5f5d895cb1 | ||
|
|
e4c2d446d2 | ||
|
|
cfe519fbb0 | ||
|
|
1b14e2ed4d | ||
|
|
8c060f3142 | ||
|
|
c2f77181d7 | ||
|
|
cd67c3a66a | ||
|
|
5e037680d6 | ||
|
|
5f628fb63a | ||
|
|
777bbb73af | ||
|
|
c1807c19a7 | ||
|
|
20ba53752d | ||
|
|
81dd6cabad | ||
|
|
5c6f39a511 | ||
|
|
e7fdb6dab2 | ||
|
|
d423d68901 | ||
|
|
4cc7908a95 | ||
|
|
0cf933cc9b | ||
|
|
93dd330288 | ||
|
|
548cd95a30 | ||
|
|
a8d63e2852 | ||
|
|
1e254ee060 | ||
|
|
154402b355 | ||
|
|
0ed108506a | ||
|
|
be6f6f2a28 | ||
|
|
8144f50ad5 | ||
|
|
51e8ee9f76 | ||
|
|
7e96898a06 | ||
|
|
451dca0a27 | ||
|
|
a316cb96c2 | ||
|
|
9ff6282fd6 | ||
|
|
8b9b746cfa | ||
|
|
9b01428dde | ||
|
|
3e28396ab0 | ||
|
|
6c461095e0 | ||
|
|
3a63bf21e1 | ||
|
|
e93758b39c | ||
|
|
7c422261fc | ||
|
|
58fac3caaf | ||
|
|
6a436ac4e9 | ||
|
|
698f9f514f | ||
|
|
f765fe5ea7 | ||
|
|
f49b8afb91 | ||
|
|
456f0a1e57 | ||
|
|
1aaafbed38 | ||
|
|
91bb7945cb | ||
|
|
f712bbd622 | ||
|
|
8ecac52817 | ||
|
|
310a1fa4e4 | ||
|
|
3ae4835f34 | ||
|
|
a0a0b3f47b | ||
|
|
46400708d0 | ||
|
|
51e711deb1 | ||
|
|
6e469e2c56 | ||
|
|
a4ab5bc26b | ||
|
|
067f5c1ecd | ||
|
|
0cfe547446 | ||
|
|
87dbe7aebc | ||
|
|
4cc7035246 | ||
|
|
aea50264e2 | ||
|
|
41742850ce | ||
|
|
5f728e1eb2 | ||
|
|
8c7ef11f69 | ||
|
|
d74753f0e5 | ||
|
|
c23ca57bed | ||
|
|
9c67f65b7c | ||
|
|
7f3417441c | ||
|
|
6add89827b | ||
|
|
a0713fbf40 | ||
|
|
ed2cdb03c6 | ||
|
|
2707697dc4 | ||
|
|
c861ee86d0 | ||
|
|
5b58993660 | ||
|
|
c2c24e50e3 | ||
|
|
c5688a7839 | ||
|
|
09475aabfd | ||
|
|
79d4cbda1a | ||
|
|
b738d09014 | ||
|
|
10764cd166 | ||
|
|
640d714345 | ||
|
|
916fd75c11 | ||
|
|
8d057042c4 | ||
|
|
10fdc3ec57 | ||
|
|
64a3c0b94e | ||
|
|
842072fe55 | ||
|
|
d37effda22 | ||
|
|
975e479034 | ||
|
|
b4c259019b | ||
|
|
d028d3acbd | ||
|
|
edf78a3862 | ||
|
|
51666786b6 | ||
|
|
be1f0f71e0 | ||
|
|
7f481fdb1a | ||
|
|
71e061429e | ||
|
|
e2279e671a | ||
|
|
41f566d371 | ||
|
|
082e6ba927 | ||
|
|
8618aa349c | ||
|
|
ae604e8cac | ||
|
|
e45ceae224 | ||
|
|
32d83dcdad | ||
|
|
788340e1bb | ||
|
|
235648ed1b | ||
|
|
44337498b1 | ||
|
|
5eb08f94cd | ||
|
|
37d15cdc42 | ||
|
|
d3fa21febc | ||
|
|
246029f842 | ||
|
|
eef874b68e | ||
|
|
f54ad41f4e | ||
|
|
de1083d03e | ||
|
|
78b3da8727 | ||
|
|
a55b4f0bfd | ||
|
|
58b4e7747c | ||
|
|
7a1e258101 | ||
|
|
219634574f | ||
|
|
7ca0610513 | ||
|
|
d8c110587c | ||
|
|
9e3bd5306d | ||
|
|
3c86ba12f9 | ||
|
|
a7489361f5 | ||
|
|
1690ff155c | ||
|
|
4a2b2ad7b0 | ||
|
|
d20d37a631 | ||
|
|
676af4c2d7 | ||
|
|
16f20bf2ec | ||
|
|
cf48da40ba | ||
|
|
d4e9a3a4b1 | ||
|
|
b7465338f6 | ||
|
|
f6d048f250 | ||
|
|
a3fb4f818f | ||
|
|
30bf4b08b4 | ||
|
|
d50c183681 | ||
|
|
ea7ae5138e | ||
|
|
c0cea6bff9 | ||
|
|
e13dda0bb9 | ||
|
|
4d99bee231 | ||
|
|
4ba4930f7e | ||
|
|
99330be9d6 | ||
|
|
fba4a9e16e | ||
|
|
478184ad37 | ||
|
|
cbff3ed167 | ||
|
|
41e990ace1 | ||
|
|
6938d8b09d | ||
|
|
dad24d4aef | ||
|
|
e779c2595f | ||
|
|
41ce6dcf6f | ||
|
|
12703c6cd3 | ||
|
|
a609a9d398 | ||
|
|
8ebdca930c | ||
|
|
67d78714f7 | ||
|
|
3f115655b3 | ||
|
|
177fa71ff2 | ||
|
|
79ab25aa7d | ||
|
|
251a70c6d0 | ||
|
|
0ba23c7f0a | ||
|
|
79a11d4c1d | ||
|
|
ffebe82072 | ||
|
|
4e74dcd802 | ||
|
|
3b01ae2d7a | ||
|
|
1be021c37c | ||
|
|
99ee497121 | ||
|
|
5e435f16c5 | ||
|
|
1cf6bcd510 | ||
|
|
535a1cb670 | ||
|
|
e45ba0dde5 | ||
|
|
0d948c349d | ||
|
|
94e8b7f8d3 | ||
|
|
533127815b | ||
|
|
3e10bb5f32 | ||
|
|
1d87fa448c | ||
|
|
2e47253d7c | ||
|
|
f16726abdc | ||
|
|
ed06d345cd | ||
|
|
18d1bd382d | ||
|
|
571bdbec92 | ||
|
|
73b6385940 | ||
|
|
9ae54273b1 | ||
|
|
231d8b7214 | ||
|
|
a6df7c30a3 | ||
|
|
efdb55d12f | ||
|
|
ef7d26c3d9 | ||
|
|
b489f676fe | ||
|
|
a149e02742 | ||
|
|
770dca45d1 | ||
|
|
171962d948 | ||
|
|
cf51c634ca | ||
|
|
d131e8417c | ||
|
|
693c8fb667 | ||
|
|
63e2681784 | ||
|
|
f6c0aa9068 | ||
|
|
0b88e29e87 | ||
|
|
dd5df302c9 | ||
|
|
279cabaa98 | ||
|
|
04735f8c7f | ||
|
|
6df6f9b7ee | ||
|
|
c0ece6b0ee | ||
|
|
bd3d4d38c4 | ||
|
|
2ce31a81f6 | ||
|
|
4eaa5d3941 | ||
|
|
87d7916308 | ||
|
|
4daf82d87f | ||
|
|
4ccc958f85 | ||
|
|
b6111084a4 | ||
|
|
26d91ee4c2 | ||
|
|
d428412330 | ||
|
|
3a2c7dbccf | ||
|
|
8360a63dea | ||
|
|
294a27a650 | ||
|
|
2c024794af | ||
|
|
584cea762b | ||
|
|
d986bcb86c | ||
|
|
8b41f3590b | ||
|
|
9a992986d6 | ||
|
|
ad3d603999 | ||
|
|
c76cb3ece8 | ||
|
|
d01b2edd55 | ||
|
|
6afc517edd | ||
|
|
048bbaa936 | ||
|
|
fed410c65f | ||
|
|
d4ec2ca9d0 | ||
|
|
9e2e1814a2 | ||
|
|
491f3f9924 | ||
|
|
5541c688e7 | ||
|
|
dae62d58bf | ||
|
|
b9b725c2ef | ||
|
|
f36ff3d808 | ||
|
|
de1805c9f0 | ||
|
|
828d85c167 | ||
|
|
e3869cacf0 | ||
|
|
37e9074bb9 | ||
|
|
8d6d192a50 | ||
|
|
bd99faf213 | ||
|
|
6dcba93e99 | ||
|
|
2972d604ae | ||
|
|
57d6718655 | ||
|
|
4911712d46 | ||
|
|
cdd07772e7 | ||
|
|
8e3ed38f5c | ||
|
|
03194d7f8b | ||
|
|
ca2a75da4c | ||
|
|
dfb80bed37 | ||
|
|
58febc1140 | ||
|
|
378bff8652 | ||
|
|
c220de72ed | ||
|
|
00f7657c4d | ||
|
|
0c0d36fbdc | ||
|
|
68a4ac86cc | ||
|
|
599a5db844 | ||
|
|
9ae4de6fcb | ||
|
|
6e7d6a99ce | ||
|
|
1a8409ab8a | ||
|
|
b3385361de | ||
|
|
90ba8ee8e3 | ||
|
|
e85e1c383b | ||
|
|
3960ab29d1 | ||
|
|
11ecdfa68e | ||
|
|
dcf3c2cf21 | ||
|
|
f264829cb4 | ||
|
|
78a2861ae8 | ||
|
|
e53b431e6b | ||
|
|
32dd33e5ac | ||
|
|
94c6dcfce8 | ||
|
|
9feb10b304 | ||
|
|
ba2839f601 | ||
|
|
d9be2c79fb | ||
|
|
fe669e0bf1 | ||
|
|
8b1d0ec0ba | ||
|
|
df912d09e8 | ||
|
|
0ef0f21ff0 | ||
|
|
34e56d5e5d | ||
|
|
c7f6e58483 | ||
|
|
ef81fcc381 | ||
|
|
63063cbc8e | ||
|
|
a0c4802fdb | ||
|
|
d8498e2e7f | ||
|
|
0b94083866 | ||
|
|
94a242b27f | ||
|
|
48bde21983 | ||
|
|
2aaaaafffd | ||
|
|
425762d6b3 | ||
|
|
3e33c2a21a | ||
|
|
94c51cb2c0 | ||
|
|
02de992f1c | ||
|
|
9d53e3dd7c | ||
|
|
2ca668b8e6 | ||
|
|
e5e14388a1 | ||
|
|
07e7281b85 | ||
|
|
ceaf9e0a5b | ||
|
|
5e81213948 | ||
|
|
92fa76e95f | ||
|
|
98987f25e6 | ||
|
|
aff6f5490e | ||
|
|
bd4cc8c954 | ||
|
|
e9f1aba7aa | ||
|
|
ac60160159 | ||
|
|
8a5ea088b4 | ||
|
|
84f94c8303 | ||
|
|
845862f073 | ||
|
|
8a197f6bc2 | ||
|
|
d0ec1b098d | ||
|
|
26f5daf764 | ||
|
|
588aa31fa4 | ||
|
|
3b2c29c70e | ||
|
|
1169857ba2 | ||
|
|
dc36546f78 | ||
|
|
37fc4ffd3f | ||
|
|
c13a6a7ced | ||
|
|
a4c1f3f48e | ||
|
|
1a3ae785a0 | ||
|
|
11198cf8af | ||
|
|
0178d2121b | ||
|
|
dcde566840 | ||
|
|
c82ac85098 | ||
|
|
d1574a2035 | ||
|
|
65dc92fad1 | ||
|
|
6f7845601b | ||
|
|
810fc73ec4 | ||
|
|
d3265c4af9 | ||
|
|
7d41eefb8a | ||
|
|
d43b438c7c | ||
|
|
c6dc768310 | ||
|
|
fd49c4f18b | ||
|
|
d4bfdbed60 | ||
|
|
0b97ca09bd | ||
|
|
a2da1e3bfd | ||
|
|
3251094c12 | ||
|
|
fd5dd8a7bb | ||
|
|
ce78ce9453 | ||
|
|
12f4438c98 | ||
|
|
515816533f | ||
|
|
042444026c | ||
|
|
d57e41758e | ||
|
|
e4b03832d7 | ||
|
|
8d30e2cfd2 | ||
|
|
2d178562ac | ||
|
|
811dce8d40 | ||
|
|
9c8729eb4e | ||
|
|
b09ce45381 | ||
|
|
64b4cd1026 | ||
|
|
eccb3973e6 | ||
|
|
4cb59bf355 | ||
|
|
3e575731dc | ||
|
|
f01f2c9409 | ||
|
|
638b5b88ed | ||
|
|
b6155f7d81 | ||
|
|
2197698461 | ||
|
|
a040e1ec26 | ||
|
|
1fd796360b | ||
|
|
a1336ee193 | ||
|
|
2fea36536f | ||
|
|
5a92f9f10b | ||
|
|
31581f258e | ||
|
|
1e8c984620 | ||
|
|
e835e76959 | ||
|
|
d76edd60c5 | ||
|
|
40db05a5f8 | ||
|
|
84b48faa56 |
244
.github/workflows/build.yml
vendored
244
.github/workflows/build.yml
vendored
@ -2,86 +2,206 @@ name: 'build'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
repository_dispatch:
|
||||
types: [manual-build]
|
||||
inputs:
|
||||
optional_deps:
|
||||
description: 'optional_deps=pkg-A:optional-dep-B,pkg-C:optional-dep-D'
|
||||
default: ''
|
||||
required: false
|
||||
type: string
|
||||
context:
|
||||
description: 'Extra information from invoker'
|
||||
default: ''
|
||||
required: false
|
||||
type: string
|
||||
schedule:
|
||||
- cron: '0 0/4 * * *'
|
||||
- cron: '0 0/3 * * *'
|
||||
|
||||
env:
|
||||
CI: true
|
||||
PY_COLORS: 1
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
runs-on: windows-latest
|
||||
continue-on-error: true
|
||||
defaults:
|
||||
run:
|
||||
shell: msys2 {0}
|
||||
schedule:
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
concurrency: autobuild-maint
|
||||
|
||||
outputs:
|
||||
build-plan: ${{ steps.check.outputs.build-plan }}
|
||||
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Dump inputs
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
env:
|
||||
CONTEXT: '${{ toJSON(github.event.inputs) }}'
|
||||
run: |
|
||||
echo "$CONTEXT"
|
||||
|
||||
- uses: msys2/setup-msys2@v2
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: 'requirements.txt'
|
||||
|
||||
- name: Install deps
|
||||
env:
|
||||
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||
run: |
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
python -m pip install -r requirements.txt
|
||||
echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: autobuild cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.autobuild_cache
|
||||
key: autobuild_cache-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
|
||||
restore-keys: autobuild_cache-
|
||||
|
||||
- name: Check what we should run
|
||||
id: check
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||
OPTIONAL_DEPS: ${{ github.event.inputs.optional_deps }}
|
||||
run: |
|
||||
python -m msys2_autobuild write-build-plan --optional-deps "$OPTIONAL_DEPS" build_plan.json
|
||||
buildPlan="$(cat build_plan.json)"
|
||||
echo "build-plan=$buildPlan" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Clean up assets
|
||||
if: steps.check.outputs.build-plan != '[]'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||
run: |
|
||||
python -m msys2_autobuild clean-assets
|
||||
|
||||
- name: Show build queue
|
||||
if: steps.check.outputs.build-plan != '[]'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||
OPTIONAL_DEPS: ${{ github.event.inputs.optional_deps }}
|
||||
run: |
|
||||
python -m msys2_autobuild show --optional-deps "$OPTIONAL_DEPS"
|
||||
|
||||
build:
|
||||
timeout-minutes: 4320
|
||||
needs: schedule
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
concurrency: autobuild-build-${{ matrix.name }}
|
||||
|
||||
if: ${{ needs.schedule.outputs.build-plan != '[]' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.schedule.outputs.build-plan) }}
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
|
||||
steps:
|
||||
|
||||
- name: Configure Pagefile
|
||||
if: ${{ matrix.hosted }}
|
||||
# https://github.com/al-cheb/configure-pagefile-action/issues/16
|
||||
continue-on-error: true
|
||||
uses: al-cheb/configure-pagefile-action@a3b6ebd6b634da88790d9c58d4b37a7f4a7b8708
|
||||
with:
|
||||
minimum-size: 4GB
|
||||
maximum-size: 16GB
|
||||
disk-root: "C:"
|
||||
|
||||
- name: Runner details
|
||||
run: |
|
||||
Get-PSDrive -PSProvider FileSystem
|
||||
Get-CIMInstance -Class Win32_Processor | Select-Object -Property Name
|
||||
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
id: python
|
||||
with:
|
||||
python-version: '3.13'
|
||||
# Avoid it setting CMake/pkg-config variables
|
||||
# https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#environment-variables
|
||||
update-environment: false
|
||||
|
||||
# Work around https://github.com/actions/setup-python/issues/1050
|
||||
- name: Cache pip dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-pip-${{ hashFiles('requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ runner.arch }}-pip-
|
||||
|
||||
- name: Install deps
|
||||
env:
|
||||
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||
PYTHON_PATH: ${{ steps.python.outputs.python-path }}
|
||||
run: |
|
||||
& "$env:PYTHON_PATH" -m venv .venv
|
||||
.\.venv\Scripts\activate
|
||||
python -m pip install -r requirements.txt
|
||||
echo "$env:VIRTUAL_ENV\Scripts" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
|
||||
- name: autobuild cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.autobuild_cache
|
||||
key: autobuild_cache-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
|
||||
restore-keys: autobuild_cache-
|
||||
|
||||
# Note that ARM64 prior to Win11 requires x86 msys, but this will install x64
|
||||
- uses: msys2/setup-msys2@v2 # zizmor: ignore[unpinned-uses]
|
||||
id: msys2
|
||||
with:
|
||||
msystem: MSYS
|
||||
update: true
|
||||
install: base-devel msys2-devel mingw-w64-x86_64-toolchain mingw-w64-i686-toolchain git python python-pip
|
||||
install: ${{ matrix.packages }}
|
||||
location: '\M'
|
||||
release: ${{ matrix.hosted }}
|
||||
cache: ${{ matrix.hosted }}
|
||||
|
||||
- name: Install deps
|
||||
- name: Switch to the main mirror
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
python -m pip install --user -r requirements.txt
|
||||
echo 'Server = https://repo.msys2.org/mingw/$repo/' > /etc/pacman.d/mirrorlist.mingw
|
||||
echo 'Server = https://repo.msys2.org/msys/$arch/' > /etc/pacman.d/mirrorlist.msys
|
||||
pacman-conf.exe
|
||||
|
||||
- name: Show build queue
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Update using the main mirror & Check install
|
||||
run: |
|
||||
python autobuild.py show
|
||||
|
||||
- name: Clean up assets
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
python autobuild.py clean-assets
|
||||
msys2 -c 'pacman --noconfirm -Suuy'
|
||||
msys2 -c 'pacman --noconfirm -Suu'
|
||||
msys2 -c 'pacman -Qkq'
|
||||
|
||||
- name: Process build queue
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||
# https://github.com/actions/runner/issues/324#issuecomment-3324382354
|
||||
# https://github.com/actions/runner/pull/4053
|
||||
JOB_CHECK_RUN_ID: ${{ job.check_run_id }}
|
||||
MSYS2_ROOT: ${{ steps.msys2.outputs.msys2-location }}
|
||||
run: |
|
||||
export PACKAGER="CI (msys2-devtools/${GITHUB_SHA::8}/${GITHUB_RUN_ID})"
|
||||
mkdir -p _tmp/assets
|
||||
python autobuild.py build _tmp
|
||||
|
||||
- if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
path: _tmp/assets/*
|
||||
|
||||
staging:
|
||||
needs: [ build ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
|
||||
- uses: eine/tip@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag: 'staging-msys'
|
||||
files: artifact/msys/*.tar.*
|
||||
|
||||
- uses: eine/tip@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag: 'staging-mingw'
|
||||
files: artifact/mingw/*.tar.*
|
||||
|
||||
- uses: eine/tip@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tag: 'staging-failed'
|
||||
files: artifact/failed/*.failed
|
||||
echo "JOB_CHECK_RUN_ID=$env:JOB_CHECK_RUN_ID"
|
||||
$BUILD_ROOT=Join-Path (Split-Path $env:GITHUB_WORKSPACE -Qualifier) "\"
|
||||
python -m msys2_autobuild build ${{ matrix.build-args }} "$env:MSYS2_ROOT" "$BUILD_ROOT"
|
||||
|
||||
80
.github/workflows/maint.yml
vendored
Normal file
80
.github/workflows/maint.yml
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
name: 'maint'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
clear_failed_packages:
|
||||
description: 'clear_failed_packages=mingw-w64-foo,mingw-w64-bar'
|
||||
default: ''
|
||||
required: false
|
||||
type: string
|
||||
clear_failed_build_types:
|
||||
description: 'clear_failed_build_types=mingw64,clang64'
|
||||
default: ''
|
||||
required: false
|
||||
type: string
|
||||
context:
|
||||
description: 'Extra information from invoker'
|
||||
default: ''
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency: autobuild-maint
|
||||
|
||||
jobs:
|
||||
|
||||
schedule:
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
|
||||
- name: Dump inputs
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
env:
|
||||
CONTEXT: '${{ toJSON(github.event.inputs) }}'
|
||||
run: |
|
||||
echo "$CONTEXT"
|
||||
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: 'requirements.txt'
|
||||
|
||||
- name: Install deps
|
||||
env:
|
||||
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||
run: |
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
python -m pip install -r requirements.txt
|
||||
echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Clear failed build types
|
||||
if: ${{ github.event.inputs.clear_failed_build_types != '' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||
CLEAR_FAILED_BUILD_TYPES: ${{ github.event.inputs.clear_failed_build_types }}
|
||||
run: |
|
||||
python -m msys2_autobuild clear-failed --build-types "$CLEAR_FAILED_BUILD_TYPES"
|
||||
python -m msys2_autobuild update-status
|
||||
|
||||
- name: Clear failed packages
|
||||
if: ${{ github.event.inputs.clear_failed_packages != '' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||
CLEAR_FAILED_PACKAGES: ${{ github.event.inputs.clear_failed_packages }}
|
||||
run: |
|
||||
python -m msys2_autobuild clear-failed --packages "$CLEAR_FAILED_PACKAGES"
|
||||
python -m msys2_autobuild update-status
|
||||
61
.github/workflows/test.yml
vendored
Normal file
61
.github/workflows/test.yml
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
name: test
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
|
||||
test:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-24.04, windows-2022, windows-11-arm]
|
||||
python-version: ['3.12', '3.13']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install poetry
|
||||
python -m poetry install
|
||||
|
||||
- name: Run mypy
|
||||
run: |
|
||||
python -m poetry run mypy .
|
||||
|
||||
- name: Run flake8
|
||||
run: |
|
||||
python -m poetry run flake8 .
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
python -m poetry run pytest
|
||||
|
||||
zizmor:
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor
|
||||
run: pipx run zizmor .
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -1 +1,4 @@
|
||||
*.pyc
|
||||
*.pyc
|
||||
.vscode/
|
||||
.mypy_cache/
|
||||
.autobuild_cache/
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2020 Christoph Reiter
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
46
README.md
46
README.md
@ -1,36 +1,50 @@
|
||||
# msys2-devtools
|
||||
# msys2-autobuild
|
||||
|
||||
## autobuild.py
|
||||
msys2-autobuild is a Python tool for
|
||||
|
||||
* automatically building MSYS2 packages in GitHub Actions
|
||||
* manually uploading packages, or retrying builds
|
||||
* retrieving the built packages for upload to the pacman repo
|
||||
|
||||
## Installation
|
||||
|
||||
```console
|
||||
$ pacman -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-pygithub mingw-w64-x86_64-python-requests
|
||||
# or
|
||||
$ poetry install
|
||||
# or
|
||||
$ python -m pip install --user -r requirements.txt
|
||||
# or
|
||||
$ pipx install git+https://github.com/msys2/msys2-autobuild
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```console
|
||||
$ python autobuild.py --help
|
||||
usage: autobuild.py [-h] {build,show,show-assets,fetch-assets,trigger,clean-assets} ...
|
||||
$ msys2-autobuild --help
|
||||
usage: msys2-autobuild [-h]
|
||||
{build,show,write-build-plan,update-status,fetch-assets,upload-assets,clear-failed,clean-assets}
|
||||
...
|
||||
|
||||
Build packages
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
subcommands:
|
||||
{build,show,show-assets,fetch-assets,trigger,clean-assets}
|
||||
{build,show,write-build-plan,update-status,fetch-assets,upload-assets,clear-failed,clean-assets}
|
||||
build Build all packages
|
||||
show Show all packages to be built
|
||||
show-assets Show all staging packages
|
||||
write-build-plan Write a GHA build matrix setup
|
||||
update-status Update the status file
|
||||
fetch-assets Download all staging packages
|
||||
trigger Trigger a GHA build
|
||||
upload-assets Upload packages
|
||||
clear-failed Clear the failed state for packages
|
||||
clean-assets Clean up GHA assets
|
||||
```
|
||||
|
||||
## Build Process
|
||||
## Configuration
|
||||
|
||||
The following graph shows what happens between a PKGBUILD getting changed in git and the built package being available in the pacman repo.
|
||||
|
||||

|
||||
|
||||
Security considerations:
|
||||
|
||||
TODO
|
||||
* `GITHUB_TOKEN` (required) - a GitHub token with write access to the current repo.
|
||||
* `GITHUB_TOKEN_READONLY` (optional) - a GitHub token with read access to the current repo. This is used for read operations to not get limited by the API access limits.
|
||||
* `GITHUB_REPOSITORY` (optional) - the path to the GitHub repo this is uploading to. Used for deciding which things can be built and where to upload them to. Defaults to `msys2/msys2-autobuild`.
|
||||
|
||||
362
autobuild.py
362
autobuild.py
@ -1,362 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import shutil
|
||||
from os import environ
|
||||
from github import Github
|
||||
from json import loads
|
||||
from pathlib import Path
|
||||
from subprocess import check_call
|
||||
import subprocess
|
||||
from sys import stdout
|
||||
import fnmatch
|
||||
import traceback
|
||||
from tabulate import tabulate
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from contextlib import contextmanager
|
||||
import requests
|
||||
import time
|
||||
|
||||
# After which overall time it should stop building (in seconds)
|
||||
BUILD_TIMEOUT = 18000
|
||||
|
||||
# Packages that take too long to build, and should be handled manually
|
||||
SKIP = [
|
||||
'mingw-w64-clang',
|
||||
]
|
||||
|
||||
|
||||
def timeoutgen(timeout):
|
||||
end = time.time() + timeout
|
||||
def new():
|
||||
return max(end - time.time(), 0)
|
||||
return new
|
||||
|
||||
get_timeout = timeoutgen(BUILD_TIMEOUT)
|
||||
|
||||
|
||||
def get_repo_checkout_dir(repo):
|
||||
# some tools can't handle long paths, so try to have the build root near the disk root
|
||||
nick = ""
|
||||
if repo == "MINGW-packages":
|
||||
nick = "_MINGW"
|
||||
elif repo == "MSYS2-packages":
|
||||
nick = "_MSYS"
|
||||
else:
|
||||
raise ValueError("unknown repo: " + repo)
|
||||
|
||||
if sys.platform == "msys":
|
||||
# root dir on the same drive
|
||||
win_path = subprocess.check_output(["cygpath", "-m", "/"], text=True).strip()
|
||||
posix_drive = subprocess.check_output(["cygpath", "-u", win_path[:3]], text=True).strip()
|
||||
return os.path.join(posix_drive, nick)
|
||||
else:
|
||||
raise NotImplementedError("fixme")
|
||||
|
||||
|
||||
def ensure_git_repo(url, path):
|
||||
if not os.path.exists(path):
|
||||
check_call(["git", "clone", url, path])
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gha_group(title):
|
||||
print(f'\n::group::{title}')
|
||||
stdout.flush()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
print('::endgroup::')
|
||||
stdout.flush()
|
||||
|
||||
|
||||
class BuildError(Exception):
|
||||
pass
|
||||
|
||||
class BuildTimeoutError(BuildError):
|
||||
pass
|
||||
|
||||
|
||||
def build_package(pkg, builddir):
|
||||
assert os.path.isabs(builddir)
|
||||
os.makedirs(builddir, exist_ok=True)
|
||||
assetdir_msys = os.path.join(builddir, "assets", "msys")
|
||||
os.makedirs(assetdir_msys, exist_ok=True)
|
||||
assetdir_mingw = os.path.join(builddir, "assets", "mingw")
|
||||
os.makedirs(assetdir_mingw, exist_ok=True)
|
||||
assetdir_failed = os.path.join(builddir, "assets", "failed")
|
||||
os.makedirs(assetdir_failed, exist_ok=True)
|
||||
|
||||
isMSYS = pkg['repo'].startswith('MSYS2')
|
||||
assetdir = assetdir_msys if isMSYS else assetdir_mingw
|
||||
repo_dir = get_repo_checkout_dir(pkg['repo'])
|
||||
pkg_dir = os.path.join(repo_dir, pkg['repo_path'])
|
||||
ensure_git_repo(pkg['repo_url'], repo_dir)
|
||||
|
||||
def run_cmd(args):
|
||||
check_call(['bash', '-c'] + [' '.join(args)], cwd=pkg_dir, timeout=get_timeout())
|
||||
|
||||
try:
|
||||
run_cmd([
|
||||
'makepkg' if isMSYS else 'makepkg-mingw',
|
||||
'--noconfirm',
|
||||
'--noprogressbar',
|
||||
'--skippgpcheck',
|
||||
'--nocheck',
|
||||
'--syncdeps',
|
||||
'--rmdeps',
|
||||
'--cleanbuild'
|
||||
])
|
||||
|
||||
run_cmd([
|
||||
'makepkg',
|
||||
'--noconfirm',
|
||||
'--noprogressbar',
|
||||
'--skippgpcheck',
|
||||
'--allsource'
|
||||
] + ([] if isMSYS else ['--config', '/etc/makepkg_mingw64.conf']))
|
||||
except subprocess.TimeoutExpired as e:
|
||||
raise BuildTimeoutError(e)
|
||||
except subprocess.CalledProcessError as e:
|
||||
|
||||
for item in pkg['packages']:
|
||||
with open(os.path.join(assetdir_failed, f"{item}-{pkg['version']}.failed"), 'wb') as h:
|
||||
# github doesn't allow empty assets
|
||||
h.write(b'oh no')
|
||||
|
||||
raise BuildError(e)
|
||||
else:
|
||||
for entry in os.listdir(pkg_dir):
|
||||
if fnmatch.fnmatch(entry, '*.pkg.tar.*') or fnmatch.fnmatch(entry, '*.src.tar.*'):
|
||||
shutil.move(os.path.join(pkg_dir, entry), assetdir)
|
||||
|
||||
|
||||
def run_build(args):
|
||||
builddir = os.path.abspath(args.builddir)
|
||||
|
||||
for pkg in get_packages_to_build()[2]:
|
||||
try:
|
||||
with gha_group(f"[{ pkg['repo'] }] { pkg['repo_path'] }..."):
|
||||
build_package(pkg, builddir)
|
||||
except BuildTimeoutError:
|
||||
print("timeout")
|
||||
break
|
||||
except BuildError:
|
||||
print("failed")
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def get_buildqueue():
|
||||
pkgs = []
|
||||
r = requests.get("https://packages.msys2.org/api/buildqueue")
|
||||
r.raise_for_status()
|
||||
for pkg in r.json():
|
||||
pkg['repo'] = pkg['repo_url'].split('/')[-1]
|
||||
pkgs.append(pkg)
|
||||
return pkgs
|
||||
|
||||
|
||||
def get_packages_to_build():
|
||||
gh = Github(*get_credentials())
|
||||
|
||||
repo = gh.get_repo('msys2/msys2-devtools')
|
||||
assets = []
|
||||
for name in ["msys", "mingw"]:
|
||||
assets.extend([a.name for a in repo.get_release('staging-' + name).get_assets()])
|
||||
assets_failed = [a.name for a in repo.get_release('staging-failed').get_assets()]
|
||||
|
||||
def pkg_is_done(pkg):
|
||||
for item in pkg['packages']:
|
||||
if not fnmatch.filter(assets, f"{item}-{pkg['version']}-*.pkg.tar.*"):
|
||||
return False
|
||||
return True
|
||||
|
||||
def pkg_has_failed(pkg):
|
||||
for item in pkg['packages']:
|
||||
if f"{item}-{pkg['version']}.failed" in assets_failed:
|
||||
return True
|
||||
return False
|
||||
|
||||
todo = []
|
||||
done = []
|
||||
skipped = []
|
||||
for pkg in get_buildqueue():
|
||||
if pkg_is_done(pkg):
|
||||
done.append(pkg)
|
||||
elif pkg_has_failed(pkg) or pkg['repo_path'] in SKIP:
|
||||
skipped.append(pkg)
|
||||
else:
|
||||
todo.append(pkg)
|
||||
|
||||
return done, skipped, todo
|
||||
|
||||
|
||||
def show_build(args):
|
||||
done, skipped, todo = get_packages_to_build()
|
||||
|
||||
def print_packages(title, pkgs):
|
||||
print()
|
||||
print(title)
|
||||
print(tabulate([(p["repo_path"], p["version"]) for p in pkgs], headers=["Package", "Version"]))
|
||||
|
||||
print_packages("TODO:", todo)
|
||||
print_packages("SKIPPED:", skipped)
|
||||
print_packages("DONE:", done)
|
||||
|
||||
|
||||
def show_assets(args):
|
||||
gh = Github(*get_credentials())
|
||||
|
||||
for name in ["msys", "mingw"]:
|
||||
assets = gh.get_repo('msys2/msys2-devtools').get_release('staging-' + name).get_assets()
|
||||
|
||||
print(tabulate(
|
||||
[[
|
||||
asset.name,
|
||||
asset.size,
|
||||
asset.created_at,
|
||||
asset.updated_at,
|
||||
#asset.browser_download_url
|
||||
] for asset in assets],
|
||||
headers=["name", "size", "created", "updated"] #, "url"]
|
||||
))
|
||||
|
||||
|
||||
def fetch_assets(args):
|
||||
gh = Github(*get_credentials())
|
||||
repo = gh.get_repo('msys2/msys2-devtools')
|
||||
|
||||
todo = []
|
||||
|
||||
def get_subdir(type_, entry):
|
||||
if type_ == "msys":
|
||||
if fnmatch.fnmatch(entry, '*.pkg.tar.*'):
|
||||
return "x86_64"
|
||||
elif fnmatch.fnmatch(entry, '*.src.tar.*'):
|
||||
return "sources"
|
||||
else:
|
||||
raise Exception("unknown file type")
|
||||
elif type_ == "mingw":
|
||||
if fnmatch.fnmatch(entry, '*.src.tar.*'):
|
||||
return "sources"
|
||||
elif entry.startswith("mingw-w64-x86_64-"):
|
||||
return "x86_64"
|
||||
elif entry.startswith("mingw-w64-i686-"):
|
||||
return "i686"
|
||||
else:
|
||||
raise Exception("unknown file type")
|
||||
|
||||
skipped = []
|
||||
for name in ["msys", "mingw"]:
|
||||
p = Path(args.targetdir) / name
|
||||
assets = repo.get_release('staging-' + name).get_assets()
|
||||
for asset in assets:
|
||||
asset_dir = p / get_subdir(name, asset.name)
|
||||
asset_dir.mkdir(parents=True, exist_ok=True)
|
||||
asset_path = asset_dir / asset.name
|
||||
if asset_path.exists():
|
||||
if asset_path.stat().st_size != asset.size:
|
||||
print(f"Warning: {asset_path} already exists but has a different size")
|
||||
skipped.append(asset)
|
||||
continue
|
||||
todo.append((asset, asset_path))
|
||||
|
||||
print(f"downloading: {len(todo)}, skipped: {len(skipped)}")
|
||||
|
||||
def fetch_item(item):
|
||||
r = requests.get(item[0].browser_download_url, timeout=10)
|
||||
r.raise_for_status()
|
||||
return (item, r.content)
|
||||
|
||||
with ThreadPoolExecutor(4) as executor:
|
||||
for i, (item, data) in enumerate(executor.map(fetch_item, todo)):
|
||||
print(f"[{i + 1}/{len(todo)}] {item[0].name}")
|
||||
with open(item[1], "wb") as h:
|
||||
h.write(data)
|
||||
|
||||
print("done")
|
||||
|
||||
|
||||
def trigger_gha_build(args):
|
||||
gh = Github(*get_credentials())
|
||||
repo = gh.get_repo('msys2/msys2-devtools')
|
||||
if repo.create_repository_dispatch('manual-build'):
|
||||
print("Build triggered")
|
||||
else:
|
||||
raise Exception("trigger failed")
|
||||
|
||||
|
||||
def clean_gha_assets(args):
|
||||
gh = Github(*get_credentials())
|
||||
|
||||
print("Fetching packages to build...")
|
||||
patterns = []
|
||||
for pkg in get_buildqueue():
|
||||
patterns.append(f"{pkg['name']}-{pkg['version']}*")
|
||||
for item in pkg['packages']:
|
||||
patterns.append(f"{item}-{pkg['version']}*")
|
||||
|
||||
print("Fetching assets...")
|
||||
assets = {}
|
||||
repo = gh.get_repo('msys2/msys2-devtools')
|
||||
for release in ['staging-msys', 'staging-mingw', 'staging-failed']:
|
||||
for asset in repo.get_release(release).get_assets():
|
||||
assets.setdefault(asset.name, []).append(asset)
|
||||
|
||||
for pattern in patterns:
|
||||
for key in fnmatch.filter(assets.keys(), pattern):
|
||||
del assets[key]
|
||||
|
||||
for items in assets.values():
|
||||
for asset in items:
|
||||
print(f"Deleting {asset.name}...")
|
||||
if not args.dry_run:
|
||||
asset.delete_asset()
|
||||
|
||||
if not assets:
|
||||
print("Nothing to delete")
|
||||
|
||||
|
||||
def get_credentials():
|
||||
if "GITHUB_TOKEN" in environ:
|
||||
return [environ["GITHUB_TOKEN"]]
|
||||
elif "GITHUB_USER" in environ and "GITHUB_PASS" in environ:
|
||||
return [environ["GITHUB_USER"], environ["GITHUB_PASS"]]
|
||||
else:
|
||||
raise Exception("'GITHUB_TOKEN' or 'GITHUB_USER'/'GITHUB_PASS' env vars not set")
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(description="Build packages", allow_abbrev=False)
|
||||
parser.set_defaults(func=lambda *x: parser.print_help())
|
||||
subparser = parser.add_subparsers(title="subcommands")
|
||||
|
||||
sub = subparser.add_parser("build", help="Build all packages")
|
||||
sub.add_argument("builddir")
|
||||
sub.set_defaults(func=run_build)
|
||||
|
||||
sub = subparser.add_parser("show", help="Show all packages to be built", allow_abbrev=False)
|
||||
sub.set_defaults(func=show_build)
|
||||
|
||||
sub = subparser.add_parser("show-assets", help="Show all staging packages", allow_abbrev=False)
|
||||
sub.set_defaults(func=show_assets)
|
||||
|
||||
sub = subparser.add_parser("fetch-assets", help="Download all staging packages", allow_abbrev=False)
|
||||
sub.add_argument("targetdir")
|
||||
sub.set_defaults(func=fetch_assets)
|
||||
|
||||
sub = subparser.add_parser("trigger", help="Trigger a GHA build", allow_abbrev=False)
|
||||
sub.set_defaults(func=trigger_gha_build)
|
||||
|
||||
sub = subparser.add_parser("clean-assets", help="Clean up GHA assets", allow_abbrev=False)
|
||||
sub.add_argument("--dry-run", action="store_true", help="Only show what is going to be deleted")
|
||||
sub.set_defaults(func=clean_gha_assets)
|
||||
|
||||
get_credentials()
|
||||
|
||||
args = parser.parse_args(argv[1:])
|
||||
return args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
2
build.bat
Normal file
2
build.bat
Normal file
@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
C:\msys64\msys2_shell.cmd -here -msys -no-start -defterm -c "./build.sh"
|
||||
5
build.sh
Normal file
5
build.sh
Normal file
@ -0,0 +1,5 @@
|
||||
pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache
|
||||
OLD_ACLOCAL_PATH="${ACLOCAL_PATH}"
|
||||
unset ACLOCAL_PATH
|
||||
python -m msys2_autobuild build / ~/build-temp -t msys,msys-src,mingw64,mingw32,mingw-src
|
||||
ACLOCAL_PATH="${OLD_ACLOCAL_PATH}"
|
||||
40
docs/docs.md
40
docs/docs.md
@ -1,40 +0,0 @@
|
||||
https://mermaid-js.github.io
|
||||
|
||||
```
|
||||
sequenceDiagram
|
||||
participant GIT as MSYS2/MINGW-packages
|
||||
participant APPVEYOR as Appveyor
|
||||
participant API as packages.msys2.org
|
||||
participant GHA as GitHub Actions
|
||||
participant DT as devtools
|
||||
participant DEV as Developer
|
||||
participant REPO as Pacman Repo
|
||||
|
||||
GIT->>API: webhook trigger on push
|
||||
API->>APPVEYOR: trigger PKGBUILD parse
|
||||
APPVEYOR->>GIT: fetch PKGBUILDS
|
||||
GIT-->>APPVEYOR:
|
||||
APPVEYOR->>APPVEYOR: parse PKGBUILDS
|
||||
APPVEYOR-->>API: upload parsed PKGBUILDS
|
||||
|
||||
DT->>GHA: cron trigger
|
||||
GHA->>API: fetch TODO list
|
||||
API-->>GHA:
|
||||
GHA->>GIT: fetch PKGBUILDs
|
||||
GIT-->>GHA:
|
||||
GHA->>DT: fetch staging
|
||||
DT-->>GHA:
|
||||
GHA->>GHA: build packages
|
||||
GHA-->>DT: upload packages
|
||||
|
||||
DEV->>DT: fetch packages
|
||||
DT-->>DEV:
|
||||
DEV->>DEV: sign packages
|
||||
DEV->>REPO: push to repo
|
||||
```
|
||||
|
||||
```
|
||||
{
|
||||
"theme": "forest"
|
||||
}
|
||||
```
|
||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 24 KiB |
0
msys2_autobuild/__init__.py
Executable file
0
msys2_autobuild/__init__.py
Executable file
3
msys2_autobuild/__main__.py
Normal file
3
msys2_autobuild/__main__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .main import run
|
||||
|
||||
run()
|
||||
421
msys2_autobuild/build.py
Normal file
421
msys2_autobuild/build.py
Normal file
@ -0,0 +1,421 @@
|
||||
import fnmatch
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import tempfile
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path, PurePath, PurePosixPath
|
||||
from subprocess import check_call
|
||||
from typing import Any, TypeVar
|
||||
from collections.abc import Generator, Sequence
|
||||
|
||||
from gitea import Attachment
|
||||
|
||||
from .config import ArchType, BuildType, Config
|
||||
from .gh import (CachedAssets, download_asset, get_asset_filename,
|
||||
get_release, get_repo_for_build_type, upload_asset)
|
||||
from .queue import Package
|
||||
from .utils import SCRIPT_DIR, PathLike
|
||||
|
||||
|
||||
class BuildError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_python_path(msys2_root: PathLike, msys2_path: PathLike) -> Path:
|
||||
return Path(os.path.normpath(str(msys2_root) + str(msys2_path)))
|
||||
|
||||
|
||||
def to_pure_posix_path(path: PathLike) -> PurePath:
|
||||
return PurePosixPath("/" + str(path).replace(":", "", 1).replace("\\", "/"))
|
||||
|
||||
|
||||
def get_build_environ(build_type: BuildType) -> dict[str, str]:
|
||||
environ = os.environ.copy()
|
||||
|
||||
# Set PACKAGER for makepkg
|
||||
packager_ref = Config.RUNNER_CONFIG[build_type]["repo"]
|
||||
if "GITHUB_SHA" in environ and "GITHUB_RUN_ID" in environ:
|
||||
packager_ref += "/" + environ["GITHUB_SHA"][:8] + "/" + environ["GITHUB_RUN_ID"]
|
||||
environ["PACKAGER"] = f"CI ({packager_ref})"
|
||||
|
||||
return environ
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temp_pacman_script(pacman_config: PathLike) -> Generator[PathLike, None, None]:
|
||||
"""Gives a temporary pacman script which uses the passed in pacman config
|
||||
without having to pass --config to it. Required because makepkg doesn't allow
|
||||
setting the pacman conf path, but it allows setting the pacman executable path
|
||||
via the 'PACMAN' env var.
|
||||
"""
|
||||
|
||||
fd, filename = tempfile.mkstemp("pacman")
|
||||
os.close(fd)
|
||||
|
||||
try:
|
||||
with open(filename, "w", encoding="utf-8") as h:
|
||||
cli = shlex.join(['/usr/bin/pacman', '--config', str(to_pure_posix_path(pacman_config))])
|
||||
h.write(f"""\
|
||||
#!/bin/bash
|
||||
set -e
|
||||
exec {cli} "$@"
|
||||
""")
|
||||
yield filename
|
||||
finally:
|
||||
try:
|
||||
os.unlink(filename)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temp_pacman_conf(msys2_root: PathLike) -> Generator[Path, None, None]:
|
||||
"""Gives a unix path to a temporary copy of pacman.conf"""
|
||||
|
||||
fd, filename = tempfile.mkstemp("pacman.conf")
|
||||
os.close(fd)
|
||||
try:
|
||||
conf = get_python_path(msys2_root, "/etc/pacman.conf")
|
||||
with open(conf, "rb") as src:
|
||||
with open(filename, "wb") as dest:
|
||||
shutil.copyfileobj(src, dest)
|
||||
|
||||
yield Path(filename)
|
||||
finally:
|
||||
try:
|
||||
os.unlink(filename)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temp_makepkg_confd(msys2_root: PathLike, config_name: str) -> Generator[Path, None, None]:
|
||||
"""Gives a path to a temporary $config_name.d file"""
|
||||
|
||||
conf_dir = get_python_path(msys2_root, f"/etc/{config_name}.d")
|
||||
os.makedirs(conf_dir, exist_ok=True)
|
||||
conf_file = conf_dir / "msys2_autobuild.conf"
|
||||
try:
|
||||
open(conf_file, "wb").close()
|
||||
yield conf_file
|
||||
finally:
|
||||
try:
|
||||
os.unlink(conf_file)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
os.rmdir(conf_dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def clean_environ(environ: dict[str, str]) -> dict[str, str]:
|
||||
"""Returns an environment without any CI related variables.
|
||||
|
||||
This is to avoid leaking secrets to package build scripts we call.
|
||||
While in theory we trust them this can't hurt.
|
||||
"""
|
||||
|
||||
new_env = environ.copy()
|
||||
for key in list(new_env):
|
||||
if key.startswith(("GITHUB_", "RUNNER_")):
|
||||
del new_env[key]
|
||||
return new_env
|
||||
|
||||
|
||||
def run_cmd(msys2_root: PathLike, args: Sequence[PathLike], **kwargs: Any) -> None:
|
||||
executable = os.path.join(msys2_root, 'usr', 'bin', 'bash.exe')
|
||||
env = clean_environ(kwargs.pop("env", os.environ.copy()))
|
||||
env["CHERE_INVOKING"] = "1"
|
||||
env["MSYSTEM"] = "MSYS"
|
||||
env["MSYS2_PATH_TYPE"] = "minimal"
|
||||
|
||||
check_call([executable, '-lc'] + [shlex.join([str(a) for a in args])], env=env, **kwargs)
|
||||
|
||||
|
||||
def make_tree_writable(topdir: PathLike) -> None:
|
||||
# Ensure all files and directories under topdir are writable
|
||||
# (and readable) by owner.
|
||||
# Taken from meson, and adjusted
|
||||
|
||||
def chmod(p: PathLike) -> None:
|
||||
os.chmod(p, os.stat(p).st_mode | stat.S_IWRITE | stat.S_IREAD)
|
||||
|
||||
chmod(topdir)
|
||||
for root, dirs, files in os.walk(topdir):
|
||||
for d in dirs:
|
||||
chmod(os.path.join(root, d))
|
||||
# Work around Python bug following junctions
|
||||
# https://github.com/python/cpython/issues/67596#issuecomment-1918112817
|
||||
dirs[:] = [d for d in dirs if not os.path.isjunction(os.path.join(root, d))]
|
||||
for fname in files:
|
||||
fpath = os.path.join(root, fname)
|
||||
if os.path.isfile(fpath):
|
||||
chmod(fpath)
|
||||
|
||||
|
||||
def remove_junctions(topdir: PathLike) -> None:
|
||||
# work around a git issue where it can't handle junctions
|
||||
# https://github.com/git-for-windows/git/issues/5320
|
||||
for root, dirs, _ in os.walk(topdir):
|
||||
no_junctions = []
|
||||
for d in dirs:
|
||||
if not os.path.isjunction(os.path.join(root, d)):
|
||||
no_junctions.append(d)
|
||||
else:
|
||||
os.remove(os.path.join(root, d))
|
||||
dirs[:] = no_junctions
|
||||
|
||||
|
||||
def reset_git_repo(path: PathLike):
|
||||
|
||||
def clean():
|
||||
assert os.path.exists(path)
|
||||
|
||||
# Try to avoid git hanging in a junction loop, by removing them
|
||||
# before running git clean/reset
|
||||
# https://github.com/msys2/msys2-autobuild/issues/108#issuecomment-2776420879
|
||||
try:
|
||||
remove_junctions(path)
|
||||
except OSError as e:
|
||||
print("Removing junctions failed", e)
|
||||
|
||||
check_call(["git", "clean", "-xfdf"], cwd=path)
|
||||
check_call(["git", "reset", "--hard", "HEAD"], cwd=path)
|
||||
|
||||
made_writable = False
|
||||
for i in range(10):
|
||||
try:
|
||||
clean()
|
||||
except subprocess.CalledProcessError:
|
||||
try:
|
||||
if not made_writable:
|
||||
print("Trying to make files writable")
|
||||
make_tree_writable(path)
|
||||
remove_junctions(path)
|
||||
made_writable = True
|
||||
except OSError as e:
|
||||
print("Making files writable failed", e)
|
||||
print(f"git clean/reset failed, sleeping for {i} seconds")
|
||||
time.sleep(i)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
# run it one more time to raise
|
||||
clean()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def fresh_git_repo(url: str, path: PathLike) -> Generator:
|
||||
if not os.path.exists(path):
|
||||
check_call(["git", "clone", url, path])
|
||||
check_call(["git", "config", "core.longpaths", "true"], cwd=path)
|
||||
else:
|
||||
reset_git_repo(path)
|
||||
check_call(["git", "fetch", "origin"], cwd=path)
|
||||
check_call(["git", "reset", "--hard", "origin/master"], cwd=path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
assert os.path.exists(path)
|
||||
reset_git_repo(path)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def staging_dependencies(
|
||||
build_type: BuildType, pkg: Package, msys2_root: PathLike,
|
||||
builddir: PathLike) -> Generator[PathLike, None, None]:
|
||||
|
||||
def add_to_repo(repo_root: PathLike, pacman_config: PathLike, repo_name: str,
|
||||
assets: list[Attachment]) -> None:
|
||||
repo_dir = Path(repo_root) / repo_name
|
||||
os.makedirs(repo_dir, exist_ok=True)
|
||||
|
||||
todo = []
|
||||
for asset in assets:
|
||||
asset_path = os.path.join(repo_dir, get_asset_filename(asset))
|
||||
todo.append((asset_path, asset))
|
||||
|
||||
def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
|
||||
asset_path, asset = item
|
||||
download_asset(asset, asset_path)
|
||||
return item
|
||||
|
||||
package_paths = []
|
||||
with ThreadPoolExecutor(8) as executor:
|
||||
for i, item in enumerate(executor.map(fetch_item, todo)):
|
||||
asset_path, asset = item
|
||||
print(f"[{i + 1}/{len(todo)}] {get_asset_filename(asset)}")
|
||||
package_paths.append(asset_path)
|
||||
|
||||
repo_name = f"autobuild-{repo_name}"
|
||||
repo_db_path = os.path.join(repo_dir, f"{repo_name}.db.tar.gz")
|
||||
|
||||
with open(pacman_config, encoding="utf-8") as h:
|
||||
text = h.read()
|
||||
uri = to_pure_posix_path(repo_dir).as_uri()
|
||||
if uri not in text:
|
||||
with open(pacman_config, "w", encoding="utf-8") as h2:
|
||||
h2.write(f"""[{repo_name}]
|
||||
Server={uri}
|
||||
SigLevel=Never
|
||||
""")
|
||||
h2.write(text)
|
||||
|
||||
# repo-add 15 packages at a time so we don't hit the size limit for CLI arguments
|
||||
ChunkItem = TypeVar("ChunkItem")
|
||||
|
||||
def chunks(lst: list[ChunkItem], n: int) -> Generator[list[ChunkItem], None, None]:
|
||||
for i in range(0, len(lst), n):
|
||||
yield lst[i:i + n]
|
||||
|
||||
base_args: list[PathLike] = ["repo-add", to_pure_posix_path(repo_db_path)]
|
||||
posix_paths: list[PathLike] = [to_pure_posix_path(p) for p in package_paths]
|
||||
for chunk in chunks(posix_paths, 15):
|
||||
args = base_args + chunk
|
||||
run_cmd(msys2_root, args, cwd=repo_dir)
|
||||
|
||||
cached_assets = CachedAssets()
|
||||
repo_root = os.path.join(builddir, "_REPO")
|
||||
try:
|
||||
shutil.rmtree(repo_root, ignore_errors=True)
|
||||
os.makedirs(repo_root, exist_ok=True)
|
||||
with temp_pacman_conf(msys2_root) as pacman_config:
|
||||
to_add: dict[ArchType, list[GitReleaseAsset]] = {}
|
||||
for dep_type, deps in pkg.get_depends(build_type).items():
|
||||
assets = cached_assets.get_assets(dep_type)
|
||||
for dep in deps:
|
||||
for pattern in dep.get_build_patterns(dep_type):
|
||||
for asset in assets:
|
||||
if fnmatch.fnmatch(get_asset_filename(asset), pattern):
|
||||
to_add.setdefault(dep_type, []).append(asset)
|
||||
break
|
||||
else:
|
||||
if pkg.is_optional_dep(dep, dep_type):
|
||||
# If it's there, good, if not we ignore it since it's part of a cycle
|
||||
pass
|
||||
else:
|
||||
raise SystemExit(f"asset for {pattern} in {dep_type} not found")
|
||||
|
||||
for dep_type, assets in to_add.items():
|
||||
add_to_repo(repo_root, pacman_config, dep_type, assets)
|
||||
|
||||
with temp_pacman_script(pacman_config) as temp_pacman:
|
||||
# in case they are already installed we need to upgrade
|
||||
run_cmd(msys2_root, [to_pure_posix_path(temp_pacman), "--noconfirm", "-Suy"])
|
||||
run_cmd(msys2_root, [to_pure_posix_path(temp_pacman), "--noconfirm", "-Su"])
|
||||
yield temp_pacman
|
||||
finally:
|
||||
shutil.rmtree(repo_root, ignore_errors=True)
|
||||
# downgrade again
|
||||
run_cmd(msys2_root, ["pacman", "--noconfirm", "-Suuy"])
|
||||
run_cmd(msys2_root, ["pacman", "--noconfirm", "-Suu"])
|
||||
|
||||
|
||||
def build_package(build_type: BuildType, pkg: Package, msys2_root: PathLike, builddir: PathLike) -> None:
|
||||
assert os.path.isabs(builddir)
|
||||
assert os.path.isabs(msys2_root)
|
||||
os.makedirs(builddir, exist_ok=True)
|
||||
|
||||
repo_name = {"MINGW-packages": "W", "MSYS2-packages": "S"}.get(pkg['repo'], pkg['repo'])
|
||||
repo_dir = os.path.join(builddir, repo_name)
|
||||
to_upload: list[str] = []
|
||||
|
||||
repo = get_repo_for_build_type(build_type)
|
||||
|
||||
with fresh_git_repo(pkg['repo_url'], repo_dir):
|
||||
orig_pkg_dir = os.path.join(repo_dir, pkg['repo_path'])
|
||||
# Rename it to get a shorter overall build path
|
||||
# https://github.com/msys2/msys2-autobuild/issues/71
|
||||
pkg_dir = os.path.join(repo_dir, 'B')
|
||||
assert not os.path.exists(pkg_dir)
|
||||
os.rename(orig_pkg_dir, pkg_dir)
|
||||
|
||||
# Fetch all keys mentioned in the PKGBUILD
|
||||
validpgpkeys = to_pure_posix_path(os.path.join(SCRIPT_DIR, 'fetch-validpgpkeys.sh'))
|
||||
run_cmd(msys2_root, ['bash', validpgpkeys], cwd=pkg_dir)
|
||||
|
||||
with staging_dependencies(build_type, pkg, msys2_root, builddir) as temp_pacman:
|
||||
try:
|
||||
env = get_build_environ(build_type)
|
||||
# this makes makepkg use our custom pacman script
|
||||
env['PACMAN'] = str(to_pure_posix_path(temp_pacman))
|
||||
if build_type == Config.MINGW_SRC_BUILD_TYPE:
|
||||
with temp_makepkg_confd(msys2_root, "makepkg_mingw.conf") as makepkg_conf:
|
||||
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||
h.write("COMPRESSZST=(zstd -c -T0 --ultra -22 -)\n")
|
||||
|
||||
env['MINGW_ARCH'] = Config.MINGW_SRC_ARCH
|
||||
run_cmd(msys2_root, [
|
||||
'makepkg-mingw',
|
||||
'--noconfirm',
|
||||
'--noprogressbar',
|
||||
'--allsource'
|
||||
], env=env, cwd=pkg_dir)
|
||||
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
|
||||
with temp_makepkg_confd(msys2_root, "makepkg.conf") as makepkg_conf:
|
||||
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||
h.write("COMPRESSZST=(zstd -c -T0 --ultra -22 -)\n")
|
||||
|
||||
run_cmd(msys2_root, [
|
||||
'makepkg',
|
||||
'--noconfirm',
|
||||
'--noprogressbar',
|
||||
'--allsource'
|
||||
], env=env, cwd=pkg_dir)
|
||||
elif build_type in Config.MINGW_ARCH_LIST:
|
||||
with temp_makepkg_confd(msys2_root, "makepkg_mingw.conf") as makepkg_conf:
|
||||
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||
h.write("COMPRESSZST=(zstd -c -T0 --ultra -20 -)\n")
|
||||
|
||||
env['MINGW_ARCH'] = build_type
|
||||
run_cmd(msys2_root, [
|
||||
'makepkg-mingw',
|
||||
'--noconfirm',
|
||||
'--noprogressbar',
|
||||
'--nocheck',
|
||||
'--syncdeps',
|
||||
'--rmdeps',
|
||||
'--cleanbuild'
|
||||
], env=env, cwd=pkg_dir)
|
||||
elif build_type in Config.MSYS_ARCH_LIST:
|
||||
with temp_makepkg_confd(msys2_root, "makepkg.conf") as makepkg_conf:
|
||||
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||
h.write("COMPRESSZST=(zstd -c -T0 --ultra -20 -)\n")
|
||||
|
||||
run_cmd(msys2_root, [
|
||||
'makepkg',
|
||||
'--noconfirm',
|
||||
'--noprogressbar',
|
||||
'--nocheck',
|
||||
'--syncdeps',
|
||||
'--rmdeps',
|
||||
'--cleanbuild'
|
||||
], env=env, cwd=pkg_dir)
|
||||
else:
|
||||
assert 0
|
||||
|
||||
entries = os.listdir(pkg_dir)
|
||||
for pattern in pkg.get_build_patterns(build_type):
|
||||
found = fnmatch.filter(entries, pattern)
|
||||
if not found:
|
||||
raise BuildError(f"{pattern} not found, likely wrong version built")
|
||||
to_upload.extend([os.path.join(pkg_dir, e) for e in found])
|
||||
|
||||
except (subprocess.CalledProcessError, BuildError) as e:
|
||||
release = get_release(repo, "staging-failed")
|
||||
failed_data = {}
|
||||
content = json.dumps(failed_data).encode()
|
||||
upload_asset(repo, release, pkg.get_failed_name(build_type), text=True, content=content)
|
||||
|
||||
raise BuildError(e)
|
||||
else:
|
||||
release = get_release(repo, "staging-" + build_type)
|
||||
for path in to_upload:
|
||||
upload_asset(repo, release, path)
|
||||
102
msys2_autobuild/cmd_build.py
Normal file
102
msys2_autobuild/cmd_build.py
Normal file
@ -0,0 +1,102 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from typing import Any, Literal
|
||||
|
||||
from .build import BuildError, build_package, run_cmd
|
||||
from .config import BuildType, Config
|
||||
from .queue import (Package, PackageStatus, get_buildqueue_with_status,
|
||||
update_status)
|
||||
from .utils import apply_optional_deps, gha_group
|
||||
|
||||
BuildFrom = Literal["start", "middle", "end"]
|
||||
|
||||
|
||||
def get_package_to_build(
|
||||
pkgs: list[Package], build_types: list[BuildType] | None,
|
||||
build_from: BuildFrom) -> tuple[Package, BuildType] | None:
|
||||
|
||||
can_build = []
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
if build_types is not None and build_type not in build_types:
|
||||
continue
|
||||
if pkg.get_status(build_type) == PackageStatus.WAITING_FOR_BUILD:
|
||||
can_build.append((pkg, build_type))
|
||||
|
||||
if not can_build:
|
||||
return None
|
||||
|
||||
if build_from == "end":
|
||||
return can_build[-1]
|
||||
elif build_from == "middle":
|
||||
return can_build[len(can_build)//2]
|
||||
elif build_from == "start":
|
||||
return can_build[0]
|
||||
else:
|
||||
raise Exception("Unknown order:", build_from)
|
||||
|
||||
|
||||
def run_build(args: Any) -> None:
|
||||
builddir = os.path.abspath(args.builddir)
|
||||
msys2_root = os.path.abspath(args.msys2_root)
|
||||
if args.build_types is None:
|
||||
build_types = None
|
||||
else:
|
||||
build_types = [p.strip() for p in args.build_types.split(",")]
|
||||
|
||||
apply_optional_deps(args.optional_deps or "")
|
||||
|
||||
start_time = time.monotonic()
|
||||
|
||||
if not sys.platform == "win32":
|
||||
raise SystemExit("ERROR: Needs to run under native Python")
|
||||
|
||||
if not shutil.which("git"):
|
||||
raise SystemExit("ERROR: git not in PATH")
|
||||
|
||||
if not os.path.isdir(msys2_root):
|
||||
raise SystemExit("ERROR: msys2_root doesn't exist")
|
||||
|
||||
try:
|
||||
run_cmd(msys2_root, [])
|
||||
except Exception as e:
|
||||
raise SystemExit("ERROR: msys2_root not functional", e)
|
||||
|
||||
print(f"Building {build_types} starting from {args.build_from}")
|
||||
|
||||
while True:
|
||||
pkgs = get_buildqueue_with_status(full_details=True)
|
||||
update_status(pkgs)
|
||||
|
||||
if (time.monotonic() - start_time) >= Config.SOFT_JOB_TIMEOUT:
|
||||
print("timeout reached")
|
||||
break
|
||||
|
||||
todo = get_package_to_build(pkgs, build_types, args.build_from)
|
||||
if not todo:
|
||||
break
|
||||
pkg, build_type = todo
|
||||
|
||||
try:
|
||||
with gha_group(f"[{pkg['repo']}] [{build_type}] {pkg['name']}..."):
|
||||
build_package(build_type, pkg, msys2_root, builddir)
|
||||
except BuildError:
|
||||
with gha_group(f"[{pkg['repo']}] [{build_type}] {pkg['name']}: failed"):
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
continue
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser("build", help="Build all packages")
|
||||
sub.add_argument("-t", "--build-types", action="store")
|
||||
sub.add_argument(
|
||||
"--build-from", action="store", default="start", help="Start building from start|end|middle")
|
||||
sub.add_argument("--optional-deps", action="store")
|
||||
sub.add_argument("msys2_root", help="The MSYS2 install used for building. e.g. C:\\msys64")
|
||||
sub.add_argument(
|
||||
"builddir",
|
||||
help="A directory used for saving temporary build results and the git repos")
|
||||
sub.set_defaults(func=run_build)
|
||||
90
msys2_autobuild/cmd_clean_assets.py
Normal file
90
msys2_autobuild/cmd_clean_assets.py
Normal file
@ -0,0 +1,90 @@
|
||||
import re
|
||||
import fnmatch
|
||||
from typing import Any
|
||||
|
||||
from gitea import Release, Attachment
|
||||
|
||||
from .config import get_all_build_types
|
||||
from .gh import (get_asset_filename, get_current_repo, get_release,
|
||||
get_release_assets, get_gitea)
|
||||
from .queue import get_buildqueue
|
||||
|
||||
|
||||
def get_assets_to_delete() -> tuple[list[Release], list[tuple[Release, Attachment]]]:
|
||||
|
||||
print("Fetching packages to build...")
|
||||
keep_patterns = []
|
||||
for pkg in get_buildqueue():
|
||||
for build_type in pkg.get_build_types():
|
||||
keep_patterns.append(pkg.get_failed_name(build_type))
|
||||
keep_patterns.extend(pkg.get_build_patterns(build_type))
|
||||
keep_pattern_regex = re.compile('|'.join(fnmatch.translate(p) for p in keep_patterns))
|
||||
|
||||
def should_be_deleted(asset: Attachment) -> bool:
|
||||
filename = get_asset_filename(asset)
|
||||
return not keep_pattern_regex.match(filename)
|
||||
|
||||
def get_to_delete(release: Release) -> tuple[list[Release], list[Attachment]]:
|
||||
assets = get_release_assets(release)
|
||||
to_delete = []
|
||||
for asset in assets:
|
||||
if should_be_deleted(asset):
|
||||
to_delete.append(asset)
|
||||
|
||||
# Deleting and re-creating a release requires two write calls, so delete
|
||||
# the release if all assets should be deleted and there are more than 2.
|
||||
# min_to_delete = 3
|
||||
|
||||
# XXX: re-creating releases causes notifications, so avoid unless possible
|
||||
# https://github.com/msys2/msys2-autobuild/issues/77#issuecomment-1657231719
|
||||
min_to_delete = 400*333
|
||||
|
||||
if len(to_delete) >= min_to_delete and len(assets) == len(to_delete):
|
||||
return [release], []
|
||||
else:
|
||||
return [], to_delete
|
||||
|
||||
def get_all_releases() -> list[Release]:
|
||||
repo = get_current_repo()
|
||||
|
||||
releases = []
|
||||
for build_type in get_all_build_types():
|
||||
releases.append(get_release(repo, "staging-" + build_type))
|
||||
releases.append(get_release(repo, "staging-failed"))
|
||||
return releases
|
||||
|
||||
print("Fetching assets...")
|
||||
releases = []
|
||||
assets = []
|
||||
for release in get_all_releases():
|
||||
r, a = get_to_delete(release)
|
||||
releases.extend(r)
|
||||
assets.extend(r, a)
|
||||
|
||||
return releases, assets
|
||||
|
||||
|
||||
def clean_gha_assets(args: Any) -> None:
|
||||
repo = get_current_repo()
|
||||
releases, assets = get_assets_to_delete()
|
||||
|
||||
print("Resetting releases...")
|
||||
for release in releases:
|
||||
print(f"Resetting {release.tag_name}...")
|
||||
if not args.dry_run:
|
||||
release.delete_release()
|
||||
get_release(repo, release.tag_name)
|
||||
|
||||
print("Deleting assets...")
|
||||
for release, asset in assets:
|
||||
print(f"Deleting {get_asset_filename(asset)}...")
|
||||
if not args.dry_run:
|
||||
gitea = get_gitea()
|
||||
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser("clean-assets", help="Clean up GHA assets", allow_abbrev=False)
|
||||
sub.add_argument(
|
||||
"--dry-run", action="store_true", help="Only show what is going to be deleted")
|
||||
sub.set_defaults(func=clean_gha_assets)
|
||||
49
msys2_autobuild/cmd_clear_failed.py
Normal file
49
msys2_autobuild/cmd_clear_failed.py
Normal file
@ -0,0 +1,49 @@
|
||||
from typing import Any
|
||||
|
||||
from .gh import (get_asset_filename, get_current_repo, get_release,
|
||||
get_release_assets, get_gitea)
|
||||
from .queue import get_buildqueue_with_status
|
||||
|
||||
|
||||
def clear_failed_state(args: Any) -> None:
|
||||
build_type_filter = args.build_types
|
||||
build_type_list = build_type_filter.replace(" ", "").split(",") if build_type_filter else []
|
||||
package_filter = args.packages
|
||||
package_list = package_filter.replace(" ", "").split(",") if package_filter else []
|
||||
|
||||
if build_type_filter is None and package_filter is None:
|
||||
raise SystemExit("clear-failed: At least one of --build-types or --packages needs to be passed")
|
||||
|
||||
repo = get_current_repo()
|
||||
release = get_release(repo, 'staging-failed')
|
||||
assets_failed = get_release_assets(release)
|
||||
failed_map = dict((get_asset_filename(a), a) for a in assets_failed)
|
||||
|
||||
for pkg in get_buildqueue_with_status():
|
||||
|
||||
if package_filter is not None and pkg["name"] not in package_list:
|
||||
continue
|
||||
|
||||
for build_type in pkg.get_build_types():
|
||||
if build_type_filter is not None and build_type not in build_type_list:
|
||||
continue
|
||||
|
||||
name = pkg.get_failed_name(build_type)
|
||||
if name in failed_map:
|
||||
asset = failed_map[name]
|
||||
print(f"Deleting {get_asset_filename(asset)}...")
|
||||
if not args.dry_run:
|
||||
gitea = get_gitea()
|
||||
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser(
|
||||
"clear-failed", help="Clear the failed state for packages", allow_abbrev=False)
|
||||
sub.add_argument(
|
||||
"--dry-run", action="store_true", help="Only show what is going to be deleted")
|
||||
sub.add_argument("--build-types", action="store", help=(
|
||||
"A comma separated list of build types (e.g. mingw64)"))
|
||||
sub.add_argument("--packages", action="store", help=(
|
||||
"A comma separated list of packages to clear (e.g. mingw-w64-qt-creator)"))
|
||||
sub.set_defaults(func=clear_failed_state)
|
||||
178
msys2_autobuild/cmd_fetch_assets.py
Normal file
178
msys2_autobuild/cmd_fetch_assets.py
Normal file
@ -0,0 +1,178 @@
|
||||
import fnmatch
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
import subprocess
|
||||
|
||||
from gitea import Attachment
|
||||
|
||||
from .config import BuildType, Config
|
||||
from .gh import (CachedAssets, download_asset, get_asset_filename,
|
||||
get_asset_mtime_ns)
|
||||
from .queue import PackageStatus, get_buildqueue_with_status
|
||||
from .utils import ask_yes_no
|
||||
|
||||
|
||||
def get_repo_subdir(build_type: BuildType) -> Path:
|
||||
if build_type in Config.MSYS_ARCH_LIST:
|
||||
return Path("msys") / "x86_64"
|
||||
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
|
||||
return Path("msys") / "sources"
|
||||
elif build_type == Config.MINGW_SRC_BUILD_TYPE:
|
||||
return Path("mingw") / "sources"
|
||||
elif build_type in Config.MINGW_ARCH_LIST:
|
||||
return Path("mingw") / build_type
|
||||
else:
|
||||
raise Exception("unknown type")
|
||||
|
||||
|
||||
def fetch_assets(args: Any) -> None:
|
||||
target_dir = os.path.abspath(args.targetdir)
|
||||
fetch_all = args.fetch_all
|
||||
fetch_complete = args.fetch_complete
|
||||
|
||||
all_patterns: dict[BuildType, list[str]] = {}
|
||||
all_blocked = []
|
||||
for pkg in get_buildqueue_with_status():
|
||||
for build_type in pkg.get_build_types():
|
||||
if args.build_type and build_type not in args.build_type:
|
||||
continue
|
||||
status = pkg.get_status(build_type)
|
||||
pkg_patterns = pkg.get_build_patterns(build_type)
|
||||
if status == PackageStatus.FINISHED:
|
||||
all_patterns.setdefault(build_type, []).extend(pkg_patterns)
|
||||
elif status in [PackageStatus.FINISHED_BUT_BLOCKED,
|
||||
PackageStatus.FINISHED_BUT_INCOMPLETE]:
|
||||
if fetch_all or (fetch_complete and status != PackageStatus.FINISHED_BUT_INCOMPLETE):
|
||||
all_patterns.setdefault(build_type, []).extend(pkg_patterns)
|
||||
else:
|
||||
all_blocked.append(
|
||||
(pkg["name"], build_type, pkg.get_status_details(build_type)))
|
||||
|
||||
all_assets = {}
|
||||
cached_assets = CachedAssets()
|
||||
assets_to_download: dict[BuildType, list[Attachment]] = {}
|
||||
for build_type, patterns in all_patterns.items():
|
||||
if build_type not in all_assets:
|
||||
all_assets[build_type] = cached_assets.get_assets(build_type)
|
||||
assets = all_assets[build_type]
|
||||
|
||||
assets_mapping: dict[str, list[Attachment]] = {}
|
||||
for asset in assets:
|
||||
assets_mapping.setdefault(get_asset_filename(asset), []).append(asset)
|
||||
|
||||
for pattern in patterns:
|
||||
matches = fnmatch.filter(assets_mapping.keys(), pattern)
|
||||
if matches:
|
||||
found = assets_mapping[matches[0]]
|
||||
assets_to_download.setdefault(build_type, []).extend(found)
|
||||
|
||||
to_fetch = {}
|
||||
for build_type, assets in assets_to_download.items():
|
||||
for asset in assets:
|
||||
asset_dir = Path(target_dir) / get_repo_subdir(build_type)
|
||||
asset_path = asset_dir / get_asset_filename(asset)
|
||||
to_fetch[str(asset_path)] = asset
|
||||
|
||||
def file_is_uptodate(path: str, asset: Attachment) -> bool:
|
||||
asset_path = Path(path)
|
||||
if not asset_path.exists():
|
||||
return False
|
||||
if asset_path.stat().st_size != asset.size:
|
||||
return False
|
||||
if get_asset_mtime_ns(asset) != asset_path.stat().st_mtime_ns:
|
||||
return False
|
||||
return True
|
||||
|
||||
# find files that are either wrong or not what we want
|
||||
to_delete = []
|
||||
not_uptodate = []
|
||||
for root, dirs, files in os.walk(target_dir):
|
||||
for name in files:
|
||||
existing = os.path.join(root, name)
|
||||
if existing in to_fetch:
|
||||
asset = to_fetch[existing]
|
||||
if not file_is_uptodate(existing, asset):
|
||||
to_delete.append(existing)
|
||||
not_uptodate.append(existing)
|
||||
else:
|
||||
to_delete.append(existing)
|
||||
|
||||
if args.delete and not args.pretend:
|
||||
# delete unwanted files
|
||||
for path in to_delete:
|
||||
os.remove(path)
|
||||
|
||||
# delete empty directories
|
||||
for root, dirs, files in os.walk(target_dir, topdown=False):
|
||||
for name in dirs:
|
||||
path = os.path.join(root, name)
|
||||
if not os.listdir(path):
|
||||
os.rmdir(path)
|
||||
|
||||
# Finally figure out what to download
|
||||
todo = {}
|
||||
done = []
|
||||
for path, asset in to_fetch.items():
|
||||
if not os.path.exists(path) or path in not_uptodate:
|
||||
todo[path] = asset
|
||||
Path(path).parent.mkdir(parents=True, exist_ok=True)
|
||||
else:
|
||||
done.append(path)
|
||||
|
||||
if args.verbose and all_blocked:
|
||||
import pprint
|
||||
print("Packages that are blocked and why:")
|
||||
pprint.pprint(all_blocked)
|
||||
|
||||
print(f"downloading: {len(todo)}, done: {len(done)} "
|
||||
f"blocked: {len(all_blocked)} (related builds missing)")
|
||||
|
||||
print("Pass --verbose to see the list of blocked packages.")
|
||||
print("Pass --fetch-complete to also fetch blocked but complete packages")
|
||||
print("Pass --fetch-all to fetch all packages.")
|
||||
print("Pass --delete to clear the target directory")
|
||||
|
||||
def verify_file(path: str, target: str) -> None:
|
||||
try:
|
||||
subprocess.run(["zstd", "--quiet", "--test", path], capture_output=True, check=True, text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise Exception(f"zstd test failed for {target!r}: {e.stderr}") from e
|
||||
|
||||
def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
|
||||
asset_path, asset = item
|
||||
if not args.pretend:
|
||||
download_asset(asset, asset_path, verify_file)
|
||||
return item
|
||||
|
||||
with ThreadPoolExecutor(8) as executor:
|
||||
for i, item in enumerate(executor.map(fetch_item, todo.items())):
|
||||
print(f"[{i + 1}/{len(todo)}] {get_asset_filename(item[1])}")
|
||||
|
||||
print("done")
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser(
|
||||
"fetch-assets", help="Download all staging packages", allow_abbrev=False)
|
||||
sub.add_argument("targetdir")
|
||||
sub.add_argument(
|
||||
"--delete", action="store_true", help="Clear targetdir of unneeded files")
|
||||
sub.add_argument(
|
||||
"--verbose", action="store_true", help="Show why things are blocked")
|
||||
sub.add_argument(
|
||||
"--pretend", action="store_true",
|
||||
help="Don't actually download, just show what would be done")
|
||||
sub.add_argument(
|
||||
"--fetch-all", action="store_true", help="Fetch all packages, even blocked ones")
|
||||
sub.add_argument(
|
||||
"--fetch-complete", action="store_true",
|
||||
help="Fetch all packages, even blocked ones, except incomplete ones")
|
||||
sub.add_argument(
|
||||
"-t", "--build-type", action="append",
|
||||
help="Only fetch packages for given build type(s) (may be used more than once)")
|
||||
sub.add_argument(
|
||||
"--noconfirm", action="store_true",
|
||||
help="Don't require user confirmation")
|
||||
sub.set_defaults(func=fetch_assets)
|
||||
66
msys2_autobuild/cmd_show_build.py
Normal file
66
msys2_autobuild/cmd_show_build.py
Normal file
@ -0,0 +1,66 @@
|
||||
from typing import Any
|
||||
|
||||
from tabulate import tabulate
|
||||
|
||||
from .queue import Package, PackageStatus, get_buildqueue_with_status, get_cycles
|
||||
from .utils import apply_optional_deps, gha_group
|
||||
|
||||
|
||||
def show_cycles(pkgs: list[Package]) -> None:
|
||||
cycles = get_cycles(pkgs)
|
||||
if cycles:
|
||||
def format_package(p: Package) -> str:
|
||||
return f"{p['name']} [{p['version_repo']} -> {p['version']}]"
|
||||
|
||||
with gha_group(f"Dependency Cycles ({len(cycles)})"):
|
||||
print(tabulate([
|
||||
(format_package(a), "<-->", format_package(b)) for (a, b) in cycles],
|
||||
headers=["Package", "", "Package"]))
|
||||
|
||||
|
||||
def show_build(args: Any) -> None:
|
||||
todo = []
|
||||
waiting = []
|
||||
done = []
|
||||
failed = []
|
||||
|
||||
apply_optional_deps(args.optional_deps or "")
|
||||
|
||||
pkgs = get_buildqueue_with_status(full_details=args.details)
|
||||
|
||||
show_cycles(pkgs)
|
||||
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
details = pkg.get_status_details(build_type)
|
||||
details.pop("blocked", None)
|
||||
if status == PackageStatus.WAITING_FOR_BUILD:
|
||||
todo.append((pkg, build_type, status, details))
|
||||
elif status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED,
|
||||
PackageStatus.FINISHED_BUT_INCOMPLETE):
|
||||
done.append((pkg, build_type, status, details))
|
||||
elif status in (PackageStatus.WAITING_FOR_DEPENDENCIES,
|
||||
PackageStatus.MANUAL_BUILD_REQUIRED):
|
||||
waiting.append((pkg, build_type, status, details))
|
||||
else:
|
||||
failed.append((pkg, build_type, status, details))
|
||||
|
||||
def show_table(name: str, items: list) -> None:
|
||||
with gha_group(f"{name} ({len(items)})"):
|
||||
print(tabulate([(p["name"], bt, p["version"], str(s), d) for (p, bt, s, d) in items],
|
||||
headers=["Package", "Build", "Version", "Status", "Details"]))
|
||||
|
||||
show_table("TODO", todo)
|
||||
show_table("WAITING", waiting)
|
||||
show_table("FAILED", failed)
|
||||
show_table("DONE", done)
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser(
|
||||
"show", help="Show all packages to be built", allow_abbrev=False)
|
||||
sub.add_argument(
|
||||
"--details", action="store_true", help="Show more details such as links to failed build logs (slow)")
|
||||
sub.add_argument("--optional-deps", action="store")
|
||||
sub.set_defaults(func=show_build)
|
||||
13
msys2_autobuild/cmd_update_status.py
Normal file
13
msys2_autobuild/cmd_update_status.py
Normal file
@ -0,0 +1,13 @@
|
||||
from typing import Any
|
||||
|
||||
from .queue import get_buildqueue_with_status, update_status
|
||||
|
||||
|
||||
def run_update_status(args: Any) -> None:
|
||||
update_status(get_buildqueue_with_status(full_details=True))
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser(
|
||||
"update-status", help="Update the status file", allow_abbrev=False)
|
||||
sub.set_defaults(func=run_update_status)
|
||||
65
msys2_autobuild/cmd_upload_assets.py
Normal file
65
msys2_autobuild/cmd_upload_assets.py
Normal file
@ -0,0 +1,65 @@
|
||||
import glob
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from .gh import get_release, get_repo_for_build_type, upload_asset
|
||||
from .queue import PackageStatus, get_buildqueue_with_status
|
||||
|
||||
|
||||
def upload_assets(args: Any) -> None:
|
||||
package_name = args.package
|
||||
src_dir = args.path
|
||||
src_dir = os.path.abspath(src_dir)
|
||||
|
||||
pkgs = get_buildqueue_with_status()
|
||||
|
||||
if package_name is not None:
|
||||
for pkg in pkgs:
|
||||
if pkg["name"] == package_name:
|
||||
break
|
||||
else:
|
||||
raise SystemExit(f"Package '{package_name}' not in the queue, check the 'show' command")
|
||||
pkgs = [pkg]
|
||||
|
||||
pattern_entries = []
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
|
||||
# ignore finished packages
|
||||
if status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED,
|
||||
PackageStatus.FINISHED_BUT_INCOMPLETE):
|
||||
continue
|
||||
|
||||
pattern_entries.append((build_type, pkg.get_build_patterns(build_type)))
|
||||
|
||||
print(f"Looking for the following files in {src_dir}:")
|
||||
for build_type, patterns in pattern_entries:
|
||||
for pattern in patterns:
|
||||
print(" ", pattern)
|
||||
|
||||
matches = []
|
||||
for build_type, patterns in pattern_entries:
|
||||
for pattern in patterns:
|
||||
for match in glob.glob(os.path.join(src_dir, pattern)):
|
||||
matches.append((build_type, match))
|
||||
print(f"Found {len(matches)} files..")
|
||||
|
||||
for build_type, match in matches:
|
||||
repo = get_repo_for_build_type(build_type)
|
||||
release = get_release(repo, 'staging-' + build_type)
|
||||
print(f"Uploading {match}")
|
||||
if not args.dry_run:
|
||||
upload_asset(release, match)
|
||||
print("Done")
|
||||
|
||||
|
||||
def add_parser(subparsers: Any) -> None:
|
||||
sub = subparsers.add_parser(
|
||||
"upload-assets", help="Upload packages", allow_abbrev=False)
|
||||
sub.add_argument("path", help="Directory to look for packages in")
|
||||
sub.add_argument(
|
||||
"--dry-run", action="store_true", help="Only show what is going to be uploaded")
|
||||
sub.add_argument("-p", "--package", action="store", help=(
|
||||
"Only upload files belonging to a particualr package (pkgbase)"))
|
||||
sub.set_defaults(func=upload_assets)
|
||||
114
msys2_autobuild/config.py
Normal file
114
msys2_autobuild/config.py
Normal file
@ -0,0 +1,114 @@
|
||||
from typing import Literal, TypeAlias
|
||||
|
||||
from urllib3.util import Retry
|
||||
|
||||
ArchType = Literal["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64", "msys"]
|
||||
SourceType = Literal["mingw-src", "msys-src"]
|
||||
BuildType: TypeAlias = ArchType | SourceType
|
||||
|
||||
REQUESTS_TIMEOUT = (15, 30)
|
||||
REQUESTS_RETRY = Retry(total=3, backoff_factor=1, status_forcelist=[500, 502])
|
||||
|
||||
|
||||
def get_all_build_types() -> list[BuildType]:
|
||||
all_build_types: list[BuildType] = []
|
||||
all_build_types.extend(Config.MSYS_ARCH_LIST)
|
||||
all_build_types.extend(Config.MINGW_ARCH_LIST)
|
||||
all_build_types.append(Config.MINGW_SRC_BUILD_TYPE)
|
||||
all_build_types.append(Config.MSYS_SRC_BUILD_TYPE)
|
||||
return all_build_types
|
||||
|
||||
|
||||
def build_type_is_src(build_type: BuildType) -> bool:
|
||||
return build_type in [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
|
||||
|
||||
|
||||
class Config:
|
||||
|
||||
ALLOWED_UPLOADERS = [
|
||||
"elieux",
|
||||
"lazka",
|
||||
"jeremyd2019",
|
||||
]
|
||||
"""Users that are allowed to upload assets. This is checked at download time"""
|
||||
|
||||
MINGW_ARCH_LIST: list[ArchType] = ["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64"]
|
||||
"""Arches we try to build"""
|
||||
|
||||
MINGW_SRC_ARCH: ArchType = "ucrt64"
|
||||
"""The arch that is used to build the source package (any mingw one should work)"""
|
||||
|
||||
MINGW_SRC_BUILD_TYPE: BuildType = "mingw-src"
|
||||
|
||||
MSYS_ARCH_LIST: list[ArchType] = ["msys"]
|
||||
|
||||
MSYS_SRC_ARCH: ArchType = "msys"
|
||||
|
||||
MSYS_SRC_BUILD_TYPE: BuildType = "msys-src"
|
||||
|
||||
RUNNER_CONFIG: dict[BuildType, dict] = {
|
||||
"msys-src": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
"max_jobs": 1,
|
||||
},
|
||||
"msys": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
},
|
||||
"mingw-src": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
"max_jobs": 1,
|
||||
},
|
||||
"mingw32": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
},
|
||||
"mingw64": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
},
|
||||
"ucrt64": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
},
|
||||
"clang64": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-2022"],
|
||||
"hosted": True,
|
||||
},
|
||||
"clangarm64": {
|
||||
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||
"labels": ["windows-11-arm"],
|
||||
"hosted": True,
|
||||
},
|
||||
}
|
||||
"""Runner config to use for each build type."""
|
||||
|
||||
SOFT_JOB_TIMEOUT = 60 * 60 * 3
|
||||
"""Runtime after which we shouldn't start a new build"""
|
||||
|
||||
MAXIMUM_JOB_COUNT = 15
|
||||
"""Maximum number of jobs to spawn"""
|
||||
|
||||
MANUAL_BUILD: list[tuple[str, list[BuildType]]] = [
|
||||
]
|
||||
"""Packages that take too long to build, or can't be build and should be handled manually"""
|
||||
|
||||
IGNORE_RDEP_PACKAGES: list[str] = [
|
||||
]
|
||||
"""XXX: These would in theory block rdeps, but no one fixed them, so we ignore them"""
|
||||
|
||||
OPTIONAL_DEPS: dict[str, list[str]] = {
|
||||
"mingw-w64-headers-git": ["mingw-w64-winpthreads", "mingw-w64-tools-git"],
|
||||
"mingw-w64-crt-git": ["mingw-w64-winpthreads"],
|
||||
"mingw-w64-llvm": ["mingw-w64-libc++"],
|
||||
}
|
||||
"""XXX: In case of cycles we mark these deps as optional"""
|
||||
17
msys2_autobuild/fetch-validpgpkeys.sh
Normal file
17
msys2_autobuild/fetch-validpgpkeys.sh
Normal file
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
. PKGBUILD
|
||||
|
||||
set -e
|
||||
|
||||
_keyserver=(
|
||||
"keyserver.ubuntu.com"
|
||||
"keys.gnupg.net"
|
||||
"pgp.mit.edu"
|
||||
"keys.openpgp.org"
|
||||
)
|
||||
for key in "${validpgpkeys[@]}"; do
|
||||
for server in "${_keyserver[@]}"; do
|
||||
timeout 20 /usr/bin/gpg --keyserver "${server}" --recv "${key}" && break || true
|
||||
done
|
||||
done
|
||||
183
msys2_autobuild/gh.py
Normal file
183
msys2_autobuild/gh.py
Normal file
@ -0,0 +1,183 @@
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import hashlib
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, UTC
|
||||
from functools import cache
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from collections.abc import Generator, Callable
|
||||
|
||||
import requests
|
||||
from gitea import Configuration, ApiClient, RepositoryApi, CreateReleaseOption
|
||||
from gitea import Repository, Release, Attachment
|
||||
from gitea.rest import ApiException
|
||||
|
||||
from .config import REQUESTS_TIMEOUT, BuildType, Config
|
||||
from .utils import PathLike, get_requests_session
|
||||
|
||||
|
||||
@cache
|
||||
def _get_repo(name: str) -> Repository:
|
||||
gitea = get_gitea()
|
||||
split = name.split("/")
|
||||
return gitea.repo_get(split[0], split[1])
|
||||
|
||||
|
||||
def get_current_repo() -> Repository:
|
||||
repo_full_name = os.environ.get("GITHUB_REPOSITORY", "Befator-Inc-Firmen-Netzwerk/msys2-autobuild")
|
||||
return _get_repo(repo_full_name)
|
||||
|
||||
|
||||
def get_repo_for_build_type(build_type: BuildType) -> Repository:
|
||||
return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"])
|
||||
|
||||
|
||||
@cache
|
||||
def get_gitea() -> RepositoryApi:
|
||||
configuration = Configuration()
|
||||
configuration.host = "https://git.befatorinc.de/api/v1"
|
||||
configuration.api_key["Authorization"] = "token 91f6f2e72e6d64fbd0b34133efae4a6c838d0e58"
|
||||
gitea = RepositoryApi(ApiClient(configuration))
|
||||
return gitea
|
||||
|
||||
|
||||
def download_text_asset(asset: Attachment, cache=False) -> str:
|
||||
session = get_requests_session(nocache=not cache)
|
||||
with session.get(asset.browser_download_url, timeout=REQUESTS_TIMEOUT) as r:
|
||||
r.raise_for_status()
|
||||
return r.text
|
||||
|
||||
|
||||
def get_asset_mtime_ns(asset: Attachment) -> int:
|
||||
"""Returns the mtime of an asset in nanoseconds"""
|
||||
|
||||
return int(asset.created_at.timestamp() * (1000 ** 3))
|
||||
|
||||
|
||||
def download_asset(asset: Attachment, target_path: str,
|
||||
onverify: Callable[[str, str], None] | None = None) -> None:
|
||||
session = get_requests_session(nocache=True)
|
||||
with session.get(asset.browser_download_url, stream=True, timeout=REQUESTS_TIMEOUT) as r:
|
||||
r.raise_for_status()
|
||||
fd, temppath = tempfile.mkstemp()
|
||||
try:
|
||||
os.chmod(temppath, 0o644)
|
||||
with os.fdopen(fd, "wb") as h:
|
||||
for chunk in r.iter_content(256 * 1024):
|
||||
h.write(chunk)
|
||||
mtime_ns = get_asset_mtime_ns(asset)
|
||||
os.utime(temppath, ns=(mtime_ns, mtime_ns))
|
||||
if onverify is not None:
|
||||
onverify(temppath, target_path)
|
||||
shutil.move(temppath, target_path)
|
||||
finally:
|
||||
try:
|
||||
os.remove(temppath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def get_gh_asset_name(basename: PathLike, text: bool = False) -> str:
|
||||
# GitHub will throw out charaters like '~' or '='. It also doesn't like
|
||||
# when there is no file extension and will try to add one
|
||||
return hashlib.sha256(str(basename).encode("utf-8")).hexdigest() + (".bin" if not text else ".txt")
|
||||
|
||||
|
||||
def get_asset_filename(asset: Attachment) -> str:
|
||||
return asset.name
|
||||
|
||||
|
||||
def get_release_assets(release: Release) -> list[Attachment]:
|
||||
assets = []
|
||||
for asset in release.assets:
|
||||
# We allow uploads from GHA and some special users
|
||||
assets.append(asset)
|
||||
return assets
|
||||
|
||||
|
||||
def upload_asset(repo: Repository, release: Release, path: PathLike, replace: bool = False,
|
||||
text: bool = False, content: bytes | None = None) -> None:
|
||||
gitea = get_gitea()
|
||||
path = Path(path)
|
||||
basename = os.path.basename(str(path))
|
||||
asset_name = get_gh_asset_name(basename, text)
|
||||
asset_label = basename
|
||||
|
||||
def can_try_upload_again() -> bool:
|
||||
for asset in get_release_assets(release):
|
||||
if asset_name == asset.name:
|
||||
# We want to treat incomplete assets as if they weren't there
|
||||
# so replace them always
|
||||
if replace:
|
||||
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||
break
|
||||
else:
|
||||
print(f"Skipping upload for {asset_name} as {asset_label}, already exists")
|
||||
return False
|
||||
return True
|
||||
|
||||
def upload() -> None:
|
||||
if content is None:
|
||||
with open(path, "rb") as fileobj:
|
||||
gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=path)
|
||||
else:
|
||||
tmp_path = None
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tf.write(content)
|
||||
tf.flush()
|
||||
tmp_path = tf.name
|
||||
|
||||
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=tmp_path)
|
||||
finally:
|
||||
if tmp_path and os.path.exists(tmp_path):
|
||||
os.remove(tmp_path)
|
||||
|
||||
try:
|
||||
upload()
|
||||
except (ApiException, requests.RequestException):
|
||||
if can_try_upload_again():
|
||||
upload()
|
||||
|
||||
print(f"Uploaded {asset_name} as {asset_label}")
|
||||
|
||||
|
||||
def get_release(repo: Repository, name: str, create: bool = True) -> Release:
|
||||
"""Like Repository.get_release() but creates the referenced release if needed"""
|
||||
|
||||
gitea = get_gitea()
|
||||
try:
|
||||
return gitea.repo_get_release_by_tag(repo.owner.login, repo.name, name)
|
||||
except ApiException:
|
||||
if not create:
|
||||
raise
|
||||
return gitea.repo_create_release(repo.owner.login, repo.name, body=CreateReleaseOption(tag_name = name, prerelease = True))
|
||||
|
||||
|
||||
class CachedAssets:
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._assets: dict[BuildType, list[Attachment]] = {}
|
||||
self._failed: dict[str, list[Attachment]] = {}
|
||||
|
||||
def get_assets(self, build_type: BuildType) -> list[Attachment]:
|
||||
if build_type not in self._assets:
|
||||
repo = get_repo_for_build_type(build_type)
|
||||
release = get_release(repo, 'staging-' + build_type)
|
||||
self._assets[build_type] = get_release_assets(release)
|
||||
return self._assets[build_type]
|
||||
|
||||
def get_failed_assets(self, build_type: BuildType) -> list[Attachment]:
|
||||
repo = get_repo_for_build_type(build_type)
|
||||
key = repo.full_name
|
||||
if key not in self._failed:
|
||||
release = get_release(repo, 'staging-failed')
|
||||
self._failed[key] = get_release_assets(release)
|
||||
assets = self._failed[key]
|
||||
# XXX: This depends on the format of the filename
|
||||
return [a for a in assets if get_asset_filename(a).startswith(build_type + "-")]
|
||||
41
msys2_autobuild/main.py
Normal file
41
msys2_autobuild/main.py
Normal file
@ -0,0 +1,41 @@
|
||||
import argparse
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets,
|
||||
cmd_show_build, cmd_update_status, cmd_upload_assets)
|
||||
from .utils import install_requests_cache
|
||||
|
||||
|
||||
def main(argv: list[str]) -> None:
|
||||
parser = argparse.ArgumentParser(description="Build packages", allow_abbrev=False)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action='count',
|
||||
default=0,
|
||||
help='Increase verbosity (can be used multiple times)'
|
||||
)
|
||||
parser.set_defaults(func=lambda *x: parser.print_help())
|
||||
subparsers = parser.add_subparsers(title="subcommands")
|
||||
cmd_build.add_parser(subparsers)
|
||||
cmd_show_build.add_parser(subparsers)
|
||||
cmd_update_status.add_parser(subparsers)
|
||||
cmd_fetch_assets.add_parser(subparsers)
|
||||
cmd_upload_assets.add_parser(subparsers)
|
||||
cmd_clear_failed.add_parser(subparsers)
|
||||
cmd_clean_assets.add_parser(subparsers)
|
||||
|
||||
args = parser.parse_args(argv[1:])
|
||||
level_map = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
|
||||
logging.basicConfig(
|
||||
level=level_map.get(args.verbose, logging.DEBUG),
|
||||
handlers=[logging.StreamHandler(sys.stderr)],
|
||||
format='[%(asctime)s] [%(levelname)8s] [%(name)s:%(module)s:%(lineno)d] %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S')
|
||||
|
||||
with install_requests_cache():
|
||||
args.func(args)
|
||||
|
||||
|
||||
def run() -> None:
|
||||
return main(sys.argv)
|
||||
464
msys2_autobuild/queue.py
Normal file
464
msys2_autobuild/queue.py
Normal file
@ -0,0 +1,464 @@
|
||||
import fnmatch
|
||||
import io
|
||||
import json
|
||||
import tempfile
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from enum import Enum
|
||||
from typing import Any, cast
|
||||
|
||||
import requests
|
||||
from gitea.rest import ApiException
|
||||
|
||||
from .config import (REQUESTS_TIMEOUT, ArchType, BuildType, Config,
|
||||
build_type_is_src, get_all_build_types)
|
||||
from .gh import (CachedAssets, download_text_asset, get_asset_filename,
|
||||
get_current_repo, get_release,
|
||||
get_gitea)
|
||||
from .utils import get_requests_session, queue_website_update
|
||||
|
||||
|
||||
class PackageStatus(Enum):
|
||||
FINISHED = 'finished'
|
||||
FINISHED_BUT_BLOCKED = 'finished-but-blocked'
|
||||
FINISHED_BUT_INCOMPLETE = 'finished-but-incomplete'
|
||||
FAILED_TO_BUILD = 'failed-to-build'
|
||||
WAITING_FOR_BUILD = 'waiting-for-build'
|
||||
WAITING_FOR_DEPENDENCIES = 'waiting-for-dependencies'
|
||||
MANUAL_BUILD_REQUIRED = 'manual-build-required'
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
|
||||
class Package(dict):
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "Package({!r})".format(self["name"])
|
||||
|
||||
def __hash__(self) -> int: # type: ignore
|
||||
return id(self)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return self is other
|
||||
|
||||
@property
|
||||
def _active_builds(self) -> dict:
|
||||
return {
|
||||
k: v for k, v in self["builds"].items() if k in (Config.MINGW_ARCH_LIST + Config.MSYS_ARCH_LIST)}
|
||||
|
||||
def _get_build(self, build_type: BuildType) -> dict:
|
||||
return self["builds"].get(build_type, {})
|
||||
|
||||
def get_status(self, build_type: BuildType) -> PackageStatus:
|
||||
build = self._get_build(build_type)
|
||||
return build.get("status", PackageStatus.UNKNOWN)
|
||||
|
||||
def get_status_details(self, build_type: BuildType) -> dict[str, Any]:
|
||||
build = self._get_build(build_type)
|
||||
return dict(build.get("status_details", {}))
|
||||
|
||||
def set_status(self, build_type: BuildType, status: PackageStatus,
|
||||
description: str | None = None,
|
||||
urls: dict[str, str] | None = None) -> None:
|
||||
build = self["builds"].setdefault(build_type, {})
|
||||
build["status"] = status
|
||||
meta: dict[str, Any] = {}
|
||||
meta["desc"] = description
|
||||
if urls is None:
|
||||
urls = {}
|
||||
meta["urls"] = urls
|
||||
build["status_details"] = meta
|
||||
|
||||
def set_blocked(
|
||||
self, build_type: BuildType, status: PackageStatus,
|
||||
dep: "Package", dep_type: BuildType) -> None:
|
||||
dep_details = dep.get_status_details(dep_type)
|
||||
dep_blocked = dep_details.get("blocked", {})
|
||||
details = self.get_status_details(build_type)
|
||||
blocked = details.get("blocked", {})
|
||||
if dep_blocked:
|
||||
blocked = dict(dep_blocked)
|
||||
else:
|
||||
blocked.setdefault(dep, set()).add(dep_type)
|
||||
descs = []
|
||||
for pkg, types in blocked.items():
|
||||
descs.append("{} ({})".format(pkg["name"], "/".join(types)))
|
||||
self.set_status(build_type, status, "Blocked by: " + ", ".join(descs))
|
||||
build = self._get_build(build_type)
|
||||
build.setdefault("status_details", {})["blocked"] = blocked
|
||||
|
||||
def is_new(self, build_type: BuildType) -> bool:
|
||||
build = self._get_build(build_type)
|
||||
return build.get("new", False)
|
||||
|
||||
def get_build_patterns(self, build_type: BuildType) -> list[str]:
|
||||
patterns = []
|
||||
if build_type_is_src(build_type):
|
||||
patterns.append(f"{self['name']}-{self['version']}.src.tar.[!s]*")
|
||||
elif build_type in (Config.MINGW_ARCH_LIST + Config.MSYS_ARCH_LIST):
|
||||
for item in self._get_build(build_type).get('packages', []):
|
||||
patterns.append(f"{item}-{self['version']}-*.pkg.tar.zst")
|
||||
else:
|
||||
assert 0
|
||||
return patterns
|
||||
|
||||
def get_failed_name(self, build_type: BuildType) -> str:
|
||||
return f"{build_type}-{self['name']}-{self['version']}.failed"
|
||||
|
||||
def get_build_types(self) -> list[BuildType]:
|
||||
build_types = list(self._active_builds)
|
||||
if self["source"]:
|
||||
if any((k in Config.MINGW_ARCH_LIST) for k in build_types):
|
||||
build_types.append(Config.MINGW_SRC_BUILD_TYPE)
|
||||
if any((k in Config.MSYS_ARCH_LIST) for k in build_types):
|
||||
build_types.append(Config.MSYS_SRC_BUILD_TYPE)
|
||||
return build_types
|
||||
|
||||
def _get_dep_build(self, build_type: BuildType) -> dict:
|
||||
if build_type == Config.MINGW_SRC_BUILD_TYPE:
|
||||
build_type = Config.MINGW_SRC_ARCH
|
||||
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
|
||||
build_type = Config.MSYS_SRC_ARCH
|
||||
return self._get_build(build_type)
|
||||
|
||||
def is_optional_dep(self, dep: "Package", dep_type: BuildType) -> bool:
|
||||
# Some deps are manually marked as optional to break cycles.
|
||||
# This requires them to be in the main repo though, otherwise the cycle has to
|
||||
# be fixed manually.
|
||||
return dep["name"] in Config.OPTIONAL_DEPS.get(self["name"], []) and not dep.is_new(dep_type)
|
||||
|
||||
def get_depends(self, build_type: BuildType) -> "dict[ArchType, set[Package]]":
|
||||
build = self._get_dep_build(build_type)
|
||||
return build.get('ext-depends', {})
|
||||
|
||||
def get_rdepends(self, build_type: BuildType) -> "dict[ArchType, set[Package]]":
|
||||
build = self._get_dep_build(build_type)
|
||||
return build.get('ext-rdepends', {})
|
||||
|
||||
|
||||
def get_buildqueue() -> list[Package]:
|
||||
session = get_requests_session()
|
||||
r = session.get("http://localhost:8160/api/buildqueue2", timeout=REQUESTS_TIMEOUT)
|
||||
r.raise_for_status()
|
||||
|
||||
return parse_buildqueue(r.text)
|
||||
|
||||
|
||||
def parse_buildqueue(payload: str) -> list[Package]:
|
||||
pkgs = []
|
||||
for received in json.loads(payload):
|
||||
pkg = Package(received)
|
||||
pkg['repo'] = pkg['repo_url'].split('/')[-1]
|
||||
pkgs.append(pkg)
|
||||
|
||||
# extract the package mapping
|
||||
dep_mapping = {}
|
||||
for pkg in pkgs:
|
||||
for build in pkg._active_builds.values():
|
||||
for name in build['packages']:
|
||||
dep_mapping[name] = pkg
|
||||
|
||||
# link up dependencies with the real package in the queue
|
||||
for pkg in pkgs:
|
||||
for build in pkg._active_builds.values():
|
||||
ver_depends: dict[str, set[Package]] = {}
|
||||
for repo, deps in build['depends'].items():
|
||||
for dep in deps:
|
||||
ver_depends.setdefault(repo, set()).add(dep_mapping[dep])
|
||||
build['ext-depends'] = ver_depends
|
||||
|
||||
# reverse dependencies
|
||||
for pkg in pkgs:
|
||||
for build in pkg._active_builds.values():
|
||||
r_depends: dict[str, set[Package]] = {}
|
||||
for pkg2 in pkgs:
|
||||
for r_repo, build2 in pkg2._active_builds.items():
|
||||
for repo, deps in build2['ext-depends'].items():
|
||||
if pkg in deps:
|
||||
r_depends.setdefault(r_repo, set()).add(pkg2)
|
||||
build['ext-rdepends'] = r_depends
|
||||
|
||||
return pkgs
|
||||
|
||||
|
||||
def get_cycles(pkgs: list[Package]) -> set[tuple[Package, Package]]:
|
||||
cycles: set[tuple[Package, Package]] = set()
|
||||
|
||||
# In case the package is already built it doesn't matter if it is part of a cycle
|
||||
def pkg_is_finished(pkg: Package, build_type: BuildType) -> bool:
|
||||
return pkg.get_status(build_type) in [
|
||||
PackageStatus.FINISHED,
|
||||
PackageStatus.FINISHED_BUT_BLOCKED,
|
||||
PackageStatus.FINISHED_BUT_INCOMPLETE,
|
||||
]
|
||||
|
||||
# Transitive dependencies of a package. Excluding branches where a root is finished
|
||||
def get_buildqueue_deps(pkg: Package, build_type: ArchType) -> "dict[ArchType, set[Package]]":
|
||||
start = (build_type, pkg)
|
||||
todo = set([start])
|
||||
done = set()
|
||||
result = set()
|
||||
|
||||
while todo:
|
||||
build_type, pkg = todo.pop()
|
||||
item = (build_type, pkg)
|
||||
done.add(item)
|
||||
if pkg_is_finished(pkg, build_type):
|
||||
continue
|
||||
result.add(item)
|
||||
for dep_build_type, deps in pkg.get_depends(build_type).items():
|
||||
for dep in deps:
|
||||
dep_item = (dep_build_type, dep)
|
||||
if dep_item not in done:
|
||||
todo.add(dep_item)
|
||||
result.discard(start)
|
||||
|
||||
d: dict[ArchType, set[Package]] = {}
|
||||
for build_type, pkg in result:
|
||||
d.setdefault(build_type, set()).add(pkg)
|
||||
return d
|
||||
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
if build_type_is_src(build_type):
|
||||
continue
|
||||
build_type = cast(ArchType, build_type)
|
||||
for dep_build_type, deps in get_buildqueue_deps(pkg, build_type).items():
|
||||
for dep in deps:
|
||||
# manually broken cycle
|
||||
if pkg.is_optional_dep(dep, dep_build_type) or dep.is_optional_dep(pkg, build_type):
|
||||
continue
|
||||
dep_deps = get_buildqueue_deps(dep, dep_build_type)
|
||||
if pkg in dep_deps.get(build_type, set()):
|
||||
cycles.add(tuple(sorted([pkg, dep], key=lambda p: p["name"]))) # type: ignore
|
||||
|
||||
return cycles
|
||||
|
||||
|
||||
def get_buildqueue_with_status(full_details: bool = False) -> list[Package]:
|
||||
cached_assets = CachedAssets()
|
||||
|
||||
assets_failed = []
|
||||
for build_type in get_all_build_types():
|
||||
assets_failed.extend(cached_assets.get_failed_assets(build_type))
|
||||
|
||||
failed_urls = {}
|
||||
if full_details:
|
||||
# This might take a while, so only in full mode
|
||||
with ThreadPoolExecutor(8) as executor:
|
||||
for i, (asset, content) in enumerate(
|
||||
zip(assets_failed, executor.map(download_text_asset, assets_failed))):
|
||||
result = json.loads(content)
|
||||
#No more Github Action URLs
|
||||
#if result["urls"]:
|
||||
# failed_urls[get_asset_filename(asset)] = result["urls"]
|
||||
|
||||
def pkg_is_done(build_type: BuildType, pkg: Package) -> bool:
|
||||
done_names = [get_asset_filename(a) for a in cached_assets.get_assets(build_type)]
|
||||
for pattern in pkg.get_build_patterns(build_type):
|
||||
if not fnmatch.filter(done_names, pattern):
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_failed_urls(build_type: BuildType, pkg: Package) -> dict[str, str] | None:
|
||||
failed_names = [get_asset_filename(a) for a in assets_failed]
|
||||
name = pkg.get_failed_name(build_type)
|
||||
if name in failed_names:
|
||||
return failed_urls.get(name)
|
||||
return None
|
||||
|
||||
def pkg_has_failed(build_type: BuildType, pkg: Package) -> bool:
|
||||
failed_names = [get_asset_filename(a) for a in assets_failed]
|
||||
name = pkg.get_failed_name(build_type)
|
||||
return name in failed_names
|
||||
|
||||
def pkg_is_manual(build_type: BuildType, pkg: Package) -> bool:
|
||||
if build_type_is_src(build_type):
|
||||
return False
|
||||
for pattern, types in Config.MANUAL_BUILD:
|
||||
type_matches = not types or build_type in types
|
||||
if type_matches and fnmatch.fnmatchcase(pkg['name'], pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
pkgs = get_buildqueue()
|
||||
|
||||
# basic state
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
if pkg_is_done(build_type, pkg):
|
||||
pkg.set_status(build_type, PackageStatus.FINISHED)
|
||||
elif pkg_has_failed(build_type, pkg):
|
||||
urls = get_failed_urls(build_type, pkg)
|
||||
pkg.set_status(build_type, PackageStatus.FAILED_TO_BUILD, urls=urls)
|
||||
elif pkg_is_manual(build_type, pkg):
|
||||
pkg.set_status(build_type, PackageStatus.MANUAL_BUILD_REQUIRED)
|
||||
else:
|
||||
pkg.set_status(build_type, PackageStatus.WAITING_FOR_BUILD)
|
||||
|
||||
# wait for dependencies to be finished before starting a build
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
if status == PackageStatus.WAITING_FOR_BUILD:
|
||||
|
||||
for dep_type, deps in pkg.get_depends(build_type).items():
|
||||
for dep in deps:
|
||||
dep_status = dep.get_status(dep_type)
|
||||
if dep_status != PackageStatus.FINISHED:
|
||||
if pkg.is_optional_dep(dep, dep_type):
|
||||
continue
|
||||
pkg.set_blocked(
|
||||
build_type, PackageStatus.WAITING_FOR_DEPENDENCIES, dep, dep_type)
|
||||
|
||||
# Block packages where not all deps/rdeps/related are finished
|
||||
changed = True
|
||||
while changed:
|
||||
changed = False
|
||||
for pkg in pkgs:
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
if status == PackageStatus.FINISHED:
|
||||
# src builds are independent
|
||||
if build_type_is_src(build_type):
|
||||
continue
|
||||
|
||||
for dep_type, deps in pkg.get_depends(build_type).items():
|
||||
for dep in deps:
|
||||
dep_status = dep.get_status(dep_type)
|
||||
if dep_status != PackageStatus.FINISHED:
|
||||
pkg.set_blocked(
|
||||
build_type, PackageStatus.FINISHED_BUT_BLOCKED, dep, dep_type)
|
||||
changed = True
|
||||
|
||||
for dep_type, deps in pkg.get_rdepends(build_type).items():
|
||||
for dep in deps:
|
||||
if dep["name"] in Config.IGNORE_RDEP_PACKAGES:
|
||||
continue
|
||||
dep_status = dep.get_status(dep_type)
|
||||
dep_new = dep.is_new(dep_type)
|
||||
# if the rdep isn't in the repo we can't break it by uploading
|
||||
if dep_status != PackageStatus.FINISHED and not dep_new:
|
||||
pkg.set_blocked(
|
||||
build_type, PackageStatus.FINISHED_BUT_BLOCKED, dep, dep_type)
|
||||
changed = True
|
||||
|
||||
# Block packages where not every build type is finished
|
||||
for pkg in pkgs:
|
||||
unfinished = []
|
||||
blocked = []
|
||||
finished = []
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
if status != PackageStatus.FINISHED:
|
||||
if status == PackageStatus.FINISHED_BUT_BLOCKED:
|
||||
blocked.append(build_type)
|
||||
# if the package isn't in the repo better not block on it
|
||||
elif not pkg.is_new(build_type):
|
||||
unfinished.append(build_type)
|
||||
else:
|
||||
finished.append(build_type)
|
||||
|
||||
# We track source packages by assuming they are in the repo if there is
|
||||
# at least one binary package in the repo. Uploading lone source
|
||||
# packages will not change anything, so block them.
|
||||
if not blocked and not unfinished and finished and \
|
||||
all(build_type_is_src(bt) for bt in finished):
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
if status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED):
|
||||
changed = True
|
||||
pkg.set_status(build_type, PackageStatus.FINISHED_BUT_INCOMPLETE)
|
||||
elif unfinished:
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
if status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED):
|
||||
changed = True
|
||||
for bt in unfinished:
|
||||
pkg.set_blocked(build_type, PackageStatus.FINISHED_BUT_INCOMPLETE, pkg, bt)
|
||||
elif blocked:
|
||||
for build_type in pkg.get_build_types():
|
||||
status = pkg.get_status(build_type)
|
||||
if status == PackageStatus.FINISHED:
|
||||
changed = True
|
||||
for bt in blocked:
|
||||
pkg.set_blocked(build_type, PackageStatus.FINISHED_BUT_BLOCKED, pkg, bt)
|
||||
|
||||
return pkgs
|
||||
|
||||
|
||||
def update_status(pkgs: list[Package]) -> None:
|
||||
repo = get_current_repo()
|
||||
release = get_release(repo, "status")
|
||||
|
||||
status_object: dict[str, Any] = {}
|
||||
|
||||
packages = []
|
||||
for pkg in pkgs:
|
||||
pkg_result = {}
|
||||
pkg_result["name"] = pkg["name"]
|
||||
pkg_result["version"] = pkg["version"]
|
||||
builds = {}
|
||||
for build_type in pkg.get_build_types():
|
||||
details = pkg.get_status_details(build_type)
|
||||
details.pop("blocked", None)
|
||||
details["status"] = pkg.get_status(build_type).value
|
||||
builds[build_type] = details
|
||||
pkg_result["builds"] = builds
|
||||
packages.append(pkg_result)
|
||||
status_object["packages"] = packages
|
||||
|
||||
cycles = []
|
||||
for a, b in get_cycles(pkgs):
|
||||
cycles.append([a["name"], b["name"]])
|
||||
status_object["cycles"] = sorted(cycles)
|
||||
|
||||
content = json.dumps(status_object, indent=2).encode()
|
||||
|
||||
# If multiple jobs update this at the same time things can fail,
|
||||
# assume the other one went through and just ignore all errors
|
||||
try:
|
||||
asset = None
|
||||
asset_name = "status.json"
|
||||
for asset in release.assets:
|
||||
if asset.name == asset_name:
|
||||
break
|
||||
|
||||
do_replace = True
|
||||
|
||||
# Avoid uploading the same file twice, to reduce API write calls
|
||||
if asset is not None and asset.size == len(content):
|
||||
try:
|
||||
old_content = download_text_asset(asset, cache=True)
|
||||
if old_content == content.decode():
|
||||
do_replace = False
|
||||
except requests.RequestException:
|
||||
# github sometimes returns 404 for a short time after uploading
|
||||
pass
|
||||
|
||||
if do_replace:
|
||||
if asset is not None:
|
||||
gitea = get_gitea()
|
||||
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||
|
||||
tmp_path = None
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||
tf.write(content)
|
||||
tf.flush()
|
||||
tmp_path = tf.name
|
||||
|
||||
gitea = get_gitea()
|
||||
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_name, attachment=tmp_path)
|
||||
finally:
|
||||
if tmp_path and os.path.exists(tmp_path):
|
||||
os.remove(tmp_path)
|
||||
|
||||
print(f"Uploaded status file for {len(packages)} packages: {new_asset.browser_download_url}")
|
||||
queue_website_update()
|
||||
else:
|
||||
print("Status unchanged")
|
||||
except (ApiException, requests.RequestException) as e:
|
||||
print(e)
|
||||
122
msys2_autobuild/utils.py
Normal file
122
msys2_autobuild/utils.py
Normal file
@ -0,0 +1,122 @@
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from datetime import timedelta
|
||||
from functools import cache
|
||||
from typing import Any, AnyStr, TypeAlias
|
||||
from collections.abc import Generator
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
from .config import REQUESTS_RETRY, REQUESTS_TIMEOUT, Config
|
||||
|
||||
PathLike: TypeAlias = os.PathLike | AnyStr
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
|
||||
def requests_cache_disabled() -> Any:
|
||||
import requests_cache
|
||||
return requests_cache.disabled()
|
||||
|
||||
|
||||
@cache
|
||||
def get_requests_session(nocache: bool = False) -> requests.Session:
|
||||
adapter = HTTPAdapter(max_retries=REQUESTS_RETRY)
|
||||
if nocache:
|
||||
with requests_cache_disabled():
|
||||
http = requests.Session()
|
||||
else:
|
||||
http = requests.Session()
|
||||
http.mount("https://", adapter)
|
||||
http.mount("http://", adapter)
|
||||
return http
|
||||
|
||||
|
||||
@contextmanager
|
||||
def install_requests_cache() -> Generator:
|
||||
# This adds basic etag based caching, to avoid hitting API rate limiting
|
||||
|
||||
import requests_cache
|
||||
from requests_cache.backends.sqlite import SQLiteCache
|
||||
|
||||
# Monkey patch globally, so pygithub uses it as well.
|
||||
# Only do re-validation with etag/date etc and ignore the cache-control headers that
|
||||
# github sends by default with 60 seconds.
|
||||
cache_dir = os.path.join(os.getcwd(), '.autobuild_cache')
|
||||
os.makedirs(cache_dir, exist_ok=True)
|
||||
cache_file = f'http_cache_{requests_cache.__version__}.sqlite'
|
||||
# delete other versions
|
||||
for f in os.listdir(cache_dir):
|
||||
if f.startswith('http_cache') and f != cache_file:
|
||||
os.remove(os.path.join(cache_dir, f))
|
||||
requests_cache.install_cache(
|
||||
always_revalidate=True,
|
||||
cache_control=False,
|
||||
expire_after=requests_cache.EXPIRE_IMMEDIATELY,
|
||||
backend=SQLiteCache(os.path.join(cache_dir, cache_file)))
|
||||
|
||||
# Call this once, so it gets cached from the main thread and can be used in a thread pool
|
||||
get_requests_session(nocache=True)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Delete old cache entries, so this doesn't grow indefinitely
|
||||
cache = requests_cache.get_cache()
|
||||
assert cache is not None
|
||||
cache.delete(older_than=timedelta(hours=3))
|
||||
|
||||
# un-monkey-patch again
|
||||
requests_cache.uninstall_cache()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gha_group(title: str) -> Generator:
|
||||
print(f'\n::group::{title}')
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
print('::endgroup::')
|
||||
|
||||
|
||||
def queue_website_update() -> None:
|
||||
session = get_requests_session()
|
||||
r = session.post('https://packages.msys2.org/api/trigger_update', timeout=REQUESTS_TIMEOUT)
|
||||
try:
|
||||
# it's not worth stopping the build if this fails, so just log it
|
||||
r.raise_for_status()
|
||||
except requests.RequestException as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def parse_optional_deps(optional_deps: str) -> dict[str, list[str]]:
|
||||
res: dict[str, list[str]] = {}
|
||||
optional_deps = optional_deps.replace(" ", "")
|
||||
if not optional_deps:
|
||||
return res
|
||||
for entry in optional_deps.split(","):
|
||||
assert ":" in entry
|
||||
first, second = entry.split(":", 2)
|
||||
res.setdefault(first, []).append(second)
|
||||
return res
|
||||
|
||||
|
||||
def apply_optional_deps(optional_deps: str) -> None:
|
||||
for dep, ignored in parse_optional_deps(optional_deps).items():
|
||||
Config.OPTIONAL_DEPS.setdefault(dep, []).extend(ignored)
|
||||
|
||||
|
||||
def ask_yes_no(prompt, default_no: bool = True):
|
||||
"""Ask a yes/no question via input() and return their answer."""
|
||||
|
||||
if default_no:
|
||||
prompt += " [y/N] "
|
||||
else:
|
||||
prompt += " [Y/n] "
|
||||
|
||||
user_input = input(prompt).strip().lower()
|
||||
|
||||
if not user_input:
|
||||
return False if default_no else True
|
||||
else:
|
||||
return user_input == 'y'
|
||||
830
poetry.lock
generated
Normal file
830
poetry.lock
generated
Normal file
@ -0,0 +1,830 @@
|
||||
# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.3.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"},
|
||||
{file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
|
||||
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
|
||||
|
||||
[[package]]
|
||||
name = "cattrs"
|
||||
version = "25.2.0"
|
||||
description = "Composable complex class support for attrs and dataclasses."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cattrs-25.2.0-py3-none-any.whl", hash = "sha256:539d7eedee7d2f0706e4e109182ad096d608ba84633c32c75ef3458f1d11e8f1"},
|
||||
{file = "cattrs-25.2.0.tar.gz", hash = "sha256:f46c918e955db0177be6aa559068390f71988e877c603ae2e56c71827165cc06"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=24.3.0"
|
||||
typing-extensions = ">=4.12.2"
|
||||
|
||||
[package.extras]
|
||||
bson = ["pymongo (>=4.4.0)"]
|
||||
cbor2 = ["cbor2 (>=5.4.6)"]
|
||||
msgpack = ["msgpack (>=1.0.5)"]
|
||||
msgspec = ["msgspec (>=0.19.0) ; implementation_name == \"cpython\""]
|
||||
orjson = ["orjson (>=3.10.7) ; implementation_name == \"cpython\""]
|
||||
pyyaml = ["pyyaml (>=6.0)"]
|
||||
tomlkit = ["tomlkit (>=0.11.8)"]
|
||||
ujson = ["ujson (>=5.10.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.8.3"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"},
|
||||
{file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "2.0.0"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation != \"PyPy\""
|
||||
files = [
|
||||
{file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
|
||||
{file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.3"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f"},
|
||||
{file = "charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849"},
|
||||
{file = "charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37"},
|
||||
{file = "charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce"},
|
||||
{file = "charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce"},
|
||||
{file = "charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-win32.whl", hash = "sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557"},
|
||||
{file = "charset_normalizer-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432"},
|
||||
{file = "charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca"},
|
||||
{file = "charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a"},
|
||||
{file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["dev"]
|
||||
markers = "sys_platform == \"win32\""
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.1"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cryptography-46.0.1-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:1cd6d50c1a8b79af1a6f703709d8973845f677c8e97b1268f5ff323d38ce8475"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0ff483716be32690c14636e54a1f6e2e1b7bf8e22ca50b989f88fa1b2d287080"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9873bf7c1f2a6330bdfe8621e7ce64b725784f9f0c3a6a55c3047af5849f920e"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0dfb7c88d4462a0cfdd0d87a3c245a7bc3feb59de101f6ff88194f740f72eda6"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e22801b61613ebdebf7deb18b507919e107547a1d39a3b57f5f855032dd7cfb8"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:757af4f6341ce7a1e47c326ca2a81f41d236070217e5fbbad61bbfe299d55d28"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f7a24ea78de345cfa7f6a8d3bde8b242c7fac27f2bd78fa23474ca38dfaeeab9"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e8776dac9e660c22241b6587fae51a67b4b0147daa4d176b172c3ff768ad736"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9f40642a140c0c8649987027867242b801486865277cbabc8c6059ddef16dc8b"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:449ef2b321bec7d97ef2c944173275ebdab78f3abdd005400cc409e27cd159ab"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2dd339ba3345b908fa3141ddba4025568fa6fd398eabce3ef72a29ac2d73ad75"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7411c910fb2a412053cf33cfad0153ee20d27e256c6c3f14d7d7d1d9fec59fd5"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-win32.whl", hash = "sha256:cbb8e769d4cac884bb28e3ff620ef1001b75588a5c83c9c9f1fdc9afbe7f29b0"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-win_amd64.whl", hash = "sha256:92e8cfe8bd7dd86eac0a677499894862cd5cc2fd74de917daa881d00871ac8e7"},
|
||||
{file = "cryptography-46.0.1-cp311-abi3-win_arm64.whl", hash = "sha256:db5597a4c7353b2e5fb05a8e6cb74b56a4658a2b7bf3cb6b1821ae7e7fd6eaa0"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:4c49eda9a23019e11d32a0eb51a27b3e7ddedde91e099c0ac6373e3aacc0d2ee"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9babb7818fdd71394e576cf26c5452df77a355eac1a27ddfa24096665a27f8fd"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9f2c4cc63be3ef43c0221861177cee5d14b505cd4d4599a89e2cd273c4d3542a"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:41c281a74df173876da1dc9a9b6953d387f06e3d3ed9284e3baae3ab3f40883a"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0a17377fa52563d730248ba1f68185461fff36e8bc75d8787a7dd2e20a802b7a"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:0d1922d9280e08cde90b518a10cd66831f632960a8d08cb3418922d83fce6f12"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:af84e8e99f1a82cea149e253014ea9dc89f75b82c87bb6c7242203186f465129"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:ef648d2c690703501714588b2ba640facd50fd16548133b11b2859e8655a69da"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:e94eb5fa32a8a9f9bf991f424f002913e3dd7c699ef552db9b14ba6a76a6313b"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:534b96c0831855e29fc3b069b085fd185aa5353033631a585d5cd4dd5d40d657"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9b55038b5c6c47559aa33626d8ecd092f354e23de3c6975e4bb205df128a2a0"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ec13b7105117dbc9afd023300fb9954d72ca855c274fe563e72428ece10191c0"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-win32.whl", hash = "sha256:504e464944f2c003a0785b81668fe23c06f3b037e9cb9f68a7c672246319f277"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c52fded6383f7e20eaf70a60aeddd796b3677c3ad2922c801be330db62778e05"},
|
||||
{file = "cryptography-46.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:9495d78f52c804b5ec8878b5b8c7873aa8e63db9cd9ee387ff2db3fffe4df784"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d84c40bdb8674c29fa192373498b6cb1e84f882889d21a471b45d1f868d8d44b"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ed64e5083fa806709e74fc5ea067dfef9090e5b7a2320a49be3c9df3583a2d8"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:341fb7a26bc9d6093c1b124b9f13acc283d2d51da440b98b55ab3f79f2522ead"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6ef1488967e729948d424d09c94753d0167ce59afba8d0f6c07a22b629c557b2"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7823bc7cdf0b747ecfb096d004cc41573c2f5c7e3a29861603a2871b43d3ef32"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:f736ab8036796f5a119ff8211deda416f8c15ce03776db704a7a4e17381cb2ef"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e46710a240a41d594953012213ea8ca398cd2448fbc5d0f1be8160b5511104a0"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:84ef1f145de5aee82ea2447224dc23f065ff4cc5791bb3b506615957a6ba8128"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9394c7d5a7565ac5f7d9ba38b2617448eba384d7b107b262d63890079fad77ca"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ed957044e368ed295257ae3d212b95456bd9756df490e1ac4538857f67531fcc"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f7de12fa0eee6234de9a9ce0ffcfa6ce97361db7a50b09b65c63ac58e5f22fc7"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7fab1187b6c6b2f11a326f33b036f7168f5b996aedd0c059f9738915e4e8f53a"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-win32.whl", hash = "sha256:45f790934ac1018adeba46a0f7289b2b8fe76ba774a88c7f1922213a56c98bc1"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-win_amd64.whl", hash = "sha256:7176a5ab56fac98d706921f6416a05e5aff7df0e4b91516f450f8627cda22af3"},
|
||||
{file = "cryptography-46.0.1-cp38-abi3-win_arm64.whl", hash = "sha256:efc9e51c3e595267ff84adf56e9b357db89ab2279d7e375ffcaf8f678606f3d9"},
|
||||
{file = "cryptography-46.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd4b5e2ee4e60425711ec65c33add4e7a626adef79d66f62ba0acfd493af282d"},
|
||||
{file = "cryptography-46.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48948940d0ae00483e85e9154bb42997d0b77c21e43a77b7773c8c80de532ac5"},
|
||||
{file = "cryptography-46.0.1-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b9c79af2c3058430d911ff1a5b2b96bbfe8da47d5ed961639ce4681886614e70"},
|
||||
{file = "cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0ca4be2af48c24df689a150d9cd37404f689e2968e247b6b8ff09bff5bcd786f"},
|
||||
{file = "cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:13e67c4d3fb8b6bc4ef778a7ccdd8df4cd15b4bcc18f4239c8440891a11245cc"},
|
||||
{file = "cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:15b5fd9358803b0d1cc42505a18d8bca81dabb35b5cfbfea1505092e13a9d96d"},
|
||||
{file = "cryptography-46.0.1-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e34da95e29daf8a71cb2841fd55df0511539a6cdf33e6f77c1e95e44006b9b46"},
|
||||
{file = "cryptography-46.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:34f04b7311174469ab3ac2647469743720f8b6c8b046f238e5cb27905695eb2a"},
|
||||
{file = "cryptography-46.0.1.tar.gz", hash = "sha256:ed570874e88f213437f5cf758f9ef26cbfc3f336d889b1e592ee11283bb8d1c7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
|
||||
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
|
||||
nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8"
|
||||
version = "7.3.0"
|
||||
description = "the modular source code checker: pep8 pyflakes and co"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"},
|
||||
{file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mccabe = ">=0.7.0,<0.8.0"
|
||||
pycodestyle = ">=2.14.0,<2.15.0"
|
||||
pyflakes = ">=3.4.0,<3.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
|
||||
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.1.0"
|
||||
description = "brain-dead simple config-ini parsing"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
|
||||
{file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.7.0"
|
||||
description = "McCabe checker, plugin for flake8"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
|
||||
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.18.1"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy-1.18.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2761b6ae22a2b7d8e8607fb9b81ae90bc2e95ec033fd18fa35e807af6c657763"},
|
||||
{file = "mypy-1.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b10e3ea7f2eec23b4929a3fabf84505da21034a4f4b9613cda81217e92b74f3"},
|
||||
{file = "mypy-1.18.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:261fbfced030228bc0f724d5d92f9ae69f46373bdfd0e04a533852677a11dbea"},
|
||||
{file = "mypy-1.18.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4dc6b34a1c6875e6286e27d836a35c0d04e8316beac4482d42cfea7ed2527df8"},
|
||||
{file = "mypy-1.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1cabb353194d2942522546501c0ff75c4043bf3b63069cb43274491b44b773c9"},
|
||||
{file = "mypy-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:738b171690c8e47c93569635ee8ec633d2cdb06062f510b853b5f233020569a9"},
|
||||
{file = "mypy-1.18.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c903857b3e28fc5489e54042684a9509039ea0aedb2a619469438b544ae1961"},
|
||||
{file = "mypy-1.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a0c8392c19934c2b6c65566d3a6abdc6b51d5da7f5d04e43f0eb627d6eeee65"},
|
||||
{file = "mypy-1.18.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f85eb7efa2ec73ef63fc23b8af89c2fe5bf2a4ad985ed2d3ff28c1bb3c317c92"},
|
||||
{file = "mypy-1.18.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:82ace21edf7ba8af31c3308a61dc72df30500f4dbb26f99ac36b4b80809d7e94"},
|
||||
{file = "mypy-1.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a2dfd53dfe632f1ef5d161150a4b1f2d0786746ae02950eb3ac108964ee2975a"},
|
||||
{file = "mypy-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:320f0ad4205eefcb0e1a72428dde0ad10be73da9f92e793c36228e8ebf7298c0"},
|
||||
{file = "mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9"},
|
||||
{file = "mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e"},
|
||||
{file = "mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2"},
|
||||
{file = "mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d"},
|
||||
{file = "mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5"},
|
||||
{file = "mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf"},
|
||||
{file = "mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f"},
|
||||
{file = "mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce"},
|
||||
{file = "mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e"},
|
||||
{file = "mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71"},
|
||||
{file = "mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746"},
|
||||
{file = "mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d"},
|
||||
{file = "mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61"},
|
||||
{file = "mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5"},
|
||||
{file = "mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8"},
|
||||
{file = "mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d"},
|
||||
{file = "mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d"},
|
||||
{file = "mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce"},
|
||||
{file = "mypy-1.18.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e37763af63a8018308859bc83d9063c501a5820ec5bd4a19f0a2ac0d1c25c061"},
|
||||
{file = "mypy-1.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:51531b6e94f34b8bd8b01dee52bbcee80daeac45e69ec5c36e25bce51cbc46e6"},
|
||||
{file = "mypy-1.18.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dbfdea20e90e9c5476cea80cfd264d8e197c6ef2c58483931db2eefb2f7adc14"},
|
||||
{file = "mypy-1.18.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99f272c9b59f5826fffa439575716276d19cbf9654abc84a2ba2d77090a0ba14"},
|
||||
{file = "mypy-1.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8c05a7f8c00300a52f3a4fcc95a185e99bf944d7e851ff141bae8dcf6dcfeac4"},
|
||||
{file = "mypy-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:2fbcecbe5cf213ba294aa8c0b8c104400bf7bb64db82fb34fe32a205da4b3531"},
|
||||
{file = "mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e"},
|
||||
{file = "mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy_extensions = ">=1.0.0"
|
||||
pathspec = ">=0.9.0"
|
||||
typing_extensions = ">=4.6.0"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
faster-cache = ["orjson"]
|
||||
install-types = ["pip"]
|
||||
mypyc = ["setuptools (>=50)"]
|
||||
reports = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.1.0"
|
||||
description = "Type system extensions for programs checked with the mypy type checker."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"},
|
||||
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
|
||||
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.4.0"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"},
|
||||
{file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"]
|
||||
type = ["mypy (>=1.14.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
|
||||
{file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["coverage", "pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pycodestyle"
|
||||
version = "2.14.0"
|
||||
description = "Python style guide checker"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"},
|
||||
{file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.23"
|
||||
description = "C parser in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\""
|
||||
files = [
|
||||
{file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"},
|
||||
{file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "3.4.0"
|
||||
description = "passive checker of Python programs"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"},
|
||||
{file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.8.1"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pygithub-2.8.1-py3-none-any.whl", hash = "sha256:23a0a5bca93baef082e03411bf0ce27204c32be8bfa7abc92fe4a3e132936df0"},
|
||||
{file = "pygithub-2.8.1.tar.gz", hash = "sha256:341b7c78521cb07324ff670afd1baa2bf5c286f8d9fd302c1798ba594a5400c9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
|
||||
pynacl = ">=1.4.0"
|
||||
requests = ">=2.14.0"
|
||||
typing-extensions = ">=4.5.0"
|
||||
urllib3 = ">=1.26.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
description = "Pygments is a syntax highlighting package written in Python."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
|
||||
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
windows-terminal = ["colorama (>=0.4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.10.1"
|
||||
description = "JSON Web Token implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"},
|
||||
{file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
|
||||
|
||||
[package.extras]
|
||||
crypto = ["cryptography (>=3.4.0)"]
|
||||
dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pynacl"
|
||||
version = "1.6.0"
|
||||
description = "Python binding to the Networking and Cryptography (NaCl) library"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:f46386c24a65383a9081d68e9c2de909b1834ec74ff3013271f1bca9c2d233eb"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:dea103a1afcbc333bc0e992e64233d360d393d1e63d0bc88554f572365664348"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:04f20784083014e265ad58c1b2dd562c3e35864b5394a14ab54f5d150ee9e53e"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbcc4452a1eb10cd5217318c822fde4be279c9de8567f78bad24c773c21254f8"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fed9fe1bec9e7ff9af31cd0abba179d0e984a2960c77e8e5292c7e9b7f7b5d"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:10d755cf2a455d8c0f8c767a43d68f24d163b8fe93ccfaabfa7bafd26be58d73"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:536703b8f90e911294831a7fbcd0c062b837f3ccaa923d92a6254e11178aaf42"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6b08eab48c9669d515a344fb0ef27e2cbde847721e34bba94a343baa0f33f1f4"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5789f016e08e5606803161ba24de01b5a345d24590a80323379fc4408832d290"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-win32.whl", hash = "sha256:4853c154dc16ea12f8f3ee4b7e763331876316cc3a9f06aeedf39bcdca8f9995"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:347dcddce0b4d83ed3f32fd00379c83c425abee5a9d2cd0a2c84871334eaff64"},
|
||||
{file = "pynacl-1.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2d6cd56ce4998cb66a6c112fda7b1fdce5266c9f05044fa72972613bef376d15"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-win32.whl", hash = "sha256:dcdeb41c22ff3c66eef5e63049abf7639e0db4edee57ba70531fc1b6b133185d"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:cf831615cc16ba324240de79d925eacae8265b7691412ac6b24221db157f6bd1"},
|
||||
{file = "pynacl-1.6.0-cp38-abi3-win_arm64.whl", hash = "sha256:84709cea8f888e618c21ed9a0efdb1a59cc63141c403db8bf56c469b71ad56f2"},
|
||||
{file = "pynacl-1.6.0.tar.gz", hash = "sha256:cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = [
|
||||
{version = ">=1.4.1", markers = "platform_python_implementation != \"PyPy\" and python_version < \"3.14\""},
|
||||
{version = ">=2.0.0", markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.14\""},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (<7)", "sphinx_rtd_theme"]
|
||||
tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.2"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"},
|
||||
{file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
|
||||
iniconfig = ">=1"
|
||||
packaging = ">=20"
|
||||
pluggy = ">=1.5,<2"
|
||||
pygments = ">=2.7.2"
|
||||
|
||||
[package.extras]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
|
||||
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset_normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-cache"
|
||||
version = "1.2.1"
|
||||
description = "A persistent cache for python requests"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "requests_cache-1.2.1-py3-none-any.whl", hash = "sha256:1285151cddf5331067baa82598afe2d47c7495a1334bfe7a7d329b43e9fd3603"},
|
||||
{file = "requests_cache-1.2.1.tar.gz", hash = "sha256:68abc986fdc5b8d0911318fbb5f7c80eebcd4d01bfacc6685ecf8876052511d1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=21.2"
|
||||
cattrs = ">=22.2"
|
||||
platformdirs = ">=2.5"
|
||||
requests = ">=2.22"
|
||||
url-normalize = ">=1.4"
|
||||
urllib3 = ">=1.25.5"
|
||||
|
||||
[package.extras]
|
||||
all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"]
|
||||
bson = ["bson (>=0.5)"]
|
||||
docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"]
|
||||
dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"]
|
||||
json = ["ujson (>=5.4)"]
|
||||
mongodb = ["pymongo (>=3)"]
|
||||
redis = ["redis (>=3)"]
|
||||
security = ["itsdangerous (>=2.0)"]
|
||||
yaml = ["pyyaml (>=6.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "tabulate"
|
||||
version = "0.9.0"
|
||||
description = "Pretty-print tabular data"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
|
||||
{file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
widechars = ["wcwidth"]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250913"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1"},
|
||||
{file = "types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "types-tabulate"
|
||||
version = "0.9.0.20241207"
|
||||
description = "Typing stubs for tabulate"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85"},
|
||||
{file = "types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.9+"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"},
|
||||
{file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "url-normalize"
|
||||
version = "2.2.1"
|
||||
description = "URL normalization for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "url_normalize-2.2.1-py3-none-any.whl", hash = "sha256:3deb687587dc91f7b25c9ae5162ffc0f057ae85d22b1e15cf5698311247f567b"},
|
||||
{file = "url_normalize-2.2.1.tar.gz", hash = "sha256:74a540a3b6eba1d95bdc610c24f2c0141639f3ba903501e61a52a8730247ff37"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
idna = ">=3.3"
|
||||
|
||||
[package.extras]
|
||||
dev = ["mypy", "pre-commit", "pytest", "pytest-cov", "pytest-socket", "ruff"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.5.0"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"},
|
||||
{file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.12.0,<4.0"
|
||||
content-hash = "46744ebe6eae1328613d276f75270cb598a1a7d928f80148bcd7c31873650b7d"
|
||||
32
pyproject.toml
Normal file
32
pyproject.toml
Normal file
@ -0,0 +1,32 @@
|
||||
[project]
|
||||
name = "msys2-autobuild"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
license = "MIT"
|
||||
authors = [
|
||||
{ name = "Christoph Reiter", email = "reiter.christoph@gmail.com" }
|
||||
]
|
||||
requires-python = ">=3.12.0,<4.0"
|
||||
dependencies = [
|
||||
"PyGithub>=2.8.1,<3",
|
||||
"tabulate>=0.9.0,<0.10",
|
||||
"requests>=2.28.1,<3",
|
||||
"requests-cache>=1.0.0,<2",
|
||||
"urllib3>=2.2.1,<3",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
msys2-autobuild = "msys2_autobuild.main:run"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pytest>=8.0.0,<9",
|
||||
"mypy==1.18.1",
|
||||
"flake8>=7.0.0,<8",
|
||||
"types-tabulate>=0.9.0.0,<0.10",
|
||||
"types-requests>=2.25.0,<3",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=2.2.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
@ -1,3 +1,18 @@
|
||||
PyGithub
|
||||
tabulate
|
||||
requests
|
||||
attrs==25.3.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
cattrs==25.2.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
certifi==2025.8.3 ; python_version >= "3.12" and python_version < "4.0"
|
||||
cffi==2.0.0 ; python_version >= "3.12" and python_version < "4.0" and platform_python_implementation != "PyPy"
|
||||
charset-normalizer==3.4.3 ; python_version >= "3.12" and python_version < "4.0"
|
||||
cryptography==46.0.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||
idna==3.10 ; python_version >= "3.12" and python_version < "4.0"
|
||||
platformdirs==4.4.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
pycparser==2.23 ; python_version >= "3.12" and python_version < "4.0" and platform_python_implementation != "PyPy" and implementation_name != "PyPy"
|
||||
pygithub==2.8.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||
pyjwt==2.10.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||
pynacl==1.6.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
requests-cache==1.2.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||
requests==2.32.5 ; python_version >= "3.12" and python_version < "4.0"
|
||||
tabulate==0.9.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
typing-extensions==4.15.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
url-normalize==2.2.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||
urllib3==2.5.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
140
tests/main_test.py
Normal file
140
tests/main_test.py
Normal file
@ -0,0 +1,140 @@
|
||||
# type: ignore
|
||||
|
||||
import os
|
||||
import stat
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from msys2_autobuild.utils import parse_optional_deps
|
||||
from msys2_autobuild.queue import parse_buildqueue, get_cycles
|
||||
from msys2_autobuild.build import make_tree_writable, remove_junctions
|
||||
|
||||
|
||||
def test_make_tree_writable():
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
nested_dir = Path(tempdir) / "nested"
|
||||
nested_junction = nested_dir / "junction"
|
||||
nested_dir.mkdir()
|
||||
file_path = nested_dir / "test_file.txt"
|
||||
file_path.write_text("content")
|
||||
|
||||
# Create a junction loop if possible, to make sure we ignore it
|
||||
if os.name == 'nt':
|
||||
import _winapi
|
||||
_winapi.CreateJunction(str(nested_dir), str(nested_junction))
|
||||
else:
|
||||
nested_junction.mkdir()
|
||||
|
||||
# Remove permissions
|
||||
for p in [tempdir, nested_dir, file_path, nested_junction]:
|
||||
os.chmod(p, os.stat(p).st_mode & ~stat.S_IWRITE & ~stat.S_IREAD)
|
||||
|
||||
make_tree_writable(tempdir)
|
||||
|
||||
assert os.access(tempdir, os.W_OK) and os.access(tempdir, os.R_OK)
|
||||
assert os.access(nested_dir, os.W_OK) and os.access(nested_dir, os.R_OK)
|
||||
assert os.access(file_path, os.W_OK) and os.access(file_path, os.R_OK)
|
||||
assert os.access(nested_junction, os.W_OK) and os.access(nested_junction, os.R_OK)
|
||||
|
||||
|
||||
def test_remove_junctions():
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
nested_dir = Path(tempdir) / "nested"
|
||||
nested_junction = nested_dir / "junction"
|
||||
nested_dir.mkdir()
|
||||
|
||||
# Create a junction loop if possible, to make sure we ignore it
|
||||
if os.name == 'nt':
|
||||
import _winapi
|
||||
_winapi.CreateJunction(str(nested_dir), str(nested_junction))
|
||||
assert nested_junction.exists()
|
||||
assert os.path.isjunction(nested_junction)
|
||||
|
||||
remove_junctions(tempdir)
|
||||
assert not nested_junction.exists()
|
||||
|
||||
|
||||
def test_parse_optional_deps():
|
||||
assert parse_optional_deps("a:b,c:d,a:x") == {'a': ['b', 'x'], 'c': ['d']}
|
||||
|
||||
|
||||
def test_get_cycles():
|
||||
buildqueue = """
|
||||
[
|
||||
{
|
||||
"name": "c-ares",
|
||||
"version": "1.34.2-1",
|
||||
"version_repo": "1.33.1-1",
|
||||
"repo_url": "https://github.com/msys2/MSYS2-packages",
|
||||
"repo_path": "c-ares",
|
||||
"source": true,
|
||||
"builds": {
|
||||
"msys": {
|
||||
"packages": [
|
||||
"libcares",
|
||||
"libcares-devel"
|
||||
],
|
||||
"depends": {
|
||||
"msys": [
|
||||
"libnghttp2",
|
||||
"libuv"
|
||||
]
|
||||
},
|
||||
"new": false
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "nghttp2",
|
||||
"version": "1.64.0-1",
|
||||
"version_repo": "1.63.0-1",
|
||||
"repo_url": "https://github.com/msys2/MSYS2-packages",
|
||||
"repo_path": "nghttp2",
|
||||
"source": true,
|
||||
"builds": {
|
||||
"msys": {
|
||||
"packages": [
|
||||
"libnghttp2",
|
||||
"libnghttp2-devel",
|
||||
"nghttp2"
|
||||
],
|
||||
"depends": {
|
||||
"msys": [
|
||||
"libcares",
|
||||
"libcares-devel"
|
||||
]
|
||||
},
|
||||
"new": false
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "libuv",
|
||||
"version": "1.49.2-1",
|
||||
"version_repo": "1.49.1-1",
|
||||
"repo_url": "https://github.com/msys2/MSYS2-packages",
|
||||
"repo_path": "libuv",
|
||||
"source": true,
|
||||
"builds": {
|
||||
"msys": {
|
||||
"packages": [
|
||||
"libuv",
|
||||
"libuv-devel"
|
||||
],
|
||||
"depends": {
|
||||
"msys": [
|
||||
"libnghttp2"
|
||||
]
|
||||
},
|
||||
"new": false
|
||||
}
|
||||
}
|
||||
}
|
||||
]"""
|
||||
|
||||
pkgs = parse_buildqueue(buildqueue)
|
||||
cycles = get_cycles(pkgs)
|
||||
assert len(cycles) == 3
|
||||
assert (pkgs[0], pkgs[2]) in cycles
|
||||
assert (pkgs[0], pkgs[1]) in cycles
|
||||
assert (pkgs[2], pkgs[1]) in cycles
|
||||
2
update-status.bat
Normal file
2
update-status.bat
Normal file
@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
C:\msys64\msys2_shell.cmd -here -mingw64 -no-start -defterm -c "pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache && python -m msys2_autobuild update-status"
|
||||
Loading…
x
Reference in New Issue
Block a user