Compare commits
443 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50276fb9a2 | ||
|
|
84c03f504e | ||
|
|
553846537b | ||
|
|
c6213b4d1a | ||
|
|
ecd1d51f4d | ||
|
|
fd1952d205 | ||
|
|
19926ce9c5 | ||
|
|
33a052a413 | ||
|
|
59740a3f2e | ||
|
|
4704486d49 | ||
|
|
dc632d9934 | ||
|
|
3687fa3a0b | ||
|
|
42b02362e1 | ||
|
|
05abf4e953 | ||
|
|
a3bae5a40c | ||
|
|
456089ba22 | ||
|
|
d15bda6f83 | ||
|
|
de38d16edd | ||
|
|
fd77359a5a | ||
|
|
3581de3619 | ||
|
|
84d3306857 | ||
|
|
5bbfb7bb18 | ||
|
|
69ce064955 | ||
|
|
ab3c2437e8 | ||
|
|
70dec0bd33 | ||
|
|
54197e6af4 | ||
|
|
c237bc163a | ||
|
|
5f5d7aafa2 | ||
|
|
5c2504702e | ||
|
|
776a26e021 | ||
|
|
999e4e9327 | ||
|
|
3a5fc4c416 | ||
|
|
663b7acdc1 | ||
|
|
e8d10d7e9e | ||
|
|
caa6a73b53 | ||
|
|
839b8befc3 | ||
|
|
a2fb8db0e7 | ||
|
|
311b4cd295 | ||
|
|
0d471ea5b7 | ||
|
|
8d9cbcb54c | ||
|
|
23845c53e0 | ||
|
|
e9e823c2e7 | ||
|
|
fe4bcd08a9 | ||
|
|
47cc05c39f | ||
|
|
a2ebb72da0 | ||
|
|
4413e41389 | ||
|
|
d45f6720f4 | ||
|
|
e2042058f1 | ||
|
|
bb54adc298 | ||
|
|
1ef3f8f5f5 | ||
|
|
ca6dd299ee | ||
|
|
5f9bed8409 | ||
|
|
625631832e | ||
|
|
7ec5a79b46 | ||
|
|
a187346d08 | ||
|
|
b442168127 | ||
|
|
bdd38ec73c | ||
|
|
98f6ea2875 | ||
|
|
a977f9deb9 | ||
|
|
4f60392b3e | ||
|
|
35ff0b71b6 | ||
|
|
1575848e81 | ||
|
|
657fd89531 | ||
|
|
0f20d6bfa8 | ||
|
|
c553f33cf0 | ||
|
|
c5b593a34c | ||
|
|
1bc0a28e35 | ||
|
|
0f71ee73cf | ||
|
|
4deb3111d3 | ||
|
|
5bf958fd1b | ||
|
|
7eed3d8bc1 | ||
|
|
7c78444174 | ||
|
|
19c8f00aba | ||
|
|
a6b3079ae3 | ||
|
|
acafab9b5f | ||
|
|
ef67d84096 | ||
|
|
7c56a1d764 | ||
|
|
cfdccd0a03 | ||
|
|
22f1e5ad0b | ||
|
|
05a051162d | ||
|
|
f968d2f0ca | ||
|
|
67d510ec4b | ||
|
|
f44d95e7c2 | ||
|
|
00495cb263 | ||
|
|
40ab937954 | ||
|
|
59bb7f6f18 | ||
|
|
bf3cf80161 | ||
|
|
63ea6585cd | ||
|
|
ea149103be | ||
|
|
9c7e8d3135 | ||
|
|
8d08599c2e | ||
|
|
3e617554bb | ||
|
|
9a0b6a31c9 | ||
|
|
8d7df1587a | ||
|
|
dad6671556 | ||
|
|
bf9a4e2862 | ||
|
|
719254cb89 | ||
|
|
281ad3e16e | ||
|
|
d4515ba2fe | ||
|
|
b78070c653 | ||
|
|
aa0637d87b | ||
|
|
d5779cd65d | ||
|
|
1c45f2ab2e | ||
|
|
0eca067dd7 | ||
|
|
1ed7c15c97 | ||
|
|
dae5e305db | ||
|
|
1d8af300c4 | ||
|
|
1f4971c293 | ||
|
|
fd1d5cc9ef | ||
|
|
e6700d2089 | ||
|
|
d1048413f8 | ||
|
|
3e0391eb26 | ||
|
|
049635cd1a | ||
|
|
ca30448b74 | ||
|
|
a79a8c4c7a | ||
|
|
3f5f60aa62 | ||
|
|
79a45bf6c7 | ||
|
|
0852421d17 | ||
|
|
f368fb4951 | ||
|
|
0af6deb998 | ||
|
|
c9fb5c61ab | ||
|
|
a3a5c1da40 | ||
|
|
1f1fabade2 | ||
|
|
4db4e22d09 | ||
|
|
5b61a937a1 | ||
|
|
edc9089808 | ||
|
|
a1540964f5 | ||
|
|
95ab14dfe7 | ||
|
|
d68ad18de2 | ||
|
|
305e7b4c68 | ||
|
|
79096b753c | ||
|
|
6d6d83ea3e | ||
|
|
f78c47f441 | ||
|
|
13b6b27fea | ||
|
|
dfc132af9d | ||
|
|
1a8a881082 | ||
|
|
aa61bfdedd | ||
|
|
b51cfd02af | ||
|
|
76a815c145 | ||
|
|
236220ef8e | ||
|
|
60a287290d | ||
|
|
cc301e1e62 | ||
|
|
f3bf1b80b0 | ||
|
|
3ef72c5eed | ||
|
|
ccaad93b62 | ||
|
|
fb16cedabf | ||
|
|
3116e844be | ||
|
|
e3bb36afac | ||
|
|
30fbfffb96 | ||
|
|
8cb3c65f55 | ||
|
|
7417496d9e | ||
|
|
c27f9a7c40 | ||
|
|
19857e3fa0 | ||
|
|
956ac59246 | ||
|
|
ba632451ef | ||
|
|
606b782bb0 | ||
|
|
e2ca121180 | ||
|
|
b453032363 | ||
|
|
98697683a5 | ||
|
|
6f93057f83 | ||
|
|
88871c4cb0 | ||
|
|
ad34ca14b6 | ||
|
|
e0e19de2c1 | ||
|
|
5085f864b3 | ||
|
|
6f40845ba3 | ||
|
|
6788467670 | ||
|
|
87f0603c87 | ||
|
|
0d25d51a04 | ||
|
|
d0ddf60737 | ||
|
|
91ab34350f | ||
|
|
38e6bc6e47 | ||
|
|
6ccea00bba | ||
|
|
c152a6dbbf | ||
|
|
b7df29ff56 | ||
|
|
77c2d02a4d | ||
|
|
aea263ec2c | ||
|
|
1666f6d3b0 | ||
|
|
63a1b6020e | ||
|
|
77a21114a8 | ||
|
|
6e2c5b47d4 | ||
|
|
e2f4f874a2 | ||
|
|
63f65d30bc | ||
|
|
307799fd27 | ||
|
|
bf82f9fff2 | ||
|
|
a9862b27c1 | ||
|
|
2ae439cd00 | ||
|
|
21a84297d8 | ||
|
|
e22cc1cc17 | ||
|
|
eee25ec33f | ||
|
|
59e8e1af5d | ||
|
|
1fd41adbfa | ||
|
|
e94b92f73e | ||
|
|
5d06444a57 | ||
|
|
9d582e19b1 | ||
|
|
bf34129d62 | ||
|
|
c9dd9afe5e | ||
|
|
b40229daa6 | ||
|
|
253f8b8c4c | ||
|
|
c03c642719 | ||
|
|
f581199930 | ||
|
|
3637fea711 | ||
|
|
e23492ee15 | ||
|
|
9f4f288d00 | ||
|
|
b36a4da1da | ||
|
|
5ecdbc97a7 | ||
|
|
0d4680c01f | ||
|
|
9374b1d9b4 | ||
|
|
22ea970beb | ||
|
|
45c6b89ec7 | ||
|
|
70c6903191 | ||
|
|
f33be41b0f | ||
|
|
5f53dab6de | ||
|
|
4dbd2618fb | ||
|
|
a43bdf9479 | ||
|
|
9360a8eebe | ||
|
|
7d84a7e086 | ||
|
|
ea46306e71 | ||
|
|
97faefb5b3 | ||
|
|
745e5e2c40 | ||
|
|
84315e8e56 | ||
|
|
0a302154b0 | ||
|
|
4a5355f5dc | ||
|
|
88b49f2c6a | ||
|
|
1684dff8bc | ||
|
|
8870b3a342 | ||
|
|
258256e739 | ||
|
|
133ce88284 | ||
|
|
099438dc3f | ||
|
|
94d87dac25 | ||
|
|
4384e62d01 | ||
|
|
cb4434c72b | ||
|
|
892e1a3206 | ||
|
|
5f5d895cb1 | ||
|
|
e4c2d446d2 | ||
|
|
cfe519fbb0 | ||
|
|
1b14e2ed4d | ||
|
|
8c060f3142 | ||
|
|
c2f77181d7 | ||
|
|
cd67c3a66a | ||
|
|
5e037680d6 | ||
|
|
5f628fb63a | ||
|
|
777bbb73af | ||
|
|
c1807c19a7 | ||
|
|
20ba53752d | ||
|
|
81dd6cabad | ||
|
|
5c6f39a511 | ||
|
|
e7fdb6dab2 | ||
|
|
d423d68901 | ||
|
|
4cc7908a95 | ||
|
|
0cf933cc9b | ||
|
|
93dd330288 | ||
|
|
548cd95a30 | ||
|
|
a8d63e2852 | ||
|
|
1e254ee060 | ||
|
|
154402b355 | ||
|
|
0ed108506a | ||
|
|
be6f6f2a28 | ||
|
|
8144f50ad5 | ||
|
|
51e8ee9f76 | ||
|
|
7e96898a06 | ||
|
|
451dca0a27 | ||
|
|
a316cb96c2 | ||
|
|
9ff6282fd6 | ||
|
|
8b9b746cfa | ||
|
|
9b01428dde | ||
|
|
3e28396ab0 | ||
|
|
6c461095e0 | ||
|
|
3a63bf21e1 | ||
|
|
e93758b39c | ||
|
|
7c422261fc | ||
|
|
58fac3caaf | ||
|
|
6a436ac4e9 | ||
|
|
698f9f514f | ||
|
|
f765fe5ea7 | ||
|
|
f49b8afb91 | ||
|
|
456f0a1e57 | ||
|
|
1aaafbed38 | ||
|
|
91bb7945cb | ||
|
|
f712bbd622 | ||
|
|
8ecac52817 | ||
|
|
310a1fa4e4 | ||
|
|
3ae4835f34 | ||
|
|
a0a0b3f47b | ||
|
|
46400708d0 | ||
|
|
51e711deb1 | ||
|
|
6e469e2c56 | ||
|
|
a4ab5bc26b | ||
|
|
067f5c1ecd | ||
|
|
0cfe547446 | ||
|
|
87dbe7aebc | ||
|
|
4cc7035246 | ||
|
|
aea50264e2 | ||
|
|
41742850ce | ||
|
|
5f728e1eb2 | ||
|
|
8c7ef11f69 | ||
|
|
d74753f0e5 | ||
|
|
c23ca57bed | ||
|
|
9c67f65b7c | ||
|
|
7f3417441c | ||
|
|
6add89827b | ||
|
|
a0713fbf40 | ||
|
|
ed2cdb03c6 | ||
|
|
2707697dc4 | ||
|
|
c861ee86d0 | ||
|
|
5b58993660 | ||
|
|
c2c24e50e3 | ||
|
|
c5688a7839 | ||
|
|
09475aabfd | ||
|
|
79d4cbda1a | ||
|
|
b738d09014 | ||
|
|
10764cd166 | ||
|
|
640d714345 | ||
|
|
916fd75c11 | ||
|
|
8d057042c4 | ||
|
|
10fdc3ec57 | ||
|
|
64a3c0b94e | ||
|
|
842072fe55 | ||
|
|
d37effda22 | ||
|
|
975e479034 | ||
|
|
b4c259019b | ||
|
|
d028d3acbd | ||
|
|
edf78a3862 | ||
|
|
51666786b6 | ||
|
|
be1f0f71e0 | ||
|
|
7f481fdb1a | ||
|
|
71e061429e | ||
|
|
e2279e671a | ||
|
|
41f566d371 | ||
|
|
082e6ba927 | ||
|
|
8618aa349c | ||
|
|
ae604e8cac | ||
|
|
e45ceae224 | ||
|
|
32d83dcdad | ||
|
|
788340e1bb | ||
|
|
235648ed1b | ||
|
|
44337498b1 | ||
|
|
5eb08f94cd | ||
|
|
37d15cdc42 | ||
|
|
d3fa21febc | ||
|
|
246029f842 | ||
|
|
eef874b68e | ||
|
|
f54ad41f4e | ||
|
|
de1083d03e | ||
|
|
78b3da8727 | ||
|
|
a55b4f0bfd | ||
|
|
58b4e7747c | ||
|
|
7a1e258101 | ||
|
|
219634574f | ||
|
|
7ca0610513 | ||
|
|
d8c110587c | ||
|
|
9e3bd5306d | ||
|
|
3c86ba12f9 | ||
|
|
a7489361f5 | ||
|
|
1690ff155c | ||
|
|
4a2b2ad7b0 | ||
|
|
d20d37a631 | ||
|
|
676af4c2d7 | ||
|
|
16f20bf2ec | ||
|
|
cf48da40ba | ||
|
|
d4e9a3a4b1 | ||
|
|
b7465338f6 | ||
|
|
f6d048f250 | ||
|
|
a3fb4f818f | ||
|
|
30bf4b08b4 | ||
|
|
d50c183681 | ||
|
|
ea7ae5138e | ||
|
|
c0cea6bff9 | ||
|
|
e13dda0bb9 | ||
|
|
4d99bee231 | ||
|
|
4ba4930f7e | ||
|
|
99330be9d6 | ||
|
|
fba4a9e16e | ||
|
|
478184ad37 | ||
|
|
cbff3ed167 | ||
|
|
41e990ace1 | ||
|
|
6938d8b09d | ||
|
|
dad24d4aef | ||
|
|
e779c2595f | ||
|
|
41ce6dcf6f | ||
|
|
12703c6cd3 | ||
|
|
a609a9d398 | ||
|
|
8ebdca930c | ||
|
|
67d78714f7 | ||
|
|
3f115655b3 | ||
|
|
177fa71ff2 | ||
|
|
79ab25aa7d | ||
|
|
251a70c6d0 | ||
|
|
0ba23c7f0a | ||
|
|
79a11d4c1d | ||
|
|
ffebe82072 | ||
|
|
4e74dcd802 | ||
|
|
3b01ae2d7a | ||
|
|
1be021c37c | ||
|
|
99ee497121 | ||
|
|
5e435f16c5 | ||
|
|
1cf6bcd510 | ||
|
|
535a1cb670 | ||
|
|
e45ba0dde5 | ||
|
|
0d948c349d | ||
|
|
94e8b7f8d3 | ||
|
|
533127815b | ||
|
|
3e10bb5f32 | ||
|
|
1d87fa448c | ||
|
|
2e47253d7c | ||
|
|
f16726abdc | ||
|
|
ed06d345cd | ||
|
|
18d1bd382d | ||
|
|
571bdbec92 | ||
|
|
73b6385940 | ||
|
|
9ae54273b1 | ||
|
|
231d8b7214 | ||
|
|
a6df7c30a3 | ||
|
|
efdb55d12f | ||
|
|
ef7d26c3d9 | ||
|
|
b489f676fe | ||
|
|
a149e02742 | ||
|
|
770dca45d1 | ||
|
|
171962d948 | ||
|
|
cf51c634ca | ||
|
|
d131e8417c | ||
|
|
693c8fb667 | ||
|
|
63e2681784 | ||
|
|
f6c0aa9068 | ||
|
|
0b88e29e87 | ||
|
|
dd5df302c9 | ||
|
|
279cabaa98 | ||
|
|
04735f8c7f | ||
|
|
6df6f9b7ee | ||
|
|
c0ece6b0ee | ||
|
|
bd3d4d38c4 | ||
|
|
2ce31a81f6 | ||
|
|
4eaa5d3941 | ||
|
|
87d7916308 | ||
|
|
4daf82d87f | ||
|
|
4ccc958f85 | ||
|
|
b6111084a4 | ||
|
|
26d91ee4c2 | ||
|
|
d428412330 | ||
|
|
3a2c7dbccf | ||
|
|
8360a63dea | ||
|
|
294a27a650 | ||
|
|
2c024794af | ||
|
|
584cea762b |
202
.github/workflows/build.yml
vendored
202
.github/workflows/build.yml
vendored
@ -2,104 +2,206 @@ name: 'build'
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
optional_deps:
|
||||||
|
description: 'optional_deps=pkg-A:optional-dep-B,pkg-C:optional-dep-D'
|
||||||
|
default: ''
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
context:
|
||||||
|
description: 'Extra information from invoker'
|
||||||
|
default: ''
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0/2 * * *'
|
- cron: '0 0/3 * * *'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CI: true
|
PYTHONUNBUFFERED: 1
|
||||||
PY_COLORS: 1
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
build:
|
schedule:
|
||||||
runs-on: windows-latest
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
concurrency: autobuild-maint
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
build-plan: ${{ steps.check.outputs.build-plan }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
- name: Dump inputs
|
||||||
|
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||||
|
env:
|
||||||
|
CONTEXT: '${{ toJSON(github.event.inputs) }}'
|
||||||
|
run: |
|
||||||
|
echo "$CONTEXT"
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
path: ~\AppData\Local\pip\Cache
|
python-version: '3.13'
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
cache: 'pip'
|
||||||
restore-keys: |
|
cache-dependency-path: 'requirements.txt'
|
||||||
${{ runner.os }}-pip-
|
|
||||||
|
|
||||||
- name: Install deps
|
- name: Install deps
|
||||||
env:
|
env:
|
||||||
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --user wheel
|
python -m venv .venv
|
||||||
python -m pip install --user -r requirements.txt
|
source .venv/bin/activate
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Check if we should run
|
- name: autobuild cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/.autobuild_cache
|
||||||
|
key: autobuild_cache-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
|
||||||
|
restore-keys: autobuild_cache-
|
||||||
|
|
||||||
|
- name: Check what we should run
|
||||||
id: check
|
id: check
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||||
|
OPTIONAL_DEPS: ${{ github.event.inputs.optional_deps }}
|
||||||
run: |
|
run: |
|
||||||
python autobuild.py should-run
|
python -m msys2_autobuild write-build-plan --optional-deps "$OPTIONAL_DEPS" build_plan.json
|
||||||
$skipBuild = ($LASTEXITCODE -ne 0)
|
buildPlan="$(cat build_plan.json)"
|
||||||
If ($skipBuild) {echo '::set-output name=skip-build::true'}
|
echo "build-plan=$buildPlan" >> $GITHUB_OUTPUT
|
||||||
exit 0
|
|
||||||
|
|
||||||
- name: Clean up assets
|
- name: Clean up assets
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
if: steps.check.outputs.build-plan != '[]'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||||
run: |
|
run: |
|
||||||
python autobuild.py clean-assets
|
python -m msys2_autobuild clean-assets
|
||||||
|
|
||||||
- name: Show build queue
|
- name: Show build queue
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
if: steps.check.outputs.build-plan != '[]'
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||||
|
OPTIONAL_DEPS: ${{ github.event.inputs.optional_deps }}
|
||||||
run: |
|
run: |
|
||||||
python autobuild.py show
|
python -m msys2_autobuild show --optional-deps "$OPTIONAL_DEPS"
|
||||||
|
|
||||||
- uses: msys2/setup-msys2@v2
|
build:
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
timeout-minutes: 4320
|
||||||
|
needs: schedule
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
concurrency: autobuild-build-${{ matrix.name }}
|
||||||
|
|
||||||
|
if: ${{ needs.schedule.outputs.build-plan != '[]' }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJson(needs.schedule.outputs.build-plan) }}
|
||||||
|
name: ${{ matrix.name }}
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Configure Pagefile
|
||||||
|
if: ${{ matrix.hosted }}
|
||||||
|
# https://github.com/al-cheb/configure-pagefile-action/issues/16
|
||||||
|
continue-on-error: true
|
||||||
|
uses: al-cheb/configure-pagefile-action@a3b6ebd6b634da88790d9c58d4b37a7f4a7b8708
|
||||||
|
with:
|
||||||
|
minimum-size: 4GB
|
||||||
|
maximum-size: 16GB
|
||||||
|
disk-root: "C:"
|
||||||
|
|
||||||
|
- name: Runner details
|
||||||
|
run: |
|
||||||
|
Get-PSDrive -PSProvider FileSystem
|
||||||
|
Get-CIMInstance -Class Win32_Processor | Select-Object -Property Name
|
||||||
|
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
# Avoid it setting CMake/pkg-config variables
|
||||||
|
# https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#environment-variables
|
||||||
|
update-environment: false
|
||||||
|
|
||||||
|
# Work around https://github.com/actions/setup-python/issues/1050
|
||||||
|
- name: Cache pip dependencies
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~\AppData\Local\pip\Cache
|
||||||
|
key: ${{ runner.os }}-${{ runner.arch }}-pip-${{ hashFiles('requirements.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-${{ runner.arch }}-pip-
|
||||||
|
|
||||||
|
- name: Install deps
|
||||||
|
env:
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||||
|
PYTHON_PATH: ${{ steps.python.outputs.python-path }}
|
||||||
|
run: |
|
||||||
|
& "$env:PYTHON_PATH" -m venv .venv
|
||||||
|
.\.venv\Scripts\activate
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
echo "$env:VIRTUAL_ENV\Scripts" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||||
|
|
||||||
|
- name: autobuild cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/.autobuild_cache
|
||||||
|
key: autobuild_cache-${{ github.job }}-${{ github.run_id }}-${{ github.run_attempt }}
|
||||||
|
restore-keys: autobuild_cache-
|
||||||
|
|
||||||
|
# Note that ARM64 prior to Win11 requires x86 msys, but this will install x64
|
||||||
|
- uses: msys2/setup-msys2@v2 # zizmor: ignore[unpinned-uses]
|
||||||
|
id: msys2
|
||||||
with:
|
with:
|
||||||
msystem: MSYS
|
msystem: MSYS
|
||||||
update: true
|
update: true
|
||||||
install: msys2-devel base-devel mingw-w64-x86_64-toolchain mingw-w64-i686-toolchain git
|
install: ${{ matrix.packages }}
|
||||||
|
location: '\M'
|
||||||
|
release: ${{ matrix.hosted }}
|
||||||
|
cache: ${{ matrix.hosted }}
|
||||||
|
|
||||||
- name: Switch to the main mirror
|
- name: Switch to the main mirror
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
|
||||||
shell: msys2 {0}
|
shell: msys2 {0}
|
||||||
run: |
|
run: |
|
||||||
sed -e "s|Include = /etc/pacman.d/mirrorlist.mingw32|Server = http://repo.msys2.org/mingw/i686/|g" -i /etc/pacman.conf
|
echo 'Server = https://repo.msys2.org/mingw/$repo/' > /etc/pacman.d/mirrorlist.mingw
|
||||||
sed -e "s|Include = /etc/pacman.d/mirrorlist.mingw64|Server = http://repo.msys2.org/mingw/x86_64/|g" -i /etc/pacman.conf
|
echo 'Server = https://repo.msys2.org/msys/$arch/' > /etc/pacman.d/mirrorlist.msys
|
||||||
sed -e "s|Include = /etc/pacman.d/mirrorlist.msys|Server = http://repo.msys2.org/msys/\$arch/|g" -i /etc/pacman.conf
|
|
||||||
pacman-conf.exe
|
pacman-conf.exe
|
||||||
|
|
||||||
- name: Update using the main mirror
|
- name: Update using the main mirror & Check install
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
|
||||||
run: |
|
run: |
|
||||||
msys2 -c 'pacman --noconfirm -Suuy'
|
msys2 -c 'pacman --noconfirm -Suuy'
|
||||||
msys2 -c 'pacman --noconfirm -Suu'
|
msys2 -c 'pacman --noconfirm -Suu'
|
||||||
|
|
||||||
- name: Check install
|
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
|
||||||
run: |
|
|
||||||
msys2 -c 'pacman -Qkq'
|
msys2 -c 'pacman -Qkq'
|
||||||
|
|
||||||
- name: Init git
|
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
|
||||||
shell: msys2 {0}
|
|
||||||
run: |
|
|
||||||
git config --global user.email 'ci@msys2.org'
|
|
||||||
git config --global user.name 'MSYS2 Continuous Integration'
|
|
||||||
|
|
||||||
- name: Process build queue
|
- name: Process build queue
|
||||||
if: steps.check.outputs.skip-build != 'true'
|
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||||
|
# https://github.com/actions/runner/issues/324#issuecomment-3324382354
|
||||||
|
# https://github.com/actions/runner/pull/4053
|
||||||
|
JOB_CHECK_RUN_ID: ${{ job.check_run_id }}
|
||||||
|
MSYS2_ROOT: ${{ steps.msys2.outputs.msys2-location }}
|
||||||
run: |
|
run: |
|
||||||
$env:PACKAGER='CI (msys2-autobuild/' + $env:GITHUB_SHA.Substring(0, 8) + '/' + $env:GITHUB_RUN_ID + ')'
|
echo "JOB_CHECK_RUN_ID=$env:JOB_CHECK_RUN_ID"
|
||||||
$BUILD_ROOT='C:\_'
|
$BUILD_ROOT=Join-Path (Split-Path $env:GITHUB_WORKSPACE -Qualifier) "\"
|
||||||
$MSYS2_ROOT=(msys2 -c 'cygpath -w /')
|
python -m msys2_autobuild build ${{ matrix.build-args }} "$env:MSYS2_ROOT" "$BUILD_ROOT"
|
||||||
Get-PSDrive -PSProvider FileSystem
|
|
||||||
python autobuild.py build "$MSYS2_ROOT" "$BUILD_ROOT"
|
|
||||||
|
|||||||
80
.github/workflows/maint.yml
vendored
Normal file
80
.github/workflows/maint.yml
vendored
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
name: 'maint'
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
clear_failed_packages:
|
||||||
|
description: 'clear_failed_packages=mingw-w64-foo,mingw-w64-bar'
|
||||||
|
default: ''
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
clear_failed_build_types:
|
||||||
|
description: 'clear_failed_build_types=mingw64,clang64'
|
||||||
|
default: ''
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
context:
|
||||||
|
description: 'Extra information from invoker'
|
||||||
|
default: ''
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
concurrency: autobuild-maint
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
schedule:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Dump inputs
|
||||||
|
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||||
|
env:
|
||||||
|
CONTEXT: '${{ toJSON(github.event.inputs) }}'
|
||||||
|
run: |
|
||||||
|
echo "$CONTEXT"
|
||||||
|
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
cache: 'pip'
|
||||||
|
cache-dependency-path: 'requirements.txt'
|
||||||
|
|
||||||
|
- name: Install deps
|
||||||
|
env:
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||||
|
run: |
|
||||||
|
python -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
python -m pip install -r requirements.txt
|
||||||
|
echo "$VIRTUAL_ENV/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Clear failed build types
|
||||||
|
if: ${{ github.event.inputs.clear_failed_build_types != '' }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||||
|
CLEAR_FAILED_BUILD_TYPES: ${{ github.event.inputs.clear_failed_build_types }}
|
||||||
|
run: |
|
||||||
|
python -m msys2_autobuild clear-failed --build-types "$CLEAR_FAILED_BUILD_TYPES"
|
||||||
|
python -m msys2_autobuild update-status
|
||||||
|
|
||||||
|
- name: Clear failed packages
|
||||||
|
if: ${{ github.event.inputs.clear_failed_packages != '' }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GITHUB_TOKEN_READONLY: ${{ secrets.GITHUBTOKENREADONLY }}
|
||||||
|
CLEAR_FAILED_PACKAGES: ${{ github.event.inputs.clear_failed_packages }}
|
||||||
|
run: |
|
||||||
|
python -m msys2_autobuild clear-failed --packages "$CLEAR_FAILED_PACKAGES"
|
||||||
|
python -m msys2_autobuild update-status
|
||||||
61
.github/workflows/test.yml
vendored
Normal file
61
.github/workflows/test.yml
vendored
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
name: test
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
test:
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-24.04, windows-2022, windows-11-arm]
|
||||||
|
python-version: ['3.12', '3.13']
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install poetry
|
||||||
|
python -m poetry install
|
||||||
|
|
||||||
|
- name: Run mypy
|
||||||
|
run: |
|
||||||
|
python -m poetry run mypy .
|
||||||
|
|
||||||
|
- name: Run flake8
|
||||||
|
run: |
|
||||||
|
python -m poetry run flake8 .
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
python -m poetry run pytest
|
||||||
|
|
||||||
|
zizmor:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Run zizmor
|
||||||
|
run: pipx run zizmor .
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
*.pyc
|
*.pyc
|
||||||
.vscode/
|
.vscode/
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
|
.autobuild_cache/
|
||||||
66
README.md
66
README.md
@ -1,60 +1,50 @@
|
|||||||
# msys2-autobuild
|
# msys2-autobuild
|
||||||
|
|
||||||
## autobuild.py
|
msys2-autobuild is a Python tool for
|
||||||
|
|
||||||
|
* automatically building MSYS2 packages in GitHub Actions
|
||||||
|
* manually uploading packages, or retrying builds
|
||||||
|
* retrieving the built packages for upload to the pacman repo
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ python -m pip install --user -r requirements.txt
|
$ pacman -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-pygithub mingw-w64-x86_64-python-requests
|
||||||
# or
|
# or
|
||||||
$ poetry install
|
$ poetry install
|
||||||
|
# or
|
||||||
|
$ python -m pip install --user -r requirements.txt
|
||||||
|
# or
|
||||||
|
$ pipx install git+https://github.com/msys2/msys2-autobuild
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ python autobuild.py --help
|
$ msys2-autobuild --help
|
||||||
usage: autobuild.py [-h] {build,show,should-run,fetch-assets,clean-assets} ...
|
usage: msys2-autobuild [-h]
|
||||||
|
{build,show,write-build-plan,update-status,fetch-assets,upload-assets,clear-failed,clean-assets}
|
||||||
|
...
|
||||||
|
|
||||||
Build packages
|
Build packages
|
||||||
|
|
||||||
optional arguments:
|
options:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
|
|
||||||
subcommands:
|
subcommands:
|
||||||
{build,show,should-run,fetch-assets,clean-assets}
|
{build,show,write-build-plan,update-status,fetch-assets,upload-assets,clear-failed,clean-assets}
|
||||||
build Build all packages
|
build Build all packages
|
||||||
show Show all packages to be built
|
show Show all packages to be built
|
||||||
should-run Fails if the workflow shouldn't run
|
write-build-plan Write a GHA build matrix setup
|
||||||
|
update-status Update the status file
|
||||||
fetch-assets Download all staging packages
|
fetch-assets Download all staging packages
|
||||||
|
upload-assets Upload packages
|
||||||
|
clear-failed Clear the failed state for packages
|
||||||
clean-assets Clean up GHA assets
|
clean-assets Clean up GHA assets
|
||||||
```
|
```
|
||||||
|
|
||||||
## Automated Build Process
|
## Configuration
|
||||||
|
|
||||||
The following graph shows what happens between a PKGBUILD getting changed in git
|
* `GITHUB_TOKEN` (required) - a GitHub token with write access to the current repo.
|
||||||
and the built package being available in the pacman repo.
|
* `GITHUB_TOKEN_READONLY` (optional) - a GitHub token with read access to the current repo. This is used for read operations to not get limited by the API access limits.
|
||||||
|
* `GITHUB_REPOSITORY` (optional) - the path to the GitHub repo this is uploading to. Used for deciding which things can be built and where to upload them to. Defaults to `msys2/msys2-autobuild`.
|
||||||

|
|
||||||
|
|
||||||
### Security Considerations
|
|
||||||
|
|
||||||
Assuming changes to PKGBUILDs are properly reviewed, the pacman signature
|
|
||||||
checking works, the upstream source is OK and all MSYS2 organization members are
|
|
||||||
trusted we need to consider a bad actor controlling some part of the building
|
|
||||||
process between the PKGBUILD getting changed and the package ending up signed in
|
|
||||||
the pacman repo.
|
|
||||||
|
|
||||||
A bad actor would need to get a package on the machine of the developer signing
|
|
||||||
the package and adding it to the pacman repo. We take the following precautions:
|
|
||||||
|
|
||||||
* We only build packages automatically with GitHub Actions without third party
|
|
||||||
actions, excluding the official GitHub ones. We assume the GHA images and
|
|
||||||
official actions are safe.
|
|
||||||
* The download tool used by the person signing the package checks that the
|
|
||||||
binaries where uploaded by GHA so that uploading a package with a personal
|
|
||||||
account leads to an error. Someone would need to push a workflow change to the
|
|
||||||
repo which gets run and uploads a package to the release assets to avoid that.
|
|
||||||
We assume the bad actor doesn't have git push rights.
|
|
||||||
* Packages too large for GHA get built/signed by MSYS2 developers on their
|
|
||||||
machines. We assume the developer machines are safe.
|
|
||||||
* We enforce 2FA for the MSYS2 organization to make account takeovers of
|
|
||||||
existing MSYS2 developers harder.
|
|
||||||
|
|
||||||
Feedback and ideas on how to improve this welcome.
|
|
||||||
|
|||||||
892
autobuild.py
892
autobuild.py
@ -1,892 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
import argparse
|
|
||||||
from os import environ
|
|
||||||
from github import Github
|
|
||||||
from github.GitRelease import GitRelease
|
|
||||||
from github.GitReleaseAsset import GitReleaseAsset
|
|
||||||
from github.Repository import Repository
|
|
||||||
from pathlib import Path, PurePosixPath, PurePath
|
|
||||||
from subprocess import check_call
|
|
||||||
import subprocess
|
|
||||||
from sys import stdout
|
|
||||||
import fnmatch
|
|
||||||
import traceback
|
|
||||||
from tabulate import tabulate
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import requests
|
|
||||||
import shlex
|
|
||||||
import time
|
|
||||||
import tempfile
|
|
||||||
import shutil
|
|
||||||
import json
|
|
||||||
from hashlib import sha256
|
|
||||||
from typing import Generator, Union, AnyStr, List, Any, Dict, Tuple, Set, Sequence, \
|
|
||||||
Collection, Optional
|
|
||||||
|
|
||||||
_PathLike = Union[os.PathLike, AnyStr]
|
|
||||||
|
|
||||||
|
|
||||||
class _Package(dict):
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "Package(%r)" % self["name"]
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return id(self)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self is other
|
|
||||||
|
|
||||||
def get_build_patterns(self, build_type: str) -> List[str]:
|
|
||||||
patterns = []
|
|
||||||
if build_type in ["mingw-src", "msys-src"]:
|
|
||||||
patterns.append(f"{self['name']}-{self['version']}.src.tar.*")
|
|
||||||
elif build_type in ["mingw32", "mingw64", "msys"]:
|
|
||||||
for item in self['packages'].get(build_type, []):
|
|
||||||
patterns.append(f"{item}-{self['version']}-*.pkg.tar.*")
|
|
||||||
else:
|
|
||||||
assert 0
|
|
||||||
return patterns
|
|
||||||
|
|
||||||
def get_failed_names(self, build_type: str) -> List[str]:
|
|
||||||
names = []
|
|
||||||
if build_type in ["mingw-src", "msys-src"]:
|
|
||||||
names.append(f"{self['name']}-{self['version']}.failed")
|
|
||||||
elif build_type in ["mingw32", "mingw64", "msys"]:
|
|
||||||
for item in self['packages'].get(build_type, []):
|
|
||||||
names.append(f"{item}-{self['version']}.failed")
|
|
||||||
else:
|
|
||||||
assert 0
|
|
||||||
return names
|
|
||||||
|
|
||||||
def get_build_types(self) -> List[str]:
|
|
||||||
build_types = list(self["packages"].keys())
|
|
||||||
if any(k.startswith("mingw") for k in self["packages"].keys()):
|
|
||||||
build_types.append("mingw-src")
|
|
||||||
if "msys" in self["packages"].keys():
|
|
||||||
build_types.append("msys-src")
|
|
||||||
return build_types
|
|
||||||
|
|
||||||
def get_repo_type(self) -> str:
|
|
||||||
return "msys" if self['repo'].startswith('MSYS2') else "mingw"
|
|
||||||
|
|
||||||
|
|
||||||
# After which we shouldn't start a new build
|
|
||||||
SOFT_TIMEOUT = 60 * 60 * 3
|
|
||||||
|
|
||||||
# Packages that take too long to build, and should be handled manually
|
|
||||||
SKIP: List[str] = [
|
|
||||||
# 'mingw-w64-clang',
|
|
||||||
# 'mingw-w64-arm-none-eabi-gcc',
|
|
||||||
# 'mingw-w64-gcc',
|
|
||||||
'mingw-w64-gcc-git',
|
|
||||||
'mingw-w64-firebird-git',
|
|
||||||
'mingw-w64-qt5-static',
|
|
||||||
'mingw-w64-blender',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# FIXME: Packages that should be ignored if they depend on other things
|
|
||||||
# in the queue. Ideally this list should be empty..
|
|
||||||
IGNORE_RDEP_PACKAGES: List[str] = [
|
|
||||||
"mingw-w64-vrpn",
|
|
||||||
"mingw-w64-cocos2d-x",
|
|
||||||
"mingw-w64-mlpack",
|
|
||||||
"mingw-w64-qemu",
|
|
||||||
"mingw-w64-ghc",
|
|
||||||
"mingw-w64-python-notebook",
|
|
||||||
"mingw-w64-python-pywin32",
|
|
||||||
"mingw-w64-usbmuxd",
|
|
||||||
"mingw-w64-ldns",
|
|
||||||
"mingw-w64-npm",
|
|
||||||
"mingw-w64-yarn",
|
|
||||||
"mingw-w64-bower",
|
|
||||||
"mingw-w64-nodejs",
|
|
||||||
"mingw-w64-cross-conemu-git",
|
|
||||||
"mingw-w64-blender",
|
|
||||||
"mingw-w64-godot-cpp",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
REPO = "msys2/msys2-autobuild"
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_run_url() -> Optional[str]:
|
|
||||||
if "GITHUB_RUN_ID" in os.environ and "GITHUB_REPOSITORY" in os.environ:
|
|
||||||
run_id = os.environ["GITHUB_RUN_ID"]
|
|
||||||
repo = os.environ["GITHUB_REPOSITORY"]
|
|
||||||
return f"https://github.com/{repo}/actions/runs/{run_id}"
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd(msys2_root: _PathLike, args, **kwargs):
|
|
||||||
executable = os.path.join(msys2_root, 'usr', 'bin', 'bash.exe')
|
|
||||||
env = kwargs.pop("env", os.environ.copy())
|
|
||||||
env["CHERE_INVOKING"] = "1"
|
|
||||||
env["MSYSTEM"] = "MSYS"
|
|
||||||
env["MSYS2_PATH_TYPE"] = "minimal"
|
|
||||||
check_call([executable, '-lc'] + [shlex.join([str(a) for a in args])], env=env, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def fresh_git_repo(url: str, path: _PathLike) -> Generator:
|
|
||||||
if not os.path.exists(path):
|
|
||||||
check_call(["git", "clone", url, path])
|
|
||||||
else:
|
|
||||||
check_call(["git", "fetch", "origin"], cwd=path)
|
|
||||||
check_call(["git", "reset", "--hard", "origin/master"], cwd=path)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
assert os.path.exists(path)
|
|
||||||
try:
|
|
||||||
check_call(["git", "clean", "-xfdf"], cwd=path)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
# sometimes it fails right after the build has failed
|
|
||||||
# not sure why
|
|
||||||
pass
|
|
||||||
check_call(["git", "reset", "--hard", "HEAD"], cwd=path)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def gha_group(title: str) -> Generator:
|
|
||||||
print(f'\n::group::{title}')
|
|
||||||
stdout.flush()
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
print('::endgroup::')
|
|
||||||
stdout.flush()
|
|
||||||
|
|
||||||
|
|
||||||
class BuildError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def asset_is_complete(asset: GitReleaseAsset) -> bool:
|
|
||||||
# assets can stay around in a weird incomplete state
|
|
||||||
# in which case asset.state == "starter". GitHub shows
|
|
||||||
# them with a red warning sign in the edit UI.
|
|
||||||
return asset.state == "uploaded"
|
|
||||||
|
|
||||||
|
|
||||||
def download_asset(asset: GitReleaseAsset, target_path: str) -> None:
|
|
||||||
assert asset_is_complete(asset)
|
|
||||||
with requests.get(asset.browser_download_url, stream=True, timeout=(15, 30)) as r:
|
|
||||||
r.raise_for_status()
|
|
||||||
fd, temppath = tempfile.mkstemp()
|
|
||||||
try:
|
|
||||||
os.chmod(temppath, 0o644)
|
|
||||||
with os.fdopen(fd, "wb") as h:
|
|
||||||
for chunk in r.iter_content(4096):
|
|
||||||
h.write(chunk)
|
|
||||||
shutil.move(temppath, target_path)
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.remove(temppath)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def upload_asset(release: GitRelease, path: _PathLike, replace: bool = False,
|
|
||||||
text: bool = False) -> None:
|
|
||||||
# type_: msys/mingw/failed
|
|
||||||
if not environ.get("CI"):
|
|
||||||
print("WARNING: upload skipped, not running in CI")
|
|
||||||
return
|
|
||||||
path = Path(path)
|
|
||||||
|
|
||||||
basename = os.path.basename(str(path))
|
|
||||||
asset_name = get_gh_asset_name(basename, text)
|
|
||||||
asset_label = basename
|
|
||||||
|
|
||||||
for asset in get_release_assets(release, include_incomplete=True):
|
|
||||||
if asset_name == asset.name:
|
|
||||||
# We want to tread incomplete assets as if they weren't there
|
|
||||||
# so replace them always
|
|
||||||
if replace or not asset_is_complete(asset):
|
|
||||||
asset.delete_asset()
|
|
||||||
else:
|
|
||||||
print(f"Skipping upload for {asset_name} as {asset_label}, already exists")
|
|
||||||
return
|
|
||||||
|
|
||||||
release.upload_asset(str(path), label=asset_label, name=asset_name)
|
|
||||||
print(f"Uploaded {asset_name} as {asset_label}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_python_path(msys2_root: _PathLike, msys2_path: _PathLike) -> Path:
|
|
||||||
return Path(os.path.normpath(str(msys2_root) + str(msys2_path)))
|
|
||||||
|
|
||||||
|
|
||||||
def to_pure_posix_path(path: _PathLike) -> PurePath:
|
|
||||||
return PurePosixPath("/" + str(path).replace(":", "", 1).replace("\\", "/"))
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def backup_pacman_conf(msys2_root: _PathLike) -> Generator:
|
|
||||||
conf = get_python_path(msys2_root, "/etc/pacman.conf")
|
|
||||||
backup = get_python_path(msys2_root, "/etc/pacman.conf.backup")
|
|
||||||
shutil.copyfile(conf, backup)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
os.replace(backup, conf)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def auto_key_retrieve(msys2_root: _PathLike) -> Generator:
|
|
||||||
home_dir = os.path.join(msys2_root, "home", environ["USERNAME"])
|
|
||||||
assert os.path.exists(home_dir)
|
|
||||||
gnupg_dir = os.path.join(home_dir, ".gnupg")
|
|
||||||
os.makedirs(gnupg_dir, exist_ok=True)
|
|
||||||
conf = os.path.join(gnupg_dir, "gpg.conf")
|
|
||||||
backup = None
|
|
||||||
if os.path.exists(conf):
|
|
||||||
backup = conf + ".backup"
|
|
||||||
shutil.copyfile(conf, backup)
|
|
||||||
try:
|
|
||||||
with open(conf, "w", encoding="utf-8") as h:
|
|
||||||
h.write("""
|
|
||||||
keyserver hkp://keys.gnupg.net
|
|
||||||
keyserver-options auto-key-retrieve
|
|
||||||
""")
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
if backup is not None:
|
|
||||||
os.replace(backup, conf)
|
|
||||||
|
|
||||||
|
|
||||||
def build_type_to_dep_type(build_type):
|
|
||||||
if build_type == "mingw-src":
|
|
||||||
dep_type = "mingw64"
|
|
||||||
elif build_type == "msys-src":
|
|
||||||
dep_type = "msys"
|
|
||||||
else:
|
|
||||||
dep_type = build_type
|
|
||||||
return dep_type
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def staging_dependencies(
|
|
||||||
build_type: str, pkg: _Package, msys2_root: _PathLike,
|
|
||||||
builddir: _PathLike) -> Generator:
|
|
||||||
repo = get_repo()
|
|
||||||
|
|
||||||
def add_to_repo(repo_root, repo_type, asset):
|
|
||||||
repo_dir = Path(repo_root) / get_repo_subdir(repo_type, asset)
|
|
||||||
os.makedirs(repo_dir, exist_ok=True)
|
|
||||||
print(f"Downloading {get_asset_filename(asset)}...")
|
|
||||||
package_path = os.path.join(repo_dir, get_asset_filename(asset))
|
|
||||||
download_asset(asset, package_path)
|
|
||||||
|
|
||||||
repo_name = "autobuild-" + (
|
|
||||||
str(get_repo_subdir(repo_type, asset)).replace("/", "-").replace("\\", "-"))
|
|
||||||
repo_db_path = os.path.join(repo_dir, f"{repo_name}.db.tar.gz")
|
|
||||||
|
|
||||||
conf = get_python_path(msys2_root, "/etc/pacman.conf")
|
|
||||||
with open(conf, "r", encoding="utf-8") as h:
|
|
||||||
text = h.read()
|
|
||||||
uri = to_pure_posix_path(repo_dir).as_uri()
|
|
||||||
if uri not in text:
|
|
||||||
with open(conf, "w", encoding="utf-8") as h2:
|
|
||||||
h2.write(f"""[{repo_name}]
|
|
||||||
Server={uri}
|
|
||||||
SigLevel=Never
|
|
||||||
""")
|
|
||||||
h2.write(text)
|
|
||||||
|
|
||||||
run_cmd(msys2_root, ["repo-add", to_pure_posix_path(repo_db_path),
|
|
||||||
to_pure_posix_path(package_path)], cwd=repo_dir)
|
|
||||||
|
|
||||||
def get_cached_assets(
|
|
||||||
repo: Repository, release_name: str, *, _cache={}) -> List[GitReleaseAsset]:
|
|
||||||
key = (repo.full_name, release_name)
|
|
||||||
if key not in _cache:
|
|
||||||
release = repo.get_release(release_name)
|
|
||||||
_cache[key] = get_release_assets(release)
|
|
||||||
return _cache[key]
|
|
||||||
|
|
||||||
repo_root = os.path.join(builddir, "_REPO")
|
|
||||||
try:
|
|
||||||
shutil.rmtree(repo_root, ignore_errors=True)
|
|
||||||
os.makedirs(repo_root, exist_ok=True)
|
|
||||||
with backup_pacman_conf(msys2_root):
|
|
||||||
to_add = []
|
|
||||||
dep_type = build_type_to_dep_type(build_type)
|
|
||||||
for name, dep in pkg['ext-depends'].get(dep_type, {}).items():
|
|
||||||
pattern = f"{name}-{dep['version']}-*.pkg.*"
|
|
||||||
repo_type = dep.get_repo_type()
|
|
||||||
for asset in get_cached_assets(repo, "staging-" + repo_type):
|
|
||||||
if fnmatch.fnmatch(get_asset_filename(asset), pattern):
|
|
||||||
to_add.append((repo_type, asset))
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise SystemExit(f"asset for {pattern} in {repo_type} not found")
|
|
||||||
|
|
||||||
for repo_type, asset in to_add:
|
|
||||||
add_to_repo(repo_root, repo_type, asset)
|
|
||||||
|
|
||||||
# in case they are already installed we need to upgrade
|
|
||||||
run_cmd(msys2_root, ["pacman", "--noconfirm", "-Suy"])
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(repo_root, ignore_errors=True)
|
|
||||||
# downgrade again
|
|
||||||
run_cmd(msys2_root, ["pacman", "--noconfirm", "-Suuy"])
|
|
||||||
|
|
||||||
|
|
||||||
def build_package(build_type: str, pkg, msys2_root: _PathLike, builddir: _PathLike) -> None:
|
|
||||||
assert os.path.isabs(builddir)
|
|
||||||
assert os.path.isabs(msys2_root)
|
|
||||||
os.makedirs(builddir, exist_ok=True)
|
|
||||||
|
|
||||||
repo_name = {"MINGW-packages": "M", "MSYS2-packages": "S"}.get(pkg['repo'], pkg['repo'])
|
|
||||||
repo_dir = os.path.join(builddir, repo_name)
|
|
||||||
to_upload: List[str] = []
|
|
||||||
|
|
||||||
repo = get_repo()
|
|
||||||
|
|
||||||
with staging_dependencies(build_type, pkg, msys2_root, builddir), \
|
|
||||||
auto_key_retrieve(msys2_root), \
|
|
||||||
fresh_git_repo(pkg['repo_url'], repo_dir):
|
|
||||||
pkg_dir = os.path.join(repo_dir, pkg['repo_path'])
|
|
||||||
|
|
||||||
try:
|
|
||||||
if build_type == "mingw-src":
|
|
||||||
env = environ.copy()
|
|
||||||
env['MINGW_INSTALLS'] = 'mingw64'
|
|
||||||
run_cmd(msys2_root, [
|
|
||||||
'makepkg-mingw',
|
|
||||||
'--noconfirm',
|
|
||||||
'--noprogressbar',
|
|
||||||
'--allsource'
|
|
||||||
], env=env, cwd=pkg_dir)
|
|
||||||
elif build_type == "msys-src":
|
|
||||||
run_cmd(msys2_root, [
|
|
||||||
'makepkg',
|
|
||||||
'--noconfirm',
|
|
||||||
'--noprogressbar',
|
|
||||||
'--allsource'
|
|
||||||
], cwd=pkg_dir)
|
|
||||||
elif build_type in ["mingw32", "mingw64"]:
|
|
||||||
env = environ.copy()
|
|
||||||
env['MINGW_INSTALLS'] = build_type
|
|
||||||
run_cmd(msys2_root, [
|
|
||||||
'makepkg-mingw',
|
|
||||||
'--noconfirm',
|
|
||||||
'--noprogressbar',
|
|
||||||
'--nocheck',
|
|
||||||
'--syncdeps',
|
|
||||||
'--rmdeps',
|
|
||||||
'--cleanbuild'
|
|
||||||
], env=env, cwd=pkg_dir)
|
|
||||||
elif build_type == "msys":
|
|
||||||
run_cmd(msys2_root, [
|
|
||||||
'makepkg',
|
|
||||||
'--noconfirm',
|
|
||||||
'--noprogressbar',
|
|
||||||
'--nocheck',
|
|
||||||
'--syncdeps',
|
|
||||||
'--rmdeps',
|
|
||||||
'--cleanbuild'
|
|
||||||
], cwd=pkg_dir)
|
|
||||||
else:
|
|
||||||
assert 0
|
|
||||||
|
|
||||||
entries = os.listdir(pkg_dir)
|
|
||||||
for pattern in pkg.get_build_patterns(build_type):
|
|
||||||
found = fnmatch.filter(entries, pattern)
|
|
||||||
if not found:
|
|
||||||
raise BuildError(f"{pattern} not found, likely wrong version built")
|
|
||||||
to_upload.extend([os.path.join(pkg_dir, e) for e in found])
|
|
||||||
|
|
||||||
except (subprocess.CalledProcessError, BuildError) as e:
|
|
||||||
release = repo.get_release("staging-failed")
|
|
||||||
for entry in pkg.get_failed_names(build_type):
|
|
||||||
with tempfile.TemporaryDirectory() as tempdir:
|
|
||||||
failed_path = os.path.join(tempdir, entry)
|
|
||||||
failed_data = {}
|
|
||||||
run_url = get_current_run_url()
|
|
||||||
if run_url is not None:
|
|
||||||
failed_data["url"] = run_url
|
|
||||||
with open(failed_path, 'w') as h:
|
|
||||||
h.write(json.dumps(failed_data))
|
|
||||||
upload_asset(release, failed_path, text=True)
|
|
||||||
|
|
||||||
raise BuildError(e)
|
|
||||||
else:
|
|
||||||
release = repo.get_release("staging-" + pkg.get_repo_type())
|
|
||||||
for path in to_upload:
|
|
||||||
upload_asset(release, path)
|
|
||||||
|
|
||||||
|
|
||||||
def run_build(args: Any) -> None:
|
|
||||||
builddir = os.path.abspath(args.builddir)
|
|
||||||
msys2_root = os.path.abspath(args.msys2_root)
|
|
||||||
start_time = time.monotonic()
|
|
||||||
|
|
||||||
if not sys.platform == "win32":
|
|
||||||
raise SystemExit("ERROR: Needs to run under native Python")
|
|
||||||
|
|
||||||
if not shutil.which("git"):
|
|
||||||
raise SystemExit("ERROR: git not in PATH")
|
|
||||||
|
|
||||||
if not os.path.isdir(msys2_root):
|
|
||||||
raise SystemExit("ERROR: msys2_root doesn't exist")
|
|
||||||
|
|
||||||
try:
|
|
||||||
run_cmd(msys2_root, [])
|
|
||||||
except Exception as e:
|
|
||||||
raise SystemExit("ERROR: msys2_root not functional", e)
|
|
||||||
|
|
||||||
done = set()
|
|
||||||
while True:
|
|
||||||
todo = get_package_to_build()
|
|
||||||
if not todo:
|
|
||||||
break
|
|
||||||
pkg, build_type = todo
|
|
||||||
key = pkg['repo'] + build_type + pkg['name'] + pkg['version']
|
|
||||||
if key in done:
|
|
||||||
raise SystemExit("ERROR: building package again in the same run", pkg)
|
|
||||||
done.add(key)
|
|
||||||
|
|
||||||
if (time.monotonic() - start_time) >= SOFT_TIMEOUT:
|
|
||||||
print("timeout reached")
|
|
||||||
break
|
|
||||||
|
|
||||||
try:
|
|
||||||
with gha_group(f"[{ pkg['repo'] }] [{ build_type }] { pkg['name'] }..."):
|
|
||||||
build_package(build_type, pkg, msys2_root, builddir)
|
|
||||||
except BuildError:
|
|
||||||
with gha_group(f"[{ pkg['repo'] }] [{ build_type }] { pkg['name'] }: failed"):
|
|
||||||
traceback.print_exc(file=sys.stdout)
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
def get_buildqueue() -> List[_Package]:
|
|
||||||
pkgs = []
|
|
||||||
r = requests.get("https://packages.msys2.org/api/buildqueue")
|
|
||||||
r.raise_for_status()
|
|
||||||
dep_mapping = {}
|
|
||||||
for received in r.json():
|
|
||||||
pkg = _Package(received)
|
|
||||||
pkg['repo'] = pkg['repo_url'].split('/')[-1]
|
|
||||||
pkgs.append(pkg)
|
|
||||||
for repo, names in pkg['packages'].items():
|
|
||||||
for name in names:
|
|
||||||
dep_mapping[name] = pkg
|
|
||||||
|
|
||||||
# We need to pull in all packages of that particular build because they can
|
|
||||||
# depend on each other with a fixed version
|
|
||||||
for pkg in pkgs:
|
|
||||||
for repo, deps in pkg['depends'].items():
|
|
||||||
all_deps = set(deps)
|
|
||||||
for dep in deps:
|
|
||||||
dep_pkg = dep_mapping[dep]
|
|
||||||
all_deps.update(dep_pkg['packages'][repo])
|
|
||||||
pkg['depends'][repo] = sorted(all_deps)
|
|
||||||
|
|
||||||
# link up dependencies with the real package in the queue
|
|
||||||
for pkg in pkgs:
|
|
||||||
ver_depends: Dict[str, Dict[str, _Package]] = {}
|
|
||||||
for repo, deps in pkg['depends'].items():
|
|
||||||
for dep in deps:
|
|
||||||
ver_depends.setdefault(repo, {})[dep] = dep_mapping[dep]
|
|
||||||
pkg['ext-depends'] = ver_depends
|
|
||||||
|
|
||||||
# reverse dependencies
|
|
||||||
for pkg in pkgs:
|
|
||||||
r_depends: Dict[str, Set[_Package]] = {}
|
|
||||||
for pkg2 in pkgs:
|
|
||||||
for repo, deps in pkg2['ext-depends'].items():
|
|
||||||
if pkg in deps.values():
|
|
||||||
r_depends.setdefault(repo, set()).add(pkg2)
|
|
||||||
pkg['ext-rdepends'] = r_depends
|
|
||||||
|
|
||||||
return pkgs
|
|
||||||
|
|
||||||
|
|
||||||
def get_gh_asset_name(basename: _PathLike, text: bool = False) -> str:
|
|
||||||
# GitHub will throw out charaters like '~' or '='. It also doesn't like
|
|
||||||
# when there is no file extension and will try to add one
|
|
||||||
return sha256(str(basename).encode("utf-8")).hexdigest() + (".bin" if not text else ".txt")
|
|
||||||
|
|
||||||
|
|
||||||
def get_asset_filename(asset: GitReleaseAsset) -> str:
|
|
||||||
if not asset.label:
|
|
||||||
return asset.name
|
|
||||||
else:
|
|
||||||
assert os.path.splitext(get_gh_asset_name(asset.label))[0] == \
|
|
||||||
os.path.splitext(asset.name)[0]
|
|
||||||
return asset.label
|
|
||||||
|
|
||||||
|
|
||||||
def get_release_assets(release: GitRelease, include_incomplete=False) -> List[GitReleaseAsset]:
|
|
||||||
assets = []
|
|
||||||
for asset in release.get_assets():
|
|
||||||
# skip in case not fully uploaded yet (or uploading failed)
|
|
||||||
if not asset_is_complete(asset) and not include_incomplete:
|
|
||||||
continue
|
|
||||||
uploader = asset.uploader
|
|
||||||
if uploader.type != "Bot" or uploader.login != "github-actions[bot]":
|
|
||||||
raise SystemExit(f"ERROR: Asset '{get_asset_filename(asset)}' not uploaded "
|
|
||||||
f"by GHA but '{uploader.login}'. Aborting.")
|
|
||||||
assets.append(asset)
|
|
||||||
return assets
|
|
||||||
|
|
||||||
|
|
||||||
def get_packages_to_build() -> Tuple[
|
|
||||||
List[Tuple[_Package, str]], List[Tuple[_Package, str, str]],
|
|
||||||
List[Tuple[_Package, str]]]:
|
|
||||||
repo = get_repo(optional_credentials=True)
|
|
||||||
assets = []
|
|
||||||
for name in ["msys", "mingw"]:
|
|
||||||
release = repo.get_release('staging-' + name)
|
|
||||||
assets.extend([
|
|
||||||
get_asset_filename(a) for a in get_release_assets(release)])
|
|
||||||
release = repo.get_release('staging-failed')
|
|
||||||
assets_failed = [
|
|
||||||
get_asset_filename(a) for a in get_release_assets(release)]
|
|
||||||
|
|
||||||
def pkg_is_done(build_type: str, pkg: _Package) -> bool:
|
|
||||||
for pattern in pkg.get_build_patterns(build_type):
|
|
||||||
if not fnmatch.filter(assets, pattern):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def pkg_has_failed(build_type: str, pkg: _Package) -> bool:
|
|
||||||
for name in pkg.get_failed_names(build_type):
|
|
||||||
if name in assets_failed:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def pkg_is_skipped(build_type: str, pkg: _Package) -> bool:
|
|
||||||
for other, other_type, msg in skipped:
|
|
||||||
if build_type == other_type and pkg is other:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return pkg['name'] in SKIP
|
|
||||||
|
|
||||||
todo = []
|
|
||||||
done = []
|
|
||||||
skipped = []
|
|
||||||
for pkg in get_buildqueue():
|
|
||||||
for build_type in pkg.get_build_types():
|
|
||||||
if pkg_is_done(build_type, pkg):
|
|
||||||
done.append((pkg, build_type))
|
|
||||||
elif pkg_has_failed(build_type, pkg):
|
|
||||||
skipped.append((pkg, build_type, "failed"))
|
|
||||||
elif pkg_is_skipped(build_type, pkg):
|
|
||||||
skipped.append((pkg, build_type, "skipped"))
|
|
||||||
else:
|
|
||||||
dep_type = build_type_to_dep_type(build_type)
|
|
||||||
for dep in pkg['ext-depends'].get(dep_type, {}).values():
|
|
||||||
if pkg_has_failed(dep_type, dep) or pkg_is_skipped(dep_type, dep):
|
|
||||||
skipped.append((pkg, build_type, "requires: " + dep['name']))
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
todo.append((pkg, build_type))
|
|
||||||
|
|
||||||
return done, skipped, todo
|
|
||||||
|
|
||||||
|
|
||||||
def get_package_to_build() -> Optional[Tuple[_Package, str]]:
|
|
||||||
done, skipped, todo = get_packages_to_build()
|
|
||||||
if todo:
|
|
||||||
return todo[0]
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_workflow():
|
|
||||||
workflow_name = os.environ.get("GITHUB_WORKFLOW", None)
|
|
||||||
if workflow_name is None:
|
|
||||||
raise Exception("GITHUB_WORKFLOW not set")
|
|
||||||
repo = get_repo()
|
|
||||||
for workflow in repo.get_workflows():
|
|
||||||
if workflow.name == workflow_name:
|
|
||||||
return workflow
|
|
||||||
else:
|
|
||||||
raise Exception("workflow not found:", workflow_name)
|
|
||||||
|
|
||||||
|
|
||||||
def should_run(args: Any) -> None:
|
|
||||||
current_id = None
|
|
||||||
if "GITHUB_RUN_ID" in os.environ:
|
|
||||||
current_id = int(os.environ["GITHUB_RUN_ID"])
|
|
||||||
|
|
||||||
workflow = get_workflow()
|
|
||||||
runs = list(workflow.get_runs(status="in_progress"))
|
|
||||||
runs += list(workflow.get_runs(status="queued"))
|
|
||||||
for run in runs:
|
|
||||||
if current_id is not None and current_id == run.id:
|
|
||||||
# Ignore this run itself
|
|
||||||
continue
|
|
||||||
raise SystemExit(
|
|
||||||
f"Another workflow is currently running or has something queued: {run.html_url}")
|
|
||||||
|
|
||||||
if not get_package_to_build():
|
|
||||||
raise SystemExit("Nothing to build")
|
|
||||||
|
|
||||||
|
|
||||||
def show_build(args: Any) -> None:
|
|
||||||
done, skipped, todo = get_packages_to_build()
|
|
||||||
|
|
||||||
with gha_group(f"TODO ({len(todo)})"):
|
|
||||||
print(tabulate([(p["name"], bt, p["version"]) for (p, bt) in todo],
|
|
||||||
headers=["Package", "Build", "Version"]))
|
|
||||||
|
|
||||||
with gha_group(f"SKIPPED ({len(skipped)})"):
|
|
||||||
print(tabulate([(p["name"], bt, p["version"], r) for (p, bt, r) in skipped],
|
|
||||||
headers=["Package", "Build", "Version", "Reason"]))
|
|
||||||
|
|
||||||
with gha_group(f"DONE ({len(done)})"):
|
|
||||||
print(tabulate([(p["name"], bt, p["version"]) for (p, bt) in done],
|
|
||||||
headers=["Package", "Build", "Version"]))
|
|
||||||
|
|
||||||
|
|
||||||
def get_repo_subdir(type_: str, asset: GitReleaseAsset) -> Path:
|
|
||||||
entry = get_asset_filename(asset)
|
|
||||||
t = Path(type_)
|
|
||||||
if type_ == "msys":
|
|
||||||
if fnmatch.fnmatch(entry, '*.pkg.tar.*'):
|
|
||||||
return t / "x86_64"
|
|
||||||
elif fnmatch.fnmatch(entry, '*.src.tar.*'):
|
|
||||||
return t / "sources"
|
|
||||||
else:
|
|
||||||
raise Exception("unknown file type")
|
|
||||||
elif type_ == "mingw":
|
|
||||||
if fnmatch.fnmatch(entry, '*.src.tar.*'):
|
|
||||||
return t / "sources"
|
|
||||||
elif entry.startswith("mingw-w64-x86_64-"):
|
|
||||||
return t / "x86_64"
|
|
||||||
elif entry.startswith("mingw-w64-i686-"):
|
|
||||||
return t / "i686"
|
|
||||||
else:
|
|
||||||
raise Exception("unknown file type")
|
|
||||||
else:
|
|
||||||
raise Exception("unknown type")
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_assets(args: Any) -> None:
|
|
||||||
repo = get_repo(optional_credentials=True)
|
|
||||||
|
|
||||||
pkgs = get_buildqueue()
|
|
||||||
|
|
||||||
todo = []
|
|
||||||
done = []
|
|
||||||
all_blocked = {}
|
|
||||||
for name, repo_name in [("msys", "MSYS2-packages"), ("mingw", "MINGW-packages")]:
|
|
||||||
p = Path(args.targetdir)
|
|
||||||
release = repo.get_release('staging-' + name)
|
|
||||||
release_assets = get_release_assets(release)
|
|
||||||
repo_pkgs = [p for p in pkgs if p["repo"] == repo_name]
|
|
||||||
finished_assets, blocked = get_finished_assets(
|
|
||||||
repo_pkgs, release_assets, args.fetch_all)
|
|
||||||
all_blocked.update(blocked)
|
|
||||||
|
|
||||||
for pkg, assets in finished_assets.items():
|
|
||||||
for asset in assets:
|
|
||||||
asset_dir = p / get_repo_subdir(name, asset)
|
|
||||||
asset_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
asset_path = asset_dir / get_asset_filename(asset)
|
|
||||||
if asset_path.exists():
|
|
||||||
if asset_path.stat().st_size != asset.size:
|
|
||||||
print(f"Warning: {asset_path} already exists "
|
|
||||||
f"but has a different size")
|
|
||||||
done.append(asset)
|
|
||||||
continue
|
|
||||||
todo.append((asset, asset_path))
|
|
||||||
|
|
||||||
if args.verbose and all_blocked:
|
|
||||||
import pprint
|
|
||||||
print("Packages that are blocked and why:")
|
|
||||||
pprint.pprint(all_blocked)
|
|
||||||
|
|
||||||
print(f"downloading: {len(todo)}, done: {len(done)}, "
|
|
||||||
f"blocked: {len(all_blocked)} (related builds missing)")
|
|
||||||
|
|
||||||
print("Pass --verbose to see the list of blocked packages.")
|
|
||||||
print("Pass --fetch-all to also fetch blocked packages.")
|
|
||||||
|
|
||||||
def fetch_item(item):
|
|
||||||
asset, asset_path = item
|
|
||||||
if not args.pretend:
|
|
||||||
download_asset(asset, asset_path)
|
|
||||||
return item
|
|
||||||
|
|
||||||
with ThreadPoolExecutor(8) as executor:
|
|
||||||
for i, item in enumerate(executor.map(fetch_item, todo)):
|
|
||||||
print(f"[{i + 1}/{len(todo)}] {get_asset_filename(item[0])}")
|
|
||||||
|
|
||||||
print("done")
|
|
||||||
|
|
||||||
|
|
||||||
def get_assets_to_delete(repo: Repository) -> List[GitReleaseAsset]:
|
|
||||||
print("Fetching packages to build...")
|
|
||||||
patterns = []
|
|
||||||
for pkg in get_buildqueue():
|
|
||||||
for build_type in pkg.get_build_types():
|
|
||||||
patterns.extend(pkg.get_failed_names(build_type))
|
|
||||||
patterns.extend(pkg.get_build_patterns(build_type))
|
|
||||||
|
|
||||||
print("Fetching assets...")
|
|
||||||
assets: Dict[str, List[GitReleaseAsset]] = {}
|
|
||||||
for release_name in ['staging-msys', 'staging-mingw', 'staging-failed']:
|
|
||||||
release = repo.get_release(release_name)
|
|
||||||
for asset in get_release_assets(release, include_incomplete=True):
|
|
||||||
assets.setdefault(get_asset_filename(asset), []).append(asset)
|
|
||||||
|
|
||||||
for pattern in patterns:
|
|
||||||
for key in fnmatch.filter(assets.keys(), pattern):
|
|
||||||
del assets[key]
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for items in assets.values():
|
|
||||||
for asset in items:
|
|
||||||
result.append(asset)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def get_finished_assets(pkgs: Collection[_Package],
|
|
||||||
assets: Sequence[GitReleaseAsset],
|
|
||||||
ignore_blocked: bool) -> Tuple[
|
|
||||||
Dict[_Package, List[GitReleaseAsset]], Dict[_Package, str]]:
|
|
||||||
"""Returns assets for packages where all package results are available"""
|
|
||||||
|
|
||||||
assets_mapping: Dict[str, List[GitReleaseAsset]] = {}
|
|
||||||
for asset in assets:
|
|
||||||
assets_mapping.setdefault(get_asset_filename(asset), []).append(asset)
|
|
||||||
|
|
||||||
finished = {}
|
|
||||||
for pkg in pkgs:
|
|
||||||
# Only returns assets for packages where everything has been
|
|
||||||
# built already
|
|
||||||
patterns = []
|
|
||||||
for build_type in pkg.get_build_types():
|
|
||||||
patterns.extend(pkg.get_build_patterns(build_type))
|
|
||||||
|
|
||||||
finished_maybe = []
|
|
||||||
for pattern in patterns:
|
|
||||||
matches = fnmatch.filter(assets_mapping.keys(), pattern)
|
|
||||||
if matches:
|
|
||||||
found = assets_mapping[matches[0]]
|
|
||||||
finished_maybe.extend(found)
|
|
||||||
|
|
||||||
if len(finished_maybe) == len(patterns):
|
|
||||||
finished[pkg] = finished_maybe
|
|
||||||
|
|
||||||
blocked = {}
|
|
||||||
|
|
||||||
if not ignore_blocked:
|
|
||||||
for pkg in finished:
|
|
||||||
blocked_reason = set()
|
|
||||||
|
|
||||||
# skip packages where not all dependencies have been built
|
|
||||||
for repo, deps in pkg["ext-depends"].items():
|
|
||||||
for dep in deps.values():
|
|
||||||
if dep in pkgs and dep not in finished:
|
|
||||||
blocked_reason.add(dep)
|
|
||||||
|
|
||||||
# skip packages where not all reverse dependencies have been built
|
|
||||||
for repo, deps in pkg["ext-rdepends"].items():
|
|
||||||
for dep in deps:
|
|
||||||
if dep["name"] in IGNORE_RDEP_PACKAGES:
|
|
||||||
continue
|
|
||||||
if dep in pkgs and dep not in finished:
|
|
||||||
blocked_reason.add(dep)
|
|
||||||
|
|
||||||
if blocked_reason:
|
|
||||||
blocked[pkg] = "waiting on %r" % blocked_reason
|
|
||||||
|
|
||||||
for pkg in blocked:
|
|
||||||
finished.pop(pkg, None)
|
|
||||||
|
|
||||||
return finished, blocked
|
|
||||||
|
|
||||||
|
|
||||||
def clean_gha_assets(args: Any) -> None:
|
|
||||||
repo = get_repo()
|
|
||||||
assets = get_assets_to_delete(repo)
|
|
||||||
|
|
||||||
for asset in assets:
|
|
||||||
print(f"Deleting {get_asset_filename(asset)}...")
|
|
||||||
if not args.dry_run:
|
|
||||||
asset.delete_asset()
|
|
||||||
|
|
||||||
if not assets:
|
|
||||||
print("Nothing to delete")
|
|
||||||
|
|
||||||
|
|
||||||
def get_credentials(optional: bool = False) -> Dict[str, Any]:
|
|
||||||
if "GITHUB_TOKEN" in environ:
|
|
||||||
return {'login_or_token': environ["GITHUB_TOKEN"]}
|
|
||||||
elif "GITHUB_USER" in environ and "GITHUB_PASS" in environ:
|
|
||||||
return {'login_or_token': environ["GITHUB_USER"], 'password': environ["GITHUB_PASS"]}
|
|
||||||
else:
|
|
||||||
if optional:
|
|
||||||
print("[Warning] 'GITHUB_TOKEN' or 'GITHUB_USER'/'GITHUB_PASS' env vars "
|
|
||||||
"not set which might lead to API rate limiting", file=sys.stderr)
|
|
||||||
return {}
|
|
||||||
else:
|
|
||||||
raise Exception("'GITHUB_TOKEN' or 'GITHUB_USER'/'GITHUB_PASS' env vars not set")
|
|
||||||
|
|
||||||
|
|
||||||
def get_repo(optional_credentials: bool = False) -> Repository:
|
|
||||||
kwargs = get_credentials(optional=optional_credentials)
|
|
||||||
has_creds = bool(kwargs)
|
|
||||||
# 100 is the maximum allowed
|
|
||||||
kwargs['per_page'] = 100
|
|
||||||
gh = Github(**kwargs)
|
|
||||||
if not has_creds and optional_credentials:
|
|
||||||
print(f"[Warning] Rate limit status: {gh.get_rate_limit().core}", file=sys.stderr)
|
|
||||||
return gh.get_repo(REPO, lazy=True)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv: List[str]):
|
|
||||||
parser = argparse.ArgumentParser(description="Build packages", allow_abbrev=False)
|
|
||||||
parser.set_defaults(func=lambda *x: parser.print_help())
|
|
||||||
subparser = parser.add_subparsers(title="subcommands")
|
|
||||||
|
|
||||||
sub = subparser.add_parser("build", help="Build all packages")
|
|
||||||
sub.add_argument("msys2_root", help="The MSYS2 install used for building. e.g. C:\\msys64")
|
|
||||||
sub.add_argument(
|
|
||||||
"builddir",
|
|
||||||
help="A directory used for saving temporary build results and the git repos")
|
|
||||||
sub.set_defaults(func=run_build)
|
|
||||||
|
|
||||||
sub = subparser.add_parser(
|
|
||||||
"show", help="Show all packages to be built", allow_abbrev=False)
|
|
||||||
sub.add_argument(
|
|
||||||
"--fail-on-idle", action="store_true", help="Fails if there is nothing to do")
|
|
||||||
sub.set_defaults(func=show_build)
|
|
||||||
|
|
||||||
sub = subparser.add_parser(
|
|
||||||
"should-run", help="Fails if the workflow shouldn't run", allow_abbrev=False)
|
|
||||||
sub.set_defaults(func=should_run)
|
|
||||||
|
|
||||||
sub = subparser.add_parser(
|
|
||||||
"fetch-assets", help="Download all staging packages", allow_abbrev=False)
|
|
||||||
sub.add_argument("targetdir")
|
|
||||||
sub.add_argument(
|
|
||||||
"--verbose", action="store_true", help="Show why things are blocked")
|
|
||||||
sub.add_argument(
|
|
||||||
"--pretend", action="store_true",
|
|
||||||
help="Don't actually download, just show what would be done")
|
|
||||||
sub.add_argument(
|
|
||||||
"--fetch-all", action="store_true", help="Fetch all packages, even blocked ones")
|
|
||||||
sub.set_defaults(func=fetch_assets)
|
|
||||||
|
|
||||||
sub = subparser.add_parser("clean-assets", help="Clean up GHA assets", allow_abbrev=False)
|
|
||||||
sub.add_argument(
|
|
||||||
"--dry-run", action="store_true", help="Only show what is going to be deleted")
|
|
||||||
sub.set_defaults(func=clean_gha_assets)
|
|
||||||
|
|
||||||
args = parser.parse_args(argv[1:])
|
|
||||||
return args.func(args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main(sys.argv)
|
|
||||||
2
build.bat
Normal file
2
build.bat
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
@echo off
|
||||||
|
C:\msys64\msys2_shell.cmd -here -msys -no-start -defterm -c "./build.sh"
|
||||||
5
build.sh
Normal file
5
build.sh
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache
|
||||||
|
OLD_ACLOCAL_PATH="${ACLOCAL_PATH}"
|
||||||
|
unset ACLOCAL_PATH
|
||||||
|
python -m msys2_autobuild build / ~/build-temp -t msys,msys-src,mingw64,mingw32,mingw-src
|
||||||
|
ACLOCAL_PATH="${OLD_ACLOCAL_PATH}"
|
||||||
43
docs/docs.md
43
docs/docs.md
@ -1,43 +0,0 @@
|
|||||||
https://mermaid-js.github.io
|
|
||||||
|
|
||||||
```
|
|
||||||
sequenceDiagram
|
|
||||||
participant GIT as MSYS2/MINGW-packages
|
|
||||||
participant API as packages.msys2.org
|
|
||||||
participant GHA as GitHub Actions
|
|
||||||
participant DT as msys2-autobuild
|
|
||||||
participant DEV as Developer
|
|
||||||
participant REPO as Pacman Repo
|
|
||||||
|
|
||||||
GIT->>GHA: GIT push trigger
|
|
||||||
GHA->>GHA: parse PKBUILDs
|
|
||||||
GHA-->>GIT: upload parsed PKGBUILDs
|
|
||||||
|
|
||||||
loop Every 5 minutes
|
|
||||||
API->>GIT: fetch parsed PKGBUILDs
|
|
||||||
GIT-->>API:
|
|
||||||
end
|
|
||||||
|
|
||||||
loop Every 2 hours
|
|
||||||
DT->>GHA: cron trigger
|
|
||||||
GHA->>API: fetch TODO list
|
|
||||||
API-->>GHA:
|
|
||||||
GHA->>GIT: fetch PKGBUILDs
|
|
||||||
GIT-->>GHA:
|
|
||||||
GHA->>DT: fetch staging
|
|
||||||
DT-->>GHA:
|
|
||||||
GHA->>GHA: build packages
|
|
||||||
GHA-->>DT: upload packages
|
|
||||||
end
|
|
||||||
|
|
||||||
DEV->>DT: fetch packages
|
|
||||||
DT-->>DEV:
|
|
||||||
DEV->>DEV: sign packages
|
|
||||||
DEV->>REPO: push to repo
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
{
|
|
||||||
"theme": "forest"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 24 KiB |
0
msys2_autobuild/__init__.py
Executable file
0
msys2_autobuild/__init__.py
Executable file
3
msys2_autobuild/__main__.py
Normal file
3
msys2_autobuild/__main__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .main import run
|
||||||
|
|
||||||
|
run()
|
||||||
421
msys2_autobuild/build.py
Normal file
421
msys2_autobuild/build.py
Normal file
@ -0,0 +1,421 @@
|
|||||||
|
import fnmatch
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import shlex
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from pathlib import Path, PurePath, PurePosixPath
|
||||||
|
from subprocess import check_call
|
||||||
|
from typing import Any, TypeVar
|
||||||
|
from collections.abc import Generator, Sequence
|
||||||
|
|
||||||
|
from gitea import Attachment
|
||||||
|
|
||||||
|
from .config import ArchType, BuildType, Config
|
||||||
|
from .gh import (CachedAssets, download_asset, get_asset_filename,
|
||||||
|
get_release, get_repo_for_build_type, upload_asset)
|
||||||
|
from .queue import Package
|
||||||
|
from .utils import SCRIPT_DIR, PathLike
|
||||||
|
|
||||||
|
|
||||||
|
class BuildError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_python_path(msys2_root: PathLike, msys2_path: PathLike) -> Path:
|
||||||
|
return Path(os.path.normpath(str(msys2_root) + str(msys2_path)))
|
||||||
|
|
||||||
|
|
||||||
|
def to_pure_posix_path(path: PathLike) -> PurePath:
|
||||||
|
return PurePosixPath("/" + str(path).replace(":", "", 1).replace("\\", "/"))
|
||||||
|
|
||||||
|
|
||||||
|
def get_build_environ(build_type: BuildType) -> dict[str, str]:
|
||||||
|
environ = os.environ.copy()
|
||||||
|
|
||||||
|
# Set PACKAGER for makepkg
|
||||||
|
packager_ref = Config.RUNNER_CONFIG[build_type]["repo"]
|
||||||
|
if "GITHUB_SHA" in environ and "GITHUB_RUN_ID" in environ:
|
||||||
|
packager_ref += "/" + environ["GITHUB_SHA"][:8] + "/" + environ["GITHUB_RUN_ID"]
|
||||||
|
environ["PACKAGER"] = f"CI ({packager_ref})"
|
||||||
|
|
||||||
|
return environ
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def temp_pacman_script(pacman_config: PathLike) -> Generator[PathLike, None, None]:
|
||||||
|
"""Gives a temporary pacman script which uses the passed in pacman config
|
||||||
|
without having to pass --config to it. Required because makepkg doesn't allow
|
||||||
|
setting the pacman conf path, but it allows setting the pacman executable path
|
||||||
|
via the 'PACMAN' env var.
|
||||||
|
"""
|
||||||
|
|
||||||
|
fd, filename = tempfile.mkstemp("pacman")
|
||||||
|
os.close(fd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(filename, "w", encoding="utf-8") as h:
|
||||||
|
cli = shlex.join(['/usr/bin/pacman', '--config', str(to_pure_posix_path(pacman_config))])
|
||||||
|
h.write(f"""\
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
exec {cli} "$@"
|
||||||
|
""")
|
||||||
|
yield filename
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.unlink(filename)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def temp_pacman_conf(msys2_root: PathLike) -> Generator[Path, None, None]:
|
||||||
|
"""Gives a unix path to a temporary copy of pacman.conf"""
|
||||||
|
|
||||||
|
fd, filename = tempfile.mkstemp("pacman.conf")
|
||||||
|
os.close(fd)
|
||||||
|
try:
|
||||||
|
conf = get_python_path(msys2_root, "/etc/pacman.conf")
|
||||||
|
with open(conf, "rb") as src:
|
||||||
|
with open(filename, "wb") as dest:
|
||||||
|
shutil.copyfileobj(src, dest)
|
||||||
|
|
||||||
|
yield Path(filename)
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.unlink(filename)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def temp_makepkg_confd(msys2_root: PathLike, config_name: str) -> Generator[Path, None, None]:
|
||||||
|
"""Gives a path to a temporary $config_name.d file"""
|
||||||
|
|
||||||
|
conf_dir = get_python_path(msys2_root, f"/etc/{config_name}.d")
|
||||||
|
os.makedirs(conf_dir, exist_ok=True)
|
||||||
|
conf_file = conf_dir / "msys2_autobuild.conf"
|
||||||
|
try:
|
||||||
|
open(conf_file, "wb").close()
|
||||||
|
yield conf_file
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.unlink(conf_file)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
os.rmdir(conf_dir)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def clean_environ(environ: dict[str, str]) -> dict[str, str]:
|
||||||
|
"""Returns an environment without any CI related variables.
|
||||||
|
|
||||||
|
This is to avoid leaking secrets to package build scripts we call.
|
||||||
|
While in theory we trust them this can't hurt.
|
||||||
|
"""
|
||||||
|
|
||||||
|
new_env = environ.copy()
|
||||||
|
for key in list(new_env):
|
||||||
|
if key.startswith(("GITHUB_", "RUNNER_")):
|
||||||
|
del new_env[key]
|
||||||
|
return new_env
|
||||||
|
|
||||||
|
|
||||||
|
def run_cmd(msys2_root: PathLike, args: Sequence[PathLike], **kwargs: Any) -> None:
|
||||||
|
executable = os.path.join(msys2_root, 'usr', 'bin', 'bash.exe')
|
||||||
|
env = clean_environ(kwargs.pop("env", os.environ.copy()))
|
||||||
|
env["CHERE_INVOKING"] = "1"
|
||||||
|
env["MSYSTEM"] = "MSYS"
|
||||||
|
env["MSYS2_PATH_TYPE"] = "minimal"
|
||||||
|
|
||||||
|
check_call([executable, '-lc'] + [shlex.join([str(a) for a in args])], env=env, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def make_tree_writable(topdir: PathLike) -> None:
|
||||||
|
# Ensure all files and directories under topdir are writable
|
||||||
|
# (and readable) by owner.
|
||||||
|
# Taken from meson, and adjusted
|
||||||
|
|
||||||
|
def chmod(p: PathLike) -> None:
|
||||||
|
os.chmod(p, os.stat(p).st_mode | stat.S_IWRITE | stat.S_IREAD)
|
||||||
|
|
||||||
|
chmod(topdir)
|
||||||
|
for root, dirs, files in os.walk(topdir):
|
||||||
|
for d in dirs:
|
||||||
|
chmod(os.path.join(root, d))
|
||||||
|
# Work around Python bug following junctions
|
||||||
|
# https://github.com/python/cpython/issues/67596#issuecomment-1918112817
|
||||||
|
dirs[:] = [d for d in dirs if not os.path.isjunction(os.path.join(root, d))]
|
||||||
|
for fname in files:
|
||||||
|
fpath = os.path.join(root, fname)
|
||||||
|
if os.path.isfile(fpath):
|
||||||
|
chmod(fpath)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_junctions(topdir: PathLike) -> None:
|
||||||
|
# work around a git issue where it can't handle junctions
|
||||||
|
# https://github.com/git-for-windows/git/issues/5320
|
||||||
|
for root, dirs, _ in os.walk(topdir):
|
||||||
|
no_junctions = []
|
||||||
|
for d in dirs:
|
||||||
|
if not os.path.isjunction(os.path.join(root, d)):
|
||||||
|
no_junctions.append(d)
|
||||||
|
else:
|
||||||
|
os.remove(os.path.join(root, d))
|
||||||
|
dirs[:] = no_junctions
|
||||||
|
|
||||||
|
|
||||||
|
def reset_git_repo(path: PathLike):
|
||||||
|
|
||||||
|
def clean():
|
||||||
|
assert os.path.exists(path)
|
||||||
|
|
||||||
|
# Try to avoid git hanging in a junction loop, by removing them
|
||||||
|
# before running git clean/reset
|
||||||
|
# https://github.com/msys2/msys2-autobuild/issues/108#issuecomment-2776420879
|
||||||
|
try:
|
||||||
|
remove_junctions(path)
|
||||||
|
except OSError as e:
|
||||||
|
print("Removing junctions failed", e)
|
||||||
|
|
||||||
|
check_call(["git", "clean", "-xfdf"], cwd=path)
|
||||||
|
check_call(["git", "reset", "--hard", "HEAD"], cwd=path)
|
||||||
|
|
||||||
|
made_writable = False
|
||||||
|
for i in range(10):
|
||||||
|
try:
|
||||||
|
clean()
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
try:
|
||||||
|
if not made_writable:
|
||||||
|
print("Trying to make files writable")
|
||||||
|
make_tree_writable(path)
|
||||||
|
remove_junctions(path)
|
||||||
|
made_writable = True
|
||||||
|
except OSError as e:
|
||||||
|
print("Making files writable failed", e)
|
||||||
|
print(f"git clean/reset failed, sleeping for {i} seconds")
|
||||||
|
time.sleep(i)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# run it one more time to raise
|
||||||
|
clean()
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def fresh_git_repo(url: str, path: PathLike) -> Generator:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
check_call(["git", "clone", url, path])
|
||||||
|
check_call(["git", "config", "core.longpaths", "true"], cwd=path)
|
||||||
|
else:
|
||||||
|
reset_git_repo(path)
|
||||||
|
check_call(["git", "fetch", "origin"], cwd=path)
|
||||||
|
check_call(["git", "reset", "--hard", "origin/master"], cwd=path)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
assert os.path.exists(path)
|
||||||
|
reset_git_repo(path)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def staging_dependencies(
|
||||||
|
build_type: BuildType, pkg: Package, msys2_root: PathLike,
|
||||||
|
builddir: PathLike) -> Generator[PathLike, None, None]:
|
||||||
|
|
||||||
|
def add_to_repo(repo_root: PathLike, pacman_config: PathLike, repo_name: str,
|
||||||
|
assets: list[Attachment]) -> None:
|
||||||
|
repo_dir = Path(repo_root) / repo_name
|
||||||
|
os.makedirs(repo_dir, exist_ok=True)
|
||||||
|
|
||||||
|
todo = []
|
||||||
|
for asset in assets:
|
||||||
|
asset_path = os.path.join(repo_dir, get_asset_filename(asset))
|
||||||
|
todo.append((asset_path, asset))
|
||||||
|
|
||||||
|
def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
|
||||||
|
asset_path, asset = item
|
||||||
|
download_asset(asset, asset_path)
|
||||||
|
return item
|
||||||
|
|
||||||
|
package_paths = []
|
||||||
|
with ThreadPoolExecutor(8) as executor:
|
||||||
|
for i, item in enumerate(executor.map(fetch_item, todo)):
|
||||||
|
asset_path, asset = item
|
||||||
|
print(f"[{i + 1}/{len(todo)}] {get_asset_filename(asset)}")
|
||||||
|
package_paths.append(asset_path)
|
||||||
|
|
||||||
|
repo_name = f"autobuild-{repo_name}"
|
||||||
|
repo_db_path = os.path.join(repo_dir, f"{repo_name}.db.tar.gz")
|
||||||
|
|
||||||
|
with open(pacman_config, encoding="utf-8") as h:
|
||||||
|
text = h.read()
|
||||||
|
uri = to_pure_posix_path(repo_dir).as_uri()
|
||||||
|
if uri not in text:
|
||||||
|
with open(pacman_config, "w", encoding="utf-8") as h2:
|
||||||
|
h2.write(f"""[{repo_name}]
|
||||||
|
Server={uri}
|
||||||
|
SigLevel=Never
|
||||||
|
""")
|
||||||
|
h2.write(text)
|
||||||
|
|
||||||
|
# repo-add 15 packages at a time so we don't hit the size limit for CLI arguments
|
||||||
|
ChunkItem = TypeVar("ChunkItem")
|
||||||
|
|
||||||
|
def chunks(lst: list[ChunkItem], n: int) -> Generator[list[ChunkItem], None, None]:
|
||||||
|
for i in range(0, len(lst), n):
|
||||||
|
yield lst[i:i + n]
|
||||||
|
|
||||||
|
base_args: list[PathLike] = ["repo-add", to_pure_posix_path(repo_db_path)]
|
||||||
|
posix_paths: list[PathLike] = [to_pure_posix_path(p) for p in package_paths]
|
||||||
|
for chunk in chunks(posix_paths, 15):
|
||||||
|
args = base_args + chunk
|
||||||
|
run_cmd(msys2_root, args, cwd=repo_dir)
|
||||||
|
|
||||||
|
cached_assets = CachedAssets()
|
||||||
|
repo_root = os.path.join(builddir, "_REPO")
|
||||||
|
try:
|
||||||
|
shutil.rmtree(repo_root, ignore_errors=True)
|
||||||
|
os.makedirs(repo_root, exist_ok=True)
|
||||||
|
with temp_pacman_conf(msys2_root) as pacman_config:
|
||||||
|
to_add: dict[ArchType, list[GitReleaseAsset]] = {}
|
||||||
|
for dep_type, deps in pkg.get_depends(build_type).items():
|
||||||
|
assets = cached_assets.get_assets(dep_type)
|
||||||
|
for dep in deps:
|
||||||
|
for pattern in dep.get_build_patterns(dep_type):
|
||||||
|
for asset in assets:
|
||||||
|
if fnmatch.fnmatch(get_asset_filename(asset), pattern):
|
||||||
|
to_add.setdefault(dep_type, []).append(asset)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if pkg.is_optional_dep(dep, dep_type):
|
||||||
|
# If it's there, good, if not we ignore it since it's part of a cycle
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise SystemExit(f"asset for {pattern} in {dep_type} not found")
|
||||||
|
|
||||||
|
for dep_type, assets in to_add.items():
|
||||||
|
add_to_repo(repo_root, pacman_config, dep_type, assets)
|
||||||
|
|
||||||
|
with temp_pacman_script(pacman_config) as temp_pacman:
|
||||||
|
# in case they are already installed we need to upgrade
|
||||||
|
run_cmd(msys2_root, [to_pure_posix_path(temp_pacman), "--noconfirm", "-Suy"])
|
||||||
|
run_cmd(msys2_root, [to_pure_posix_path(temp_pacman), "--noconfirm", "-Su"])
|
||||||
|
yield temp_pacman
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(repo_root, ignore_errors=True)
|
||||||
|
# downgrade again
|
||||||
|
run_cmd(msys2_root, ["pacman", "--noconfirm", "-Suuy"])
|
||||||
|
run_cmd(msys2_root, ["pacman", "--noconfirm", "-Suu"])
|
||||||
|
|
||||||
|
|
||||||
|
def build_package(build_type: BuildType, pkg: Package, msys2_root: PathLike, builddir: PathLike) -> None:
|
||||||
|
assert os.path.isabs(builddir)
|
||||||
|
assert os.path.isabs(msys2_root)
|
||||||
|
os.makedirs(builddir, exist_ok=True)
|
||||||
|
|
||||||
|
repo_name = {"MINGW-packages": "W", "MSYS2-packages": "S"}.get(pkg['repo'], pkg['repo'])
|
||||||
|
repo_dir = os.path.join(builddir, repo_name)
|
||||||
|
to_upload: list[str] = []
|
||||||
|
|
||||||
|
repo = get_repo_for_build_type(build_type)
|
||||||
|
|
||||||
|
with fresh_git_repo(pkg['repo_url'], repo_dir):
|
||||||
|
orig_pkg_dir = os.path.join(repo_dir, pkg['repo_path'])
|
||||||
|
# Rename it to get a shorter overall build path
|
||||||
|
# https://github.com/msys2/msys2-autobuild/issues/71
|
||||||
|
pkg_dir = os.path.join(repo_dir, 'B')
|
||||||
|
assert not os.path.exists(pkg_dir)
|
||||||
|
os.rename(orig_pkg_dir, pkg_dir)
|
||||||
|
|
||||||
|
# Fetch all keys mentioned in the PKGBUILD
|
||||||
|
validpgpkeys = to_pure_posix_path(os.path.join(SCRIPT_DIR, 'fetch-validpgpkeys.sh'))
|
||||||
|
run_cmd(msys2_root, ['bash', validpgpkeys], cwd=pkg_dir)
|
||||||
|
|
||||||
|
with staging_dependencies(build_type, pkg, msys2_root, builddir) as temp_pacman:
|
||||||
|
try:
|
||||||
|
env = get_build_environ(build_type)
|
||||||
|
# this makes makepkg use our custom pacman script
|
||||||
|
env['PACMAN'] = str(to_pure_posix_path(temp_pacman))
|
||||||
|
if build_type == Config.MINGW_SRC_BUILD_TYPE:
|
||||||
|
with temp_makepkg_confd(msys2_root, "makepkg_mingw.conf") as makepkg_conf:
|
||||||
|
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||||
|
h.write("COMPRESSZST=(zstd -c -T0 --ultra -22 -)\n")
|
||||||
|
|
||||||
|
env['MINGW_ARCH'] = Config.MINGW_SRC_ARCH
|
||||||
|
run_cmd(msys2_root, [
|
||||||
|
'makepkg-mingw',
|
||||||
|
'--noconfirm',
|
||||||
|
'--noprogressbar',
|
||||||
|
'--allsource'
|
||||||
|
], env=env, cwd=pkg_dir)
|
||||||
|
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
|
||||||
|
with temp_makepkg_confd(msys2_root, "makepkg.conf") as makepkg_conf:
|
||||||
|
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||||
|
h.write("COMPRESSZST=(zstd -c -T0 --ultra -22 -)\n")
|
||||||
|
|
||||||
|
run_cmd(msys2_root, [
|
||||||
|
'makepkg',
|
||||||
|
'--noconfirm',
|
||||||
|
'--noprogressbar',
|
||||||
|
'--allsource'
|
||||||
|
], env=env, cwd=pkg_dir)
|
||||||
|
elif build_type in Config.MINGW_ARCH_LIST:
|
||||||
|
with temp_makepkg_confd(msys2_root, "makepkg_mingw.conf") as makepkg_conf:
|
||||||
|
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||||
|
h.write("COMPRESSZST=(zstd -c -T0 --ultra -20 -)\n")
|
||||||
|
|
||||||
|
env['MINGW_ARCH'] = build_type
|
||||||
|
run_cmd(msys2_root, [
|
||||||
|
'makepkg-mingw',
|
||||||
|
'--noconfirm',
|
||||||
|
'--noprogressbar',
|
||||||
|
'--nocheck',
|
||||||
|
'--syncdeps',
|
||||||
|
'--rmdeps',
|
||||||
|
'--cleanbuild'
|
||||||
|
], env=env, cwd=pkg_dir)
|
||||||
|
elif build_type in Config.MSYS_ARCH_LIST:
|
||||||
|
with temp_makepkg_confd(msys2_root, "makepkg.conf") as makepkg_conf:
|
||||||
|
with open(makepkg_conf, "w", encoding="utf-8") as h:
|
||||||
|
h.write("COMPRESSZST=(zstd -c -T0 --ultra -20 -)\n")
|
||||||
|
|
||||||
|
run_cmd(msys2_root, [
|
||||||
|
'makepkg',
|
||||||
|
'--noconfirm',
|
||||||
|
'--noprogressbar',
|
||||||
|
'--nocheck',
|
||||||
|
'--syncdeps',
|
||||||
|
'--rmdeps',
|
||||||
|
'--cleanbuild'
|
||||||
|
], env=env, cwd=pkg_dir)
|
||||||
|
else:
|
||||||
|
assert 0
|
||||||
|
|
||||||
|
entries = os.listdir(pkg_dir)
|
||||||
|
for pattern in pkg.get_build_patterns(build_type):
|
||||||
|
found = fnmatch.filter(entries, pattern)
|
||||||
|
if not found:
|
||||||
|
raise BuildError(f"{pattern} not found, likely wrong version built")
|
||||||
|
to_upload.extend([os.path.join(pkg_dir, e) for e in found])
|
||||||
|
|
||||||
|
except (subprocess.CalledProcessError, BuildError) as e:
|
||||||
|
release = get_release(repo, "staging-failed")
|
||||||
|
failed_data = {}
|
||||||
|
content = json.dumps(failed_data).encode()
|
||||||
|
upload_asset(repo, release, pkg.get_failed_name(build_type), text=True, content=content)
|
||||||
|
|
||||||
|
raise BuildError(e)
|
||||||
|
else:
|
||||||
|
release = get_release(repo, "staging-" + build_type)
|
||||||
|
for path in to_upload:
|
||||||
|
upload_asset(repo, release, path)
|
||||||
102
msys2_autobuild/cmd_build.py
Normal file
102
msys2_autobuild/cmd_build.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
from typing import Any, Literal
|
||||||
|
|
||||||
|
from .build import BuildError, build_package, run_cmd
|
||||||
|
from .config import BuildType, Config
|
||||||
|
from .queue import (Package, PackageStatus, get_buildqueue_with_status,
|
||||||
|
update_status)
|
||||||
|
from .utils import apply_optional_deps, gha_group
|
||||||
|
|
||||||
|
BuildFrom = Literal["start", "middle", "end"]
|
||||||
|
|
||||||
|
|
||||||
|
def get_package_to_build(
|
||||||
|
pkgs: list[Package], build_types: list[BuildType] | None,
|
||||||
|
build_from: BuildFrom) -> tuple[Package, BuildType] | None:
|
||||||
|
|
||||||
|
can_build = []
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
if build_types is not None and build_type not in build_types:
|
||||||
|
continue
|
||||||
|
if pkg.get_status(build_type) == PackageStatus.WAITING_FOR_BUILD:
|
||||||
|
can_build.append((pkg, build_type))
|
||||||
|
|
||||||
|
if not can_build:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if build_from == "end":
|
||||||
|
return can_build[-1]
|
||||||
|
elif build_from == "middle":
|
||||||
|
return can_build[len(can_build)//2]
|
||||||
|
elif build_from == "start":
|
||||||
|
return can_build[0]
|
||||||
|
else:
|
||||||
|
raise Exception("Unknown order:", build_from)
|
||||||
|
|
||||||
|
|
||||||
|
def run_build(args: Any) -> None:
|
||||||
|
builddir = os.path.abspath(args.builddir)
|
||||||
|
msys2_root = os.path.abspath(args.msys2_root)
|
||||||
|
if args.build_types is None:
|
||||||
|
build_types = None
|
||||||
|
else:
|
||||||
|
build_types = [p.strip() for p in args.build_types.split(",")]
|
||||||
|
|
||||||
|
apply_optional_deps(args.optional_deps or "")
|
||||||
|
|
||||||
|
start_time = time.monotonic()
|
||||||
|
|
||||||
|
if not sys.platform == "win32":
|
||||||
|
raise SystemExit("ERROR: Needs to run under native Python")
|
||||||
|
|
||||||
|
if not shutil.which("git"):
|
||||||
|
raise SystemExit("ERROR: git not in PATH")
|
||||||
|
|
||||||
|
if not os.path.isdir(msys2_root):
|
||||||
|
raise SystemExit("ERROR: msys2_root doesn't exist")
|
||||||
|
|
||||||
|
try:
|
||||||
|
run_cmd(msys2_root, [])
|
||||||
|
except Exception as e:
|
||||||
|
raise SystemExit("ERROR: msys2_root not functional", e)
|
||||||
|
|
||||||
|
print(f"Building {build_types} starting from {args.build_from}")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
pkgs = get_buildqueue_with_status(full_details=True)
|
||||||
|
update_status(pkgs)
|
||||||
|
|
||||||
|
if (time.monotonic() - start_time) >= Config.SOFT_JOB_TIMEOUT:
|
||||||
|
print("timeout reached")
|
||||||
|
break
|
||||||
|
|
||||||
|
todo = get_package_to_build(pkgs, build_types, args.build_from)
|
||||||
|
if not todo:
|
||||||
|
break
|
||||||
|
pkg, build_type = todo
|
||||||
|
|
||||||
|
try:
|
||||||
|
with gha_group(f"[{pkg['repo']}] [{build_type}] {pkg['name']}..."):
|
||||||
|
build_package(build_type, pkg, msys2_root, builddir)
|
||||||
|
except BuildError:
|
||||||
|
with gha_group(f"[{pkg['repo']}] [{build_type}] {pkg['name']}: failed"):
|
||||||
|
traceback.print_exc(file=sys.stdout)
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser("build", help="Build all packages")
|
||||||
|
sub.add_argument("-t", "--build-types", action="store")
|
||||||
|
sub.add_argument(
|
||||||
|
"--build-from", action="store", default="start", help="Start building from start|end|middle")
|
||||||
|
sub.add_argument("--optional-deps", action="store")
|
||||||
|
sub.add_argument("msys2_root", help="The MSYS2 install used for building. e.g. C:\\msys64")
|
||||||
|
sub.add_argument(
|
||||||
|
"builddir",
|
||||||
|
help="A directory used for saving temporary build results and the git repos")
|
||||||
|
sub.set_defaults(func=run_build)
|
||||||
90
msys2_autobuild/cmd_clean_assets.py
Normal file
90
msys2_autobuild/cmd_clean_assets.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
import re
|
||||||
|
import fnmatch
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from gitea import Release, Attachment
|
||||||
|
|
||||||
|
from .config import get_all_build_types
|
||||||
|
from .gh import (get_asset_filename, get_current_repo, get_release,
|
||||||
|
get_release_assets, get_gitea)
|
||||||
|
from .queue import get_buildqueue
|
||||||
|
|
||||||
|
|
||||||
|
def get_assets_to_delete() -> tuple[list[Release], list[tuple[Release, Attachment]]]:
|
||||||
|
|
||||||
|
print("Fetching packages to build...")
|
||||||
|
keep_patterns = []
|
||||||
|
for pkg in get_buildqueue():
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
keep_patterns.append(pkg.get_failed_name(build_type))
|
||||||
|
keep_patterns.extend(pkg.get_build_patterns(build_type))
|
||||||
|
keep_pattern_regex = re.compile('|'.join(fnmatch.translate(p) for p in keep_patterns))
|
||||||
|
|
||||||
|
def should_be_deleted(asset: Attachment) -> bool:
|
||||||
|
filename = get_asset_filename(asset)
|
||||||
|
return not keep_pattern_regex.match(filename)
|
||||||
|
|
||||||
|
def get_to_delete(release: Release) -> tuple[list[Release], list[Attachment]]:
|
||||||
|
assets = get_release_assets(release)
|
||||||
|
to_delete = []
|
||||||
|
for asset in assets:
|
||||||
|
if should_be_deleted(asset):
|
||||||
|
to_delete.append(asset)
|
||||||
|
|
||||||
|
# Deleting and re-creating a release requires two write calls, so delete
|
||||||
|
# the release if all assets should be deleted and there are more than 2.
|
||||||
|
# min_to_delete = 3
|
||||||
|
|
||||||
|
# XXX: re-creating releases causes notifications, so avoid unless possible
|
||||||
|
# https://github.com/msys2/msys2-autobuild/issues/77#issuecomment-1657231719
|
||||||
|
min_to_delete = 400*333
|
||||||
|
|
||||||
|
if len(to_delete) >= min_to_delete and len(assets) == len(to_delete):
|
||||||
|
return [release], []
|
||||||
|
else:
|
||||||
|
return [], to_delete
|
||||||
|
|
||||||
|
def get_all_releases() -> list[Release]:
|
||||||
|
repo = get_current_repo()
|
||||||
|
|
||||||
|
releases = []
|
||||||
|
for build_type in get_all_build_types():
|
||||||
|
releases.append(get_release(repo, "staging-" + build_type))
|
||||||
|
releases.append(get_release(repo, "staging-failed"))
|
||||||
|
return releases
|
||||||
|
|
||||||
|
print("Fetching assets...")
|
||||||
|
releases = []
|
||||||
|
assets = []
|
||||||
|
for release in get_all_releases():
|
||||||
|
r, a = get_to_delete(release)
|
||||||
|
releases.extend(r)
|
||||||
|
assets.extend(r, a)
|
||||||
|
|
||||||
|
return releases, assets
|
||||||
|
|
||||||
|
|
||||||
|
def clean_gha_assets(args: Any) -> None:
|
||||||
|
repo = get_current_repo()
|
||||||
|
releases, assets = get_assets_to_delete()
|
||||||
|
|
||||||
|
print("Resetting releases...")
|
||||||
|
for release in releases:
|
||||||
|
print(f"Resetting {release.tag_name}...")
|
||||||
|
if not args.dry_run:
|
||||||
|
release.delete_release()
|
||||||
|
get_release(repo, release.tag_name)
|
||||||
|
|
||||||
|
print("Deleting assets...")
|
||||||
|
for release, asset in assets:
|
||||||
|
print(f"Deleting {get_asset_filename(asset)}...")
|
||||||
|
if not args.dry_run:
|
||||||
|
gitea = get_gitea()
|
||||||
|
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser("clean-assets", help="Clean up GHA assets", allow_abbrev=False)
|
||||||
|
sub.add_argument(
|
||||||
|
"--dry-run", action="store_true", help="Only show what is going to be deleted")
|
||||||
|
sub.set_defaults(func=clean_gha_assets)
|
||||||
49
msys2_autobuild/cmd_clear_failed.py
Normal file
49
msys2_autobuild/cmd_clear_failed.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from .gh import (get_asset_filename, get_current_repo, get_release,
|
||||||
|
get_release_assets, get_gitea)
|
||||||
|
from .queue import get_buildqueue_with_status
|
||||||
|
|
||||||
|
|
||||||
|
def clear_failed_state(args: Any) -> None:
|
||||||
|
build_type_filter = args.build_types
|
||||||
|
build_type_list = build_type_filter.replace(" ", "").split(",") if build_type_filter else []
|
||||||
|
package_filter = args.packages
|
||||||
|
package_list = package_filter.replace(" ", "").split(",") if package_filter else []
|
||||||
|
|
||||||
|
if build_type_filter is None and package_filter is None:
|
||||||
|
raise SystemExit("clear-failed: At least one of --build-types or --packages needs to be passed")
|
||||||
|
|
||||||
|
repo = get_current_repo()
|
||||||
|
release = get_release(repo, 'staging-failed')
|
||||||
|
assets_failed = get_release_assets(release)
|
||||||
|
failed_map = dict((get_asset_filename(a), a) for a in assets_failed)
|
||||||
|
|
||||||
|
for pkg in get_buildqueue_with_status():
|
||||||
|
|
||||||
|
if package_filter is not None and pkg["name"] not in package_list:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
if build_type_filter is not None and build_type not in build_type_list:
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = pkg.get_failed_name(build_type)
|
||||||
|
if name in failed_map:
|
||||||
|
asset = failed_map[name]
|
||||||
|
print(f"Deleting {get_asset_filename(asset)}...")
|
||||||
|
if not args.dry_run:
|
||||||
|
gitea = get_gitea()
|
||||||
|
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser(
|
||||||
|
"clear-failed", help="Clear the failed state for packages", allow_abbrev=False)
|
||||||
|
sub.add_argument(
|
||||||
|
"--dry-run", action="store_true", help="Only show what is going to be deleted")
|
||||||
|
sub.add_argument("--build-types", action="store", help=(
|
||||||
|
"A comma separated list of build types (e.g. mingw64)"))
|
||||||
|
sub.add_argument("--packages", action="store", help=(
|
||||||
|
"A comma separated list of packages to clear (e.g. mingw-w64-qt-creator)"))
|
||||||
|
sub.set_defaults(func=clear_failed_state)
|
||||||
178
msys2_autobuild/cmd_fetch_assets.py
Normal file
178
msys2_autobuild/cmd_fetch_assets.py
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
import fnmatch
|
||||||
|
import os
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from gitea import Attachment
|
||||||
|
|
||||||
|
from .config import BuildType, Config
|
||||||
|
from .gh import (CachedAssets, download_asset, get_asset_filename,
|
||||||
|
get_asset_mtime_ns)
|
||||||
|
from .queue import PackageStatus, get_buildqueue_with_status
|
||||||
|
from .utils import ask_yes_no
|
||||||
|
|
||||||
|
|
||||||
|
def get_repo_subdir(build_type: BuildType) -> Path:
|
||||||
|
if build_type in Config.MSYS_ARCH_LIST:
|
||||||
|
return Path("msys") / "x86_64"
|
||||||
|
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
|
||||||
|
return Path("msys") / "sources"
|
||||||
|
elif build_type == Config.MINGW_SRC_BUILD_TYPE:
|
||||||
|
return Path("mingw") / "sources"
|
||||||
|
elif build_type in Config.MINGW_ARCH_LIST:
|
||||||
|
return Path("mingw") / build_type
|
||||||
|
else:
|
||||||
|
raise Exception("unknown type")
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_assets(args: Any) -> None:
|
||||||
|
target_dir = os.path.abspath(args.targetdir)
|
||||||
|
fetch_all = args.fetch_all
|
||||||
|
fetch_complete = args.fetch_complete
|
||||||
|
|
||||||
|
all_patterns: dict[BuildType, list[str]] = {}
|
||||||
|
all_blocked = []
|
||||||
|
for pkg in get_buildqueue_with_status():
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
if args.build_type and build_type not in args.build_type:
|
||||||
|
continue
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
pkg_patterns = pkg.get_build_patterns(build_type)
|
||||||
|
if status == PackageStatus.FINISHED:
|
||||||
|
all_patterns.setdefault(build_type, []).extend(pkg_patterns)
|
||||||
|
elif status in [PackageStatus.FINISHED_BUT_BLOCKED,
|
||||||
|
PackageStatus.FINISHED_BUT_INCOMPLETE]:
|
||||||
|
if fetch_all or (fetch_complete and status != PackageStatus.FINISHED_BUT_INCOMPLETE):
|
||||||
|
all_patterns.setdefault(build_type, []).extend(pkg_patterns)
|
||||||
|
else:
|
||||||
|
all_blocked.append(
|
||||||
|
(pkg["name"], build_type, pkg.get_status_details(build_type)))
|
||||||
|
|
||||||
|
all_assets = {}
|
||||||
|
cached_assets = CachedAssets()
|
||||||
|
assets_to_download: dict[BuildType, list[Attachment]] = {}
|
||||||
|
for build_type, patterns in all_patterns.items():
|
||||||
|
if build_type not in all_assets:
|
||||||
|
all_assets[build_type] = cached_assets.get_assets(build_type)
|
||||||
|
assets = all_assets[build_type]
|
||||||
|
|
||||||
|
assets_mapping: dict[str, list[Attachment]] = {}
|
||||||
|
for asset in assets:
|
||||||
|
assets_mapping.setdefault(get_asset_filename(asset), []).append(asset)
|
||||||
|
|
||||||
|
for pattern in patterns:
|
||||||
|
matches = fnmatch.filter(assets_mapping.keys(), pattern)
|
||||||
|
if matches:
|
||||||
|
found = assets_mapping[matches[0]]
|
||||||
|
assets_to_download.setdefault(build_type, []).extend(found)
|
||||||
|
|
||||||
|
to_fetch = {}
|
||||||
|
for build_type, assets in assets_to_download.items():
|
||||||
|
for asset in assets:
|
||||||
|
asset_dir = Path(target_dir) / get_repo_subdir(build_type)
|
||||||
|
asset_path = asset_dir / get_asset_filename(asset)
|
||||||
|
to_fetch[str(asset_path)] = asset
|
||||||
|
|
||||||
|
def file_is_uptodate(path: str, asset: Attachment) -> bool:
|
||||||
|
asset_path = Path(path)
|
||||||
|
if not asset_path.exists():
|
||||||
|
return False
|
||||||
|
if asset_path.stat().st_size != asset.size:
|
||||||
|
return False
|
||||||
|
if get_asset_mtime_ns(asset) != asset_path.stat().st_mtime_ns:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
# find files that are either wrong or not what we want
|
||||||
|
to_delete = []
|
||||||
|
not_uptodate = []
|
||||||
|
for root, dirs, files in os.walk(target_dir):
|
||||||
|
for name in files:
|
||||||
|
existing = os.path.join(root, name)
|
||||||
|
if existing in to_fetch:
|
||||||
|
asset = to_fetch[existing]
|
||||||
|
if not file_is_uptodate(existing, asset):
|
||||||
|
to_delete.append(existing)
|
||||||
|
not_uptodate.append(existing)
|
||||||
|
else:
|
||||||
|
to_delete.append(existing)
|
||||||
|
|
||||||
|
if args.delete and not args.pretend:
|
||||||
|
# delete unwanted files
|
||||||
|
for path in to_delete:
|
||||||
|
os.remove(path)
|
||||||
|
|
||||||
|
# delete empty directories
|
||||||
|
for root, dirs, files in os.walk(target_dir, topdown=False):
|
||||||
|
for name in dirs:
|
||||||
|
path = os.path.join(root, name)
|
||||||
|
if not os.listdir(path):
|
||||||
|
os.rmdir(path)
|
||||||
|
|
||||||
|
# Finally figure out what to download
|
||||||
|
todo = {}
|
||||||
|
done = []
|
||||||
|
for path, asset in to_fetch.items():
|
||||||
|
if not os.path.exists(path) or path in not_uptodate:
|
||||||
|
todo[path] = asset
|
||||||
|
Path(path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
else:
|
||||||
|
done.append(path)
|
||||||
|
|
||||||
|
if args.verbose and all_blocked:
|
||||||
|
import pprint
|
||||||
|
print("Packages that are blocked and why:")
|
||||||
|
pprint.pprint(all_blocked)
|
||||||
|
|
||||||
|
print(f"downloading: {len(todo)}, done: {len(done)} "
|
||||||
|
f"blocked: {len(all_blocked)} (related builds missing)")
|
||||||
|
|
||||||
|
print("Pass --verbose to see the list of blocked packages.")
|
||||||
|
print("Pass --fetch-complete to also fetch blocked but complete packages")
|
||||||
|
print("Pass --fetch-all to fetch all packages.")
|
||||||
|
print("Pass --delete to clear the target directory")
|
||||||
|
|
||||||
|
def verify_file(path: str, target: str) -> None:
|
||||||
|
try:
|
||||||
|
subprocess.run(["zstd", "--quiet", "--test", path], capture_output=True, check=True, text=True)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise Exception(f"zstd test failed for {target!r}: {e.stderr}") from e
|
||||||
|
|
||||||
|
def fetch_item(item: tuple[str, Attachment]) -> tuple[str, Attachment]:
|
||||||
|
asset_path, asset = item
|
||||||
|
if not args.pretend:
|
||||||
|
download_asset(asset, asset_path, verify_file)
|
||||||
|
return item
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(8) as executor:
|
||||||
|
for i, item in enumerate(executor.map(fetch_item, todo.items())):
|
||||||
|
print(f"[{i + 1}/{len(todo)}] {get_asset_filename(item[1])}")
|
||||||
|
|
||||||
|
print("done")
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser(
|
||||||
|
"fetch-assets", help="Download all staging packages", allow_abbrev=False)
|
||||||
|
sub.add_argument("targetdir")
|
||||||
|
sub.add_argument(
|
||||||
|
"--delete", action="store_true", help="Clear targetdir of unneeded files")
|
||||||
|
sub.add_argument(
|
||||||
|
"--verbose", action="store_true", help="Show why things are blocked")
|
||||||
|
sub.add_argument(
|
||||||
|
"--pretend", action="store_true",
|
||||||
|
help="Don't actually download, just show what would be done")
|
||||||
|
sub.add_argument(
|
||||||
|
"--fetch-all", action="store_true", help="Fetch all packages, even blocked ones")
|
||||||
|
sub.add_argument(
|
||||||
|
"--fetch-complete", action="store_true",
|
||||||
|
help="Fetch all packages, even blocked ones, except incomplete ones")
|
||||||
|
sub.add_argument(
|
||||||
|
"-t", "--build-type", action="append",
|
||||||
|
help="Only fetch packages for given build type(s) (may be used more than once)")
|
||||||
|
sub.add_argument(
|
||||||
|
"--noconfirm", action="store_true",
|
||||||
|
help="Don't require user confirmation")
|
||||||
|
sub.set_defaults(func=fetch_assets)
|
||||||
66
msys2_autobuild/cmd_show_build.py
Normal file
66
msys2_autobuild/cmd_show_build.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from tabulate import tabulate
|
||||||
|
|
||||||
|
from .queue import Package, PackageStatus, get_buildqueue_with_status, get_cycles
|
||||||
|
from .utils import apply_optional_deps, gha_group
|
||||||
|
|
||||||
|
|
||||||
|
def show_cycles(pkgs: list[Package]) -> None:
|
||||||
|
cycles = get_cycles(pkgs)
|
||||||
|
if cycles:
|
||||||
|
def format_package(p: Package) -> str:
|
||||||
|
return f"{p['name']} [{p['version_repo']} -> {p['version']}]"
|
||||||
|
|
||||||
|
with gha_group(f"Dependency Cycles ({len(cycles)})"):
|
||||||
|
print(tabulate([
|
||||||
|
(format_package(a), "<-->", format_package(b)) for (a, b) in cycles],
|
||||||
|
headers=["Package", "", "Package"]))
|
||||||
|
|
||||||
|
|
||||||
|
def show_build(args: Any) -> None:
|
||||||
|
todo = []
|
||||||
|
waiting = []
|
||||||
|
done = []
|
||||||
|
failed = []
|
||||||
|
|
||||||
|
apply_optional_deps(args.optional_deps or "")
|
||||||
|
|
||||||
|
pkgs = get_buildqueue_with_status(full_details=args.details)
|
||||||
|
|
||||||
|
show_cycles(pkgs)
|
||||||
|
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
details = pkg.get_status_details(build_type)
|
||||||
|
details.pop("blocked", None)
|
||||||
|
if status == PackageStatus.WAITING_FOR_BUILD:
|
||||||
|
todo.append((pkg, build_type, status, details))
|
||||||
|
elif status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED,
|
||||||
|
PackageStatus.FINISHED_BUT_INCOMPLETE):
|
||||||
|
done.append((pkg, build_type, status, details))
|
||||||
|
elif status in (PackageStatus.WAITING_FOR_DEPENDENCIES,
|
||||||
|
PackageStatus.MANUAL_BUILD_REQUIRED):
|
||||||
|
waiting.append((pkg, build_type, status, details))
|
||||||
|
else:
|
||||||
|
failed.append((pkg, build_type, status, details))
|
||||||
|
|
||||||
|
def show_table(name: str, items: list) -> None:
|
||||||
|
with gha_group(f"{name} ({len(items)})"):
|
||||||
|
print(tabulate([(p["name"], bt, p["version"], str(s), d) for (p, bt, s, d) in items],
|
||||||
|
headers=["Package", "Build", "Version", "Status", "Details"]))
|
||||||
|
|
||||||
|
show_table("TODO", todo)
|
||||||
|
show_table("WAITING", waiting)
|
||||||
|
show_table("FAILED", failed)
|
||||||
|
show_table("DONE", done)
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser(
|
||||||
|
"show", help="Show all packages to be built", allow_abbrev=False)
|
||||||
|
sub.add_argument(
|
||||||
|
"--details", action="store_true", help="Show more details such as links to failed build logs (slow)")
|
||||||
|
sub.add_argument("--optional-deps", action="store")
|
||||||
|
sub.set_defaults(func=show_build)
|
||||||
13
msys2_autobuild/cmd_update_status.py
Normal file
13
msys2_autobuild/cmd_update_status.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from .queue import get_buildqueue_with_status, update_status
|
||||||
|
|
||||||
|
|
||||||
|
def run_update_status(args: Any) -> None:
|
||||||
|
update_status(get_buildqueue_with_status(full_details=True))
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser(
|
||||||
|
"update-status", help="Update the status file", allow_abbrev=False)
|
||||||
|
sub.set_defaults(func=run_update_status)
|
||||||
65
msys2_autobuild/cmd_upload_assets.py
Normal file
65
msys2_autobuild/cmd_upload_assets.py
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
import glob
|
||||||
|
import os
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from .gh import get_release, get_repo_for_build_type, upload_asset
|
||||||
|
from .queue import PackageStatus, get_buildqueue_with_status
|
||||||
|
|
||||||
|
|
||||||
|
def upload_assets(args: Any) -> None:
|
||||||
|
package_name = args.package
|
||||||
|
src_dir = args.path
|
||||||
|
src_dir = os.path.abspath(src_dir)
|
||||||
|
|
||||||
|
pkgs = get_buildqueue_with_status()
|
||||||
|
|
||||||
|
if package_name is not None:
|
||||||
|
for pkg in pkgs:
|
||||||
|
if pkg["name"] == package_name:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise SystemExit(f"Package '{package_name}' not in the queue, check the 'show' command")
|
||||||
|
pkgs = [pkg]
|
||||||
|
|
||||||
|
pattern_entries = []
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
|
||||||
|
# ignore finished packages
|
||||||
|
if status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED,
|
||||||
|
PackageStatus.FINISHED_BUT_INCOMPLETE):
|
||||||
|
continue
|
||||||
|
|
||||||
|
pattern_entries.append((build_type, pkg.get_build_patterns(build_type)))
|
||||||
|
|
||||||
|
print(f"Looking for the following files in {src_dir}:")
|
||||||
|
for build_type, patterns in pattern_entries:
|
||||||
|
for pattern in patterns:
|
||||||
|
print(" ", pattern)
|
||||||
|
|
||||||
|
matches = []
|
||||||
|
for build_type, patterns in pattern_entries:
|
||||||
|
for pattern in patterns:
|
||||||
|
for match in glob.glob(os.path.join(src_dir, pattern)):
|
||||||
|
matches.append((build_type, match))
|
||||||
|
print(f"Found {len(matches)} files..")
|
||||||
|
|
||||||
|
for build_type, match in matches:
|
||||||
|
repo = get_repo_for_build_type(build_type)
|
||||||
|
release = get_release(repo, 'staging-' + build_type)
|
||||||
|
print(f"Uploading {match}")
|
||||||
|
if not args.dry_run:
|
||||||
|
upload_asset(release, match)
|
||||||
|
print("Done")
|
||||||
|
|
||||||
|
|
||||||
|
def add_parser(subparsers: Any) -> None:
|
||||||
|
sub = subparsers.add_parser(
|
||||||
|
"upload-assets", help="Upload packages", allow_abbrev=False)
|
||||||
|
sub.add_argument("path", help="Directory to look for packages in")
|
||||||
|
sub.add_argument(
|
||||||
|
"--dry-run", action="store_true", help="Only show what is going to be uploaded")
|
||||||
|
sub.add_argument("-p", "--package", action="store", help=(
|
||||||
|
"Only upload files belonging to a particualr package (pkgbase)"))
|
||||||
|
sub.set_defaults(func=upload_assets)
|
||||||
114
msys2_autobuild/config.py
Normal file
114
msys2_autobuild/config.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
from typing import Literal, TypeAlias
|
||||||
|
|
||||||
|
from urllib3.util import Retry
|
||||||
|
|
||||||
|
ArchType = Literal["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64", "msys"]
|
||||||
|
SourceType = Literal["mingw-src", "msys-src"]
|
||||||
|
BuildType: TypeAlias = ArchType | SourceType
|
||||||
|
|
||||||
|
REQUESTS_TIMEOUT = (15, 30)
|
||||||
|
REQUESTS_RETRY = Retry(total=3, backoff_factor=1, status_forcelist=[500, 502])
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_build_types() -> list[BuildType]:
|
||||||
|
all_build_types: list[BuildType] = []
|
||||||
|
all_build_types.extend(Config.MSYS_ARCH_LIST)
|
||||||
|
all_build_types.extend(Config.MINGW_ARCH_LIST)
|
||||||
|
all_build_types.append(Config.MINGW_SRC_BUILD_TYPE)
|
||||||
|
all_build_types.append(Config.MSYS_SRC_BUILD_TYPE)
|
||||||
|
return all_build_types
|
||||||
|
|
||||||
|
|
||||||
|
def build_type_is_src(build_type: BuildType) -> bool:
|
||||||
|
return build_type in [Config.MINGW_SRC_BUILD_TYPE, Config.MSYS_SRC_BUILD_TYPE]
|
||||||
|
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
|
||||||
|
ALLOWED_UPLOADERS = [
|
||||||
|
"elieux",
|
||||||
|
"lazka",
|
||||||
|
"jeremyd2019",
|
||||||
|
]
|
||||||
|
"""Users that are allowed to upload assets. This is checked at download time"""
|
||||||
|
|
||||||
|
MINGW_ARCH_LIST: list[ArchType] = ["mingw32", "mingw64", "ucrt64", "clang64", "clangarm64"]
|
||||||
|
"""Arches we try to build"""
|
||||||
|
|
||||||
|
MINGW_SRC_ARCH: ArchType = "ucrt64"
|
||||||
|
"""The arch that is used to build the source package (any mingw one should work)"""
|
||||||
|
|
||||||
|
MINGW_SRC_BUILD_TYPE: BuildType = "mingw-src"
|
||||||
|
|
||||||
|
MSYS_ARCH_LIST: list[ArchType] = ["msys"]
|
||||||
|
|
||||||
|
MSYS_SRC_ARCH: ArchType = "msys"
|
||||||
|
|
||||||
|
MSYS_SRC_BUILD_TYPE: BuildType = "msys-src"
|
||||||
|
|
||||||
|
RUNNER_CONFIG: dict[BuildType, dict] = {
|
||||||
|
"msys-src": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
"max_jobs": 1,
|
||||||
|
},
|
||||||
|
"msys": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
},
|
||||||
|
"mingw-src": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
"max_jobs": 1,
|
||||||
|
},
|
||||||
|
"mingw32": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
},
|
||||||
|
"mingw64": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
},
|
||||||
|
"ucrt64": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
},
|
||||||
|
"clang64": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-2022"],
|
||||||
|
"hosted": True,
|
||||||
|
},
|
||||||
|
"clangarm64": {
|
||||||
|
"repo": "Befator-Inc-Firmen-Netzwerk/msys2-autobuild",
|
||||||
|
"labels": ["windows-11-arm"],
|
||||||
|
"hosted": True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
"""Runner config to use for each build type."""
|
||||||
|
|
||||||
|
SOFT_JOB_TIMEOUT = 60 * 60 * 3
|
||||||
|
"""Runtime after which we shouldn't start a new build"""
|
||||||
|
|
||||||
|
MAXIMUM_JOB_COUNT = 15
|
||||||
|
"""Maximum number of jobs to spawn"""
|
||||||
|
|
||||||
|
MANUAL_BUILD: list[tuple[str, list[BuildType]]] = [
|
||||||
|
]
|
||||||
|
"""Packages that take too long to build, or can't be build and should be handled manually"""
|
||||||
|
|
||||||
|
IGNORE_RDEP_PACKAGES: list[str] = [
|
||||||
|
]
|
||||||
|
"""XXX: These would in theory block rdeps, but no one fixed them, so we ignore them"""
|
||||||
|
|
||||||
|
OPTIONAL_DEPS: dict[str, list[str]] = {
|
||||||
|
"mingw-w64-headers-git": ["mingw-w64-winpthreads", "mingw-w64-tools-git"],
|
||||||
|
"mingw-w64-crt-git": ["mingw-w64-winpthreads"],
|
||||||
|
"mingw-w64-llvm": ["mingw-w64-libc++"],
|
||||||
|
}
|
||||||
|
"""XXX: In case of cycles we mark these deps as optional"""
|
||||||
17
msys2_autobuild/fetch-validpgpkeys.sh
Normal file
17
msys2_autobuild/fetch-validpgpkeys.sh
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. PKGBUILD
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
_keyserver=(
|
||||||
|
"keyserver.ubuntu.com"
|
||||||
|
"keys.gnupg.net"
|
||||||
|
"pgp.mit.edu"
|
||||||
|
"keys.openpgp.org"
|
||||||
|
)
|
||||||
|
for key in "${validpgpkeys[@]}"; do
|
||||||
|
for server in "${_keyserver[@]}"; do
|
||||||
|
timeout 20 /usr/bin/gpg --keyserver "${server}" --recv "${key}" && break || true
|
||||||
|
done
|
||||||
|
done
|
||||||
183
msys2_autobuild/gh.py
Normal file
183
msys2_autobuild/gh.py
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
import io
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
import hashlib
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from datetime import datetime, UTC
|
||||||
|
from functools import cache
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
from collections.abc import Generator, Callable
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from gitea import Configuration, ApiClient, RepositoryApi, CreateReleaseOption
|
||||||
|
from gitea import Repository, Release, Attachment
|
||||||
|
from gitea.rest import ApiException
|
||||||
|
|
||||||
|
from .config import REQUESTS_TIMEOUT, BuildType, Config
|
||||||
|
from .utils import PathLike, get_requests_session
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def _get_repo(name: str) -> Repository:
|
||||||
|
gitea = get_gitea()
|
||||||
|
split = name.split("/")
|
||||||
|
return gitea.repo_get(split[0], split[1])
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_repo() -> Repository:
|
||||||
|
repo_full_name = os.environ.get("GITHUB_REPOSITORY", "Befator-Inc-Firmen-Netzwerk/msys2-autobuild")
|
||||||
|
return _get_repo(repo_full_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_repo_for_build_type(build_type: BuildType) -> Repository:
|
||||||
|
return _get_repo(Config.RUNNER_CONFIG[build_type]["repo"])
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def get_gitea() -> RepositoryApi:
|
||||||
|
configuration = Configuration()
|
||||||
|
configuration.host = "https://git.befatorinc.de/api/v1"
|
||||||
|
configuration.api_key["Authorization"] = "token 91f6f2e72e6d64fbd0b34133efae4a6c838d0e58"
|
||||||
|
gitea = RepositoryApi(ApiClient(configuration))
|
||||||
|
return gitea
|
||||||
|
|
||||||
|
|
||||||
|
def download_text_asset(asset: Attachment, cache=False) -> str:
|
||||||
|
session = get_requests_session(nocache=not cache)
|
||||||
|
with session.get(asset.browser_download_url, timeout=REQUESTS_TIMEOUT) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
return r.text
|
||||||
|
|
||||||
|
|
||||||
|
def get_asset_mtime_ns(asset: Attachment) -> int:
|
||||||
|
"""Returns the mtime of an asset in nanoseconds"""
|
||||||
|
|
||||||
|
return int(asset.created_at.timestamp() * (1000 ** 3))
|
||||||
|
|
||||||
|
|
||||||
|
def download_asset(asset: Attachment, target_path: str,
|
||||||
|
onverify: Callable[[str, str], None] | None = None) -> None:
|
||||||
|
session = get_requests_session(nocache=True)
|
||||||
|
with session.get(asset.browser_download_url, stream=True, timeout=REQUESTS_TIMEOUT) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
fd, temppath = tempfile.mkstemp()
|
||||||
|
try:
|
||||||
|
os.chmod(temppath, 0o644)
|
||||||
|
with os.fdopen(fd, "wb") as h:
|
||||||
|
for chunk in r.iter_content(256 * 1024):
|
||||||
|
h.write(chunk)
|
||||||
|
mtime_ns = get_asset_mtime_ns(asset)
|
||||||
|
os.utime(temppath, ns=(mtime_ns, mtime_ns))
|
||||||
|
if onverify is not None:
|
||||||
|
onverify(temppath, target_path)
|
||||||
|
shutil.move(temppath, target_path)
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.remove(temppath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_gh_asset_name(basename: PathLike, text: bool = False) -> str:
|
||||||
|
# GitHub will throw out charaters like '~' or '='. It also doesn't like
|
||||||
|
# when there is no file extension and will try to add one
|
||||||
|
return hashlib.sha256(str(basename).encode("utf-8")).hexdigest() + (".bin" if not text else ".txt")
|
||||||
|
|
||||||
|
|
||||||
|
def get_asset_filename(asset: Attachment) -> str:
|
||||||
|
return asset.name
|
||||||
|
|
||||||
|
|
||||||
|
def get_release_assets(release: Release) -> list[Attachment]:
|
||||||
|
assets = []
|
||||||
|
for asset in release.assets:
|
||||||
|
# We allow uploads from GHA and some special users
|
||||||
|
assets.append(asset)
|
||||||
|
return assets
|
||||||
|
|
||||||
|
|
||||||
|
def upload_asset(repo: Repository, release: Release, path: PathLike, replace: bool = False,
|
||||||
|
text: bool = False, content: bytes | None = None) -> None:
|
||||||
|
gitea = get_gitea()
|
||||||
|
path = Path(path)
|
||||||
|
basename = os.path.basename(str(path))
|
||||||
|
asset_name = get_gh_asset_name(basename, text)
|
||||||
|
asset_label = basename
|
||||||
|
|
||||||
|
def can_try_upload_again() -> bool:
|
||||||
|
for asset in get_release_assets(release):
|
||||||
|
if asset_name == asset.name:
|
||||||
|
# We want to treat incomplete assets as if they weren't there
|
||||||
|
# so replace them always
|
||||||
|
if replace:
|
||||||
|
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
print(f"Skipping upload for {asset_name} as {asset_label}, already exists")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def upload() -> None:
|
||||||
|
if content is None:
|
||||||
|
with open(path, "rb") as fileobj:
|
||||||
|
gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=path)
|
||||||
|
else:
|
||||||
|
tmp_path = None
|
||||||
|
try:
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||||
|
tf.write(content)
|
||||||
|
tf.flush()
|
||||||
|
tmp_path = tf.name
|
||||||
|
|
||||||
|
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_label, attachment=tmp_path)
|
||||||
|
finally:
|
||||||
|
if tmp_path and os.path.exists(tmp_path):
|
||||||
|
os.remove(tmp_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
upload()
|
||||||
|
except (ApiException, requests.RequestException):
|
||||||
|
if can_try_upload_again():
|
||||||
|
upload()
|
||||||
|
|
||||||
|
print(f"Uploaded {asset_name} as {asset_label}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_release(repo: Repository, name: str, create: bool = True) -> Release:
|
||||||
|
"""Like Repository.get_release() but creates the referenced release if needed"""
|
||||||
|
|
||||||
|
gitea = get_gitea()
|
||||||
|
try:
|
||||||
|
return gitea.repo_get_release_by_tag(repo.owner.login, repo.name, name)
|
||||||
|
except ApiException:
|
||||||
|
if not create:
|
||||||
|
raise
|
||||||
|
return gitea.repo_create_release(repo.owner.login, repo.name, body=CreateReleaseOption(tag_name = name, prerelease = True))
|
||||||
|
|
||||||
|
|
||||||
|
class CachedAssets:
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._assets: dict[BuildType, list[Attachment]] = {}
|
||||||
|
self._failed: dict[str, list[Attachment]] = {}
|
||||||
|
|
||||||
|
def get_assets(self, build_type: BuildType) -> list[Attachment]:
|
||||||
|
if build_type not in self._assets:
|
||||||
|
repo = get_repo_for_build_type(build_type)
|
||||||
|
release = get_release(repo, 'staging-' + build_type)
|
||||||
|
self._assets[build_type] = get_release_assets(release)
|
||||||
|
return self._assets[build_type]
|
||||||
|
|
||||||
|
def get_failed_assets(self, build_type: BuildType) -> list[Attachment]:
|
||||||
|
repo = get_repo_for_build_type(build_type)
|
||||||
|
key = repo.full_name
|
||||||
|
if key not in self._failed:
|
||||||
|
release = get_release(repo, 'staging-failed')
|
||||||
|
self._failed[key] = get_release_assets(release)
|
||||||
|
assets = self._failed[key]
|
||||||
|
# XXX: This depends on the format of the filename
|
||||||
|
return [a for a in assets if get_asset_filename(a).startswith(build_type + "-")]
|
||||||
41
msys2_autobuild/main.py
Normal file
41
msys2_autobuild/main.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from . import (cmd_build, cmd_clean_assets, cmd_clear_failed, cmd_fetch_assets,
|
||||||
|
cmd_show_build, cmd_update_status, cmd_upload_assets)
|
||||||
|
from .utils import install_requests_cache
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv: list[str]) -> None:
|
||||||
|
parser = argparse.ArgumentParser(description="Build packages", allow_abbrev=False)
|
||||||
|
parser.add_argument(
|
||||||
|
'-v', '--verbose',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help='Increase verbosity (can be used multiple times)'
|
||||||
|
)
|
||||||
|
parser.set_defaults(func=lambda *x: parser.print_help())
|
||||||
|
subparsers = parser.add_subparsers(title="subcommands")
|
||||||
|
cmd_build.add_parser(subparsers)
|
||||||
|
cmd_show_build.add_parser(subparsers)
|
||||||
|
cmd_update_status.add_parser(subparsers)
|
||||||
|
cmd_fetch_assets.add_parser(subparsers)
|
||||||
|
cmd_upload_assets.add_parser(subparsers)
|
||||||
|
cmd_clear_failed.add_parser(subparsers)
|
||||||
|
cmd_clean_assets.add_parser(subparsers)
|
||||||
|
|
||||||
|
args = parser.parse_args(argv[1:])
|
||||||
|
level_map = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
|
||||||
|
logging.basicConfig(
|
||||||
|
level=level_map.get(args.verbose, logging.DEBUG),
|
||||||
|
handlers=[logging.StreamHandler(sys.stderr)],
|
||||||
|
format='[%(asctime)s] [%(levelname)8s] [%(name)s:%(module)s:%(lineno)d] %(message)s',
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
with install_requests_cache():
|
||||||
|
args.func(args)
|
||||||
|
|
||||||
|
|
||||||
|
def run() -> None:
|
||||||
|
return main(sys.argv)
|
||||||
464
msys2_autobuild/queue.py
Normal file
464
msys2_autobuild/queue.py
Normal file
@ -0,0 +1,464 @@
|
|||||||
|
import fnmatch
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import tempfile
|
||||||
|
import os
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from gitea.rest import ApiException
|
||||||
|
|
||||||
|
from .config import (REQUESTS_TIMEOUT, ArchType, BuildType, Config,
|
||||||
|
build_type_is_src, get_all_build_types)
|
||||||
|
from .gh import (CachedAssets, download_text_asset, get_asset_filename,
|
||||||
|
get_current_repo, get_release,
|
||||||
|
get_gitea)
|
||||||
|
from .utils import get_requests_session, queue_website_update
|
||||||
|
|
||||||
|
|
||||||
|
class PackageStatus(Enum):
|
||||||
|
FINISHED = 'finished'
|
||||||
|
FINISHED_BUT_BLOCKED = 'finished-but-blocked'
|
||||||
|
FINISHED_BUT_INCOMPLETE = 'finished-but-incomplete'
|
||||||
|
FAILED_TO_BUILD = 'failed-to-build'
|
||||||
|
WAITING_FOR_BUILD = 'waiting-for-build'
|
||||||
|
WAITING_FOR_DEPENDENCIES = 'waiting-for-dependencies'
|
||||||
|
MANUAL_BUILD_REQUIRED = 'manual-build-required'
|
||||||
|
UNKNOWN = 'unknown'
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
|
||||||
|
class Package(dict):
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return "Package({!r})".format(self["name"])
|
||||||
|
|
||||||
|
def __hash__(self) -> int: # type: ignore
|
||||||
|
return id(self)
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
return self is other
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _active_builds(self) -> dict:
|
||||||
|
return {
|
||||||
|
k: v for k, v in self["builds"].items() if k in (Config.MINGW_ARCH_LIST + Config.MSYS_ARCH_LIST)}
|
||||||
|
|
||||||
|
def _get_build(self, build_type: BuildType) -> dict:
|
||||||
|
return self["builds"].get(build_type, {})
|
||||||
|
|
||||||
|
def get_status(self, build_type: BuildType) -> PackageStatus:
|
||||||
|
build = self._get_build(build_type)
|
||||||
|
return build.get("status", PackageStatus.UNKNOWN)
|
||||||
|
|
||||||
|
def get_status_details(self, build_type: BuildType) -> dict[str, Any]:
|
||||||
|
build = self._get_build(build_type)
|
||||||
|
return dict(build.get("status_details", {}))
|
||||||
|
|
||||||
|
def set_status(self, build_type: BuildType, status: PackageStatus,
|
||||||
|
description: str | None = None,
|
||||||
|
urls: dict[str, str] | None = None) -> None:
|
||||||
|
build = self["builds"].setdefault(build_type, {})
|
||||||
|
build["status"] = status
|
||||||
|
meta: dict[str, Any] = {}
|
||||||
|
meta["desc"] = description
|
||||||
|
if urls is None:
|
||||||
|
urls = {}
|
||||||
|
meta["urls"] = urls
|
||||||
|
build["status_details"] = meta
|
||||||
|
|
||||||
|
def set_blocked(
|
||||||
|
self, build_type: BuildType, status: PackageStatus,
|
||||||
|
dep: "Package", dep_type: BuildType) -> None:
|
||||||
|
dep_details = dep.get_status_details(dep_type)
|
||||||
|
dep_blocked = dep_details.get("blocked", {})
|
||||||
|
details = self.get_status_details(build_type)
|
||||||
|
blocked = details.get("blocked", {})
|
||||||
|
if dep_blocked:
|
||||||
|
blocked = dict(dep_blocked)
|
||||||
|
else:
|
||||||
|
blocked.setdefault(dep, set()).add(dep_type)
|
||||||
|
descs = []
|
||||||
|
for pkg, types in blocked.items():
|
||||||
|
descs.append("{} ({})".format(pkg["name"], "/".join(types)))
|
||||||
|
self.set_status(build_type, status, "Blocked by: " + ", ".join(descs))
|
||||||
|
build = self._get_build(build_type)
|
||||||
|
build.setdefault("status_details", {})["blocked"] = blocked
|
||||||
|
|
||||||
|
def is_new(self, build_type: BuildType) -> bool:
|
||||||
|
build = self._get_build(build_type)
|
||||||
|
return build.get("new", False)
|
||||||
|
|
||||||
|
def get_build_patterns(self, build_type: BuildType) -> list[str]:
|
||||||
|
patterns = []
|
||||||
|
if build_type_is_src(build_type):
|
||||||
|
patterns.append(f"{self['name']}-{self['version']}.src.tar.[!s]*")
|
||||||
|
elif build_type in (Config.MINGW_ARCH_LIST + Config.MSYS_ARCH_LIST):
|
||||||
|
for item in self._get_build(build_type).get('packages', []):
|
||||||
|
patterns.append(f"{item}-{self['version']}-*.pkg.tar.zst")
|
||||||
|
else:
|
||||||
|
assert 0
|
||||||
|
return patterns
|
||||||
|
|
||||||
|
def get_failed_name(self, build_type: BuildType) -> str:
|
||||||
|
return f"{build_type}-{self['name']}-{self['version']}.failed"
|
||||||
|
|
||||||
|
def get_build_types(self) -> list[BuildType]:
|
||||||
|
build_types = list(self._active_builds)
|
||||||
|
if self["source"]:
|
||||||
|
if any((k in Config.MINGW_ARCH_LIST) for k in build_types):
|
||||||
|
build_types.append(Config.MINGW_SRC_BUILD_TYPE)
|
||||||
|
if any((k in Config.MSYS_ARCH_LIST) for k in build_types):
|
||||||
|
build_types.append(Config.MSYS_SRC_BUILD_TYPE)
|
||||||
|
return build_types
|
||||||
|
|
||||||
|
def _get_dep_build(self, build_type: BuildType) -> dict:
|
||||||
|
if build_type == Config.MINGW_SRC_BUILD_TYPE:
|
||||||
|
build_type = Config.MINGW_SRC_ARCH
|
||||||
|
elif build_type == Config.MSYS_SRC_BUILD_TYPE:
|
||||||
|
build_type = Config.MSYS_SRC_ARCH
|
||||||
|
return self._get_build(build_type)
|
||||||
|
|
||||||
|
def is_optional_dep(self, dep: "Package", dep_type: BuildType) -> bool:
|
||||||
|
# Some deps are manually marked as optional to break cycles.
|
||||||
|
# This requires them to be in the main repo though, otherwise the cycle has to
|
||||||
|
# be fixed manually.
|
||||||
|
return dep["name"] in Config.OPTIONAL_DEPS.get(self["name"], []) and not dep.is_new(dep_type)
|
||||||
|
|
||||||
|
def get_depends(self, build_type: BuildType) -> "dict[ArchType, set[Package]]":
|
||||||
|
build = self._get_dep_build(build_type)
|
||||||
|
return build.get('ext-depends', {})
|
||||||
|
|
||||||
|
def get_rdepends(self, build_type: BuildType) -> "dict[ArchType, set[Package]]":
|
||||||
|
build = self._get_dep_build(build_type)
|
||||||
|
return build.get('ext-rdepends', {})
|
||||||
|
|
||||||
|
|
||||||
|
def get_buildqueue() -> list[Package]:
|
||||||
|
session = get_requests_session()
|
||||||
|
r = session.get("http://localhost:8160/api/buildqueue2", timeout=REQUESTS_TIMEOUT)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
return parse_buildqueue(r.text)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_buildqueue(payload: str) -> list[Package]:
|
||||||
|
pkgs = []
|
||||||
|
for received in json.loads(payload):
|
||||||
|
pkg = Package(received)
|
||||||
|
pkg['repo'] = pkg['repo_url'].split('/')[-1]
|
||||||
|
pkgs.append(pkg)
|
||||||
|
|
||||||
|
# extract the package mapping
|
||||||
|
dep_mapping = {}
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build in pkg._active_builds.values():
|
||||||
|
for name in build['packages']:
|
||||||
|
dep_mapping[name] = pkg
|
||||||
|
|
||||||
|
# link up dependencies with the real package in the queue
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build in pkg._active_builds.values():
|
||||||
|
ver_depends: dict[str, set[Package]] = {}
|
||||||
|
for repo, deps in build['depends'].items():
|
||||||
|
for dep in deps:
|
||||||
|
ver_depends.setdefault(repo, set()).add(dep_mapping[dep])
|
||||||
|
build['ext-depends'] = ver_depends
|
||||||
|
|
||||||
|
# reverse dependencies
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build in pkg._active_builds.values():
|
||||||
|
r_depends: dict[str, set[Package]] = {}
|
||||||
|
for pkg2 in pkgs:
|
||||||
|
for r_repo, build2 in pkg2._active_builds.items():
|
||||||
|
for repo, deps in build2['ext-depends'].items():
|
||||||
|
if pkg in deps:
|
||||||
|
r_depends.setdefault(r_repo, set()).add(pkg2)
|
||||||
|
build['ext-rdepends'] = r_depends
|
||||||
|
|
||||||
|
return pkgs
|
||||||
|
|
||||||
|
|
||||||
|
def get_cycles(pkgs: list[Package]) -> set[tuple[Package, Package]]:
|
||||||
|
cycles: set[tuple[Package, Package]] = set()
|
||||||
|
|
||||||
|
# In case the package is already built it doesn't matter if it is part of a cycle
|
||||||
|
def pkg_is_finished(pkg: Package, build_type: BuildType) -> bool:
|
||||||
|
return pkg.get_status(build_type) in [
|
||||||
|
PackageStatus.FINISHED,
|
||||||
|
PackageStatus.FINISHED_BUT_BLOCKED,
|
||||||
|
PackageStatus.FINISHED_BUT_INCOMPLETE,
|
||||||
|
]
|
||||||
|
|
||||||
|
# Transitive dependencies of a package. Excluding branches where a root is finished
|
||||||
|
def get_buildqueue_deps(pkg: Package, build_type: ArchType) -> "dict[ArchType, set[Package]]":
|
||||||
|
start = (build_type, pkg)
|
||||||
|
todo = set([start])
|
||||||
|
done = set()
|
||||||
|
result = set()
|
||||||
|
|
||||||
|
while todo:
|
||||||
|
build_type, pkg = todo.pop()
|
||||||
|
item = (build_type, pkg)
|
||||||
|
done.add(item)
|
||||||
|
if pkg_is_finished(pkg, build_type):
|
||||||
|
continue
|
||||||
|
result.add(item)
|
||||||
|
for dep_build_type, deps in pkg.get_depends(build_type).items():
|
||||||
|
for dep in deps:
|
||||||
|
dep_item = (dep_build_type, dep)
|
||||||
|
if dep_item not in done:
|
||||||
|
todo.add(dep_item)
|
||||||
|
result.discard(start)
|
||||||
|
|
||||||
|
d: dict[ArchType, set[Package]] = {}
|
||||||
|
for build_type, pkg in result:
|
||||||
|
d.setdefault(build_type, set()).add(pkg)
|
||||||
|
return d
|
||||||
|
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
if build_type_is_src(build_type):
|
||||||
|
continue
|
||||||
|
build_type = cast(ArchType, build_type)
|
||||||
|
for dep_build_type, deps in get_buildqueue_deps(pkg, build_type).items():
|
||||||
|
for dep in deps:
|
||||||
|
# manually broken cycle
|
||||||
|
if pkg.is_optional_dep(dep, dep_build_type) or dep.is_optional_dep(pkg, build_type):
|
||||||
|
continue
|
||||||
|
dep_deps = get_buildqueue_deps(dep, dep_build_type)
|
||||||
|
if pkg in dep_deps.get(build_type, set()):
|
||||||
|
cycles.add(tuple(sorted([pkg, dep], key=lambda p: p["name"]))) # type: ignore
|
||||||
|
|
||||||
|
return cycles
|
||||||
|
|
||||||
|
|
||||||
|
def get_buildqueue_with_status(full_details: bool = False) -> list[Package]:
|
||||||
|
cached_assets = CachedAssets()
|
||||||
|
|
||||||
|
assets_failed = []
|
||||||
|
for build_type in get_all_build_types():
|
||||||
|
assets_failed.extend(cached_assets.get_failed_assets(build_type))
|
||||||
|
|
||||||
|
failed_urls = {}
|
||||||
|
if full_details:
|
||||||
|
# This might take a while, so only in full mode
|
||||||
|
with ThreadPoolExecutor(8) as executor:
|
||||||
|
for i, (asset, content) in enumerate(
|
||||||
|
zip(assets_failed, executor.map(download_text_asset, assets_failed))):
|
||||||
|
result = json.loads(content)
|
||||||
|
#No more Github Action URLs
|
||||||
|
#if result["urls"]:
|
||||||
|
# failed_urls[get_asset_filename(asset)] = result["urls"]
|
||||||
|
|
||||||
|
def pkg_is_done(build_type: BuildType, pkg: Package) -> bool:
|
||||||
|
done_names = [get_asset_filename(a) for a in cached_assets.get_assets(build_type)]
|
||||||
|
for pattern in pkg.get_build_patterns(build_type):
|
||||||
|
if not fnmatch.filter(done_names, pattern):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_failed_urls(build_type: BuildType, pkg: Package) -> dict[str, str] | None:
|
||||||
|
failed_names = [get_asset_filename(a) for a in assets_failed]
|
||||||
|
name = pkg.get_failed_name(build_type)
|
||||||
|
if name in failed_names:
|
||||||
|
return failed_urls.get(name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def pkg_has_failed(build_type: BuildType, pkg: Package) -> bool:
|
||||||
|
failed_names = [get_asset_filename(a) for a in assets_failed]
|
||||||
|
name = pkg.get_failed_name(build_type)
|
||||||
|
return name in failed_names
|
||||||
|
|
||||||
|
def pkg_is_manual(build_type: BuildType, pkg: Package) -> bool:
|
||||||
|
if build_type_is_src(build_type):
|
||||||
|
return False
|
||||||
|
for pattern, types in Config.MANUAL_BUILD:
|
||||||
|
type_matches = not types or build_type in types
|
||||||
|
if type_matches and fnmatch.fnmatchcase(pkg['name'], pattern):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
pkgs = get_buildqueue()
|
||||||
|
|
||||||
|
# basic state
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
if pkg_is_done(build_type, pkg):
|
||||||
|
pkg.set_status(build_type, PackageStatus.FINISHED)
|
||||||
|
elif pkg_has_failed(build_type, pkg):
|
||||||
|
urls = get_failed_urls(build_type, pkg)
|
||||||
|
pkg.set_status(build_type, PackageStatus.FAILED_TO_BUILD, urls=urls)
|
||||||
|
elif pkg_is_manual(build_type, pkg):
|
||||||
|
pkg.set_status(build_type, PackageStatus.MANUAL_BUILD_REQUIRED)
|
||||||
|
else:
|
||||||
|
pkg.set_status(build_type, PackageStatus.WAITING_FOR_BUILD)
|
||||||
|
|
||||||
|
# wait for dependencies to be finished before starting a build
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
if status == PackageStatus.WAITING_FOR_BUILD:
|
||||||
|
|
||||||
|
for dep_type, deps in pkg.get_depends(build_type).items():
|
||||||
|
for dep in deps:
|
||||||
|
dep_status = dep.get_status(dep_type)
|
||||||
|
if dep_status != PackageStatus.FINISHED:
|
||||||
|
if pkg.is_optional_dep(dep, dep_type):
|
||||||
|
continue
|
||||||
|
pkg.set_blocked(
|
||||||
|
build_type, PackageStatus.WAITING_FOR_DEPENDENCIES, dep, dep_type)
|
||||||
|
|
||||||
|
# Block packages where not all deps/rdeps/related are finished
|
||||||
|
changed = True
|
||||||
|
while changed:
|
||||||
|
changed = False
|
||||||
|
for pkg in pkgs:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
if status == PackageStatus.FINISHED:
|
||||||
|
# src builds are independent
|
||||||
|
if build_type_is_src(build_type):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for dep_type, deps in pkg.get_depends(build_type).items():
|
||||||
|
for dep in deps:
|
||||||
|
dep_status = dep.get_status(dep_type)
|
||||||
|
if dep_status != PackageStatus.FINISHED:
|
||||||
|
pkg.set_blocked(
|
||||||
|
build_type, PackageStatus.FINISHED_BUT_BLOCKED, dep, dep_type)
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
for dep_type, deps in pkg.get_rdepends(build_type).items():
|
||||||
|
for dep in deps:
|
||||||
|
if dep["name"] in Config.IGNORE_RDEP_PACKAGES:
|
||||||
|
continue
|
||||||
|
dep_status = dep.get_status(dep_type)
|
||||||
|
dep_new = dep.is_new(dep_type)
|
||||||
|
# if the rdep isn't in the repo we can't break it by uploading
|
||||||
|
if dep_status != PackageStatus.FINISHED and not dep_new:
|
||||||
|
pkg.set_blocked(
|
||||||
|
build_type, PackageStatus.FINISHED_BUT_BLOCKED, dep, dep_type)
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
# Block packages where not every build type is finished
|
||||||
|
for pkg in pkgs:
|
||||||
|
unfinished = []
|
||||||
|
blocked = []
|
||||||
|
finished = []
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
if status != PackageStatus.FINISHED:
|
||||||
|
if status == PackageStatus.FINISHED_BUT_BLOCKED:
|
||||||
|
blocked.append(build_type)
|
||||||
|
# if the package isn't in the repo better not block on it
|
||||||
|
elif not pkg.is_new(build_type):
|
||||||
|
unfinished.append(build_type)
|
||||||
|
else:
|
||||||
|
finished.append(build_type)
|
||||||
|
|
||||||
|
# We track source packages by assuming they are in the repo if there is
|
||||||
|
# at least one binary package in the repo. Uploading lone source
|
||||||
|
# packages will not change anything, so block them.
|
||||||
|
if not blocked and not unfinished and finished and \
|
||||||
|
all(build_type_is_src(bt) for bt in finished):
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
if status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED):
|
||||||
|
changed = True
|
||||||
|
pkg.set_status(build_type, PackageStatus.FINISHED_BUT_INCOMPLETE)
|
||||||
|
elif unfinished:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
if status in (PackageStatus.FINISHED, PackageStatus.FINISHED_BUT_BLOCKED):
|
||||||
|
changed = True
|
||||||
|
for bt in unfinished:
|
||||||
|
pkg.set_blocked(build_type, PackageStatus.FINISHED_BUT_INCOMPLETE, pkg, bt)
|
||||||
|
elif blocked:
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
status = pkg.get_status(build_type)
|
||||||
|
if status == PackageStatus.FINISHED:
|
||||||
|
changed = True
|
||||||
|
for bt in blocked:
|
||||||
|
pkg.set_blocked(build_type, PackageStatus.FINISHED_BUT_BLOCKED, pkg, bt)
|
||||||
|
|
||||||
|
return pkgs
|
||||||
|
|
||||||
|
|
||||||
|
def update_status(pkgs: list[Package]) -> None:
|
||||||
|
repo = get_current_repo()
|
||||||
|
release = get_release(repo, "status")
|
||||||
|
|
||||||
|
status_object: dict[str, Any] = {}
|
||||||
|
|
||||||
|
packages = []
|
||||||
|
for pkg in pkgs:
|
||||||
|
pkg_result = {}
|
||||||
|
pkg_result["name"] = pkg["name"]
|
||||||
|
pkg_result["version"] = pkg["version"]
|
||||||
|
builds = {}
|
||||||
|
for build_type in pkg.get_build_types():
|
||||||
|
details = pkg.get_status_details(build_type)
|
||||||
|
details.pop("blocked", None)
|
||||||
|
details["status"] = pkg.get_status(build_type).value
|
||||||
|
builds[build_type] = details
|
||||||
|
pkg_result["builds"] = builds
|
||||||
|
packages.append(pkg_result)
|
||||||
|
status_object["packages"] = packages
|
||||||
|
|
||||||
|
cycles = []
|
||||||
|
for a, b in get_cycles(pkgs):
|
||||||
|
cycles.append([a["name"], b["name"]])
|
||||||
|
status_object["cycles"] = sorted(cycles)
|
||||||
|
|
||||||
|
content = json.dumps(status_object, indent=2).encode()
|
||||||
|
|
||||||
|
# If multiple jobs update this at the same time things can fail,
|
||||||
|
# assume the other one went through and just ignore all errors
|
||||||
|
try:
|
||||||
|
asset = None
|
||||||
|
asset_name = "status.json"
|
||||||
|
for asset in release.assets:
|
||||||
|
if asset.name == asset_name:
|
||||||
|
break
|
||||||
|
|
||||||
|
do_replace = True
|
||||||
|
|
||||||
|
# Avoid uploading the same file twice, to reduce API write calls
|
||||||
|
if asset is not None and asset.size == len(content):
|
||||||
|
try:
|
||||||
|
old_content = download_text_asset(asset, cache=True)
|
||||||
|
if old_content == content.decode():
|
||||||
|
do_replace = False
|
||||||
|
except requests.RequestException:
|
||||||
|
# github sometimes returns 404 for a short time after uploading
|
||||||
|
pass
|
||||||
|
|
||||||
|
if do_replace:
|
||||||
|
if asset is not None:
|
||||||
|
gitea = get_gitea()
|
||||||
|
gitea.repo_delete_release_attachment(repo.owner.login, repo.name, release.id, asset.id)
|
||||||
|
|
||||||
|
tmp_path = None
|
||||||
|
try:
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False) as tf:
|
||||||
|
tf.write(content)
|
||||||
|
tf.flush()
|
||||||
|
tmp_path = tf.name
|
||||||
|
|
||||||
|
gitea = get_gitea()
|
||||||
|
new_asset = gitea.repo_create_release_attachment(repo.owner.login, repo.name, release.id, name=asset_name, attachment=tmp_path)
|
||||||
|
finally:
|
||||||
|
if tmp_path and os.path.exists(tmp_path):
|
||||||
|
os.remove(tmp_path)
|
||||||
|
|
||||||
|
print(f"Uploaded status file for {len(packages)} packages: {new_asset.browser_download_url}")
|
||||||
|
queue_website_update()
|
||||||
|
else:
|
||||||
|
print("Status unchanged")
|
||||||
|
except (ApiException, requests.RequestException) as e:
|
||||||
|
print(e)
|
||||||
122
msys2_autobuild/utils.py
Normal file
122
msys2_autobuild/utils.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
import os
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from datetime import timedelta
|
||||||
|
from functools import cache
|
||||||
|
from typing import Any, AnyStr, TypeAlias
|
||||||
|
from collections.abc import Generator
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
|
||||||
|
from .config import REQUESTS_RETRY, REQUESTS_TIMEOUT, Config
|
||||||
|
|
||||||
|
PathLike: TypeAlias = os.PathLike | AnyStr
|
||||||
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
def requests_cache_disabled() -> Any:
|
||||||
|
import requests_cache
|
||||||
|
return requests_cache.disabled()
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
|
def get_requests_session(nocache: bool = False) -> requests.Session:
|
||||||
|
adapter = HTTPAdapter(max_retries=REQUESTS_RETRY)
|
||||||
|
if nocache:
|
||||||
|
with requests_cache_disabled():
|
||||||
|
http = requests.Session()
|
||||||
|
else:
|
||||||
|
http = requests.Session()
|
||||||
|
http.mount("https://", adapter)
|
||||||
|
http.mount("http://", adapter)
|
||||||
|
return http
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def install_requests_cache() -> Generator:
|
||||||
|
# This adds basic etag based caching, to avoid hitting API rate limiting
|
||||||
|
|
||||||
|
import requests_cache
|
||||||
|
from requests_cache.backends.sqlite import SQLiteCache
|
||||||
|
|
||||||
|
# Monkey patch globally, so pygithub uses it as well.
|
||||||
|
# Only do re-validation with etag/date etc and ignore the cache-control headers that
|
||||||
|
# github sends by default with 60 seconds.
|
||||||
|
cache_dir = os.path.join(os.getcwd(), '.autobuild_cache')
|
||||||
|
os.makedirs(cache_dir, exist_ok=True)
|
||||||
|
cache_file = f'http_cache_{requests_cache.__version__}.sqlite'
|
||||||
|
# delete other versions
|
||||||
|
for f in os.listdir(cache_dir):
|
||||||
|
if f.startswith('http_cache') and f != cache_file:
|
||||||
|
os.remove(os.path.join(cache_dir, f))
|
||||||
|
requests_cache.install_cache(
|
||||||
|
always_revalidate=True,
|
||||||
|
cache_control=False,
|
||||||
|
expire_after=requests_cache.EXPIRE_IMMEDIATELY,
|
||||||
|
backend=SQLiteCache(os.path.join(cache_dir, cache_file)))
|
||||||
|
|
||||||
|
# Call this once, so it gets cached from the main thread and can be used in a thread pool
|
||||||
|
get_requests_session(nocache=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
# Delete old cache entries, so this doesn't grow indefinitely
|
||||||
|
cache = requests_cache.get_cache()
|
||||||
|
assert cache is not None
|
||||||
|
cache.delete(older_than=timedelta(hours=3))
|
||||||
|
|
||||||
|
# un-monkey-patch again
|
||||||
|
requests_cache.uninstall_cache()
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def gha_group(title: str) -> Generator:
|
||||||
|
print(f'\n::group::{title}')
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
print('::endgroup::')
|
||||||
|
|
||||||
|
|
||||||
|
def queue_website_update() -> None:
|
||||||
|
session = get_requests_session()
|
||||||
|
r = session.post('https://packages.msys2.org/api/trigger_update', timeout=REQUESTS_TIMEOUT)
|
||||||
|
try:
|
||||||
|
# it's not worth stopping the build if this fails, so just log it
|
||||||
|
r.raise_for_status()
|
||||||
|
except requests.RequestException as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_optional_deps(optional_deps: str) -> dict[str, list[str]]:
|
||||||
|
res: dict[str, list[str]] = {}
|
||||||
|
optional_deps = optional_deps.replace(" ", "")
|
||||||
|
if not optional_deps:
|
||||||
|
return res
|
||||||
|
for entry in optional_deps.split(","):
|
||||||
|
assert ":" in entry
|
||||||
|
first, second = entry.split(":", 2)
|
||||||
|
res.setdefault(first, []).append(second)
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def apply_optional_deps(optional_deps: str) -> None:
|
||||||
|
for dep, ignored in parse_optional_deps(optional_deps).items():
|
||||||
|
Config.OPTIONAL_DEPS.setdefault(dep, []).extend(ignored)
|
||||||
|
|
||||||
|
|
||||||
|
def ask_yes_no(prompt, default_no: bool = True):
|
||||||
|
"""Ask a yes/no question via input() and return their answer."""
|
||||||
|
|
||||||
|
if default_no:
|
||||||
|
prompt += " [y/N] "
|
||||||
|
else:
|
||||||
|
prompt += " [Y/n] "
|
||||||
|
|
||||||
|
user_input = input(prompt).strip().lower()
|
||||||
|
|
||||||
|
if not user_input:
|
||||||
|
return False if default_no else True
|
||||||
|
else:
|
||||||
|
return user_input == 'y'
|
||||||
979
poetry.lock
generated
979
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,19 +1,32 @@
|
|||||||
[tool.poetry]
|
[project]
|
||||||
name = "msys2-autobuild"
|
name = "msys2-autobuild"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = ""
|
description = ""
|
||||||
authors = ["Christoph Reiter <reiter.christoph@gmail.com>"]
|
license = "MIT"
|
||||||
|
authors = [
|
||||||
|
{ name = "Christoph Reiter", email = "reiter.christoph@gmail.com" }
|
||||||
|
]
|
||||||
|
requires-python = ">=3.12.0,<4.0"
|
||||||
|
dependencies = [
|
||||||
|
"PyGithub>=2.8.1,<3",
|
||||||
|
"tabulate>=0.9.0,<0.10",
|
||||||
|
"requests>=2.28.1,<3",
|
||||||
|
"requests-cache>=1.0.0,<2",
|
||||||
|
"urllib3>=2.2.1,<3",
|
||||||
|
]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[project.scripts]
|
||||||
python = "^3.7"
|
msys2-autobuild = "msys2_autobuild.main:run"
|
||||||
PyGithub = "^1.54.1"
|
|
||||||
tabulate = "^0.8.7"
|
|
||||||
requests = "^2.25.1"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[dependency-groups]
|
||||||
mypy = "^0.790"
|
dev = [
|
||||||
flake8 = "^3.8.4"
|
"pytest>=8.0.0,<9",
|
||||||
|
"mypy==1.18.1",
|
||||||
|
"flake8>=7.0.0,<8",
|
||||||
|
"types-tabulate>=0.9.0.0,<0.10",
|
||||||
|
"types-requests>=2.25.0,<3",
|
||||||
|
]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=2.2.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|||||||
@ -1,29 +1,18 @@
|
|||||||
certifi==2020.12.5; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \
|
attrs==25.3.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830 \
|
cattrs==25.2.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c
|
certifi==2025.8.3 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
chardet==4.0.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \
|
cffi==2.0.0 ; python_version >= "3.12" and python_version < "4.0" and platform_python_implementation != "PyPy"
|
||||||
--hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 \
|
charset-normalizer==3.4.3 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa
|
cryptography==46.0.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
deprecated==1.2.11; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \
|
idna==3.10 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:924b6921f822b64ec54f49be6700a126bab0640cfafca78f22c9d429ed590560 \
|
platformdirs==4.4.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:471ec32b2755172046e28102cd46c481f21c6036a0ec027521eba8521aa4ef35
|
pycparser==2.23 ; python_version >= "3.12" and python_version < "4.0" and platform_python_implementation != "PyPy" and implementation_name != "PyPy"
|
||||||
idna==2.10; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" \
|
pygithub==2.8.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \
|
pyjwt==2.10.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6
|
pynacl==1.6.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
pygithub==1.54.1; python_version >= "3.6" \
|
requests-cache==1.2.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:87afd6a67ea582aa7533afdbf41635725f13d12581faed7e3e04b1579c0c0627 \
|
requests==2.32.5 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:300bc16e62886ca6537b0830e8f516ea4bc3ef12d308e0c5aff8bdbd099173d4
|
tabulate==0.9.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
pyjwt==1.7.1; python_version >= "3.6" \
|
typing-extensions==4.15.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e \
|
url-normalize==2.2.1 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
--hash=sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96
|
urllib3==2.5.0 ; python_version >= "3.12" and python_version < "4.0"
|
||||||
requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") \
|
|
||||||
--hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e \
|
|
||||||
--hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804
|
|
||||||
tabulate==0.8.7 \
|
|
||||||
--hash=sha256:ac64cb76d53b1231d364babcd72abbb16855adac7de6665122f97b593f1eb2ba \
|
|
||||||
--hash=sha256:db2723a20d04bcda8522165c73eea7c300eda74e0ce852d9022e0159d7895007
|
|
||||||
urllib3==1.26.2; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" and python_version >= "3.6" \
|
|
||||||
--hash=sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473 \
|
|
||||||
--hash=sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08
|
|
||||||
wrapt==1.12.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" \
|
|
||||||
--hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7
|
|
||||||
|
|||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
140
tests/main_test.py
Normal file
140
tests/main_test.py
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
# type: ignore
|
||||||
|
|
||||||
|
import os
|
||||||
|
import stat
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from msys2_autobuild.utils import parse_optional_deps
|
||||||
|
from msys2_autobuild.queue import parse_buildqueue, get_cycles
|
||||||
|
from msys2_autobuild.build import make_tree_writable, remove_junctions
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_tree_writable():
|
||||||
|
with tempfile.TemporaryDirectory() as tempdir:
|
||||||
|
nested_dir = Path(tempdir) / "nested"
|
||||||
|
nested_junction = nested_dir / "junction"
|
||||||
|
nested_dir.mkdir()
|
||||||
|
file_path = nested_dir / "test_file.txt"
|
||||||
|
file_path.write_text("content")
|
||||||
|
|
||||||
|
# Create a junction loop if possible, to make sure we ignore it
|
||||||
|
if os.name == 'nt':
|
||||||
|
import _winapi
|
||||||
|
_winapi.CreateJunction(str(nested_dir), str(nested_junction))
|
||||||
|
else:
|
||||||
|
nested_junction.mkdir()
|
||||||
|
|
||||||
|
# Remove permissions
|
||||||
|
for p in [tempdir, nested_dir, file_path, nested_junction]:
|
||||||
|
os.chmod(p, os.stat(p).st_mode & ~stat.S_IWRITE & ~stat.S_IREAD)
|
||||||
|
|
||||||
|
make_tree_writable(tempdir)
|
||||||
|
|
||||||
|
assert os.access(tempdir, os.W_OK) and os.access(tempdir, os.R_OK)
|
||||||
|
assert os.access(nested_dir, os.W_OK) and os.access(nested_dir, os.R_OK)
|
||||||
|
assert os.access(file_path, os.W_OK) and os.access(file_path, os.R_OK)
|
||||||
|
assert os.access(nested_junction, os.W_OK) and os.access(nested_junction, os.R_OK)
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_junctions():
|
||||||
|
with tempfile.TemporaryDirectory() as tempdir:
|
||||||
|
nested_dir = Path(tempdir) / "nested"
|
||||||
|
nested_junction = nested_dir / "junction"
|
||||||
|
nested_dir.mkdir()
|
||||||
|
|
||||||
|
# Create a junction loop if possible, to make sure we ignore it
|
||||||
|
if os.name == 'nt':
|
||||||
|
import _winapi
|
||||||
|
_winapi.CreateJunction(str(nested_dir), str(nested_junction))
|
||||||
|
assert nested_junction.exists()
|
||||||
|
assert os.path.isjunction(nested_junction)
|
||||||
|
|
||||||
|
remove_junctions(tempdir)
|
||||||
|
assert not nested_junction.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_optional_deps():
|
||||||
|
assert parse_optional_deps("a:b,c:d,a:x") == {'a': ['b', 'x'], 'c': ['d']}
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_cycles():
|
||||||
|
buildqueue = """
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"name": "c-ares",
|
||||||
|
"version": "1.34.2-1",
|
||||||
|
"version_repo": "1.33.1-1",
|
||||||
|
"repo_url": "https://github.com/msys2/MSYS2-packages",
|
||||||
|
"repo_path": "c-ares",
|
||||||
|
"source": true,
|
||||||
|
"builds": {
|
||||||
|
"msys": {
|
||||||
|
"packages": [
|
||||||
|
"libcares",
|
||||||
|
"libcares-devel"
|
||||||
|
],
|
||||||
|
"depends": {
|
||||||
|
"msys": [
|
||||||
|
"libnghttp2",
|
||||||
|
"libuv"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"new": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "nghttp2",
|
||||||
|
"version": "1.64.0-1",
|
||||||
|
"version_repo": "1.63.0-1",
|
||||||
|
"repo_url": "https://github.com/msys2/MSYS2-packages",
|
||||||
|
"repo_path": "nghttp2",
|
||||||
|
"source": true,
|
||||||
|
"builds": {
|
||||||
|
"msys": {
|
||||||
|
"packages": [
|
||||||
|
"libnghttp2",
|
||||||
|
"libnghttp2-devel",
|
||||||
|
"nghttp2"
|
||||||
|
],
|
||||||
|
"depends": {
|
||||||
|
"msys": [
|
||||||
|
"libcares",
|
||||||
|
"libcares-devel"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"new": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "libuv",
|
||||||
|
"version": "1.49.2-1",
|
||||||
|
"version_repo": "1.49.1-1",
|
||||||
|
"repo_url": "https://github.com/msys2/MSYS2-packages",
|
||||||
|
"repo_path": "libuv",
|
||||||
|
"source": true,
|
||||||
|
"builds": {
|
||||||
|
"msys": {
|
||||||
|
"packages": [
|
||||||
|
"libuv",
|
||||||
|
"libuv-devel"
|
||||||
|
],
|
||||||
|
"depends": {
|
||||||
|
"msys": [
|
||||||
|
"libnghttp2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"new": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]"""
|
||||||
|
|
||||||
|
pkgs = parse_buildqueue(buildqueue)
|
||||||
|
cycles = get_cycles(pkgs)
|
||||||
|
assert len(cycles) == 3
|
||||||
|
assert (pkgs[0], pkgs[2]) in cycles
|
||||||
|
assert (pkgs[0], pkgs[1]) in cycles
|
||||||
|
assert (pkgs[2], pkgs[1]) in cycles
|
||||||
2
update-status.bat
Normal file
2
update-status.bat
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
@echo off
|
||||||
|
C:\msys64\msys2_shell.cmd -here -mingw64 -no-start -defterm -c "pacman --needed --noconfirm -S mingw-w64-x86_64-python-tabulate mingw-w64-x86_64-python-requests-cache && python -m msys2_autobuild update-status"
|
||||||
Loading…
x
Reference in New Issue
Block a user