mirror of
https://github.com/actions/runner.git
synced 2026-01-16 08:42:55 +08:00
Compare commits
591 Commits
users/tihu
...
rentziass/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b652350bda | ||
|
|
2525a1f9a3 | ||
|
|
ff85ab7fe0 | ||
|
|
2800573f56 | ||
|
|
f1a0d1a9f8 | ||
|
|
15b7034088 | ||
|
|
bbe97ff1c8 | ||
|
|
7a36a68b15 | ||
|
|
f45c5d0785 | ||
|
|
7e4f99337f | ||
|
|
186656e153 | ||
|
|
2e02381901 | ||
|
|
a55696a429 | ||
|
|
379ac038b2 | ||
|
|
14e8e1f667 | ||
|
|
3f43560cb9 | ||
|
|
73f7dbb681 | ||
|
|
f554a6446d | ||
|
|
bdceac4ab3 | ||
|
|
3f1dd45172 | ||
|
|
cf8f50b4d8 | ||
|
|
2cf22c4858 | ||
|
|
04d77df0c7 | ||
|
|
651077689d | ||
|
|
c96dcd4729 | ||
|
|
4b0058f15c | ||
|
|
87d1dfb798 | ||
|
|
c992a2b406 | ||
|
|
b2204f1fab | ||
|
|
f99c3e6ee8 | ||
|
|
463496e4fb | ||
|
|
3f9f6f3994 | ||
|
|
221f65874f | ||
|
|
9a21440691 | ||
|
|
54bcc001e5 | ||
|
|
7df164d2c7 | ||
|
|
a54f380b0e | ||
|
|
8b184c3871 | ||
|
|
b56b161118 | ||
|
|
69aca04de1 | ||
|
|
b3a60e6b06 | ||
|
|
334df748d1 | ||
|
|
b08f962182 | ||
|
|
b8144769c6 | ||
|
|
2a00363a90 | ||
|
|
a1c09806c3 | ||
|
|
c0776daddb | ||
|
|
b5b7986cd6 | ||
|
|
53d69ff441 | ||
|
|
bca18f71d0 | ||
|
|
1b8efb99f6 | ||
|
|
0b2c71fc31 | ||
|
|
60af948051 | ||
|
|
ff775ca101 | ||
|
|
f74be39e77 | ||
|
|
1eb15f28a7 | ||
|
|
afe4fc8446 | ||
|
|
a12731d34d | ||
|
|
18f2450d71 | ||
|
|
2c5f29c3ca | ||
|
|
c9de9a8699 | ||
|
|
68ff57dbc4 | ||
|
|
c774eb8d46 | ||
|
|
f184048a9a | ||
|
|
338d83a941 | ||
|
|
0b074a3e93 | ||
|
|
25faeabaa8 | ||
|
|
b121ef832b | ||
|
|
170033c92b | ||
|
|
f9c4e17fd9 | ||
|
|
646da708ba | ||
|
|
bf8236344b | ||
|
|
720f16aef6 | ||
|
|
f77066a6a8 | ||
|
|
df83df2a32 | ||
|
|
97b2254146 | ||
|
|
7f72ba9e48 | ||
|
|
f8ae5bb1a7 | ||
|
|
a5631456a2 | ||
|
|
65dfa460ba | ||
|
|
80ee51f164 | ||
|
|
c95883f28e | ||
|
|
6e940643a9 | ||
|
|
629f2384a4 | ||
|
|
c3bf70becb | ||
|
|
8b65f5f9df | ||
|
|
5f1efec208 | ||
|
|
20d82ad357 | ||
|
|
0ebdf9e83d | ||
|
|
6543bf206b | ||
|
|
a942627965 | ||
|
|
83539166c9 | ||
|
|
1c1e8bfd18 | ||
|
|
59177fa379 | ||
|
|
2d7635a7f0 | ||
|
|
0203cf24d3 | ||
|
|
5e74a4d8e4 | ||
|
|
6ca97eeb88 | ||
|
|
8a9b96806d | ||
|
|
dc9cf684c9 | ||
|
|
c765c990b9 | ||
|
|
ed48ddd08c | ||
|
|
a1e6ad8d2e | ||
|
|
14856e63bc | ||
|
|
0d24afa114 | ||
|
|
20912234a5 | ||
|
|
5969cbe208 | ||
|
|
9f57d37642 | ||
|
|
60563d82d1 | ||
|
|
097ada9374 | ||
|
|
9b457781d6 | ||
|
|
9709b69571 | ||
|
|
acf3f2ba12 | ||
|
|
f03fcc8a01 | ||
|
|
e4e103c5ed | ||
|
|
a906ec302b | ||
|
|
d9e714496d | ||
|
|
df189ba6e3 | ||
|
|
4c1de69e1c | ||
|
|
26185d43d0 | ||
|
|
e911d2908d | ||
|
|
ce4b7f4dd6 | ||
|
|
505fa60905 | ||
|
|
57459ad274 | ||
|
|
890e43f6c5 | ||
|
|
3a27ca292a | ||
|
|
282f7cd2b2 | ||
|
|
f060fe5c85 | ||
|
|
1a092a24a3 | ||
|
|
26eff8e55a | ||
|
|
d7cfd2e341 | ||
|
|
a3a7b6a77e | ||
|
|
db6005b0a7 | ||
|
|
9155c42c09 | ||
|
|
1c319b4d42 | ||
|
|
fe10d4ae82 | ||
|
|
27d9c886ab | ||
|
|
5106d6578e | ||
|
|
d5ccbd10d1 | ||
|
|
f1b5b5bd5c | ||
|
|
aaf1b92847 | ||
|
|
c1095ae2d1 | ||
|
|
a0a0a76378 | ||
|
|
d47013928b | ||
|
|
cdeec012aa | ||
|
|
2cb1f9431a | ||
|
|
e86c9487ab | ||
|
|
dc9695f123 | ||
|
|
6654f6b3de | ||
|
|
f5e4e7e47c | ||
|
|
68ca457917 | ||
|
|
77700abf81 | ||
|
|
a0ba8fd399 | ||
|
|
6b08f23b6c | ||
|
|
8131246933 | ||
|
|
7211a53c9e | ||
|
|
07310cabc0 | ||
|
|
0195d7ca77 | ||
|
|
259af3eda2 | ||
|
|
0ce29d09c6 | ||
|
|
a84e1c2b15 | ||
|
|
de51cd0ed6 | ||
|
|
3333de3a36 | ||
|
|
b065e5abbe | ||
|
|
bae52e28f9 | ||
|
|
c2c91438e8 | ||
|
|
3486c54ccb | ||
|
|
a61328a7e7 | ||
|
|
52dc98b10f | ||
|
|
a7b319530e | ||
|
|
54f082722f | ||
|
|
ed9d8fc9f7 | ||
|
|
fccbe8fb0b | ||
|
|
e3bc10a931 | ||
|
|
ba50bf6482 | ||
|
|
8eef71d93d | ||
|
|
7ae9fc03a2 | ||
|
|
8e97ad4d86 | ||
|
|
aa76aa476f | ||
|
|
0738df9702 | ||
|
|
8bf52ffe7d | ||
|
|
9df3fc825d | ||
|
|
fde5227fbf | ||
|
|
27f6ca8177 | ||
|
|
078eb3b381 | ||
|
|
c46dac6736 | ||
|
|
e640a9fef3 | ||
|
|
6d266a7c44 | ||
|
|
4700649bb5 | ||
|
|
27580ef8de | ||
|
|
6c94f78f37 | ||
|
|
074d9c0922 | ||
|
|
59f2be2158 | ||
|
|
1e1f7845fa | ||
|
|
694ae12b23 | ||
|
|
d16fb2c593 | ||
|
|
d37a7ae14d | ||
|
|
6ef5803f24 | ||
|
|
2c03d74f11 | ||
|
|
3d34a3c6d6 | ||
|
|
59ec9b4139 | ||
|
|
4a99838fa2 | ||
|
|
af8dee51e1 | ||
|
|
9b3b554758 | ||
|
|
4d8402c260 | ||
|
|
aa0ee2bf64 | ||
|
|
dcc64fead2 | ||
|
|
149123c232 | ||
|
|
e292ec220e | ||
|
|
3696b7d89f | ||
|
|
6d7446a45e | ||
|
|
ddf41af767 | ||
|
|
0b0cb5520d | ||
|
|
4c0a43f0e4 | ||
|
|
65764d9ddc | ||
|
|
36c66c8083 | ||
|
|
99b464e102 | ||
|
|
e1fa1fcbc3 | ||
|
|
2979fbad94 | ||
|
|
a77fe8a53f | ||
|
|
7e84ae0b30 | ||
|
|
fb6d1adb43 | ||
|
|
7303cb5673 | ||
|
|
43d67e46db | ||
|
|
ae04147f96 | ||
|
|
12506842c0 | ||
|
|
2190396357 | ||
|
|
41bc0da6fe | ||
|
|
2a7f327d93 | ||
|
|
dbcaa7cf3d | ||
|
|
8df87a82b0 | ||
|
|
70746ff593 | ||
|
|
054fc2e046 | ||
|
|
ecb732eaf4 | ||
|
|
3dab1f1fb0 | ||
|
|
8f1c723ba0 | ||
|
|
1e74a8137b | ||
|
|
3f28dd845f | ||
|
|
edfdbb9661 | ||
|
|
00888c10f9 | ||
|
|
84b1bea43e | ||
|
|
ce4d7be00f | ||
|
|
bd7235ef62 | ||
|
|
0f15173045 | ||
|
|
76dc3a28c0 | ||
|
|
c67e7f2813 | ||
|
|
54052b94fb | ||
|
|
f2c05de91c | ||
|
|
18803bdff6 | ||
|
|
04b07b6675 | ||
|
|
dd9fcfc5b2 | ||
|
|
5107c5efb2 | ||
|
|
1b61d78c07 | ||
|
|
2e0eb2c11f | ||
|
|
2d83e1d88f | ||
|
|
4a1e38095b | ||
|
|
f467e9e125 | ||
|
|
77e0bfbb8a | ||
|
|
a52c53955c | ||
|
|
8ebf298bcd | ||
|
|
4b85145661 | ||
|
|
bc8b6e0152 | ||
|
|
82e01c6173 | ||
|
|
93bc1cd918 | ||
|
|
692d910868 | ||
|
|
2c8c941622 | ||
|
|
86d6211c75 | ||
|
|
aa90563cae | ||
|
|
4cb3cb2962 | ||
|
|
d7777fd632 | ||
|
|
d8bce88c4f | ||
|
|
601d3de3f3 | ||
|
|
034c51cd0b | ||
|
|
d296014f99 | ||
|
|
3449d5fa52 | ||
|
|
6603bfb74c | ||
|
|
b19b9462d8 | ||
|
|
3db5c90cc4 | ||
|
|
927b26a364 | ||
|
|
72559572f6 | ||
|
|
31318d81ba | ||
|
|
1d47bfa6c7 | ||
|
|
651ea42e00 | ||
|
|
bcc665a7a1 | ||
|
|
cd812f0395 | ||
|
|
fa874cf314 | ||
|
|
bf0e76631b | ||
|
|
1d82031a2c | ||
|
|
d1a619ff09 | ||
|
|
11680fc78f | ||
|
|
3e5433ec86 | ||
|
|
b647b890c5 | ||
|
|
894c50073a | ||
|
|
5268d74ade | ||
|
|
7414e08fbd | ||
|
|
dcb790f780 | ||
|
|
b7ab810945 | ||
|
|
7310ba0a08 | ||
|
|
e842959e3e | ||
|
|
9f19310b5b | ||
|
|
84220a21d1 | ||
|
|
8e0cd36cd8 | ||
|
|
f1f18f67e1 | ||
|
|
ac39c4bd0a | ||
|
|
3f3d9b0d99 | ||
|
|
af485fb660 | ||
|
|
9e3e57ff90 | ||
|
|
ac89b31d2f | ||
|
|
65201ff6be | ||
|
|
661b261959 | ||
|
|
8a25302ba3 | ||
|
|
c7d65c42d6 | ||
|
|
a9bae6f37a | ||
|
|
3136ce3a71 | ||
|
|
a4c57f2747 | ||
|
|
ce4e62c849 | ||
|
|
121f080023 | ||
|
|
cbcb4c568a | ||
|
|
5d4b391f06 | ||
|
|
85fdc9b6b4 | ||
|
|
7f58504d35 | ||
|
|
611a7a85ed | ||
|
|
fb4bdbe440 | ||
|
|
940f4f4f40 | ||
|
|
4647f3ed5f | ||
|
|
544f19042b | ||
|
|
c851794f04 | ||
|
|
22d4310b69 | ||
|
|
65361e0fb5 | ||
|
|
36e37a0885 | ||
|
|
a5cd1ba4b6 | ||
|
|
acdc6edf7c | ||
|
|
b4a7bb0969 | ||
|
|
f47384b46e | ||
|
|
f672567acc | ||
|
|
e25c754744 | ||
|
|
f57ecd8e3c | ||
|
|
463ec00cb4 | ||
|
|
c3a7188eca | ||
|
|
2a6f271afa | ||
|
|
462337a4a4 | ||
|
|
8f1c070506 | ||
|
|
bf445e2750 | ||
|
|
67d70803a9 | ||
|
|
8c917b4ad3 | ||
|
|
440238adc4 | ||
|
|
8250726be1 | ||
|
|
5b2bc388ca | ||
|
|
6a2381f525 | ||
|
|
1f0c91e23e | ||
|
|
020a1ed790 | ||
|
|
c1a5dc71a5 | ||
|
|
c68e28788d | ||
|
|
a823a7f669 | ||
|
|
21ca5e6f04 | ||
|
|
f4197fb5a5 | ||
|
|
3a8cb43022 | ||
|
|
80a17a2f0c | ||
|
|
16834edc67 | ||
|
|
2908d82845 | ||
|
|
3f5b813499 | ||
|
|
7b703d667d | ||
|
|
d2f0a46865 | ||
|
|
143639ddac | ||
|
|
474d0fb354 | ||
|
|
15c0fe6c1d | ||
|
|
2b66cbe699 | ||
|
|
0e9e9f1e8d | ||
|
|
be65955a9d | ||
|
|
e419ae3c7e | ||
|
|
bb40cd2788 | ||
|
|
e0acb14bfc | ||
|
|
1ff8ad7860 | ||
|
|
8dd2cec3af | ||
|
|
7b53c38294 | ||
|
|
e22452c2d6 | ||
|
|
9bbfed0740 | ||
|
|
cf5afc63da | ||
|
|
a00db53b0d | ||
|
|
73ef82ff85 | ||
|
|
7892066256 | ||
|
|
8b9a81c952 | ||
|
|
460d9ae5a8 | ||
|
|
e94e744bed | ||
|
|
94080812f7 | ||
|
|
1183100ab8 | ||
|
|
4f40f29cff | ||
|
|
d88823c634 | ||
|
|
a8783c023f | ||
|
|
2606425cc5 | ||
|
|
8fb038b0e0 | ||
|
|
8b30f9381b | ||
|
|
8206cf4e73 | ||
|
|
6680a3b142 | ||
|
|
b882f6696a | ||
|
|
e76de55cda | ||
|
|
9eb4b96713 | ||
|
|
719348e0bf | ||
|
|
9fe5aa2a9a | ||
|
|
765a5c3efc | ||
|
|
e752edf7b5 | ||
|
|
e350f35217 | ||
|
|
8fa970a1e6 | ||
|
|
8eefd849c1 | ||
|
|
f6e9809844 | ||
|
|
5b2e4049bc | ||
|
|
7cb61925b0 | ||
|
|
a61d3f37dc | ||
|
|
e30b9d6d12 | ||
|
|
496904c0b7 | ||
|
|
b91ad56f92 | ||
|
|
f25c9dfba3 | ||
|
|
7d432fb24c | ||
|
|
e8ee6f7b1b | ||
|
|
d4bbbb8419 | ||
|
|
4ffd081aea | ||
|
|
c05e6748c3 | ||
|
|
a2b7856c9c | ||
|
|
5f1c6f4708 | ||
|
|
8415f13bab | ||
|
|
471e3ae2d9 | ||
|
|
1096b975e4 | ||
|
|
282ba4cfc8 | ||
|
|
b737a5ac5c | ||
|
|
20721bc950 | ||
|
|
fde86b0666 | ||
|
|
efffbaeabc | ||
|
|
3a1376f90e | ||
|
|
50b3edff3c | ||
|
|
58f7a379a1 | ||
|
|
e13627df81 | ||
|
|
48cbee08f9 | ||
|
|
21b49c542c | ||
|
|
8db8bbe13a | ||
|
|
49b04976f4 | ||
|
|
eeb0cf6f1e | ||
|
|
f8a28c3c4e | ||
|
|
1bc14f0607 | ||
|
|
22d1938ac4 | ||
|
|
229b9b8ecc | ||
|
|
896152d78e | ||
|
|
8d74a9ead6 | ||
|
|
77b8586a03 | ||
|
|
c8c47d4f27 | ||
|
|
58f3ff55aa | ||
|
|
6353ac84d7 | ||
|
|
ad9a4a45d1 | ||
|
|
a41397ae93 | ||
|
|
c4d41e95cb | ||
|
|
af6ed41bcb | ||
|
|
e8b2380a20 | ||
|
|
38ab9dedf4 | ||
|
|
c7629700ad | ||
|
|
b9a0b5dba9 | ||
|
|
766cefe599 | ||
|
|
2ecd7d2fc6 | ||
|
|
0484afeec7 | ||
|
|
1ceb1a67f2 | ||
|
|
9f778b814d | ||
|
|
92258f9ea1 | ||
|
|
74eeb82684 | ||
|
|
0e7ca9aedb | ||
|
|
bb7b1e8259 | ||
|
|
440c81b770 | ||
|
|
9958fc0374 | ||
|
|
81b07eb1c4 | ||
|
|
514ecec5a3 | ||
|
|
128b212b13 | ||
|
|
2dfa28e6e0 | ||
|
|
fd96246580 | ||
|
|
8ef48200b4 | ||
|
|
d61b27b839 | ||
|
|
542e8a3c98 | ||
|
|
e8975514fd | ||
|
|
0befa62f64 | ||
|
|
aaf02ab34c | ||
|
|
02c9d1c704 | ||
|
|
982784d704 | ||
|
|
8c096baf49 | ||
|
|
8d6972e38b | ||
|
|
1ab35b0938 | ||
|
|
f86e968d38 | ||
|
|
e979331be4 | ||
|
|
97195bad58 | ||
|
|
6d1d2460ac | ||
|
|
67356a3305 | ||
|
|
9a228e52e9 | ||
|
|
3cd76671dd | ||
|
|
e6e5f36dd0 | ||
|
|
24a27efd4f | ||
|
|
ca7be16dd3 | ||
|
|
f1c57ac0ef | ||
|
|
8581a041a5 | ||
|
|
6412390a22 | ||
|
|
7306014861 | ||
|
|
d6f8633efc | ||
|
|
130f6788d5 | ||
|
|
9b390e0531 | ||
|
|
a7101008a2 | ||
|
|
4a6630531b | ||
|
|
caec043085 | ||
|
|
a1244d2269 | ||
|
|
332b97f838 | ||
|
|
72830cfc12 | ||
|
|
29a28a870f | ||
|
|
0dd7a113f1 | ||
|
|
83b8baa45e | ||
|
|
d5e566ad17 | ||
|
|
64381cca6a | ||
|
|
f1b1532f32 | ||
|
|
04761e5353 | ||
|
|
f9e2fa939c | ||
|
|
92acb625fb | ||
|
|
6b9e8a6be4 | ||
|
|
f41f5d259d | ||
|
|
369a4eccad | ||
|
|
088981a372 | ||
|
|
852a80fcbd | ||
|
|
63640e91fa | ||
|
|
9122fe7e10 | ||
|
|
6b8452170a | ||
|
|
cc49e65356 | ||
|
|
1632e4a343 | ||
|
|
b465102e7f | ||
|
|
98c857b927 | ||
|
|
dda53af485 | ||
|
|
c0bc4c02f8 | ||
|
|
c6630ce285 | ||
|
|
40ed7f8a40 | ||
|
|
7f5067a8b5 | ||
|
|
4adaf9c1e6 | ||
|
|
d301c06a7e | ||
|
|
3e196355de | ||
|
|
dad7ad0384 | ||
|
|
b18bda773f | ||
|
|
3fc993da59 | ||
|
|
5421fe3f71 | ||
|
|
b87b4aac5c | ||
|
|
daba735b52 | ||
|
|
46ce960fd2 | ||
|
|
f4b7f91c21 | ||
|
|
ff65183e43 | ||
|
|
0f13055428 | ||
|
|
252f4de577 | ||
|
|
b6a46f2114 | ||
|
|
2145432f81 | ||
|
|
86d0ee8389 | ||
|
|
1379ed2c72 | ||
|
|
4935be5526 | ||
|
|
920fba93dc | ||
|
|
949269104d | ||
|
|
dca4f67143 | ||
|
|
01ff38f975 | ||
|
|
bc67f99bae | ||
|
|
ae2f4a6f27 | ||
|
|
15cbadb4af | ||
|
|
0678e8df09 | ||
|
|
3a1c89715c | ||
|
|
6cdd27263b | ||
|
|
32845a5448 | ||
|
|
6e6410d300 | ||
|
|
ed191b78ae | ||
|
|
75786756bb | ||
|
|
5e0c2ef816 | ||
|
|
95459dea5f | ||
|
|
59894790de | ||
|
|
cba19c4d7e | ||
|
|
01fd04464d | ||
|
|
1cb1779d6b | ||
|
|
42c86665a7 | ||
|
|
f9c2bf1dd7 | ||
|
|
84e7949457 | ||
|
|
694d73d43c | ||
|
|
352f201c62 | ||
|
|
503e50acb9 | ||
|
|
813af29886 | ||
|
|
72e2107b5e | ||
|
|
3567c042ea | ||
|
|
e646b6fec4 | ||
|
|
8d2be3d4fa | ||
|
|
407a347f83 | ||
|
|
7e74f8c9d5 | ||
|
|
efdda93aeb | ||
|
|
1d1998aabb | ||
|
|
d2c6a4e4bc | ||
|
|
d11bd3d8be | ||
|
|
761785620f | ||
|
|
416771d4b1 | ||
|
|
9499f477a2 | ||
|
|
6bc6d475f9 | ||
|
|
ca2b1bc6d5 |
27
.devcontainer/devcontainer.json
Normal file
27
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"name": "Actions Runner Devcontainer",
|
||||
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "8.0.416"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/sshd:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-azuretools.vscode-docker",
|
||||
"ms-dotnettools.csharp",
|
||||
"eamodio.gitlens"
|
||||
]
|
||||
}
|
||||
},
|
||||
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
# https://editorconfig.org/
|
||||
|
||||
[*]
|
||||
insert_final_newline = true # ensure all files end with a single newline
|
||||
trim_trailing_whitespace = true # attempt to remove trailing whitespace on save
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false # in markdown, "two trailing spaces" is unfortunately meaningful; it means `<br>`
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,10 +1,13 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🛑 Request a feature in the runner application
|
||||
url: https://github.com/orgs/community/discussions/categories/actions
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions section on the Github Product Feedback page.
|
||||
- name: ✅ Support for GitHub Actions
|
||||
url: https://github.community/c/code-to-cloud/52
|
||||
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
||||
- name: ✅ Feedback and suggestions for GitHub Actions
|
||||
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
|
||||
url: https://github.com/github/feedback/discussions/categories/actions
|
||||
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
|
||||
- name: ‼️ GitHub Security Bug Bounty
|
||||
url: https://bounty.github.com/
|
||||
|
||||
32
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
32
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
@@ -1,32 +0,0 @@
|
||||
---
|
||||
name: 🛑 Request a feature in the runner application
|
||||
about: If you have feature requests for GitHub Actions, please use the "feedback and suggestions for GitHub Actions" link below.
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
👋 You're opening a request for an enhancement in the GitHub Actions **runner application**.
|
||||
|
||||
🛑 Please stop if you're not certain that the feature you want is in the runner application - if you have a suggestion for improving GitHub Actions, please see the [GitHub Actions Feedback](https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback) discussion forum which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
|
||||
|
||||
Some additional useful links:
|
||||
* If you have found a security issue [please submit it here](https://hackerone.com/github)
|
||||
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
|
||||
* If you are having an issue or have a question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
|
||||
|
||||
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
|
||||
-->
|
||||
|
||||
**Describe the enhancement**
|
||||
A clear and concise description of what the features or enhancement you need.
|
||||
|
||||
**Code Snippet**
|
||||
If applicable, add a code snippet.
|
||||
|
||||
**Additional information**
|
||||
Add any other context about the feature here.
|
||||
|
||||
NOTE: if the feature request has been agreed upon then the assignee will create an ADR. See docs/adrs/README.md
|
||||
25
.github/copilot-instructions.md
vendored
Normal file
25
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
## Making changes
|
||||
|
||||
### Tests
|
||||
|
||||
Whenever possible, changes should be accompanied by non-trivial tests that meaningfully exercise the core functionality of the new code being introduced.
|
||||
|
||||
All tests are in the `Test/` directory at the repo root. Fast unit tests are in the `Test/L0` directory and by convention have the suffix `L0.cs`. For example: unit tests for a hypothetical `src/Runner.Worker/Foo.cs` would go in `src/Test/L0/Worker/FooL0.cs`.
|
||||
|
||||
Run tests using this command:
|
||||
|
||||
```sh
|
||||
cd src && ./dev.sh test
|
||||
```
|
||||
|
||||
### Formatting
|
||||
|
||||
After editing .cs files, always format the code using this command:
|
||||
|
||||
```sh
|
||||
cd src && ./dev.sh format
|
||||
```
|
||||
|
||||
### Feature Flags
|
||||
|
||||
Wherever possible, all changes should be safeguarded by a feature flag; `Features` are declared in [Constants.cs](src/Runner.Common/Constants.cs).
|
||||
25
.github/dependabot.yml
vendored
Normal file
25
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/images"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/src"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/src/Misc/expressionFunc/hashFiles"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
allow:
|
||||
- dependency-type: direct
|
||||
- dependency-type: production # check only dependencies, which are going to the compiled app, not supporting tools like @vue-cli
|
||||
94
.github/workflows/build.yml
vendored
94
.github/workflows/build.yml
vendored
@@ -10,15 +10,18 @@ on:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- '*'
|
||||
- '**'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64 ]
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
@@ -41,12 +44,16 @@ jobs:
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -54,51 +61,70 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
exit $NeedUpdate
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
${{ matrix.devScript }} test
|
||||
working-directory: src
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64'
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64'
|
||||
|
||||
# Create runner package tar.gz/zip
|
||||
- name: Package Release
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
${{ matrix.devScript }} package Release
|
||||
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: |
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
|
||||
docker:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-latest, ubuntu-24.04-arm ]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
docker_platform: linux/amd64
|
||||
- os: ubuntu-24.04-arm
|
||||
docker_platform: linux/arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Get latest runner version
|
||||
id: latest_runner
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const release = await github.rest.repos.getLatestRelease({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
});
|
||||
const version = release.data.tag_name.replace(/^v/, '');
|
||||
core.setOutput('version', version);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
load: true
|
||||
platforms: ${{ matrix.docker_platform }}
|
||||
tags: |
|
||||
${{ github.sha }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.latest_runner.outputs.version }}
|
||||
|
||||
- name: Test Docker image
|
||||
run: |
|
||||
docker run --rm ${{ github.sha }}:latest ./run.sh --version
|
||||
|
||||
|
||||
17
.github/workflows/close-bugs-bot.yml
vendored
Normal file
17
.github/workflows/close-bugs-bot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Close Bugs Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-bug"
|
||||
only-labels: "actions-bug"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
17
.github/workflows/close-features-bot.yml
vendored
Normal file
17
.github/workflows/close-features-bot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Close Features Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-feature"
|
||||
only-labels: "actions-feature"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -23,11 +23,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v6
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v4
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -38,4 +38,4 @@ jobs:
|
||||
working-directory: src
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v4
|
||||
|
||||
211
.github/workflows/dependency-check.yml
vendored
Normal file
211
.github/workflows/dependency-check.yml
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
name: Dependency Status Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
check_type:
|
||||
description: "Type of dependency check"
|
||||
required: false
|
||||
default: "all"
|
||||
type: choice
|
||||
options:
|
||||
- all
|
||||
- node
|
||||
- dotnet
|
||||
- docker
|
||||
- npm
|
||||
schedule:
|
||||
- cron: "0 11 * * 1" # Weekly on Monday at 11 AM
|
||||
|
||||
jobs:
|
||||
dependency-status:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
node20-status: ${{ steps.check-versions.outputs.node20-status }}
|
||||
node24-status: ${{ steps.check-versions.outputs.node24-status }}
|
||||
dotnet-status: ${{ steps.check-versions.outputs.dotnet-status }}
|
||||
docker-status: ${{ steps.check-versions.outputs.docker-status }}
|
||||
buildx-status: ${{ steps.check-versions.outputs.buildx-status }}
|
||||
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
|
||||
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Check dependency versions
|
||||
id: check-versions
|
||||
run: |
|
||||
echo "## Dependency Status Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Generated on: $(date)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check Node versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "node" ]]; then
|
||||
echo "### Node.js Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
VERSIONS_JSON=$(curl -s https://raw.githubusercontent.com/actions/node-versions/main/versions-manifest.json)
|
||||
LATEST_NODE20=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("20.")) | .version' | head -1)
|
||||
LATEST_NODE24=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("24.")) | .version' | head -1)
|
||||
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
NODE20_STATUS="✅ up-to-date"
|
||||
NODE24_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NODE20_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NODE24_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Version | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 20 | $CURRENT_NODE20 | $LATEST_NODE20 | $NODE20_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 24 | $CURRENT_NODE24 | $LATEST_NODE24 | $NODE24_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "node20-status=$NODE20_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "node24-status=$NODE24_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check .NET version
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "dotnet" ]]; then
|
||||
echo "### .NET SDK Version" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_dotnet_version=$(jq -r .sdk.version ./src/global.json)
|
||||
current_major_minor=$(echo "$current_dotnet_version" | cut -d '.' -f 1,2)
|
||||
latest_dotnet_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/$current_major_minor/latest.version")
|
||||
|
||||
DOTNET_STATUS="✅ up-to-date"
|
||||
if [ "$current_dotnet_version" != "$latest_dotnet_version" ]; then
|
||||
DOTNET_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| .NET SDK | $current_dotnet_version | $latest_dotnet_version | $DOTNET_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "dotnet-status=$DOTNET_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check Docker versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "docker" ]]; then
|
||||
echo "### Docker Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_docker=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
current_buildx=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
latest_docker=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
latest_buildx=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
DOCKER_STATUS="✅ up-to-date"
|
||||
BUILDX_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$current_docker" != "$latest_docker" ]; then
|
||||
DOCKER_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$current_buildx" != "$latest_buildx" ]; then
|
||||
BUILDX_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker | $current_docker | $latest_docker | $DOCKER_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker Buildx | $current_buildx | $latest_buildx | $BUILDX_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "docker-status=$DOCKER_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "buildx-status=$BUILDX_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check npm vulnerabilities
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "npm" ]]; then
|
||||
echo "### NPM Security Audit" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
npm install --silent
|
||||
|
||||
AUDIT_OUTPUT=""
|
||||
AUDIT_EXIT_CODE=0
|
||||
# Run npm audit and capture output and exit code
|
||||
if ! AUDIT_OUTPUT=$(npm audit --json 2>&1); then
|
||||
AUDIT_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
# Check if output is valid JSON
|
||||
if echo "$AUDIT_OUTPUT" | jq . >/dev/null 2>&1; then
|
||||
VULN_COUNT=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.total // 0')
|
||||
# Ensure VULN_COUNT is a number
|
||||
VULN_COUNT=$(echo "$VULN_COUNT" | grep -o '[0-9]*' | head -1)
|
||||
VULN_COUNT=${VULN_COUNT:-0}
|
||||
|
||||
NPM_STATUS="✅ no vulnerabilities"
|
||||
if [ "$VULN_COUNT" -gt 0 ] 2>/dev/null; then
|
||||
NPM_STATUS="⚠️ $VULN_COUNT vulnerabilities found"
|
||||
|
||||
# Get vulnerability details
|
||||
HIGH_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.high // 0')
|
||||
CRITICAL_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.critical // 0')
|
||||
|
||||
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Critical | $CRITICAL_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| High | $HIGH_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No npm vulnerabilities found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
NPM_STATUS="❌ npm audit failed"
|
||||
echo "npm audit failed to run or returned invalid JSON ❌" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Exit code: $AUDIT_EXIT_CODE" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Output: $AUDIT_OUTPUT" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "npm-vulnerabilities=$NPM_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check for open dependency PRs
|
||||
id: check-prs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "### Open Dependency PRs" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Get open PRs with dependency label
|
||||
OPEN_PRS=$(gh pr list --label "dependencies" --state open --json number,title,url)
|
||||
PR_COUNT=$(echo "$OPEN_PRS" | jq '. | length')
|
||||
|
||||
if [ "$PR_COUNT" -gt 0 ]; then
|
||||
echo "Found $PR_COUNT open dependency PR(s):" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "$OPEN_PRS" | jq -r '.[] | "- [#\(.number)](\(.url)) \(.title)"' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No open dependency PRs found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "open-dependency-prs=$PR_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Check for open PRs with the \`dependency\` label before releases" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Review and merge dependency updates regularly" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Critical vulnerabilities should be addressed immediately" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Automated workflows run weekly to check for updates:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Node.js versions (Mondays at 6 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- NPM audit fix (Mondays at 7 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- .NET SDK updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Docker/Buildx updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
166
.github/workflows/docker-buildx-upgrade.yml
vendored
Normal file
166
.github/workflows/docker-buildx-upgrade.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
name: "Docker/Buildx Version Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Run every Monday at midnight
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
check-versions:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
DOCKER_SHOULD_UPDATE: ${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}
|
||||
DOCKER_LATEST_VERSION: ${{ steps.check_docker_version.outputs.LATEST_VERSION }}
|
||||
DOCKER_CURRENT_VERSION: ${{ steps.check_docker_version.outputs.CURRENT_VERSION }}
|
||||
BUILDX_SHOULD_UPDATE: ${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}
|
||||
BUILDX_LATEST_VERSION: ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}
|
||||
BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Check Docker version
|
||||
id: check_docker_version
|
||||
shell: bash
|
||||
run: |
|
||||
# Extract current Docker version from Dockerfile
|
||||
current_version=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
# Fetch latest Docker Engine version from Docker's download site
|
||||
# This gets the latest Linux static binary version which matches what's used in the Dockerfile
|
||||
latest_version=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
|
||||
# Extra check to ensure we got a valid version
|
||||
if [[ ! $latest_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Failed to retrieve a valid Docker version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
should_update=0
|
||||
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||
|
||||
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check Buildx version
|
||||
id: check_buildx_version
|
||||
shell: bash
|
||||
run: |
|
||||
# Extract current Buildx version from Dockerfile
|
||||
current_version=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
# Fetch latest Buildx version
|
||||
latest_version=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
should_update=0
|
||||
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||
|
||||
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create annotations for versions
|
||||
run: |
|
||||
docker_should_update="${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}"
|
||||
buildx_should_update="${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}"
|
||||
|
||||
# Show annotation if only Docker needs update
|
||||
if [[ "$docker_should_update" == "1" && "$buildx_should_update" == "0" ]]; then
|
||||
echo "::warning ::Docker version (${{ steps.check_docker_version.outputs.LATEST_VERSION }}) needs update but Buildx is current. Only updating when both need updates."
|
||||
fi
|
||||
|
||||
# Show annotation if only Buildx needs update
|
||||
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "1" ]]; then
|
||||
echo "::warning ::Buildx version (${{ steps.check_buildx_version.outputs.LATEST_VERSION }}) needs update but Docker is current. Only updating when both need updates."
|
||||
fi
|
||||
|
||||
# Show annotation when both are current
|
||||
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "0" ]]; then
|
||||
echo "::warning ::Latest Docker version is ${{ steps.check_docker_version.outputs.LATEST_VERSION }} and Buildx version is ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}. No updates needed."
|
||||
fi
|
||||
|
||||
update-versions:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
needs: [check-versions]
|
||||
if: ${{ needs.check-versions.outputs.DOCKER_SHOULD_UPDATE == 1 && needs.check-versions.outputs.BUILDX_SHOULD_UPDATE == 1 }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Update Docker version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_version="${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}"
|
||||
current_version="${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }}"
|
||||
|
||||
# Update version in Dockerfile
|
||||
sed -i "s/ARG DOCKER_VERSION=$current_version/ARG DOCKER_VERSION=$latest_version/g" ./images/Dockerfile
|
||||
|
||||
- name: Update Buildx version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_version="${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
current_version="${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }}"
|
||||
|
||||
# Update version in Dockerfile
|
||||
sed -i "s/ARG BUILDX_VERSION=$current_version/ARG BUILDX_VERSION=$latest_version/g" ./images/Dockerfile
|
||||
|
||||
- name: Commit changes and create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Setup branch and commit information
|
||||
branch_name="feature/docker-buildx-upgrade"
|
||||
commit_message="Upgrade Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
pr_title="Update Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch or switch to it if it exists
|
||||
if git show-ref --quiet refs/remotes/origin/$branch_name; then
|
||||
git fetch origin
|
||||
git checkout -B "$branch_name" origin/$branch_name
|
||||
else
|
||||
git checkout -b "$branch_name"
|
||||
fi
|
||||
|
||||
# Commit and push changes
|
||||
git commit -a -m "$commit_message"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated Docker and Buildx version update:
|
||||
|
||||
- Docker: ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}
|
||||
- Buildx: ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}
|
||||
|
||||
This update ensures we're using the latest stable Docker and Buildx versions for security and performance improvements.
|
||||
|
||||
**Release notes:** https://docs.docker.com/engine/release-notes/
|
||||
|
||||
**Next steps:**
|
||||
- Review the version changes
|
||||
- Verify container builds work as expected
|
||||
- Test multi-platform builds if applicable
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "$pr_title" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "docker" \
|
||||
--body-file pr_body.txt
|
||||
75
.github/workflows/docker-publish.yml
vendored
Normal file
75
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
name: Publish DockerImage from Release Branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseBranch:
|
||||
description: 'Release Branch (releases/mXXX)'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.inputs.releaseBranch }}
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '');
|
||||
console.log(`Using runner version ${runnerVersion}`);
|
||||
if (!/^\d+\.\d+\.\d+$/.test(runnerVersion)) {
|
||||
throw new Error(`Invalid runner version: ${runnerVersion}`);
|
||||
}
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.image.outputs.version }}
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
annotations: |
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
105
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
105
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
name: "DotNet SDK Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 8 * * 1" # Weekly on Monday at 8 AM UTC (independent of Node.js/NPM)
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
dotnet-update:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
|
||||
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
- name: Get current major minor version
|
||||
id: fetch_current_version
|
||||
shell: bash
|
||||
run: |
|
||||
current_major_minor_patch_version=$(jq .sdk.version ./src/global.json | xargs)
|
||||
current_major_minor_version=$(cut -d '.' -f 1,2 <<< "$current_major_minor_patch_version")
|
||||
|
||||
echo "DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION=${current_major_minor_patch_version}" >> $GITHUB_OUTPUT
|
||||
echo "DOTNET_CURRENT_MAJOR_MINOR_VERSION=${current_major_minor_version}" >> $GITHUB_OUTPUT
|
||||
- name: Check patch version
|
||||
id: fetch_latest_version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_patch_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version")
|
||||
current_patch_version=${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
should_update=0
|
||||
[ "$current_patch_version" != "$latest_patch_version" ] && should_update=1
|
||||
|
||||
# check if git branch already exists for the upgrade
|
||||
branch_already_exists=0
|
||||
|
||||
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
|
||||
then
|
||||
branch_already_exists=1
|
||||
should_update=0
|
||||
fi
|
||||
echo "DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION=${latest_patch_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
echo "BRANCH_EXISTS=${branch_already_exists}" >> $GITHUB_OUTPUT
|
||||
- name: Create an error annotation if branch exists
|
||||
if: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS == 1 }}
|
||||
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
|
||||
- name: Create a warning annotation if no need to update
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||
- name: Update patch version
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
shell: bash
|
||||
run: |
|
||||
patch_version="${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
current_version="${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
|
||||
# Update globals
|
||||
echo Updating globals
|
||||
globals_temp=$(mktemp)
|
||||
jq --unbuffered --arg patch_version "$patch_version" '.sdk.version = $patch_version' ./src/global.json > "$globals_temp" && mv "$globals_temp" ./src/global.json
|
||||
|
||||
# Update devcontainer
|
||||
echo Updating devcontainer
|
||||
devcontainer_temp=$(mktemp)
|
||||
jq --unbuffered --arg patch_version "$patch_version" '.features."ghcr.io/devcontainers/features/dotnet".version = $patch_version' ./.devcontainer/devcontainer.json > "$devcontainer_temp" && mv "$devcontainer_temp" ./.devcontainer/devcontainer.json
|
||||
|
||||
# Update dev.sh
|
||||
echo Updating start script
|
||||
sed -i "s/DOTNETSDK_VERSION=\"$current_version\"/DOTNETSDK_VERSION=\"$patch_version\"/g" ./src/dev.sh
|
||||
- name: GIT commit and push all changed files
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
id: create_branch
|
||||
run: |
|
||||
branch_name="feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git push --set-upstream origin $branch_name
|
||||
|
||||
create-pr:
|
||||
needs: [dotnet-update]
|
||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --label "dependencies" --label "dependencies-weekly-check" --label "dependencies-not-dependabot" --label "dotnet" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||
194
.github/workflows/node-upgrade.yml
vendored
Normal file
194
.github/workflows/node-upgrade.yml
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
name: Auto Update Node Version
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 1" # Weekly, every Monday
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-node:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Get latest Node versions
|
||||
id: node-versions
|
||||
run: |
|
||||
# Get latest Node.js releases from official GitHub releases
|
||||
echo "Fetching latest Node.js releases..."
|
||||
|
||||
# Get latest v20.x release
|
||||
LATEST_NODE20=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v20.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
# Get latest v24.x release
|
||||
LATEST_NODE24=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v24.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
echo "Found Node.js releases: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
|
||||
# Verify these versions are available in alpine_nodejs releases
|
||||
echo "Verifying availability in alpine_nodejs..."
|
||||
ALPINE_RELEASES=$(curl -s https://api.github.com/repos/actions/alpine_nodejs/releases | jq -r '.[].tag_name')
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE20$"; then
|
||||
echo "::warning title=Node 20 Fallback::Node 20 version $LATEST_NODE20 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v20 release
|
||||
LATEST_NODE20=$(echo "$ALPINE_RELEASES" | grep "^v20\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 20: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE24$"; then
|
||||
echo "::warning title=Node 24 Fallback::Node 24 version $LATEST_NODE24 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v24 release
|
||||
LATEST_NODE24=$(echo "$ALPINE_RELEASES" | grep "^v24\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 24: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
# Validate that we have non-empty version numbers
|
||||
if [ -z "$LATEST_NODE20" ] || [ "$LATEST_NODE20" = "" ]; then
|
||||
echo "::error title=Invalid Node 20 Version::Failed to determine valid Node 20 version. Got: '$LATEST_NODE20'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$LATEST_NODE24" ] || [ "$LATEST_NODE24" = "" ]; then
|
||||
echo "::error title=Invalid Node 24 Version::Failed to determine valid Node 24 version. Got: '$LATEST_NODE24'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Additional validation: ensure versions match expected format (x.y.z)
|
||||
if ! echo "$LATEST_NODE20" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Format::Node 20 version '$LATEST_NODE20' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! echo "$LATEST_NODE24" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Format::Node 24 version '$LATEST_NODE24' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Validated Node versions: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
echo "latest_node20=$LATEST_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "latest_node24=$LATEST_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check current versions in externals.sh
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
echo "current_node20=$CURRENT_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "current_node24=$CURRENT_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine if updates are needed
|
||||
NEEDS_UPDATE20="false"
|
||||
NEEDS_UPDATE24="false"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NEEDS_UPDATE20="true"
|
||||
echo "::notice title=Node 20 Update Available::Current: $CURRENT_NODE20 → Latest: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NEEDS_UPDATE24="true"
|
||||
echo "::notice title=Node 24 Update Available::Current: $CURRENT_NODE24 → Latest: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
if [ "$NEEDS_UPDATE20" == "false" ] && [ "$NEEDS_UPDATE24" == "false" ]; then
|
||||
echo "::notice title=No Updates Needed::All Node.js versions are up to date"
|
||||
fi
|
||||
|
||||
echo "needs_update20=$NEEDS_UPDATE20" >> $GITHUB_OUTPUT
|
||||
echo "needs_update24=$NEEDS_UPDATE24" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update externals.sh and create PR
|
||||
if: steps.node-versions.outputs.needs_update20 == 'true' || steps.node-versions.outputs.needs_update24 == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Final validation before making changes
|
||||
NODE20_VERSION="${{ steps.node-versions.outputs.latest_node20 }}"
|
||||
NODE24_VERSION="${{ steps.node-versions.outputs.latest_node24 }}"
|
||||
|
||||
echo "Final validation of versions before PR creation:"
|
||||
echo "Node 20: '$NODE20_VERSION'"
|
||||
echo "Node 24: '$NODE24_VERSION'"
|
||||
|
||||
# Validate versions are not empty and match expected format
|
||||
if [ -z "$NODE20_VERSION" ] || ! echo "$NODE20_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Version::Refusing to create PR with invalid Node 20 version: '$NODE20_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$NODE24_VERSION" ] || ! echo "$NODE24_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Version::Refusing to create PR with invalid Node 24 version: '$NODE24_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All versions validated successfully"
|
||||
|
||||
# Update the files
|
||||
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
|
||||
sed -i 's/NODE20_VERSION="[^"]*"/NODE20_VERSION="'"$NODE20_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
|
||||
sed -i 's/NODE24_VERSION="[^"]*"/NODE24_VERSION="'"$NODE24_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
# Verify the changes were applied correctly
|
||||
echo "Verifying changes in externals.sh:"
|
||||
grep "NODE20_VERSION=" src/Misc/externals.sh
|
||||
grep "NODE24_VERSION=" src/Misc/externals.sh
|
||||
|
||||
# Ensure we actually have valid versions in the file
|
||||
UPDATED_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
UPDATED_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
if [ -z "$UPDATED_NODE20" ] || [ -z "$UPDATED_NODE24" ]; then
|
||||
echo "::error title=Update Failed::Failed to properly update externals.sh"
|
||||
echo "Updated Node 20: '$UPDATED_NODE20'"
|
||||
echo "Updated Node 24: '$UPDATED_NODE24'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/update-node"
|
||||
git checkout -b "$branch_name"
|
||||
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << EOF
|
||||
Automated Node.js version update:
|
||||
|
||||
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
|
||||
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
|
||||
|
||||
This update ensures we're using the latest stable Node.js versions for security and performance improvements.
|
||||
|
||||
**Note**: When updating Node versions, remember to also create a new release of alpine_nodejs at the updated version following the instructions at: https://github.com/actions/alpine_nodejs
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Node Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/node-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: update Node versions" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "node" \
|
||||
--label "javascript" \
|
||||
--body-file pr_body.txt
|
||||
|
||||
echo "::notice title=PR Created::Successfully created Node.js version update PR on branch $branch_name"
|
||||
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
name: NPM Audit Fix with TypeScript Auto-Fix
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit-with-ts-fix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: NPM install and audit fix with TypeScript auto-repair
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check for vulnerabilities first
|
||||
echo "Checking for npm vulnerabilities..."
|
||||
if npm audit --audit-level=moderate; then
|
||||
echo "✅ No moderate or higher vulnerabilities found"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "⚠️ Vulnerabilities found, attempting npm audit fix..."
|
||||
|
||||
# Attempt audit fix and capture the result
|
||||
if npm audit fix; then
|
||||
echo "✅ npm audit fix completed successfully"
|
||||
AUDIT_FIX_STATUS="success"
|
||||
else
|
||||
echo "⚠️ npm audit fix failed or had issues"
|
||||
AUDIT_FIX_STATUS="failed"
|
||||
|
||||
# Try audit fix with --force as a last resort for critical/high vulns only
|
||||
echo "Checking if critical/high vulnerabilities remain..."
|
||||
if ! npm audit --audit-level=high; then
|
||||
echo "🚨 Critical/high vulnerabilities remain, attempting --force fix..."
|
||||
if npm audit fix --force; then
|
||||
echo "⚠️ npm audit fix --force completed (may have breaking changes)"
|
||||
AUDIT_FIX_STATUS="force-fixed"
|
||||
else
|
||||
echo "❌ npm audit fix --force also failed"
|
||||
AUDIT_FIX_STATUS="force-failed"
|
||||
fi
|
||||
else
|
||||
echo "✅ Only moderate/low vulnerabilities remain after failed fix"
|
||||
AUDIT_FIX_STATUS="partial-success"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "AUDIT_FIX_STATUS=$AUDIT_FIX_STATUS" >> $GITHUB_ENV
|
||||
|
||||
# Try to fix TypeScript issues automatically
|
||||
echo "Attempting to fix TypeScript compatibility issues..."
|
||||
|
||||
# Check if build fails
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Build failed, attempting automated fixes..."
|
||||
|
||||
# Common fix 1: Update @types/node to latest compatible version
|
||||
echo "Trying to update @types/node to latest version..."
|
||||
npm update @types/node
|
||||
|
||||
# Common fix 2: If that doesn't work, try installing a specific known-good version
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying specific @types/node version..."
|
||||
# Try Node 20 compatible version
|
||||
npm install --save-dev @types/node@^20.0.0
|
||||
fi
|
||||
|
||||
# Common fix 3: Clear node_modules and reinstall if still failing
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Clearing node_modules and reinstalling..."
|
||||
rm -rf node_modules package-lock.json
|
||||
npm install
|
||||
|
||||
# Re-run audit fix after clean install if it was successful before
|
||||
if [[ "$AUDIT_FIX_STATUS" == "success" || "$AUDIT_FIX_STATUS" == "force-fixed" ]]; then
|
||||
echo "Re-running npm audit fix after clean install..."
|
||||
npm audit fix || echo "Audit fix failed on second attempt"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Common fix 4: Try updating TypeScript itself
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying to update TypeScript..."
|
||||
npm update typescript
|
||||
fi
|
||||
|
||||
# Final check
|
||||
if npm run build 2>/dev/null; then
|
||||
echo "✅ Successfully fixed TypeScript issues automatically"
|
||||
else
|
||||
echo "⚠️ Could not automatically fix TypeScript issues"
|
||||
fi
|
||||
else
|
||||
echo "✅ Build passes after audit fix"
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HUSKY: 0 # Disable husky hooks for automated commits
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-with-ts-repair"
|
||||
git checkout -b "$branch_name"
|
||||
|
||||
# Commit with --no-verify to skip husky hooks
|
||||
git commit -a -m "chore: npm audit fix with automated TypeScript compatibility fixes" --no-verify
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Check final build status and gather info about what was changed
|
||||
build_status="✅ Build passes"
|
||||
fixes_applied=""
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
|
||||
# Check what packages were updated
|
||||
if git diff HEAD~1 package.json | grep -q "@types/node"; then
|
||||
fixes_applied+="\n- Updated @types/node version for TypeScript compatibility"
|
||||
fi
|
||||
if git diff HEAD~1 package.json | grep -q "typescript"; then
|
||||
fixes_applied+="\n- Updated TypeScript version"
|
||||
fi
|
||||
if git diff HEAD~1 package-lock.json | grep -q "resolved"; then
|
||||
fixes_applied+="\n- Updated package dependencies via npm audit fix"
|
||||
fi
|
||||
|
||||
if ! npm run build 2>/dev/null; then
|
||||
build_status="⚠️ Build fails - manual review required"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
|
||||
# Create enhanced PR body using here-doc for proper formatting
|
||||
audit_status_msg=""
|
||||
case "$AUDIT_FIX_STATUS" in
|
||||
"success")
|
||||
audit_status_msg="✅ **Audit Fix**: Completed successfully"
|
||||
;;
|
||||
"partial-success")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Partial success (only moderate/low vulnerabilities remain)"
|
||||
;;
|
||||
"force-fixed")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Completed with --force (may have breaking changes)"
|
||||
;;
|
||||
"failed"|"force-failed")
|
||||
audit_status_msg="❌ **Audit Fix**: Failed to resolve vulnerabilities"
|
||||
;;
|
||||
*)
|
||||
audit_status_msg="❓ **Audit Fix**: Status unknown"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ⚠️ Build fails - manual review required
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
⚠️ **Manual Review Required**: The build is currently failing after automated fixes were attempted.
|
||||
|
||||
Common issues and solutions:
|
||||
- Check for TypeScript version compatibility with Node.js types
|
||||
- Review breaking changes in updated dependencies
|
||||
- Consider pinning problematic dependency versions temporarily
|
||||
- Review tsconfig.json for compatibility settings
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
else
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ✅ Build passes
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
✅ **Ready to Merge**: All automated fixes were successful and the build passes.
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ -n "$fixes_applied" ]; then
|
||||
# Add the fixes applied section to the file
|
||||
sed -i "/This workflow attempts/a\\
|
||||
\\
|
||||
**Automated Fixes Applied**:$fixes_applied" pr_body.txt
|
||||
fi
|
||||
|
||||
# Create PR with appropriate labels
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security"
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security,needs-manual-review"
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix with TypeScript auto-repair" \
|
||||
--label "$labels" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "No changes to commit"
|
||||
fi
|
||||
137
.github/workflows/npm-audit.yml
vendored
Normal file
137
.github/workflows/npm-audit.yml
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
name: NPM Audit Fix
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 7 * * 1" # Weekly on Monday at 7 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: NPM install and audit fix
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check what vulnerabilities exist
|
||||
echo "=== Checking current vulnerabilities ==="
|
||||
npm audit || true
|
||||
|
||||
# Apply audit fix --force to get security updates
|
||||
echo "=== Applying npm audit fix --force ==="
|
||||
npm audit fix --force
|
||||
|
||||
# Test if build still works and set status
|
||||
echo "=== Testing build compatibility ==="
|
||||
if npm run all; then
|
||||
echo "✅ Build successful after audit fix"
|
||||
echo "AUDIT_FIX_STATUS=success" >> $GITHUB_ENV
|
||||
else
|
||||
echo "❌ Build failed after audit fix - will create PR with fix instructions"
|
||||
echo "AUDIT_FIX_STATUS=build_failed" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-$(date +%Y%m%d)"
|
||||
git checkout -b "$branch_name"
|
||||
git add .
|
||||
git commit -m "chore: npm audit fix for hashFiles dependencies" --no-verify
|
||||
git push origin "$branch_name"
|
||||
|
||||
# Create PR body based on what actually happened
|
||||
if [ "$AUDIT_FIX_STATUS" = "success" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**✅ Full Fix Applied Successfully**
|
||||
This update addresses npm security advisories and ensures dependencies are secure and up-to-date.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
- Verified build compatibility with `npm run all`
|
||||
|
||||
**Next steps:**
|
||||
- Review the dependency changes
|
||||
- Verify the hashFiles functionality still works as expected
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
elif [ "$AUDIT_FIX_STATUS" = "build_failed" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**⚠️ Security Fixes Applied - Build Issues Need Manual Resolution**
|
||||
This update applies important security patches but causes build failures that require manual fixes.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
|
||||
**⚠️ Build Issues Detected:**
|
||||
The build fails after applying security fixes, likely due to TypeScript compatibility issues with updated `@types/node`.
|
||||
|
||||
**Required Manual Fixes:**
|
||||
1. Review TypeScript compilation errors in the build output
|
||||
2. Update TypeScript configuration if needed
|
||||
3. Consider pinning `@types/node` to a compatible version
|
||||
4. Run `npm run all` locally to verify fixes
|
||||
|
||||
**Next steps:**
|
||||
- **DO NOT merge until build issues are resolved**
|
||||
- Apply manual fixes for TypeScript compatibility
|
||||
- Test the hashFiles functionality still works as expected
|
||||
- Merge when build passes
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
else
|
||||
# Fallback case
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit attempted for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**ℹ️ No Changes Applied**
|
||||
No security vulnerabilities were found or no changes were needed.
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix for hashFiles dependencies" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "npm" \
|
||||
--label "typescript" \
|
||||
--label "security" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "✅ No changes to commit - npm audit fix did not modify any files"
|
||||
fi
|
||||
511
.github/workflows/release.yml
vendored
511
.github/workflows/release.yml
vendored
@@ -11,16 +11,15 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Make sure ./releaseVersion match ./src/runnerversion
|
||||
# Query GitHub release ensure version is not used
|
||||
- name: Check version
|
||||
uses: actions/github-script@0.3.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
|
||||
@@ -30,7 +29,7 @@ jobs:
|
||||
return
|
||||
}
|
||||
try {
|
||||
const release = await github.repos.getReleaseByTag({
|
||||
const release = await github.rest.repos.getReleaseByTag({
|
||||
owner: '${{ github.event.repository.owner.name }}',
|
||||
repo: '${{ github.event.repository.name }}',
|
||||
tag: 'v' + runnerVersion
|
||||
@@ -50,29 +49,12 @@ jobs:
|
||||
linux-arm64-sha: ${{ steps.sha.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64 ]
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
@@ -89,18 +71,22 @@ jobs:
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
|
||||
- runtime: osx-arm64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -123,154 +109,91 @@ jobs:
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "::set-output name=sha256::$sha"
|
||||
shell: bash
|
||||
id: sha_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_externals
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "::set-output name=sha256::$sha"
|
||||
shell: bash
|
||||
id: sha_noruntime
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "::set-output name=sha256::$sha"
|
||||
shell: bash
|
||||
id: sha_noruntime_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime_externals
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime == 'win-x64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime != 'win-x64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: runner-packages
|
||||
name: runner-packages-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
${{ matrix.runtime }}-trimmedpackages.json
|
||||
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Download runner package tar.gz/zip produced by 'build' job
|
||||
- name: Download Artifact
|
||||
uses: actions/download-artifact@v1
|
||||
- name: Download Artifact (win-x64)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages
|
||||
name: runner-packages-win-x64
|
||||
path: ./
|
||||
- name: Download Artifact (win-arm64)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages-win-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-x64)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages-osx-x64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-arm64)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages-osx-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-x64)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages-linux-x64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages-linux-arm
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm64)
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: runner-packages-linux-arm64
|
||||
path: ./
|
||||
|
||||
# Create ReleaseNote file
|
||||
- name: Create ReleaseNote
|
||||
id: releaseNote
|
||||
uses: actions/github-script@0.3.0
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA>/g, '${{needs.build.outputs.win-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA>/g, '${{needs.build.outputs.win-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA>/g, '${{needs.build.outputs.osx-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA>/g, '${{needs.build.outputs.osx-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.win-arm64-sha}} actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.osx-arm64-sha}} actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
@@ -291,305 +214,133 @@ jobs:
|
||||
|
||||
# Upload release assets (full runner packages)
|
||||
- name: Upload Release Asset (win-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
- name: Upload Release Asset (win-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
publish-image:
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
|
||||
console.log(`Using runner version ${runnerVersion}`)
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.image.outputs.version }}
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
annotations: |
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
||||
16
.github/workflows/stale-bot.yml
vendored
Normal file
16
.github/workflows/stale-bot.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: Stale Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * 1' # every monday at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||
exempt-issue-labels: "keep"
|
||||
days-before-stale: 365
|
||||
days-before-close: 15
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -26,4 +26,5 @@ _dotnetsdk
|
||||
TestResults
|
||||
TestLogs
|
||||
.DS_Store
|
||||
.mono
|
||||
**/*.DotSettings.user
|
||||
1
.husky/pre-commit
Executable file
1
.husky/pre-commit
Executable file
@@ -0,0 +1 @@
|
||||
cd src/Misc/expressionFunc/hashFiles && npx lint-staged
|
||||
1176
.opencode/plans/dap-browser-extension.md
Normal file
1176
.opencode/plans/dap-browser-extension.md
Normal file
File diff suppressed because it is too large
Load Diff
346
.opencode/plans/dap-cancellation-support.md
Normal file
346
.opencode/plans/dap-cancellation-support.md
Normal file
@@ -0,0 +1,346 @@
|
||||
# DAP Cancellation Support
|
||||
|
||||
**Status:** Implemented
|
||||
**Author:** OpenCode
|
||||
**Date:** January 2026
|
||||
|
||||
## Problem
|
||||
|
||||
When a cancellation signal for the current job comes in from the server, the DAP debugging session doesn't properly respond. If the runner is paused at a breakpoint waiting for debugger commands (or if a debugger never connects), the job gets stuck forever and requires manually deleting the runner.
|
||||
|
||||
### Root Cause
|
||||
|
||||
The `DapDebugSession.WaitForCommandAsync()` method uses a `TaskCompletionSource` that only completes when a DAP command arrives from the debugger. There's no mechanism to interrupt this wait when the job is cancelled externally.
|
||||
|
||||
Additionally, REPL shell commands use `CancellationToken.None`, so they also ignore job cancellation.
|
||||
|
||||
## Solution
|
||||
|
||||
Add proper cancellation token support throughout the DAP debugging flow:
|
||||
|
||||
1. Pass the job cancellation token to `OnStepStartingAsync` and `WaitForCommandAsync`
|
||||
2. Register cancellation callbacks to release blocking waits
|
||||
3. Add a `CancelSession()` method for external cancellation
|
||||
4. Send DAP `terminated` and `exited` events to notify the debugger before cancelling
|
||||
5. Use the cancellation token for REPL shell command execution
|
||||
|
||||
## Progress Checklist
|
||||
|
||||
- [x] **Phase 1:** Update IDapDebugSession interface
|
||||
- [x] **Phase 2:** Update DapDebugSession implementation
|
||||
- [x] **Phase 3:** Update StepsRunner to pass cancellation token
|
||||
- [x] **Phase 4:** Update JobRunner to register cancellation handler
|
||||
- [ ] **Phase 5:** Testing
|
||||
|
||||
## Files to Modify
|
||||
|
||||
| File | Changes |
|
||||
|------|---------|
|
||||
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Add cancellation support to `OnStepStartingAsync`, `WaitForCommandAsync`, `ExecuteShellCommandAsync`, add `CancelSession` method |
|
||||
| `src/Runner.Worker/StepsRunner.cs` | Pass `jobContext.CancellationToken` to `OnStepStartingAsync` |
|
||||
| `src/Runner.Worker/JobRunner.cs` | Register cancellation callback to call `CancelSession` on the debug session |
|
||||
|
||||
## Detailed Implementation
|
||||
|
||||
### Phase 1: Update IDapDebugSession Interface
|
||||
|
||||
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs` (lines ~144-242)
|
||||
|
||||
Add new method to interface:
|
||||
|
||||
```csharp
|
||||
/// <summary>
|
||||
/// Cancels the debug session externally (e.g., job cancellation).
|
||||
/// Sends terminated event to debugger and releases any blocking waits.
|
||||
/// </summary>
|
||||
void CancelSession();
|
||||
```
|
||||
|
||||
Update existing method signature:
|
||||
|
||||
```csharp
|
||||
// Change from:
|
||||
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep);
|
||||
|
||||
// Change to:
|
||||
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep, CancellationToken cancellationToken);
|
||||
```
|
||||
|
||||
### Phase 2: Update DapDebugSession Implementation
|
||||
|
||||
#### 2.1 Add cancellation token field
|
||||
|
||||
**Location:** Around line 260-300 (field declarations section)
|
||||
|
||||
```csharp
|
||||
// Add field to store the job cancellation token for use by REPL commands
|
||||
private CancellationToken _jobCancellationToken;
|
||||
```
|
||||
|
||||
#### 2.2 Update OnStepStartingAsync
|
||||
|
||||
**Location:** Line 1159
|
||||
|
||||
```csharp
|
||||
public async Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep, CancellationToken cancellationToken)
|
||||
{
|
||||
if (!IsActive)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_currentStep = step;
|
||||
_jobContext = jobContext;
|
||||
_jobCancellationToken = cancellationToken; // Store for REPL commands
|
||||
|
||||
// ... rest of existing implementation ...
|
||||
|
||||
// Update the WaitForCommandAsync call at line 1212:
|
||||
await WaitForCommandAsync(cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
#### 2.3 Update WaitForCommandAsync
|
||||
|
||||
**Location:** Line 1288
|
||||
|
||||
```csharp
|
||||
private async Task WaitForCommandAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_stateLock)
|
||||
{
|
||||
_state = DapSessionState.Paused;
|
||||
_commandTcs = new TaskCompletionSource<DapCommand>(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
}
|
||||
|
||||
Trace.Info("Waiting for debugger command...");
|
||||
|
||||
// Register cancellation to release the wait
|
||||
using (cancellationToken.Register(() =>
|
||||
{
|
||||
Trace.Info("Job cancellation detected, releasing debugger wait");
|
||||
_commandTcs?.TrySetResult(DapCommand.Disconnect);
|
||||
}))
|
||||
{
|
||||
var command = await _commandTcs.Task;
|
||||
|
||||
Trace.Info($"Received command: {command}");
|
||||
|
||||
lock (_stateLock)
|
||||
{
|
||||
if (_state == DapSessionState.Paused)
|
||||
{
|
||||
_state = DapSessionState.Running;
|
||||
}
|
||||
}
|
||||
|
||||
// Send continued event (only for normal commands, not cancellation)
|
||||
if (!cancellationToken.IsCancellationRequested &&
|
||||
(command == DapCommand.Continue || command == DapCommand.Next))
|
||||
{
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "continued",
|
||||
Body = new ContinuedEventBody
|
||||
{
|
||||
ThreadId = JobThreadId,
|
||||
AllThreadsContinued = true
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 2.4 Add CancelSession method
|
||||
|
||||
**Location:** After `OnJobCompleted()` method, around line 1286
|
||||
|
||||
```csharp
|
||||
/// <summary>
|
||||
/// Cancels the debug session externally (e.g., job cancellation).
|
||||
/// Sends terminated/exited events to debugger and releases any blocking waits.
|
||||
/// </summary>
|
||||
public void CancelSession()
|
||||
{
|
||||
Trace.Info("CancelSession called - terminating debug session");
|
||||
|
||||
lock (_stateLock)
|
||||
{
|
||||
if (_state == DapSessionState.Terminated)
|
||||
{
|
||||
Trace.Info("Session already terminated, ignoring CancelSession");
|
||||
return;
|
||||
}
|
||||
_state = DapSessionState.Terminated;
|
||||
}
|
||||
|
||||
// Send terminated event to debugger so it updates its UI
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "terminated",
|
||||
Body = new TerminatedEventBody()
|
||||
});
|
||||
|
||||
// Send exited event with cancellation exit code (130 = SIGINT convention)
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "exited",
|
||||
Body = new ExitedEventBody { ExitCode = 130 }
|
||||
});
|
||||
|
||||
// Release any pending command waits
|
||||
_commandTcs?.TrySetResult(DapCommand.Disconnect);
|
||||
|
||||
Trace.Info("Debug session cancelled");
|
||||
}
|
||||
```
|
||||
|
||||
#### 2.5 Update ExecuteShellCommandAsync
|
||||
|
||||
**Location:** Line 889-895
|
||||
|
||||
Change the `ExecuteAsync` call to use the stored cancellation token:
|
||||
|
||||
```csharp
|
||||
int exitCode;
|
||||
try
|
||||
{
|
||||
exitCode = await processInvoker.ExecuteAsync(
|
||||
workingDirectory: workingDirectory,
|
||||
fileName: shell,
|
||||
arguments: string.Format(shellArgs, command),
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
cancellationToken: _jobCancellationToken); // Changed from CancellationToken.None
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("Shell command cancelled due to job cancellation");
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = "(cancelled)",
|
||||
Type = "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Shell execution failed: {ex}");
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = $"Error: {ex.Message}",
|
||||
Type = "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Phase 3: Update StepsRunner
|
||||
|
||||
**File:** `src/Runner.Worker/StepsRunner.cs`
|
||||
**Location:** Line 204
|
||||
|
||||
Change:
|
||||
```csharp
|
||||
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep);
|
||||
```
|
||||
|
||||
To:
|
||||
```csharp
|
||||
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep, jobContext.CancellationToken);
|
||||
```
|
||||
|
||||
### Phase 4: Update JobRunner
|
||||
|
||||
**File:** `src/Runner.Worker/JobRunner.cs`
|
||||
|
||||
#### 4.1 Add cancellation registration
|
||||
|
||||
**Location:** After line 191 (after "Debugger connected" output), inside the debug mode block:
|
||||
|
||||
```csharp
|
||||
// Register cancellation handler to properly terminate DAP session on job cancellation
|
||||
CancellationTokenRegistration? dapCancellationRegistration = null;
|
||||
try
|
||||
{
|
||||
dapCancellationRegistration = jobRequestCancellationToken.Register(() =>
|
||||
{
|
||||
Trace.Info("Job cancelled - terminating DAP session");
|
||||
debugSession.CancelSession();
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Warning($"Failed to register DAP cancellation handler: {ex.Message}");
|
||||
}
|
||||
```
|
||||
|
||||
Note: The `dapCancellationRegistration` variable should be declared at a higher scope (around line 116 with other declarations) so it can be disposed in the finally block.
|
||||
|
||||
#### 4.2 Dispose the registration
|
||||
|
||||
**Location:** In the finally block (after line 316, alongside dapServer cleanup):
|
||||
|
||||
```csharp
|
||||
// Dispose DAP cancellation registration
|
||||
dapCancellationRegistration?.Dispose();
|
||||
```
|
||||
|
||||
## Behavior Summary
|
||||
|
||||
| Scenario | Before | After |
|
||||
|----------|--------|-------|
|
||||
| Paused at breakpoint, job cancelled | **Stuck forever** | DAP terminated event sent, wait released, job cancels normally |
|
||||
| REPL command running, job cancelled | Command runs forever | Command cancelled, job cancels normally |
|
||||
| Waiting for debugger connection, job cancelled | Already handled | No change (already works) |
|
||||
| Debugger disconnects voluntarily | Works | No change |
|
||||
| Normal step execution, job cancelled | Works | No change (existing cancellation logic handles this) |
|
||||
|
||||
## Exit Code Semantics
|
||||
|
||||
The `exited` event uses these exit codes:
|
||||
- `0` = job succeeded
|
||||
- `1` = job failed
|
||||
- `130` = job cancelled (standard Unix convention for SIGINT/Ctrl+C)
|
||||
|
||||
## Testing Scenarios
|
||||
|
||||
1. **Basic cancellation while paused:**
|
||||
- Start a debug job, let it pause at first step
|
||||
- Cancel the job from GitHub UI
|
||||
- Verify: DAP client receives terminated event, runner exits cleanly
|
||||
|
||||
2. **Cancellation during REPL command:**
|
||||
- Pause at a step, run `!sleep 60` in REPL
|
||||
- Cancel the job from GitHub UI
|
||||
- Verify: Sleep command terminates, DAP client receives terminated event, runner exits cleanly
|
||||
|
||||
3. **Cancellation before debugger connects:**
|
||||
- Start a debug job (it waits for connection)
|
||||
- Cancel the job before connecting a debugger
|
||||
- Verify: Runner exits cleanly (this already works, just verify no regression)
|
||||
|
||||
4. **Normal operation (no cancellation):**
|
||||
- Run through a debug session normally with step/continue
|
||||
- Verify: No change in behavior
|
||||
|
||||
5. **Debugger disconnect:**
|
||||
- Connect debugger, then disconnect it manually
|
||||
- Verify: Job continues to completion (existing behavior preserved)
|
||||
|
||||
## Estimated Effort
|
||||
|
||||
| Phase | Effort |
|
||||
|-------|--------|
|
||||
| Phase 1: Interface update | 15 min |
|
||||
| Phase 2: DapDebugSession implementation | 45 min |
|
||||
| Phase 3: StepsRunner update | 5 min |
|
||||
| Phase 4: JobRunner update | 15 min |
|
||||
| Phase 5: Testing | 30 min |
|
||||
| **Total** | **~2 hours** |
|
||||
|
||||
## References
|
||||
|
||||
- DAP Specification: https://microsoft.github.io/debug-adapter-protocol/specification
|
||||
- Related plan: `dap-debugging.md` (original DAP implementation)
|
||||
511
.opencode/plans/dap-debug-logging.md
Normal file
511
.opencode/plans/dap-debug-logging.md
Normal file
@@ -0,0 +1,511 @@
|
||||
# DAP Debug Logging Feature
|
||||
|
||||
**Status:** Implemented
|
||||
**Date:** January 2026
|
||||
**Related:** [dap-debugging.md](./dap-debugging.md), [dap-step-backwards.md](./dap-step-backwards.md)
|
||||
|
||||
## Overview
|
||||
|
||||
Add comprehensive debug logging to the DAP debugging infrastructure that can be toggled from the DAP client. This helps diagnose issues like step conclusions not updating correctly after step-back operations.
|
||||
|
||||
## Features
|
||||
|
||||
### 1. Debug Log Levels
|
||||
|
||||
| Level | Value | What Gets Logged |
|
||||
|-------|-------|------------------|
|
||||
| `Off` | 0 | Nothing |
|
||||
| `Minimal` | 1 | Errors, critical state changes |
|
||||
| `Normal` | 2 | Step lifecycle, checkpoint operations |
|
||||
| `Verbose` | 3 | Everything including outputs, expressions |
|
||||
|
||||
### 2. Enabling Debug Logging
|
||||
|
||||
#### Via Attach Arguments (nvim-dap config)
|
||||
|
||||
```lua
|
||||
{
|
||||
type = "runner",
|
||||
request = "attach",
|
||||
debugLogging = true, -- Enable debug logging (defaults to "normal" level)
|
||||
debugLogLevel = "verbose", -- Optional: "off", "minimal", "normal", "verbose"
|
||||
}
|
||||
```
|
||||
|
||||
#### Via REPL Commands (runtime toggle)
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `!debug on` | Enable debug logging (level: normal) |
|
||||
| `!debug off` | Disable debug logging |
|
||||
| `!debug minimal` | Set level to minimal |
|
||||
| `!debug normal` | Set level to normal |
|
||||
| `!debug verbose` | Set level to verbose |
|
||||
| `!debug status` | Show current debug settings |
|
||||
|
||||
### 3. Log Output Format
|
||||
|
||||
All debug logs are sent to the DAP console with the format:
|
||||
|
||||
```
|
||||
[DEBUG] [Category] Message
|
||||
```
|
||||
|
||||
Categories include:
|
||||
- `[Step]` - Step lifecycle events
|
||||
- `[Checkpoint]` - Checkpoint creation/restoration
|
||||
- `[StepsContext]` - Steps context mutations (SetOutcome, SetConclusion, SetOutput, ClearScope)
|
||||
|
||||
### 4. Example Output
|
||||
|
||||
With `!debug verbose` enabled:
|
||||
|
||||
```
|
||||
[DEBUG] [Step] Starting: 'cat doesnotexist' (index=2)
|
||||
[DEBUG] [Step] Checkpoints available: 2
|
||||
[DEBUG] [StepsContext] SetOutcome: step='thecat', outcome=failure
|
||||
[DEBUG] [StepsContext] SetConclusion: step='thecat', conclusion=failure
|
||||
[DEBUG] [Step] Completed: 'cat doesnotexist', result=Failed
|
||||
[DEBUG] [Step] Context state: outcome=failure, conclusion=failure
|
||||
|
||||
# After step-back:
|
||||
[DEBUG] [Checkpoint] Restoring checkpoint [1] for step 'cat doesnotexist'
|
||||
[DEBUG] [StepsContext] ClearScope: scope='(root)'
|
||||
[DEBUG] [StepsContext] Restoring: clearing scope '(root)', restoring 2 step(s)
|
||||
[DEBUG] [StepsContext] Restored: step='thefoo', outcome=success, conclusion=success
|
||||
|
||||
# After re-running with file created:
|
||||
[DEBUG] [Step] Starting: 'cat doesnotexist' (index=2)
|
||||
[DEBUG] [StepsContext] SetOutcome: step='thecat', outcome=success
|
||||
[DEBUG] [StepsContext] SetConclusion: step='thecat', conclusion=success
|
||||
[DEBUG] [Step] Completed: 'cat doesnotexist', result=Succeeded
|
||||
[DEBUG] [Step] Context state: outcome=success, conclusion=success
|
||||
```
|
||||
|
||||
## Implementation
|
||||
|
||||
### Progress Checklist
|
||||
|
||||
- [x] **Phase 1:** Add debug logging infrastructure to DapDebugSession
|
||||
- [x] **Phase 2:** Add REPL `!debug` command handling
|
||||
- [x] **Phase 3:** Add OnDebugLog callback to StepsContext
|
||||
- [x] **Phase 4:** Add debug logging calls throughout DapDebugSession
|
||||
- [x] **Phase 5:** Hook up StepsContext logging to DapDebugSession
|
||||
- [ ] **Phase 6:** Testing
|
||||
|
||||
---
|
||||
|
||||
### Phase 1: Debug Logging Infrastructure
|
||||
|
||||
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||
|
||||
Add enum and helper method:
|
||||
|
||||
```csharp
|
||||
// Add enum for debug log levels (near top of file with other enums)
|
||||
public enum DebugLogLevel
|
||||
{
|
||||
Off = 0,
|
||||
Minimal = 1, // Errors, critical state changes
|
||||
Normal = 2, // Step lifecycle, checkpoints
|
||||
Verbose = 3 // Everything including outputs, expressions
|
||||
}
|
||||
|
||||
// Add field (with other private fields)
|
||||
private DebugLogLevel _debugLogLevel = DebugLogLevel.Off;
|
||||
|
||||
// Add helper method (in a #region Debug Logging)
|
||||
private void DebugLog(string message, DebugLogLevel minLevel = DebugLogLevel.Normal)
|
||||
{
|
||||
if (_debugLogLevel >= minLevel)
|
||||
{
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody
|
||||
{
|
||||
Category = "console",
|
||||
Output = $"[DEBUG] {message}\n"
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Update `HandleAttach` to parse debug logging arguments:
|
||||
|
||||
```csharp
|
||||
private Response HandleAttach(Request request)
|
||||
{
|
||||
Trace.Info("Attach request handled");
|
||||
|
||||
// Parse debug logging from attach args
|
||||
if (request.Arguments is JsonElement args)
|
||||
{
|
||||
if (args.TryGetProperty("debugLogging", out var debugLogging))
|
||||
{
|
||||
if (debugLogging.ValueKind == JsonValueKind.True)
|
||||
{
|
||||
_debugLogLevel = DebugLogLevel.Normal;
|
||||
Trace.Info("Debug logging enabled via attach args (level: normal)");
|
||||
}
|
||||
}
|
||||
if (args.TryGetProperty("debugLogLevel", out var level) && level.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
_debugLogLevel = level.GetString()?.ToLower() switch
|
||||
{
|
||||
"minimal" => DebugLogLevel.Minimal,
|
||||
"normal" => DebugLogLevel.Normal,
|
||||
"verbose" => DebugLogLevel.Verbose,
|
||||
"off" => DebugLogLevel.Off,
|
||||
_ => _debugLogLevel
|
||||
};
|
||||
Trace.Info($"Debug log level set via attach args: {_debugLogLevel}");
|
||||
}
|
||||
}
|
||||
|
||||
return CreateSuccessResponse(null);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: REPL `!debug` Command
|
||||
|
||||
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||
|
||||
In `HandleEvaluateAsync`, add handling for `!debug` command before other shell command handling:
|
||||
|
||||
```csharp
|
||||
// Near the start of HandleEvaluateAsync, after getting the expression:
|
||||
|
||||
// Check for debug command
|
||||
if (expression.StartsWith("!debug", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return HandleDebugCommand(expression);
|
||||
}
|
||||
|
||||
// ... rest of existing HandleEvaluateAsync code
|
||||
```
|
||||
|
||||
Add the handler method:
|
||||
|
||||
```csharp
|
||||
private Response HandleDebugCommand(string command)
|
||||
{
|
||||
var parts = command.Split(' ', StringSplitOptions.RemoveEmptyEntries);
|
||||
var arg = parts.Length > 1 ? parts[1].ToLower() : "status";
|
||||
|
||||
string result;
|
||||
switch (arg)
|
||||
{
|
||||
case "on":
|
||||
_debugLogLevel = DebugLogLevel.Normal;
|
||||
result = "Debug logging enabled (level: normal)";
|
||||
break;
|
||||
case "off":
|
||||
_debugLogLevel = DebugLogLevel.Off;
|
||||
result = "Debug logging disabled";
|
||||
break;
|
||||
case "minimal":
|
||||
_debugLogLevel = DebugLogLevel.Minimal;
|
||||
result = "Debug logging set to minimal";
|
||||
break;
|
||||
case "normal":
|
||||
_debugLogLevel = DebugLogLevel.Normal;
|
||||
result = "Debug logging set to normal";
|
||||
break;
|
||||
case "verbose":
|
||||
_debugLogLevel = DebugLogLevel.Verbose;
|
||||
result = "Debug logging set to verbose";
|
||||
break;
|
||||
case "status":
|
||||
default:
|
||||
result = $"Debug logging: {_debugLogLevel}";
|
||||
break;
|
||||
}
|
||||
|
||||
return CreateSuccessResponse(new EvaluateResponseBody
|
||||
{
|
||||
Result = result,
|
||||
VariablesReference = 0
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: StepsContext OnDebugLog Callback
|
||||
|
||||
**File:** `src/Runner.Worker/StepsContext.cs`
|
||||
|
||||
Add callback property and helper:
|
||||
|
||||
```csharp
|
||||
public sealed class StepsContext
|
||||
{
|
||||
private static readonly Regex _propertyRegex = new("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
|
||||
private readonly DictionaryContextData _contextData = new();
|
||||
|
||||
/// <summary>
|
||||
/// Optional callback for debug logging. When set, will be called with debug messages
|
||||
/// for all StepsContext mutations.
|
||||
/// </summary>
|
||||
public Action<string> OnDebugLog { get; set; }
|
||||
|
||||
private void DebugLog(string message)
|
||||
{
|
||||
OnDebugLog?.Invoke(message);
|
||||
}
|
||||
|
||||
// ... rest of class
|
||||
}
|
||||
```
|
||||
|
||||
Update `ClearScope`:
|
||||
|
||||
```csharp
|
||||
public void ClearScope(string scopeName)
|
||||
{
|
||||
DebugLog($"[StepsContext] ClearScope: scope='{scopeName ?? "(root)"}'");
|
||||
if (_contextData.TryGetValue(scopeName, out _))
|
||||
{
|
||||
_contextData[scopeName] = new DictionaryContextData();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Update `SetOutput`:
|
||||
|
||||
```csharp
|
||||
public void SetOutput(
|
||||
string scopeName,
|
||||
string stepName,
|
||||
string outputName,
|
||||
string value,
|
||||
out string reference)
|
||||
{
|
||||
var step = GetStep(scopeName, stepName);
|
||||
var outputs = step["outputs"].AssertDictionary("outputs");
|
||||
outputs[outputName] = new StringContextData(value);
|
||||
if (_propertyRegex.IsMatch(outputName))
|
||||
{
|
||||
reference = $"steps.{stepName}.outputs.{outputName}";
|
||||
}
|
||||
else
|
||||
{
|
||||
reference = $"steps['{stepName}']['outputs']['{outputName}']";
|
||||
}
|
||||
DebugLog($"[StepsContext] SetOutput: step='{stepName}', output='{outputName}', value='{TruncateValue(value)}'");
|
||||
}
|
||||
|
||||
private static string TruncateValue(string value, int maxLength = 50)
|
||||
{
|
||||
if (string.IsNullOrEmpty(value)) return "(empty)";
|
||||
if (value.Length <= maxLength) return value;
|
||||
return value.Substring(0, maxLength) + "...";
|
||||
}
|
||||
```
|
||||
|
||||
Update `SetConclusion`:
|
||||
|
||||
```csharp
|
||||
public void SetConclusion(
|
||||
string scopeName,
|
||||
string stepName,
|
||||
ActionResult conclusion)
|
||||
{
|
||||
var step = GetStep(scopeName, stepName);
|
||||
var conclusionStr = conclusion.ToString().ToLowerInvariant();
|
||||
step["conclusion"] = new StringContextData(conclusionStr);
|
||||
DebugLog($"[StepsContext] SetConclusion: step='{stepName}', conclusion={conclusionStr}");
|
||||
}
|
||||
```
|
||||
|
||||
Update `SetOutcome`:
|
||||
|
||||
```csharp
|
||||
public void SetOutcome(
|
||||
string scopeName,
|
||||
string stepName,
|
||||
ActionResult outcome)
|
||||
{
|
||||
var step = GetStep(scopeName, stepName);
|
||||
var outcomeStr = outcome.ToString().ToLowerInvariant();
|
||||
step["outcome"] = new StringContextData(outcomeStr);
|
||||
DebugLog($"[StepsContext] SetOutcome: step='{stepName}', outcome={outcomeStr}");
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 4: DapDebugSession Logging Calls
|
||||
|
||||
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||
|
||||
#### In `OnStepStartingAsync` (after setting `_currentStep` and `_jobContext`):
|
||||
|
||||
```csharp
|
||||
DebugLog($"[Step] Starting: '{step.DisplayName}' (index={stepIndex})");
|
||||
DebugLog($"[Step] Checkpoints available: {_checkpoints.Count}");
|
||||
```
|
||||
|
||||
#### In `OnStepCompleted` (after logging to Trace):
|
||||
|
||||
```csharp
|
||||
DebugLog($"[Step] Completed: '{step.DisplayName}', result={result}");
|
||||
|
||||
// Log current steps context state for this step
|
||||
if (_debugLogLevel >= DebugLogLevel.Normal)
|
||||
{
|
||||
var stepsScope = step.ExecutionContext?.Global?.StepsContext?.GetScope(step.ExecutionContext.ScopeName);
|
||||
if (stepsScope != null && !string.IsNullOrEmpty(step.ExecutionContext?.ContextName))
|
||||
{
|
||||
if (stepsScope.TryGetValue(step.ExecutionContext.ContextName, out var stepData) && stepData is DictionaryContextData sd)
|
||||
{
|
||||
var outcome = sd.TryGetValue("outcome", out var o) && o is StringContextData os ? os.Value : "null";
|
||||
var conclusion = sd.TryGetValue("conclusion", out var c) && c is StringContextData cs ? cs.Value : "null";
|
||||
DebugLog($"[Step] Context state: outcome={outcome}, conclusion={conclusion}");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### In `CreateCheckpointForPendingStep` (after creating checkpoint):
|
||||
|
||||
```csharp
|
||||
DebugLog($"[Checkpoint] Created [{_checkpoints.Count - 1}] for step '{_pendingStep.DisplayName}'");
|
||||
if (_debugLogLevel >= DebugLogLevel.Verbose)
|
||||
{
|
||||
DebugLog($"[Checkpoint] Snapshot contains {checkpoint.StepsSnapshot.Count} step(s)", DebugLogLevel.Verbose);
|
||||
foreach (var entry in checkpoint.StepsSnapshot)
|
||||
{
|
||||
DebugLog($"[Checkpoint] {entry.Key}: outcome={entry.Value.Outcome}, conclusion={entry.Value.Conclusion}", DebugLogLevel.Verbose);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### In `RestoreCheckpoint` (at start of method):
|
||||
|
||||
```csharp
|
||||
DebugLog($"[Checkpoint] Restoring [{checkpointIndex}] for step '{checkpoint.StepDisplayName}'");
|
||||
if (_debugLogLevel >= DebugLogLevel.Verbose)
|
||||
{
|
||||
DebugLog($"[Checkpoint] Snapshot has {checkpoint.StepsSnapshot.Count} step(s)", DebugLogLevel.Verbose);
|
||||
}
|
||||
```
|
||||
|
||||
#### In `RestoreStepsContext` (update existing method):
|
||||
|
||||
```csharp
|
||||
private void RestoreStepsContext(StepsContext stepsContext, Dictionary<string, StepStateSnapshot> snapshot, string scopeName)
|
||||
{
|
||||
scopeName = scopeName ?? string.Empty;
|
||||
|
||||
DebugLog($"[StepsContext] Restoring: clearing scope '{(string.IsNullOrEmpty(scopeName) ? "(root)" : scopeName)}', will restore {snapshot.Count} step(s)");
|
||||
|
||||
stepsContext.ClearScope(scopeName);
|
||||
|
||||
foreach (var entry in snapshot)
|
||||
{
|
||||
var key = entry.Key;
|
||||
var slashIndex = key.IndexOf('/');
|
||||
|
||||
if (slashIndex >= 0)
|
||||
{
|
||||
var snapshotScopeName = slashIndex > 0 ? key.Substring(0, slashIndex) : string.Empty;
|
||||
var stepName = key.Substring(slashIndex + 1);
|
||||
|
||||
if (snapshotScopeName == scopeName)
|
||||
{
|
||||
var state = entry.Value;
|
||||
|
||||
if (state.Outcome.HasValue)
|
||||
{
|
||||
stepsContext.SetOutcome(scopeName, stepName, state.Outcome.Value);
|
||||
}
|
||||
if (state.Conclusion.HasValue)
|
||||
{
|
||||
stepsContext.SetConclusion(scopeName, stepName, state.Conclusion.Value);
|
||||
}
|
||||
|
||||
if (state.Outputs != null)
|
||||
{
|
||||
foreach (var output in state.Outputs)
|
||||
{
|
||||
stepsContext.SetOutput(scopeName, stepName, output.Key, output.Value, out _);
|
||||
}
|
||||
}
|
||||
|
||||
DebugLog($"[StepsContext] Restored: step='{stepName}', outcome={state.Outcome}, conclusion={state.Conclusion}", DebugLogLevel.Verbose);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Steps context restored: cleared scope '{scopeName}' and restored {snapshot.Count} step(s) from snapshot");
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 5: Hook Up StepsContext Logging
|
||||
|
||||
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||
|
||||
In `OnStepStartingAsync`, after setting `_jobContext`, hook up the callback (only once):
|
||||
|
||||
```csharp
|
||||
// Hook up StepsContext debug logging (do this once when we first get jobContext)
|
||||
if (jobContext.Global.StepsContext.OnDebugLog == null)
|
||||
{
|
||||
jobContext.Global.StepsContext.OnDebugLog = (msg) => DebugLog(msg, DebugLogLevel.Verbose);
|
||||
}
|
||||
```
|
||||
|
||||
**Note:** StepsContext logging is set to `Verbose` level since `SetOutput` can be noisy. `SetConclusion` and `SetOutcome` will still appear at `Verbose` level, but all the important state changes are also logged directly in `OnStepCompleted` at `Normal` level.
|
||||
|
||||
---
|
||||
|
||||
### Phase 6: Testing
|
||||
|
||||
#### Manual Testing Checklist
|
||||
|
||||
- [ ] `!debug status` shows "Off" by default
|
||||
- [ ] `!debug on` enables logging, shows step lifecycle
|
||||
- [ ] `!debug verbose` shows StepsContext mutations
|
||||
- [ ] `!debug off` disables logging
|
||||
- [ ] Attach with `debugLogging: true` enables logging on connect
|
||||
- [ ] Attach with `debugLogLevel: "verbose"` sets correct level
|
||||
- [ ] Step-back scenario shows restoration logs
|
||||
- [ ] Logs help identify why conclusion might not update
|
||||
|
||||
#### Test Workflow
|
||||
|
||||
Use the test workflow with `thecat` step:
|
||||
1. Run workflow, let `thecat` fail
|
||||
2. Enable `!debug verbose`
|
||||
3. Step back
|
||||
4. Create the missing file
|
||||
5. Step forward
|
||||
6. Observe logs to see if `SetConclusion` is called with `success`
|
||||
|
||||
---
|
||||
|
||||
## Files Summary
|
||||
|
||||
### Modified Files
|
||||
|
||||
| File | Changes |
|
||||
|------|---------|
|
||||
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Add `DebugLogLevel` enum, `_debugLogLevel` field, `DebugLog()` helper, `HandleDebugCommand()`, update `HandleAttach`, add logging calls throughout, hook up StepsContext callback |
|
||||
| `src/Runner.Worker/StepsContext.cs` | Add `OnDebugLog` callback, `DebugLog()` helper, `TruncateValue()` helper, add logging to `ClearScope`, `SetOutput`, `SetConclusion`, `SetOutcome` |
|
||||
|
||||
---
|
||||
|
||||
## Future Enhancements (Out of Scope)
|
||||
|
||||
- Additional debug commands (`!debug checkpoints`, `!debug steps`, `!debug env`)
|
||||
- Log to file option
|
||||
- Structured logging with timestamps
|
||||
- Category-based filtering (e.g., only show `[StepsContext]` logs)
|
||||
- Integration with nvim-dap's virtual text for inline debug info
|
||||
299
.opencode/plans/dap-debugging-fixes.md
Normal file
299
.opencode/plans/dap-debugging-fixes.md
Normal file
@@ -0,0 +1,299 @@
|
||||
# DAP Debugging - Bug Fixes and Enhancements
|
||||
|
||||
**Status:** Planned
|
||||
**Date:** January 2026
|
||||
**Related:** [dap-debugging.md](./dap-debugging.md)
|
||||
|
||||
## Overview
|
||||
|
||||
This document tracks bug fixes and enhancements for the DAP debugging implementation after the initial phases were completed.
|
||||
|
||||
## Issues
|
||||
|
||||
### Bug 1: Double Output in REPL Shell Commands
|
||||
|
||||
**Symptom:** Running commands in the REPL shell produces double output - the first one unmasked, the second one with secrets masked.
|
||||
|
||||
**Root Cause:** In `DapDebugSession.ExecuteShellCommandAsync()` (lines 670-773), output is sent to the debugger twice:
|
||||
|
||||
1. **Real-time streaming (unmasked):** Lines 678-712 stream output via DAP `output` events as data arrives from the process - but this output is NOT masked
|
||||
2. **Final result (masked):** Lines 765-769 return the combined output as `EvaluateResponseBody.Result` with secrets masked
|
||||
|
||||
The DAP client displays both the streamed events AND the evaluate response result, causing duplication.
|
||||
|
||||
**Fix:**
|
||||
1. Mask secrets in the real-time streaming output (add `HostContext.SecretMasker.MaskSecrets()` to lines ~690 and ~708)
|
||||
2. Change the final `Result` to only show exit code summary instead of full output
|
||||
|
||||
---
|
||||
|
||||
### Bug 2: Expressions Interpreted as Shell Commands
|
||||
|
||||
**Symptom:** Evaluating expressions like `${{github.event_name}} == 'push'` in the Watch/Expressions pane results in them being executed as shell commands instead of being evaluated as GitHub Actions expressions.
|
||||
|
||||
**Root Cause:** In `DapDebugSession.HandleEvaluateAsync()` (line 514), the condition to detect shell commands is too broad:
|
||||
|
||||
```csharp
|
||||
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
|
||||
```
|
||||
|
||||
Since `${{github.event_name}}` starts with `$`, it gets routed to shell execution instead of expression evaluation.
|
||||
|
||||
**Fix:**
|
||||
1. Check for `${{` prefix first - these are always GitHub Actions expressions
|
||||
2. Remove the `expression.StartsWith("$")` condition entirely (ambiguous and unnecessary since REPL context handles shell commands)
|
||||
3. Keep `expression.StartsWith("!")` for explicit shell override in non-REPL contexts
|
||||
|
||||
---
|
||||
|
||||
### Enhancement: Expression Interpolation in REPL Commands
|
||||
|
||||
**Request:** When running REPL commands like `echo ${{github.event_name}}`, the `${{ }}` expressions should be expanded before shell execution, similar to how `run:` steps work.
|
||||
|
||||
**Approach:** Add a helper method that uses the existing `PipelineTemplateEvaluator` infrastructure to expand expressions in the command string before passing it to the shell.
|
||||
|
||||
---
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### File: `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||
|
||||
#### Change 1: Mask Real-Time Streaming Output
|
||||
|
||||
**Location:** Lines ~678-712 (OutputDataReceived and ErrorDataReceived handlers)
|
||||
|
||||
**Before:**
|
||||
```csharp
|
||||
processInvoker.OutputDataReceived += (sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
output.AppendLine(args.Data);
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody
|
||||
{
|
||||
Category = "stdout",
|
||||
Output = args.Data + "\n" // NOT MASKED
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**After:**
|
||||
```csharp
|
||||
processInvoker.OutputDataReceived += (sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
output.AppendLine(args.Data);
|
||||
var maskedData = HostContext.SecretMasker.MaskSecrets(args.Data);
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody
|
||||
{
|
||||
Category = "stdout",
|
||||
Output = maskedData + "\n"
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
Apply the same change to `ErrorDataReceived` handler (~lines 696-712).
|
||||
|
||||
---
|
||||
|
||||
#### Change 2: Return Only Exit Code in Result
|
||||
|
||||
**Location:** Lines ~767-772 (return statement in ExecuteShellCommandAsync)
|
||||
|
||||
**Before:**
|
||||
```csharp
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = result.TrimEnd('\r', '\n'),
|
||||
Type = exitCode == 0 ? "string" : "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
```
|
||||
|
||||
**After:**
|
||||
```csharp
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = $"(exit code: {exitCode})",
|
||||
Type = exitCode == 0 ? "string" : "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
```
|
||||
|
||||
Also remove the result combination logic (lines ~747-762) since we no longer need to build the full result string for the response.
|
||||
|
||||
---
|
||||
|
||||
#### Change 3: Fix Expression vs Shell Routing
|
||||
|
||||
**Location:** Lines ~511-536 (HandleEvaluateAsync method)
|
||||
|
||||
**Before:**
|
||||
```csharp
|
||||
try
|
||||
{
|
||||
// Check if this is a REPL/shell command (context: "repl") or starts with shell prefix
|
||||
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
|
||||
{
|
||||
// Shell execution mode
|
||||
var command = expression.TrimStart('!', '$').Trim();
|
||||
// ...
|
||||
}
|
||||
else
|
||||
{
|
||||
// Expression evaluation mode
|
||||
var result = EvaluateExpression(expression, executionContext);
|
||||
return CreateSuccessResponse(result);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**After:**
|
||||
```csharp
|
||||
try
|
||||
{
|
||||
// GitHub Actions expressions start with "${{" - always evaluate as expressions
|
||||
if (expression.StartsWith("${{"))
|
||||
{
|
||||
var result = EvaluateExpression(expression, executionContext);
|
||||
return CreateSuccessResponse(result);
|
||||
}
|
||||
|
||||
// Check if this is a REPL/shell command:
|
||||
// - context is "repl" (from Debug Console pane)
|
||||
// - expression starts with "!" (explicit shell prefix for Watch pane)
|
||||
if (evalContext == "repl" || expression.StartsWith("!"))
|
||||
{
|
||||
// Shell execution mode
|
||||
var command = expression.TrimStart('!').Trim();
|
||||
if (string.IsNullOrEmpty(command))
|
||||
{
|
||||
return CreateSuccessResponse(new EvaluateResponseBody
|
||||
{
|
||||
Result = "(empty command)",
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
});
|
||||
}
|
||||
|
||||
var result = await ExecuteShellCommandAsync(command, executionContext);
|
||||
return CreateSuccessResponse(result);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Expression evaluation mode (Watch pane, hover, etc.)
|
||||
var result = EvaluateExpression(expression, executionContext);
|
||||
return CreateSuccessResponse(result);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### Change 4: Add Expression Expansion Helper Method
|
||||
|
||||
**Location:** Add new method before `ExecuteShellCommandAsync` (~line 667)
|
||||
|
||||
```csharp
|
||||
/// <summary>
|
||||
/// Expands ${{ }} expressions within a command string.
|
||||
/// For example: "echo ${{github.event_name}}" -> "echo push"
|
||||
/// </summary>
|
||||
private string ExpandExpressionsInCommand(string command, IExecutionContext context)
|
||||
{
|
||||
if (string.IsNullOrEmpty(command) || !command.Contains("${{"))
|
||||
{
|
||||
return command;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Create a StringToken with the command
|
||||
var token = new StringToken(null, null, null, command);
|
||||
|
||||
// Use the template evaluator to expand expressions
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var result = templateEvaluator.EvaluateStepDisplayName(
|
||||
token,
|
||||
context.ExpressionValues,
|
||||
context.ExpressionFunctions);
|
||||
|
||||
// Mask secrets in the expanded command
|
||||
result = HostContext.SecretMasker.MaskSecrets(result ?? command);
|
||||
|
||||
Trace.Info($"Expanded command: {result}");
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info($"Expression expansion failed, using original command: {ex.Message}");
|
||||
return command;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Required import:** Add `using GitHub.DistributedTask.ObjectTemplating.Tokens;` at the top of the file if not already present.
|
||||
|
||||
---
|
||||
|
||||
#### Change 5: Use Expression Expansion in Shell Execution
|
||||
|
||||
**Location:** Beginning of `ExecuteShellCommandAsync` method (~line 670)
|
||||
|
||||
**Before:**
|
||||
```csharp
|
||||
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
|
||||
{
|
||||
Trace.Info($"Executing shell command: {command}");
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
**After:**
|
||||
```csharp
|
||||
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
|
||||
{
|
||||
// Expand ${{ }} expressions in the command first
|
||||
command = ExpandExpressionsInCommand(command, context);
|
||||
|
||||
Trace.Info($"Executing shell command: {command}");
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## DAP Context Reference
|
||||
|
||||
For future reference, these are the DAP evaluate context values:
|
||||
|
||||
| DAP Context | Source UI | Behavior |
|
||||
|-------------|-----------|----------|
|
||||
| `"repl"` | Debug Console / REPL pane | Shell execution (with expression expansion) |
|
||||
| `"watch"` | Watch / Expressions pane | Expression evaluation |
|
||||
| `"hover"` | Editor hover (default) | Expression evaluation |
|
||||
| `"variables"` | Variables pane | Expression evaluation |
|
||||
| `"clipboard"` | Copy to clipboard | Expression evaluation |
|
||||
|
||||
---
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
- [ ] REPL command output is masked and appears only once
|
||||
- [ ] REPL command shows exit code in result field
|
||||
- [ ] Expression `${{github.event_name}}` evaluates correctly in Watch pane
|
||||
- [ ] Expression `${{github.event_name}} == 'push'` evaluates correctly
|
||||
- [ ] REPL command `echo ${{github.event_name}}` expands and executes correctly
|
||||
- [ ] REPL command `!ls -la` from Watch pane works (explicit shell prefix)
|
||||
- [ ] Secrets are masked in all outputs (streaming and expanded commands)
|
||||
536
.opencode/plans/dap-debugging.md
Normal file
536
.opencode/plans/dap-debugging.md
Normal file
@@ -0,0 +1,536 @@
|
||||
# DAP-Based Debugging for GitHub Actions Runner
|
||||
|
||||
**Status:** Draft
|
||||
**Author:** GitHub Actions Team
|
||||
**Date:** January 2026
|
||||
|
||||
## Progress Checklist
|
||||
|
||||
- [x] **Phase 1:** DAP Protocol Infrastructure (DapMessages.cs, DapServer.cs, basic DapDebugSession.cs)
|
||||
- [x] **Phase 2:** Debug Session Logic (DapVariableProvider.cs, variable inspection, step history tracking)
|
||||
- [x] **Phase 3:** StepsRunner Integration (pause hooks before/after step execution)
|
||||
- [x] **Phase 4:** Expression Evaluation & Shell (REPL)
|
||||
- [x] **Phase 5:** Startup Integration (JobRunner.cs modifications)
|
||||
|
||||
## Overview
|
||||
|
||||
This document describes the implementation of Debug Adapter Protocol (DAP) support in the GitHub Actions runner, enabling rich debugging of workflow jobs from any DAP-compatible editor (nvim-dap, VS Code, etc.).
|
||||
|
||||
## Goals
|
||||
|
||||
- **Primary:** Create a working demo to demonstrate the feasibility of DAP-based workflow debugging
|
||||
- **Non-goal:** Production-ready, polished implementation (this is proof-of-concept)
|
||||
|
||||
## User Experience
|
||||
|
||||
1. User re-runs a failed job with "Enable debug logging" checked in GitHub UI
|
||||
2. Runner (running locally) detects debug mode and starts DAP server on port 4711
|
||||
3. Runner prints "Waiting for debugger on port 4711..." and pauses
|
||||
4. User opens editor (nvim with nvim-dap), connects to debugger
|
||||
5. Job execution begins, pausing before the first step
|
||||
6. User can:
|
||||
- **Inspect variables:** View `github`, `env`, `inputs`, `steps`, `secrets` (redacted), `runner`, `job` contexts
|
||||
- **Evaluate expressions:** `${{ github.event.pull_request.title }}`
|
||||
- **Execute shell commands:** Run arbitrary commands in the job's environment (REPL)
|
||||
- **Step through job:** `next` moves to next step, `continue` runs to end
|
||||
- **Pause after steps:** Inspect step outputs before continuing
|
||||
|
||||
## Activation
|
||||
|
||||
DAP debugging activates automatically when the job is in debug mode:
|
||||
|
||||
- User enables "Enable debug logging" when re-running a job in GitHub UI
|
||||
- Server sends `ACTIONS_STEP_DEBUG=true` in job variables
|
||||
- Runner sets `Global.WriteDebug = true` and `runner.debug = "1"`
|
||||
- DAP server starts on port 4711
|
||||
|
||||
**No additional configuration required.**
|
||||
|
||||
### Optional Configuration
|
||||
|
||||
| Environment Variable | Default | Description |
|
||||
|---------------------|---------|-------------|
|
||||
| `ACTIONS_DAP_PORT` | `4711` | TCP port for DAP server (optional override) |
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────┐ ┌─────────────────────────────────────────┐
|
||||
│ nvim-dap │ │ Runner.Worker │
|
||||
│ (DAP Client) │◄───TCP:4711───────►│ ┌─────────────────────────────────┐ │
|
||||
│ │ │ │ DapServer │ │
|
||||
└─────────────────────┘ │ │ - TCP listener │ │
|
||||
│ │ - DAP JSON protocol │ │
|
||||
│ └──────────────┬──────────────────┘ │
|
||||
│ │ │
|
||||
│ ┌──────────────▼──────────────────┐ │
|
||||
│ │ DapDebugSession │ │
|
||||
│ │ - Debug state management │ │
|
||||
│ │ - Step coordination │ │
|
||||
│ │ - Variable exposure │ │
|
||||
│ │ - Expression evaluation │ │
|
||||
│ │ - Shell execution (REPL) │ │
|
||||
│ └──────────────┬──────────────────┘ │
|
||||
│ │ │
|
||||
│ ┌──────────────▼──────────────────┐ │
|
||||
│ │ StepsRunner (modified) │ │
|
||||
│ │ - Pause before/after steps │ │
|
||||
│ │ - Notify debug session │ │
|
||||
│ └─────────────────────────────────┘ │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## DAP Concept Mapping
|
||||
|
||||
| DAP Concept | Actions Runner Equivalent |
|
||||
|-------------|---------------------------|
|
||||
| Thread | Single job execution |
|
||||
| Stack Frame | Current step + completed steps (step history) |
|
||||
| Scope | Context category: `github`, `env`, `inputs`, `steps`, `secrets`, `runner`, `job` |
|
||||
| Variable | Individual context values |
|
||||
| Breakpoint | Pause before specific step (future enhancement) |
|
||||
| Step Over (Next) | Execute current step, pause before next |
|
||||
| Continue | Run until job end |
|
||||
| Evaluate | Evaluate `${{ }}` expressions OR execute shell commands (REPL) |
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
src/Runner.Worker/
|
||||
├── Dap/
|
||||
│ ├── DapServer.cs # TCP listener, JSON protocol handling
|
||||
│ ├── DapDebugSession.cs # Debug state, step coordination
|
||||
│ ├── DapMessages.cs # DAP protocol message types
|
||||
│ └── DapVariableProvider.cs # Converts ExecutionContext to DAP variables
|
||||
```
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
### Phase 1: DAP Protocol Infrastructure
|
||||
|
||||
#### 1.1 Protocol Messages (`Dap/DapMessages.cs`)
|
||||
|
||||
Base message types following DAP spec:
|
||||
|
||||
```csharp
|
||||
public abstract class ProtocolMessage
|
||||
{
|
||||
public int seq { get; set; }
|
||||
public string type { get; set; } // "request", "response", "event"
|
||||
}
|
||||
|
||||
public class Request : ProtocolMessage
|
||||
{
|
||||
public string command { get; set; }
|
||||
public object arguments { get; set; }
|
||||
}
|
||||
|
||||
public class Response : ProtocolMessage
|
||||
{
|
||||
public int request_seq { get; set; }
|
||||
public bool success { get; set; }
|
||||
public string command { get; set; }
|
||||
public string message { get; set; }
|
||||
public object body { get; set; }
|
||||
}
|
||||
|
||||
public class Event : ProtocolMessage
|
||||
{
|
||||
public string @event { get; set; }
|
||||
public object body { get; set; }
|
||||
}
|
||||
```
|
||||
|
||||
Message framing: `Content-Length: N\r\n\r\n{json}`
|
||||
|
||||
#### 1.2 DAP Server (`Dap/DapServer.cs`)
|
||||
|
||||
```csharp
|
||||
[ServiceLocator(Default = typeof(DapServer))]
|
||||
public interface IDapServer : IRunnerService
|
||||
{
|
||||
Task StartAsync(int port);
|
||||
Task WaitForConnectionAsync();
|
||||
Task StopAsync();
|
||||
void SendEvent(Event evt);
|
||||
}
|
||||
|
||||
public sealed class DapServer : RunnerService, IDapServer
|
||||
{
|
||||
private TcpListener _listener;
|
||||
private TcpClient _client;
|
||||
private IDapDebugSession _session;
|
||||
|
||||
// TCP listener on configurable port
|
||||
// Single-client connection
|
||||
// Async read/write loop
|
||||
// Dispatch requests to DapDebugSession
|
||||
}
|
||||
```
|
||||
|
||||
### Phase 2: Debug Session Logic
|
||||
|
||||
#### 2.1 Debug Session (`Dap/DapDebugSession.cs`)
|
||||
|
||||
```csharp
|
||||
public enum DapCommand { Continue, Next, Pause, Disconnect }
|
||||
public enum PauseReason { Entry, Step, Breakpoint, Pause }
|
||||
|
||||
[ServiceLocator(Default = typeof(DapDebugSession))]
|
||||
public interface IDapDebugSession : IRunnerService
|
||||
{
|
||||
bool IsActive { get; }
|
||||
|
||||
// Called by DapServer
|
||||
void Initialize(InitializeRequestArguments args);
|
||||
void Attach(AttachRequestArguments args);
|
||||
void ConfigurationDone();
|
||||
Task<DapCommand> WaitForCommandAsync();
|
||||
|
||||
// Called by StepsRunner
|
||||
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext);
|
||||
void OnStepCompleted(IStep step);
|
||||
|
||||
// DAP requests
|
||||
ThreadsResponse GetThreads();
|
||||
StackTraceResponse GetStackTrace(int threadId);
|
||||
ScopesResponse GetScopes(int frameId);
|
||||
VariablesResponse GetVariables(int variablesReference);
|
||||
EvaluateResponse Evaluate(string expression, string context);
|
||||
}
|
||||
|
||||
public sealed class DapDebugSession : RunnerService, IDapDebugSession
|
||||
{
|
||||
private IExecutionContext _jobContext;
|
||||
private IStep _currentStep;
|
||||
private readonly List<IStep> _completedSteps = new();
|
||||
private TaskCompletionSource<DapCommand> _commandTcs;
|
||||
private bool _pauseAfterStep = false;
|
||||
|
||||
// Object reference management for nested variables
|
||||
private int _nextVariableReference = 1;
|
||||
private readonly Dictionary<int, object> _variableReferences = new();
|
||||
}
|
||||
```
|
||||
|
||||
Core state machine:
|
||||
1. **Waiting for client:** Server started, no client connected
|
||||
2. **Initializing:** Client connected, exchanging capabilities
|
||||
3. **Ready:** `configurationDone` received, waiting to start
|
||||
4. **Paused (before step):** Stopped before step execution, waiting for command
|
||||
5. **Running:** Executing a step
|
||||
6. **Paused (after step):** Stopped after step execution, waiting for command
|
||||
|
||||
#### 2.2 Variable Provider (`Dap/DapVariableProvider.cs`)
|
||||
|
||||
Maps `ExecutionContext.ExpressionValues` to DAP scopes and variables:
|
||||
|
||||
| Scope | Source | Notes |
|
||||
|-------|--------|-------|
|
||||
| `github` | `ExpressionValues["github"]` | Full github context |
|
||||
| `env` | `ExpressionValues["env"]` | Environment variables |
|
||||
| `inputs` | `ExpressionValues["inputs"]` | Step inputs (when available) |
|
||||
| `steps` | `Global.StepsContext.GetScope()` | Completed step outputs |
|
||||
| `secrets` | `ExpressionValues["secrets"]` | Keys shown, values = `[REDACTED]` |
|
||||
| `runner` | `ExpressionValues["runner"]` | Runner context |
|
||||
| `job` | `ExpressionValues["job"]` | Job status |
|
||||
|
||||
Nested objects (e.g., `github.event.pull_request`) become expandable variables with child references.
|
||||
|
||||
### Phase 3: StepsRunner Integration
|
||||
|
||||
#### 3.1 Modify `StepsRunner.cs`
|
||||
|
||||
Add debug hooks at step boundaries:
|
||||
|
||||
```csharp
|
||||
public async Task RunAsync(IExecutionContext jobContext)
|
||||
{
|
||||
// Get debug session if available
|
||||
var debugSession = HostContext.TryGetService<IDapDebugSession>();
|
||||
bool isFirstStep = true;
|
||||
|
||||
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
|
||||
{
|
||||
// ... existing dequeue logic ...
|
||||
|
||||
var step = jobContext.JobSteps.Dequeue();
|
||||
|
||||
// Pause BEFORE step execution
|
||||
if (debugSession?.IsActive == true)
|
||||
{
|
||||
var reason = isFirstStep ? PauseReason.Entry : PauseReason.Step;
|
||||
await debugSession.OnStepStartingAsync(step, jobContext, reason);
|
||||
isFirstStep = false;
|
||||
}
|
||||
|
||||
// ... existing step execution (condition eval, RunStepAsync, etc.) ...
|
||||
|
||||
// Pause AFTER step execution (if requested)
|
||||
if (debugSession?.IsActive == true)
|
||||
{
|
||||
debugSession.OnStepCompleted(step);
|
||||
// Session may pause here to let user inspect outputs
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Phase 4: Expression Evaluation & Shell (REPL)
|
||||
|
||||
#### 4.1 Expression Evaluation
|
||||
|
||||
Reuse existing `PipelineTemplateEvaluator`:
|
||||
|
||||
```csharp
|
||||
private EvaluateResponseBody EvaluateExpression(string expression, IExecutionContext context)
|
||||
{
|
||||
// Strip ${{ }} wrapper if present
|
||||
var expr = expression.Trim();
|
||||
if (expr.StartsWith("${{") && expr.EndsWith("}}"))
|
||||
{
|
||||
expr = expr.Substring(3, expr.Length - 5).Trim();
|
||||
}
|
||||
|
||||
var expressionToken = new BasicExpressionToken(fileId: null, line: null, column: null, expression: expr);
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
|
||||
var result = templateEvaluator.EvaluateStepDisplayName(
|
||||
expressionToken,
|
||||
context.ExpressionValues,
|
||||
context.ExpressionFunctions
|
||||
);
|
||||
|
||||
// Mask secrets and determine type
|
||||
result = HostContext.SecretMasker.MaskSecrets(result ?? "null");
|
||||
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = result,
|
||||
Type = DetermineResultType(result),
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
**Supported expression formats:**
|
||||
- Plain expression: `github.ref`, `steps.build.outputs.result`
|
||||
- Wrapped expression: `${{ github.event.pull_request.title }}`
|
||||
|
||||
#### 4.2 Shell Execution (REPL)
|
||||
|
||||
Shell execution is triggered when:
|
||||
1. The evaluate request has `context: "repl"`, OR
|
||||
2. The expression starts with `!` (e.g., `!ls -la`), OR
|
||||
3. The expression starts with `$` followed by a shell command (e.g., `$env`)
|
||||
|
||||
**Usage examples in debug console:**
|
||||
```
|
||||
!ls -la # List files in workspace
|
||||
!env | grep GITHUB # Show GitHub environment variables
|
||||
!cat $GITHUB_EVENT_PATH # View the event payload
|
||||
!echo ${{ github.ref }} # Mix shell and expression (evaluated first)
|
||||
```
|
||||
|
||||
**Implementation:**
|
||||
|
||||
```csharp
|
||||
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
|
||||
{
|
||||
var processInvoker = HostContext.CreateService<IProcessInvoker>();
|
||||
var output = new StringBuilder();
|
||||
|
||||
processInvoker.OutputDataReceived += (sender, args) =>
|
||||
{
|
||||
output.AppendLine(args.Data);
|
||||
// Stream to client in real-time via DAP output event
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody { Category = "stdout", Output = args.Data + "\n" }
|
||||
});
|
||||
};
|
||||
|
||||
processInvoker.ErrorDataReceived += (sender, args) =>
|
||||
{
|
||||
_server?.SendEvent(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody { Category = "stderr", Output = args.Data + "\n" }
|
||||
});
|
||||
};
|
||||
|
||||
// Build environment from job context (includes GITHUB_*, env context, prepend path)
|
||||
var env = BuildShellEnvironment(context);
|
||||
var workDir = GetWorkingDirectory(context); // Uses github.workspace
|
||||
var (shell, shellArgs) = GetDefaultShell(); // Platform-specific detection
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(
|
||||
workingDirectory: workDir,
|
||||
fileName: shell,
|
||||
arguments: string.Format(shellArgs, command),
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
cancellationToken: CancellationToken.None
|
||||
);
|
||||
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = HostContext.SecretMasker.MaskSecrets(output.ToString()),
|
||||
Type = exitCode == 0 ? "string" : "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
**Shell detection by platform:**
|
||||
|
||||
| Platform | Priority | Shell | Arguments |
|
||||
|----------|----------|-------|-----------|
|
||||
| Windows | 1 | `pwsh` | `-NoProfile -NonInteractive -Command "{0}"` |
|
||||
| Windows | 2 | `powershell` | `-NoProfile -NonInteractive -Command "{0}"` |
|
||||
| Windows | 3 | `cmd.exe` | `/C "{0}"` |
|
||||
| Unix | 1 | `bash` | `-c "{0}"` |
|
||||
| Unix | 2 | `sh` | `-c "{0}"` |
|
||||
|
||||
**Environment built for shell commands:**
|
||||
- Current system environment variables
|
||||
- GitHub Actions context variables (from `IEnvironmentContextData.GetRuntimeEnvironmentVariables()`)
|
||||
- Prepend path from job context added to `PATH`
|
||||
|
||||
### Phase 5: Startup Integration
|
||||
|
||||
#### 5.1 Modify `JobRunner.cs`
|
||||
|
||||
Add DAP server startup after debug mode is detected (around line 159):
|
||||
|
||||
```csharp
|
||||
if (jobContext.Global.WriteDebug)
|
||||
{
|
||||
jobContext.SetRunnerContext("debug", "1");
|
||||
|
||||
// Start DAP server for interactive debugging
|
||||
var dapServer = HostContext.GetService<IDapServer>();
|
||||
var port = int.Parse(
|
||||
Environment.GetEnvironmentVariable("ACTIONS_DAP_PORT") ?? "4711");
|
||||
|
||||
await dapServer.StartAsync(port);
|
||||
Trace.Info($"DAP server listening on port {port}");
|
||||
jobContext.Output($"DAP debugger waiting for connection on port {port}...");
|
||||
|
||||
// Block until debugger connects
|
||||
await dapServer.WaitForConnectionAsync();
|
||||
Trace.Info("DAP client connected, continuing job execution");
|
||||
}
|
||||
```
|
||||
|
||||
## DAP Capabilities
|
||||
|
||||
Capabilities to advertise in `InitializeResponse`:
|
||||
|
||||
```json
|
||||
{
|
||||
"supportsConfigurationDoneRequest": true,
|
||||
"supportsEvaluateForHovers": true,
|
||||
"supportsTerminateDebuggee": true,
|
||||
"supportsStepBack": false,
|
||||
"supportsSetVariable": false,
|
||||
"supportsRestartFrame": false,
|
||||
"supportsGotoTargetsRequest": false,
|
||||
"supportsStepInTargetsRequest": false,
|
||||
"supportsCompletionsRequest": false,
|
||||
"supportsModulesRequest": false,
|
||||
"supportsExceptionOptions": false,
|
||||
"supportsValueFormattingOptions": false,
|
||||
"supportsExceptionInfoRequest": false,
|
||||
"supportsDelayedStackTraceLoading": false,
|
||||
"supportsLoadedSourcesRequest": false,
|
||||
"supportsProgressReporting": false,
|
||||
"supportsRunInTerminalRequest": false
|
||||
}
|
||||
```
|
||||
|
||||
## Client Configuration (nvim-dap)
|
||||
|
||||
Example configuration for nvim-dap:
|
||||
|
||||
```lua
|
||||
local dap = require('dap')
|
||||
|
||||
dap.adapters.actions = {
|
||||
type = 'server',
|
||||
host = '127.0.0.1',
|
||||
port = 4711,
|
||||
}
|
||||
|
||||
dap.configurations.yaml = {
|
||||
{
|
||||
type = 'actions',
|
||||
request = 'attach',
|
||||
name = 'Attach to Actions Runner',
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Demo Flow
|
||||
|
||||
1. Trigger job re-run with "Enable debug logging" checked in GitHub UI
|
||||
2. Runner starts, detects debug mode (`Global.WriteDebug == true`)
|
||||
3. DAP server starts, console shows: `DAP debugger waiting for connection on port 4711...`
|
||||
4. In nvim: `:lua require('dap').continue()`
|
||||
5. Connection established, capabilities exchanged
|
||||
6. Job begins, pauses before first step
|
||||
7. nvim shows "stopped" state, variables panel shows contexts
|
||||
8. User explores variables, evaluates expressions, runs shell commands
|
||||
9. User presses `n` (next) to advance to next step
|
||||
10. After step completes, user can inspect outputs before continuing
|
||||
11. Repeat until job completes
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
1. **Unit tests:** DAP protocol serialization, variable provider mapping
|
||||
2. **Integration tests:** Mock DAP client verifying request/response sequences
|
||||
3. **Manual testing:** Real job with nvim-dap attached
|
||||
|
||||
## Future Enhancements (Out of Scope for Demo)
|
||||
|
||||
- Composite action step-in (expand into sub-steps)
|
||||
- Breakpoints on specific step names
|
||||
- Watch expressions
|
||||
- Conditional breakpoints
|
||||
- Remote debugging (runner not on localhost)
|
||||
- VS Code extension
|
||||
|
||||
## Estimated Effort
|
||||
|
||||
| Phase | Effort |
|
||||
|-------|--------|
|
||||
| Phase 1: Protocol Infrastructure | 4-6 hours |
|
||||
| Phase 2: Debug Session Logic | 4-6 hours |
|
||||
| Phase 3: StepsRunner Integration | 2-3 hours |
|
||||
| Phase 4: Expression & Shell | 3-4 hours |
|
||||
| Phase 5: Startup & Polish | 2-3 hours |
|
||||
| **Total** | **~2-3 days** |
|
||||
|
||||
## Key Files to Modify
|
||||
|
||||
| File | Changes |
|
||||
|------|---------|
|
||||
| `src/Runner.Worker/JobRunner.cs` | Start DAP server when debug mode enabled |
|
||||
| `src/Runner.Worker/StepsRunner.cs` | Add pause hooks before/after step execution |
|
||||
| `src/Runner.Worker/Runner.Worker.csproj` | Add new Dap/ folder files |
|
||||
|
||||
## Key Files to Create
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `src/Runner.Worker/Dap/DapServer.cs` | TCP server, protocol framing |
|
||||
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Debug state machine, command handling |
|
||||
| `src/Runner.Worker/Dap/DapMessages.cs` | Protocol message types |
|
||||
| `src/Runner.Worker/Dap/DapVariableProvider.cs` | Context → DAP variable conversion |
|
||||
|
||||
## Reference Links
|
||||
|
||||
- [DAP Overview](https://microsoft.github.io/debug-adapter-protocol/overview)
|
||||
- [DAP Specification](https://microsoft.github.io/debug-adapter-protocol/specification)
|
||||
- [Enable Debug Logging (GitHub Docs)](https://docs.github.com/en/actions/how-tos/monitor-workflows/enable-debug-logging)
|
||||
1116
.opencode/plans/dap-step-backwards.md
Normal file
1116
.opencode/plans/dap-step-backwards.md
Normal file
File diff suppressed because it is too large
Load Diff
18
README.md
18
README.md
@@ -20,6 +20,20 @@ Runner releases:
|
||||
|
||||
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
|
||||
|
||||
## Contribute
|
||||
### Note
|
||||
|
||||
We accept contributions in the form of issues and pull requests. [Read more here](docs/contribute.md) before contributing.
|
||||
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](security.md)
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
|
||||
176
browser-ext/README.md
Normal file
176
browser-ext/README.md
Normal file
@@ -0,0 +1,176 @@
|
||||
# Actions DAP Debugger - Browser Extension
|
||||
|
||||
A Chrome extension that enables interactive debugging of GitHub Actions workflows directly in the browser. Connects to the runner's DAP server via a WebSocket proxy.
|
||||
|
||||
## Features
|
||||
|
||||
- **Variable Inspection**: Browse workflow context variables (`github`, `env`, `steps`, etc.)
|
||||
- **REPL Console**: Evaluate expressions and run shell commands
|
||||
- **Step Control**: Step forward, step back, continue, and reverse continue
|
||||
- **GitHub Integration**: Debugger pane injects directly into the job page
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Start the WebSocket Proxy
|
||||
|
||||
The proxy bridges WebSocket connections from the browser to the DAP TCP server.
|
||||
|
||||
```bash
|
||||
cd browser-ext/proxy
|
||||
npm install
|
||||
node proxy.js
|
||||
```
|
||||
|
||||
The proxy listens on `ws://localhost:4712` and connects to the DAP server at `tcp://localhost:4711`.
|
||||
|
||||
### 2. Load the Extension in Chrome
|
||||
|
||||
1. Open Chrome and navigate to `chrome://extensions/`
|
||||
2. Enable "Developer mode" (toggle in top right)
|
||||
3. Click "Load unpacked"
|
||||
4. Select the `browser-ext` directory
|
||||
|
||||
### 3. Start a Debug Session
|
||||
|
||||
1. Go to your GitHub repository
|
||||
2. Navigate to Actions and select a workflow run
|
||||
3. Click "Re-run jobs" → check "Enable debug logging"
|
||||
4. Wait for the runner to display "DAP debugger waiting for connection..."
|
||||
|
||||
### 4. Connect the Extension
|
||||
|
||||
1. Navigate to the job page (`github.com/.../actions/runs/.../job/...`)
|
||||
2. Click the extension icon in Chrome toolbar
|
||||
3. Click "Connect"
|
||||
4. The debugger pane will appear above the first workflow step
|
||||
|
||||
## Usage
|
||||
|
||||
### Variable Browser (Left Panel)
|
||||
|
||||
Click on scope names to expand and view variables:
|
||||
- **Globals**: `github`, `env`, `runner` contexts
|
||||
- **Job Outputs**: Outputs from previous jobs
|
||||
- **Step Outputs**: Outputs from previous steps
|
||||
|
||||
### Console (Right Panel)
|
||||
|
||||
Enter expressions or commands:
|
||||
|
||||
```bash
|
||||
# Evaluate expressions
|
||||
${{ github.ref }}
|
||||
${{ github.event_name }}
|
||||
${{ env.MY_VAR }}
|
||||
|
||||
# Run shell commands (prefix with !)
|
||||
!ls -la
|
||||
!cat package.json
|
||||
!env | grep GITHUB
|
||||
|
||||
# Modify variables
|
||||
!export MY_VAR=new_value
|
||||
```
|
||||
|
||||
### Control Buttons
|
||||
|
||||
| Button | Action | Description |
|
||||
|--------|--------|-------------|
|
||||
| ⏮ | Reverse Continue | Go back to first checkpoint |
|
||||
| ◀ | Step Back | Go to previous checkpoint |
|
||||
| ▶ | Continue | Run until next breakpoint/end |
|
||||
| ⏭ | Step (Next) | Step to next workflow step |
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
Browser Extension ──WebSocket──► Proxy ──TCP──► Runner DAP Server
|
||||
(port 4712) (port 4711)
|
||||
```
|
||||
|
||||
The WebSocket proxy handles DAP message framing (Content-Length headers) and provides a browser-compatible connection.
|
||||
|
||||
## Configuration
|
||||
|
||||
### Proxy Settings
|
||||
|
||||
| Environment Variable | Default | Description |
|
||||
|---------------------|---------|-------------|
|
||||
| `WS_PORT` | 4712 | WebSocket server port |
|
||||
| `DAP_HOST` | 127.0.0.1 | DAP server host |
|
||||
| `DAP_PORT` | 4711 | DAP server port |
|
||||
|
||||
Or use CLI arguments:
|
||||
```bash
|
||||
node proxy.js --ws-port 4712 --dap-host 127.0.0.1 --dap-port 4711
|
||||
```
|
||||
|
||||
### Extension Settings
|
||||
|
||||
Click the extension popup to configure:
|
||||
- **Proxy Host**: Default `localhost`
|
||||
- **Proxy Port**: Default `4712`
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
browser-ext/
|
||||
├── manifest.json # Extension configuration
|
||||
├── background/
|
||||
│ └── background.js # Service worker - DAP client
|
||||
├── content/
|
||||
│ ├── content.js # UI injection and interaction
|
||||
│ └── content.css # Debugger pane styling
|
||||
├── popup/
|
||||
│ ├── popup.html # Extension popup UI
|
||||
│ ├── popup.js # Popup logic
|
||||
│ └── popup.css # Popup styling
|
||||
├── lib/
|
||||
│ └── dap-protocol.js # DAP message helpers
|
||||
├── proxy/
|
||||
│ ├── proxy.js # WebSocket-to-TCP bridge
|
||||
│ └── package.json # Proxy dependencies
|
||||
└── icons/
|
||||
├── icon16.png
|
||||
├── icon48.png
|
||||
└── icon128.png
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Failed to connect to DAP server"
|
||||
|
||||
1. Ensure the proxy is running: `node proxy.js`
|
||||
2. Ensure the runner is waiting for a debugger connection
|
||||
3. Check that debug logging is enabled for the job
|
||||
|
||||
### Debugger pane doesn't appear
|
||||
|
||||
1. Verify you're on a job page (`/actions/runs/*/job/*`)
|
||||
2. Open DevTools and check for console errors
|
||||
3. Reload the page after loading the extension
|
||||
|
||||
### Variables don't load
|
||||
|
||||
1. Wait for the "stopped" event (status shows PAUSED)
|
||||
2. Click on a scope to expand it
|
||||
3. Check the console for error messages
|
||||
|
||||
## Development
|
||||
|
||||
### Modifying the Extension
|
||||
|
||||
After making changes:
|
||||
1. Go to `chrome://extensions/`
|
||||
2. Click the refresh icon on the extension card
|
||||
3. Reload the GitHub job page
|
||||
|
||||
### Debugging
|
||||
|
||||
- **Background script**: Inspect via `chrome://extensions/` → "Inspect views: service worker"
|
||||
- **Content script**: Use DevTools on the GitHub page
|
||||
- **Proxy**: Watch terminal output for message logs
|
||||
|
||||
## Security Note
|
||||
|
||||
The proxy and extension are designed for local development. The proxy only accepts connections from localhost. Do not expose the proxy to the network without additional security measures.
|
||||
528
browser-ext/background/background.js
Normal file
528
browser-ext/background/background.js
Normal file
@@ -0,0 +1,528 @@
|
||||
/**
|
||||
* Background Script - DAP Client
|
||||
*
|
||||
* Service worker that manages WebSocket connection to the proxy
|
||||
* and handles DAP protocol communication.
|
||||
*
|
||||
* NOTE: Chrome MV3 service workers can be terminated after ~30s of inactivity.
|
||||
* We handle this with:
|
||||
* 1. Keepalive pings to keep the WebSocket active
|
||||
* 2. Automatic reconnection when the service worker restarts
|
||||
* 3. Storing connection state in chrome.storage.session
|
||||
*/
|
||||
|
||||
// Connection state
|
||||
let ws = null;
|
||||
let connectionStatus = 'disconnected'; // disconnected, connecting, connected, paused, running, error
|
||||
let sequenceNumber = 1;
|
||||
const pendingRequests = new Map(); // seq -> { resolve, reject, command, timeout }
|
||||
|
||||
// Reconnection state
|
||||
let reconnectAttempts = 0;
|
||||
const MAX_RECONNECT_ATTEMPTS = 10;
|
||||
const RECONNECT_BASE_DELAY = 1000; // Start with 1s, exponential backoff
|
||||
let reconnectTimer = null;
|
||||
let lastConnectedUrl = null;
|
||||
let wasConnectedBeforeIdle = false;
|
||||
|
||||
// Keepalive interval - send ping every 15s to keep service worker AND WebSocket alive
|
||||
// Chrome MV3 service workers get suspended after ~30s of inactivity
|
||||
// We need to send actual WebSocket messages to keep both alive
|
||||
const KEEPALIVE_INTERVAL = 15000;
|
||||
let keepaliveTimer = null;
|
||||
|
||||
// Default configuration
|
||||
const DEFAULT_URL = 'ws://localhost:4712';
|
||||
|
||||
/**
|
||||
* Initialize on service worker startup - check if we should reconnect
|
||||
*/
|
||||
async function initializeOnStartup() {
|
||||
console.log('[Background] Service worker starting up...');
|
||||
|
||||
try {
|
||||
// Restore state from session storage
|
||||
const data = await chrome.storage.session.get(['connectionUrl', 'shouldBeConnected', 'lastStatus']);
|
||||
|
||||
if (data.shouldBeConnected && data.connectionUrl) {
|
||||
console.log('[Background] Restoring connection after service worker restart');
|
||||
lastConnectedUrl = data.connectionUrl;
|
||||
wasConnectedBeforeIdle = true;
|
||||
|
||||
// Small delay to let things settle
|
||||
setTimeout(() => {
|
||||
connect(data.connectionUrl);
|
||||
}, 500);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('[Background] No session state to restore');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save connection state to session storage (survives service worker restart)
|
||||
*/
|
||||
async function saveConnectionState() {
|
||||
try {
|
||||
await chrome.storage.session.set({
|
||||
connectionUrl: lastConnectedUrl,
|
||||
shouldBeConnected: connectionStatus !== 'disconnected' && connectionStatus !== 'error',
|
||||
lastStatus: connectionStatus,
|
||||
});
|
||||
} catch (e) {
|
||||
console.warn('[Background] Failed to save connection state:', e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear connection state from session storage
|
||||
*/
|
||||
async function clearConnectionState() {
|
||||
try {
|
||||
await chrome.storage.session.remove(['connectionUrl', 'shouldBeConnected', 'lastStatus']);
|
||||
} catch (e) {
|
||||
console.warn('[Background] Failed to clear connection state:', e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start keepalive ping to prevent service worker termination
|
||||
* CRITICAL: We must send actual WebSocket messages to keep the connection alive.
|
||||
* Just having a timer is not enough - Chrome will suspend the service worker
|
||||
* and close the WebSocket with code 1001 after ~30s of inactivity.
|
||||
*/
|
||||
function startKeepalive() {
|
||||
stopKeepalive();
|
||||
|
||||
keepaliveTimer = setInterval(() => {
|
||||
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||
try {
|
||||
// Send a lightweight keepalive message over WebSocket
|
||||
// This does two things:
|
||||
// 1. Keeps the WebSocket connection active (prevents proxy timeout)
|
||||
// 2. Creates activity that keeps the Chrome service worker alive
|
||||
const keepaliveMsg = JSON.stringify({ type: 'keepalive', timestamp: Date.now() });
|
||||
ws.send(keepaliveMsg);
|
||||
console.log('[Background] Keepalive sent');
|
||||
} catch (e) {
|
||||
console.error('[Background] Keepalive error:', e);
|
||||
handleUnexpectedClose();
|
||||
}
|
||||
} else if (wasConnectedBeforeIdle || lastConnectedUrl) {
|
||||
// Connection was lost, try to reconnect
|
||||
console.log('[Background] Connection lost during keepalive check');
|
||||
handleUnexpectedClose();
|
||||
}
|
||||
}, KEEPALIVE_INTERVAL);
|
||||
|
||||
console.log('[Background] Keepalive timer started (interval: ' + KEEPALIVE_INTERVAL + 'ms)');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop keepalive ping
|
||||
*/
|
||||
function stopKeepalive() {
|
||||
if (keepaliveTimer) {
|
||||
clearInterval(keepaliveTimer);
|
||||
keepaliveTimer = null;
|
||||
console.log('[Background] Keepalive timer stopped');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle unexpected connection close - attempt reconnection
|
||||
*/
|
||||
function handleUnexpectedClose() {
|
||||
if (reconnectTimer) {
|
||||
return; // Already trying to reconnect
|
||||
}
|
||||
|
||||
if (!lastConnectedUrl) {
|
||||
console.log('[Background] No URL to reconnect to');
|
||||
return;
|
||||
}
|
||||
|
||||
if (reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {
|
||||
console.error('[Background] Max reconnection attempts reached');
|
||||
connectionStatus = 'error';
|
||||
broadcastStatus();
|
||||
clearConnectionState();
|
||||
return;
|
||||
}
|
||||
|
||||
const delay = Math.min(RECONNECT_BASE_DELAY * Math.pow(2, reconnectAttempts), 30000);
|
||||
reconnectAttempts++;
|
||||
|
||||
console.log(`[Background] Scheduling reconnect attempt ${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS} in ${delay}ms`);
|
||||
connectionStatus = 'connecting';
|
||||
broadcastStatus();
|
||||
|
||||
reconnectTimer = setTimeout(() => {
|
||||
reconnectTimer = null;
|
||||
if (connectionStatus !== 'connected' && connectionStatus !== 'paused' && connectionStatus !== 'running') {
|
||||
connect(lastConnectedUrl);
|
||||
}
|
||||
}, delay);
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to the WebSocket proxy
|
||||
*/
|
||||
function connect(url) {
|
||||
// Clean up existing connection
|
||||
if (ws) {
|
||||
try {
|
||||
ws.onclose = null; // Prevent triggering reconnect
|
||||
ws.close(1000, 'Reconnecting');
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
ws = null;
|
||||
}
|
||||
|
||||
// Clear any pending reconnect
|
||||
if (reconnectTimer) {
|
||||
clearTimeout(reconnectTimer);
|
||||
reconnectTimer = null;
|
||||
}
|
||||
|
||||
connectionStatus = 'connecting';
|
||||
broadcastStatus();
|
||||
|
||||
// Use provided URL or default
|
||||
const wsUrl = url || DEFAULT_URL;
|
||||
lastConnectedUrl = wsUrl;
|
||||
console.log(`[Background] Connecting to ${wsUrl}`);
|
||||
|
||||
try {
|
||||
ws = new WebSocket(wsUrl);
|
||||
} catch (e) {
|
||||
console.error('[Background] Failed to create WebSocket:', e);
|
||||
connectionStatus = 'error';
|
||||
broadcastStatus();
|
||||
handleUnexpectedClose();
|
||||
return;
|
||||
}
|
||||
|
||||
ws.onopen = async () => {
|
||||
console.log('[Background] WebSocket connected');
|
||||
connectionStatus = 'connected';
|
||||
reconnectAttempts = 0; // Reset on successful connection
|
||||
wasConnectedBeforeIdle = true;
|
||||
broadcastStatus();
|
||||
saveConnectionState();
|
||||
startKeepalive();
|
||||
|
||||
// Initialize DAP session
|
||||
try {
|
||||
await initializeDapSession();
|
||||
} catch (error) {
|
||||
console.error('[Background] Failed to initialize DAP session:', error);
|
||||
// Don't set error status - the connection might still be usable
|
||||
// The DAP server might just need the job to progress
|
||||
}
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
try {
|
||||
const message = JSON.parse(event.data);
|
||||
handleDapMessage(message);
|
||||
} catch (error) {
|
||||
console.error('[Background] Failed to parse message:', error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = (event) => {
|
||||
console.log(`[Background] WebSocket closed: ${event.code} ${event.reason || '(no reason)'}`);
|
||||
ws = null;
|
||||
stopKeepalive();
|
||||
|
||||
// Reject any pending requests
|
||||
for (const [seq, pending] of pendingRequests) {
|
||||
if (pending.timeout) clearTimeout(pending.timeout);
|
||||
pending.reject(new Error('Connection closed'));
|
||||
}
|
||||
pendingRequests.clear();
|
||||
|
||||
// Determine if we should reconnect
|
||||
// Code 1000 = normal closure (user initiated)
|
||||
// Code 1001 = going away (service worker idle, browser closing, etc.)
|
||||
// Code 1006 = abnormal closure (connection lost)
|
||||
// Code 1011 = server error
|
||||
const shouldReconnect = event.code !== 1000;
|
||||
|
||||
if (shouldReconnect && wasConnectedBeforeIdle) {
|
||||
console.log('[Background] Unexpected close, will attempt reconnect');
|
||||
connectionStatus = 'connecting';
|
||||
broadcastStatus();
|
||||
handleUnexpectedClose();
|
||||
} else {
|
||||
connectionStatus = 'disconnected';
|
||||
wasConnectedBeforeIdle = false;
|
||||
broadcastStatus();
|
||||
clearConnectionState();
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (event) => {
|
||||
console.error('[Background] WebSocket error:', event);
|
||||
// onclose will be called after onerror, so we handle reconnection there
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from the WebSocket proxy
|
||||
*/
|
||||
function disconnect() {
|
||||
// Stop any reconnection attempts
|
||||
if (reconnectTimer) {
|
||||
clearTimeout(reconnectTimer);
|
||||
reconnectTimer = null;
|
||||
}
|
||||
reconnectAttempts = 0;
|
||||
wasConnectedBeforeIdle = false;
|
||||
stopKeepalive();
|
||||
|
||||
if (ws) {
|
||||
// Send disconnect request to DAP server first
|
||||
sendDapRequest('disconnect', {}).catch(() => {});
|
||||
|
||||
// Prevent reconnection on this close
|
||||
const socket = ws;
|
||||
ws = null;
|
||||
socket.onclose = null;
|
||||
|
||||
try {
|
||||
socket.close(1000, 'User disconnected');
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
connectionStatus = 'disconnected';
|
||||
broadcastStatus();
|
||||
clearConnectionState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize DAP session (initialize + attach + configurationDone)
|
||||
*/
|
||||
async function initializeDapSession() {
|
||||
// 1. Initialize
|
||||
const initResponse = await sendDapRequest('initialize', {
|
||||
clientID: 'browser-extension',
|
||||
clientName: 'Actions DAP Debugger',
|
||||
adapterID: 'github-actions-runner',
|
||||
pathFormat: 'path',
|
||||
linesStartAt1: true,
|
||||
columnsStartAt1: true,
|
||||
supportsVariableType: true,
|
||||
supportsVariablePaging: true,
|
||||
supportsRunInTerminalRequest: false,
|
||||
supportsProgressReporting: false,
|
||||
supportsInvalidatedEvent: true,
|
||||
});
|
||||
|
||||
console.log('[Background] Initialize response:', initResponse);
|
||||
|
||||
// 2. Attach to running session
|
||||
const attachResponse = await sendDapRequest('attach', {});
|
||||
console.log('[Background] Attach response:', attachResponse);
|
||||
|
||||
// 3. Configuration done
|
||||
const configResponse = await sendDapRequest('configurationDone', {});
|
||||
console.log('[Background] ConfigurationDone response:', configResponse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a DAP request and return a promise for the response
|
||||
*/
|
||||
function sendDapRequest(command, args = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!ws || ws.readyState !== WebSocket.OPEN) {
|
||||
reject(new Error('Not connected'));
|
||||
return;
|
||||
}
|
||||
|
||||
const seq = sequenceNumber++;
|
||||
const request = {
|
||||
seq,
|
||||
type: 'request',
|
||||
command,
|
||||
arguments: args,
|
||||
};
|
||||
|
||||
console.log(`[Background] Sending DAP request: ${command} (seq: ${seq})`);
|
||||
|
||||
// Set timeout for request
|
||||
const timeout = setTimeout(() => {
|
||||
if (pendingRequests.has(seq)) {
|
||||
pendingRequests.delete(seq);
|
||||
reject(new Error(`Request timed out: ${command}`));
|
||||
}
|
||||
}, 30000);
|
||||
|
||||
pendingRequests.set(seq, { resolve, reject, command, timeout });
|
||||
|
||||
try {
|
||||
ws.send(JSON.stringify(request));
|
||||
} catch (e) {
|
||||
pendingRequests.delete(seq);
|
||||
clearTimeout(timeout);
|
||||
reject(new Error(`Failed to send request: ${e.message}`));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle incoming DAP message (response or event)
|
||||
*/
|
||||
function handleDapMessage(message) {
|
||||
if (message.type === 'response') {
|
||||
handleDapResponse(message);
|
||||
} else if (message.type === 'event') {
|
||||
handleDapEvent(message);
|
||||
} else if (message.type === 'proxy-error') {
|
||||
console.error('[Background] Proxy error:', message.message);
|
||||
// Don't immediately set error status - might be transient
|
||||
} else if (message.type === 'keepalive-ack') {
|
||||
// Keepalive acknowledged by proxy - connection is healthy
|
||||
console.log('[Background] Keepalive acknowledged');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle DAP response
|
||||
*/
|
||||
function handleDapResponse(response) {
|
||||
const pending = pendingRequests.get(response.request_seq);
|
||||
if (!pending) {
|
||||
console.warn(`[Background] No pending request for seq ${response.request_seq}`);
|
||||
return;
|
||||
}
|
||||
|
||||
pendingRequests.delete(response.request_seq);
|
||||
if (pending.timeout) clearTimeout(pending.timeout);
|
||||
|
||||
if (response.success) {
|
||||
console.log(`[Background] DAP response success: ${response.command}`);
|
||||
pending.resolve(response.body || {});
|
||||
} else {
|
||||
console.error(`[Background] DAP response error: ${response.command} - ${response.message}`);
|
||||
pending.reject(new Error(response.message || 'Unknown error'));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle DAP event
|
||||
*/
|
||||
function handleDapEvent(event) {
|
||||
console.log(`[Background] DAP event: ${event.event}`, event.body);
|
||||
|
||||
switch (event.event) {
|
||||
case 'initialized':
|
||||
// DAP server is ready
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
connectionStatus = 'paused';
|
||||
broadcastStatus();
|
||||
saveConnectionState();
|
||||
break;
|
||||
|
||||
case 'continued':
|
||||
connectionStatus = 'running';
|
||||
broadcastStatus();
|
||||
saveConnectionState();
|
||||
break;
|
||||
|
||||
case 'terminated':
|
||||
connectionStatus = 'disconnected';
|
||||
wasConnectedBeforeIdle = false;
|
||||
broadcastStatus();
|
||||
clearConnectionState();
|
||||
break;
|
||||
|
||||
case 'output':
|
||||
// Output event - forward to content scripts
|
||||
break;
|
||||
}
|
||||
|
||||
// Broadcast event to all content scripts
|
||||
broadcastEvent(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast connection status to popup and content scripts
|
||||
*/
|
||||
function broadcastStatus() {
|
||||
const statusMessage = { type: 'status-changed', status: connectionStatus };
|
||||
|
||||
// Broadcast to all extension contexts (popup)
|
||||
chrome.runtime.sendMessage(statusMessage).catch(() => {});
|
||||
|
||||
// Broadcast to content scripts
|
||||
chrome.tabs.query({ url: 'https://github.com/*/*/actions/runs/*/job/*' }, (tabs) => {
|
||||
if (chrome.runtime.lastError) return;
|
||||
tabs.forEach((tab) => {
|
||||
chrome.tabs.sendMessage(tab.id, statusMessage).catch(() => {});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast DAP event to content scripts
|
||||
*/
|
||||
function broadcastEvent(event) {
|
||||
chrome.tabs.query({ url: 'https://github.com/*/*/actions/runs/*/job/*' }, (tabs) => {
|
||||
if (chrome.runtime.lastError) return;
|
||||
tabs.forEach((tab) => {
|
||||
chrome.tabs.sendMessage(tab.id, { type: 'dap-event', event }).catch(() => {});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Message handler for requests from popup and content scripts
|
||||
*/
|
||||
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
|
||||
console.log('[Background] Received message:', message.type);
|
||||
|
||||
switch (message.type) {
|
||||
case 'get-status':
|
||||
sendResponse({ status: connectionStatus, reconnecting: reconnectTimer !== null });
|
||||
return false;
|
||||
|
||||
case 'connect':
|
||||
reconnectAttempts = 0; // Reset attempts on manual connect
|
||||
connect(message.url || DEFAULT_URL);
|
||||
sendResponse({ status: connectionStatus });
|
||||
return false;
|
||||
|
||||
case 'disconnect':
|
||||
disconnect();
|
||||
sendResponse({ status: connectionStatus });
|
||||
return false;
|
||||
|
||||
case 'dap-request':
|
||||
// Handle DAP request from content script
|
||||
sendDapRequest(message.command, message.args || {})
|
||||
.then((body) => {
|
||||
sendResponse({ success: true, body });
|
||||
})
|
||||
.catch((error) => {
|
||||
sendResponse({ success: false, error: error.message });
|
||||
});
|
||||
return true; // Will respond asynchronously
|
||||
|
||||
default:
|
||||
console.warn('[Background] Unknown message type:', message.type);
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize on startup
|
||||
initializeOnStartup();
|
||||
|
||||
// Log startup
|
||||
console.log('[Background] Actions DAP Debugger background script loaded');
|
||||
337
browser-ext/content/content.css
Normal file
337
browser-ext/content/content.css
Normal file
@@ -0,0 +1,337 @@
|
||||
/**
|
||||
* Content Script Styles
|
||||
*
|
||||
* Matches GitHub's Primer design system for seamless integration.
|
||||
* Uses CSS custom properties for light/dark mode support.
|
||||
*/
|
||||
|
||||
/* Debugger Pane Container */
|
||||
.dap-debugger-pane {
|
||||
background-color: var(--bgColor-default, #0d1117);
|
||||
border-color: var(--borderColor-default, #30363d) !important;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
/* Header */
|
||||
.dap-header {
|
||||
background-color: var(--bgColor-muted, #161b22);
|
||||
}
|
||||
|
||||
.dap-header .octicon {
|
||||
color: var(--fgColor-muted, #8b949e);
|
||||
}
|
||||
|
||||
.dap-step-info {
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* Status Labels */
|
||||
.dap-status-label {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.Label--attention {
|
||||
background-color: #9e6a03 !important;
|
||||
color: #ffffff !important;
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.Label--success {
|
||||
background-color: #238636 !important;
|
||||
color: #ffffff !important;
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.Label--danger {
|
||||
background-color: #da3633 !important;
|
||||
color: #ffffff !important;
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.Label--secondary {
|
||||
background-color: #30363d !important;
|
||||
color: #8b949e !important;
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
/* Content Area */
|
||||
.dap-content {
|
||||
min-height: 200px;
|
||||
max-height: 400px;
|
||||
}
|
||||
|
||||
/* Scopes Panel */
|
||||
.dap-scopes {
|
||||
border-color: var(--borderColor-default, #30363d) !important;
|
||||
min-width: 150px;
|
||||
}
|
||||
|
||||
.dap-scope-header {
|
||||
background-color: var(--bgColor-muted, #161b22);
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.dap-scope-tree {
|
||||
font-size: 12px;
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
/* Tree Nodes */
|
||||
.dap-tree-node {
|
||||
padding: 1px 0;
|
||||
}
|
||||
|
||||
.dap-tree-content {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
padding: 2px 4px;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.dap-tree-content:hover {
|
||||
background-color: var(--bgColor-muted, #161b22);
|
||||
}
|
||||
|
||||
.dap-tree-children {
|
||||
margin-left: 16px;
|
||||
border-left: 1px solid var(--borderColor-muted, #21262d);
|
||||
padding-left: 8px;
|
||||
}
|
||||
|
||||
.dap-expand-icon {
|
||||
display: inline-block;
|
||||
width: 16px;
|
||||
text-align: center;
|
||||
color: var(--fgColor-muted, #8b949e);
|
||||
font-size: 10px;
|
||||
flex-shrink: 0;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.dap-tree-node .text-bold {
|
||||
color: var(--fgColor-default, #e6edf3);
|
||||
font-weight: 600;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.dap-tree-node .color-fg-muted {
|
||||
color: var(--fgColor-muted, #8b949e);
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
/* REPL Console */
|
||||
.dap-repl {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.dap-repl-header {
|
||||
background-color: var(--bgColor-muted, #161b22);
|
||||
font-size: 12px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.dap-repl-output {
|
||||
background-color: var(--bgColor-inset, #010409);
|
||||
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
|
||||
font-size: 12px;
|
||||
line-height: 1.5;
|
||||
padding: 8px;
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
min-height: 100px;
|
||||
}
|
||||
|
||||
.dap-output-input {
|
||||
color: var(--fgColor-muted, #8b949e);
|
||||
}
|
||||
|
||||
.dap-output-result {
|
||||
color: var(--fgColor-default, #e6edf3);
|
||||
}
|
||||
|
||||
.dap-output-stdout {
|
||||
color: var(--fgColor-default, #e6edf3);
|
||||
}
|
||||
|
||||
.dap-output-error {
|
||||
color: var(--fgColor-danger, #f85149);
|
||||
}
|
||||
|
||||
/* REPL Input */
|
||||
.dap-repl-input {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.dap-repl-input input {
|
||||
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
|
||||
font-size: 12px;
|
||||
background-color: var(--bgColor-inset, #010409) !important;
|
||||
border-color: var(--borderColor-default, #30363d) !important;
|
||||
color: var(--fgColor-default, #e6edf3) !important;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.dap-repl-input input:focus {
|
||||
border-color: var(--focus-outlineColor, #1f6feb) !important;
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.3);
|
||||
}
|
||||
|
||||
.dap-repl-input input:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.dap-repl-input input::placeholder {
|
||||
color: var(--fgColor-muted, #8b949e);
|
||||
}
|
||||
|
||||
/* Control Buttons */
|
||||
.dap-controls {
|
||||
background-color: var(--bgColor-muted, #161b22);
|
||||
}
|
||||
|
||||
.dap-controls button {
|
||||
min-width: 32px;
|
||||
height: 28px;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 0 8px;
|
||||
}
|
||||
|
||||
.dap-controls button svg {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.dap-controls button:disabled {
|
||||
opacity: 0.4;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.dap-controls button:not(:disabled):hover {
|
||||
background-color: var(--bgColor-accent-muted, #388bfd26);
|
||||
}
|
||||
|
||||
.dap-step-counter {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
/* Utility Classes (in case GitHub's aren't loaded) */
|
||||
.d-flex { display: flex; }
|
||||
.flex-column { flex-direction: column; }
|
||||
.flex-items-center { align-items: center; }
|
||||
.flex-auto { flex: 1 1 auto; }
|
||||
|
||||
.p-2 { padding: 8px; }
|
||||
.px-2 { padding-left: 8px; padding-right: 8px; }
|
||||
.mx-2 { margin-left: 8px; margin-right: 8px; }
|
||||
.mb-2 { margin-bottom: 8px; }
|
||||
.ml-2 { margin-left: 8px; }
|
||||
.ml-3 { margin-left: 16px; }
|
||||
.mr-2 { margin-right: 8px; }
|
||||
.ml-auto { margin-left: auto; }
|
||||
|
||||
.border { border: 1px solid var(--borderColor-default, #30363d); }
|
||||
.border-bottom { border-bottom: 1px solid var(--borderColor-default, #30363d); }
|
||||
.border-top { border-top: 1px solid var(--borderColor-default, #30363d); }
|
||||
.border-right { border-right: 1px solid var(--borderColor-default, #30363d); }
|
||||
.rounded-2 { border-radius: 6px; }
|
||||
|
||||
.overflow-auto { overflow: auto; }
|
||||
.text-bold { font-weight: 600; }
|
||||
.text-mono { font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace; }
|
||||
.text-small { font-size: 12px; }
|
||||
|
||||
.color-fg-muted { color: var(--fgColor-muted, #8b949e); }
|
||||
.color-fg-danger { color: var(--fgColor-danger, #f85149); }
|
||||
.color-fg-default { color: var(--fgColor-default, #e6edf3); }
|
||||
|
||||
/* Light mode overrides */
|
||||
@media (prefers-color-scheme: light) {
|
||||
.dap-debugger-pane {
|
||||
background-color: var(--bgColor-default, #ffffff);
|
||||
border-color: var(--borderColor-default, #d0d7de) !important;
|
||||
}
|
||||
|
||||
.dap-header,
|
||||
.dap-scope-header,
|
||||
.dap-repl-header,
|
||||
.dap-controls {
|
||||
background-color: var(--bgColor-muted, #f6f8fa);
|
||||
}
|
||||
|
||||
.dap-repl-output,
|
||||
.dap-repl-input input {
|
||||
background-color: var(--bgColor-inset, #f6f8fa) !important;
|
||||
}
|
||||
|
||||
.dap-tree-node .text-bold {
|
||||
color: var(--fgColor-default, #1f2328);
|
||||
}
|
||||
|
||||
.color-fg-muted {
|
||||
color: var(--fgColor-muted, #656d76);
|
||||
}
|
||||
}
|
||||
|
||||
/* Respect GitHub's color mode data attribute */
|
||||
[data-color-mode="light"] .dap-debugger-pane,
|
||||
html[data-color-mode="light"] .dap-debugger-pane {
|
||||
background-color: #ffffff;
|
||||
border-color: #d0d7de !important;
|
||||
}
|
||||
|
||||
[data-color-mode="light"] .dap-header,
|
||||
[data-color-mode="light"] .dap-scope-header,
|
||||
[data-color-mode="light"] .dap-repl-header,
|
||||
[data-color-mode="light"] .dap-controls,
|
||||
html[data-color-mode="light"] .dap-header,
|
||||
html[data-color-mode="light"] .dap-scope-header,
|
||||
html[data-color-mode="light"] .dap-repl-header,
|
||||
html[data-color-mode="light"] .dap-controls {
|
||||
background-color: #f6f8fa;
|
||||
}
|
||||
|
||||
[data-color-mode="light"] .dap-repl-output,
|
||||
[data-color-mode="light"] .dap-repl-input input,
|
||||
html[data-color-mode="light"] .dap-repl-output,
|
||||
html[data-color-mode="light"] .dap-repl-input input {
|
||||
background-color: #f6f8fa !important;
|
||||
}
|
||||
|
||||
/* Debug Button in Header */
|
||||
.dap-debug-btn-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.dap-debug-btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.dap-debug-btn.selected {
|
||||
background-color: var(--bgColor-accent-muted, #388bfd26);
|
||||
border-color: var(--borderColor-accent-emphasis, #388bfd);
|
||||
}
|
||||
|
||||
.dap-debug-btn:hover:not(:disabled) {
|
||||
background-color: var(--bgColor-neutral-muted, #6e768166);
|
||||
}
|
||||
|
||||
/* Light mode for debug button */
|
||||
[data-color-mode="light"] .dap-debug-btn.selected,
|
||||
html[data-color-mode="light"] .dap-debug-btn.selected {
|
||||
background-color: #ddf4ff;
|
||||
border-color: #54aeff;
|
||||
}
|
||||
767
browser-ext/content/content.js
Normal file
767
browser-ext/content/content.js
Normal file
@@ -0,0 +1,767 @@
|
||||
/**
|
||||
* Content Script - Debugger UI
|
||||
*
|
||||
* Injects the debugger pane into GitHub Actions job pages and handles
|
||||
* all UI interactions.
|
||||
*/
|
||||
|
||||
// State
|
||||
let debuggerPane = null;
|
||||
let currentFrameId = 0;
|
||||
let isConnected = false;
|
||||
let replHistory = [];
|
||||
let replHistoryIndex = -1;
|
||||
|
||||
// HTML escape helper
|
||||
function escapeHtml(text) {
|
||||
const div = document.createElement('div');
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strip result indicator suffix from step name
|
||||
* e.g., "Run tests [running]" -> "Run tests"
|
||||
*/
|
||||
function stripResultIndicator(name) {
|
||||
return name.replace(/\s*\[(running|success|failure|skipped|cancelled)\]$/i, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Send DAP request to background script
|
||||
*/
|
||||
function sendDapRequest(command, args = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
chrome.runtime.sendMessage({ type: 'dap-request', command, args }, (response) => {
|
||||
if (chrome.runtime.lastError) {
|
||||
reject(new Error(chrome.runtime.lastError.message));
|
||||
} else if (response && response.success) {
|
||||
resolve(response.body);
|
||||
} else {
|
||||
reject(new Error(response?.error || 'Unknown error'));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build map of steps from DOM
|
||||
*/
|
||||
function buildStepMap() {
|
||||
const steps = document.querySelectorAll('check-step');
|
||||
const map = new Map();
|
||||
steps.forEach((el, idx) => {
|
||||
map.set(idx, {
|
||||
element: el,
|
||||
number: parseInt(el.dataset.number),
|
||||
name: el.dataset.name,
|
||||
conclusion: el.dataset.conclusion,
|
||||
externalId: el.dataset.externalId,
|
||||
});
|
||||
});
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find step element by name
|
||||
*/
|
||||
function findStepByName(stepName) {
|
||||
return document.querySelector(`check-step[data-name="${CSS.escape(stepName)}"]`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find step element by number
|
||||
*/
|
||||
function findStepByNumber(stepNumber) {
|
||||
return document.querySelector(`check-step[data-number="${stepNumber}"]`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all step elements
|
||||
*/
|
||||
function getAllSteps() {
|
||||
return document.querySelectorAll('check-step');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the debugger pane HTML
|
||||
*/
|
||||
function createDebuggerPaneHTML() {
|
||||
return `
|
||||
<div class="dap-header d-flex flex-items-center p-2 border-bottom">
|
||||
<svg class="octicon mr-2" viewBox="0 0 16 16" width="16" height="16">
|
||||
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
|
||||
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
|
||||
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
|
||||
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
|
||||
</svg>
|
||||
<span class="text-bold">Debugger</span>
|
||||
<span class="dap-step-info color-fg-muted ml-2">Connecting...</span>
|
||||
<span class="Label dap-status-label ml-auto">CONNECTING</span>
|
||||
</div>
|
||||
|
||||
<div class="dap-content d-flex" style="height: 300px;">
|
||||
<!-- Scopes Panel -->
|
||||
<div class="dap-scopes border-right overflow-auto" style="width: 33%;">
|
||||
<div class="dap-scope-header p-2 text-bold border-bottom">Variables</div>
|
||||
<div class="dap-scope-tree p-2">
|
||||
<div class="color-fg-muted">Connect to view variables</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- REPL Console -->
|
||||
<div class="dap-repl d-flex flex-column" style="width: 67%;">
|
||||
<div class="dap-repl-header p-2 text-bold border-bottom">Console</div>
|
||||
<div class="dap-repl-output overflow-auto flex-auto p-2 text-mono text-small">
|
||||
<div class="color-fg-muted">Welcome to Actions DAP Debugger</div>
|
||||
<div class="color-fg-muted">Enter expressions like: \${{ github.ref }}</div>
|
||||
<div class="color-fg-muted">Or shell commands: !ls -la</div>
|
||||
</div>
|
||||
<div class="dap-repl-input border-top p-2">
|
||||
<input type="text" class="form-control input-sm text-mono"
|
||||
placeholder="Enter expression or !command" disabled>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Control buttons -->
|
||||
<div class="dap-controls d-flex flex-items-center p-2 border-top">
|
||||
<button class="btn btn-sm mr-2" data-action="reverseContinue" title="Reverse Continue (go to first checkpoint)" disabled>
|
||||
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 2v12h2V8.5l5 4V8.5l5 4V2.5l-5 4V2.5l-5 4V2z"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-sm mr-2" data-action="stepBack" title="Step Back" disabled>
|
||||
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 2v12h2V2H2zm3 6 7 5V3L5 8z"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-sm btn-primary mr-2" data-action="continue" title="Continue" disabled>
|
||||
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M4 2l10 6-10 6z"/></svg>
|
||||
</button>
|
||||
<button class="btn btn-sm mr-2" data-action="next" title="Step to Next" disabled>
|
||||
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 3l7 5-7 5V3zm7 5l5 0V2h2v12h-2V8.5l-5 0z"/></svg>
|
||||
</button>
|
||||
<span class="dap-step-counter color-fg-muted ml-auto text-small">
|
||||
Not connected
|
||||
</span>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inject debugger pane into the page
|
||||
*/
|
||||
function injectDebuggerPane() {
|
||||
// Remove existing pane if any
|
||||
const existing = document.querySelector('.dap-debugger-pane');
|
||||
if (existing) existing.remove();
|
||||
|
||||
// Find where to inject
|
||||
const stepsContainer = document.querySelector('check-steps');
|
||||
if (!stepsContainer) {
|
||||
console.warn('[Content] No check-steps container found');
|
||||
return null;
|
||||
}
|
||||
|
||||
// Create pane
|
||||
const pane = document.createElement('div');
|
||||
pane.className = 'dap-debugger-pane mx-2 mb-2 border rounded-2';
|
||||
pane.innerHTML = createDebuggerPaneHTML();
|
||||
|
||||
// Insert before the first real workflow step (skip "Set up job" at index 0)
|
||||
const steps = stepsContainer.querySelectorAll('check-step');
|
||||
const targetStep = steps.length > 1 ? steps[1] : stepsContainer.firstChild;
|
||||
stepsContainer.insertBefore(pane, targetStep);
|
||||
|
||||
// Setup event handlers
|
||||
setupPaneEventHandlers(pane);
|
||||
|
||||
debuggerPane = pane;
|
||||
return pane;
|
||||
}
|
||||
|
||||
/**
|
||||
* Move debugger pane to before a specific step
|
||||
*/
|
||||
function moveDebuggerPane(stepElement, stepName) {
|
||||
if (!debuggerPane || !stepElement) return;
|
||||
|
||||
// Move the pane
|
||||
stepElement.parentNode.insertBefore(debuggerPane, stepElement);
|
||||
|
||||
// Update step info
|
||||
const stepInfo = debuggerPane.querySelector('.dap-step-info');
|
||||
if (stepInfo) {
|
||||
stepInfo.textContent = `Paused before: ${stepName}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup event handlers for debugger pane
|
||||
*/
|
||||
function setupPaneEventHandlers(pane) {
|
||||
// Control buttons
|
||||
pane.querySelectorAll('[data-action]').forEach((btn) => {
|
||||
btn.addEventListener('click', async () => {
|
||||
const action = btn.dataset.action;
|
||||
enableControls(false);
|
||||
updateStatus('RUNNING');
|
||||
|
||||
try {
|
||||
await sendDapRequest(action, { threadId: 1 });
|
||||
} catch (error) {
|
||||
console.error(`[Content] DAP ${action} failed:`, error);
|
||||
appendOutput(`Error: ${error.message}`, 'error');
|
||||
enableControls(true);
|
||||
updateStatus('ERROR');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// REPL input
|
||||
const input = pane.querySelector('.dap-repl-input input');
|
||||
if (input) {
|
||||
input.addEventListener('keydown', handleReplKeydown);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle REPL input keydown
|
||||
*/
|
||||
async function handleReplKeydown(e) {
|
||||
const input = e.target;
|
||||
|
||||
if (e.key === 'Enter') {
|
||||
const command = input.value.trim();
|
||||
if (!command) return;
|
||||
|
||||
replHistory.push(command);
|
||||
replHistoryIndex = replHistory.length;
|
||||
input.value = '';
|
||||
|
||||
// Show command
|
||||
appendOutput(`> ${command}`, 'input');
|
||||
|
||||
// Send to DAP
|
||||
try {
|
||||
const response = await sendDapRequest('evaluate', {
|
||||
expression: command,
|
||||
frameId: currentFrameId,
|
||||
context: command.startsWith('!') ? 'repl' : 'watch',
|
||||
});
|
||||
// Only show result if it's NOT an exit code summary
|
||||
// (shell command output is already streamed via output events)
|
||||
if (response.result && !/^\(exit code: -?\d+\)$/.test(response.result)) {
|
||||
appendOutput(response.result, 'result');
|
||||
}
|
||||
} catch (error) {
|
||||
appendOutput(error.message, 'error');
|
||||
}
|
||||
} else if (e.key === 'ArrowUp') {
|
||||
if (replHistoryIndex > 0) {
|
||||
replHistoryIndex--;
|
||||
input.value = replHistory[replHistoryIndex];
|
||||
}
|
||||
e.preventDefault();
|
||||
} else if (e.key === 'ArrowDown') {
|
||||
if (replHistoryIndex < replHistory.length - 1) {
|
||||
replHistoryIndex++;
|
||||
input.value = replHistory[replHistoryIndex];
|
||||
} else {
|
||||
replHistoryIndex = replHistory.length;
|
||||
input.value = '';
|
||||
}
|
||||
e.preventDefault();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Append output to REPL console
|
||||
*/
|
||||
function appendOutput(text, type) {
|
||||
const output = document.querySelector('.dap-repl-output');
|
||||
if (!output) return;
|
||||
|
||||
// Handle multi-line output - each line gets its own div
|
||||
const lines = text.split('\n');
|
||||
lines.forEach((l) => {
|
||||
const div = document.createElement('div');
|
||||
div.className = `dap-output-${type}`;
|
||||
if (type === 'error') div.classList.add('color-fg-danger');
|
||||
if (type === 'input') div.classList.add('color-fg-muted');
|
||||
div.textContent = l;
|
||||
output.appendChild(div);
|
||||
});
|
||||
|
||||
output.scrollTop = output.scrollHeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable/disable control buttons
|
||||
*/
|
||||
function enableControls(enabled) {
|
||||
if (!debuggerPane) return;
|
||||
|
||||
debuggerPane.querySelectorAll('.dap-controls button').forEach((btn) => {
|
||||
btn.disabled = !enabled;
|
||||
});
|
||||
|
||||
const input = debuggerPane.querySelector('.dap-repl-input input');
|
||||
if (input) {
|
||||
input.disabled = !enabled;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update status display
|
||||
*/
|
||||
function updateStatus(status, extra) {
|
||||
if (!debuggerPane) return;
|
||||
|
||||
const label = debuggerPane.querySelector('.dap-status-label');
|
||||
if (label) {
|
||||
label.textContent = status;
|
||||
label.className = 'Label dap-status-label ml-auto ';
|
||||
|
||||
switch (status) {
|
||||
case 'PAUSED':
|
||||
label.classList.add('Label--attention');
|
||||
break;
|
||||
case 'RUNNING':
|
||||
label.classList.add('Label--success');
|
||||
break;
|
||||
case 'TERMINATED':
|
||||
case 'DISCONNECTED':
|
||||
label.classList.add('Label--secondary');
|
||||
break;
|
||||
case 'ERROR':
|
||||
label.classList.add('Label--danger');
|
||||
break;
|
||||
default:
|
||||
label.classList.add('Label--secondary');
|
||||
}
|
||||
}
|
||||
|
||||
// Update step counter if extra info provided
|
||||
if (extra) {
|
||||
const counter = debuggerPane.querySelector('.dap-step-counter');
|
||||
if (counter) {
|
||||
counter.textContent = extra;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load scopes for current frame
|
||||
*/
|
||||
async function loadScopes(frameId) {
|
||||
const scopesContainer = document.querySelector('.dap-scope-tree');
|
||||
if (!scopesContainer) return;
|
||||
|
||||
scopesContainer.innerHTML = '<div class="color-fg-muted">Loading...</div>';
|
||||
|
||||
try {
|
||||
console.log('[Content] Loading scopes for frame:', frameId);
|
||||
const response = await sendDapRequest('scopes', { frameId });
|
||||
console.log('[Content] Scopes response:', response);
|
||||
|
||||
scopesContainer.innerHTML = '';
|
||||
|
||||
if (!response.scopes || response.scopes.length === 0) {
|
||||
scopesContainer.innerHTML = '<div class="color-fg-muted">No scopes available</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
for (const scope of response.scopes) {
|
||||
console.log('[Content] Creating tree node for scope:', scope.name, 'variablesRef:', scope.variablesReference);
|
||||
// Only mark as expandable if variablesReference > 0
|
||||
const isExpandable = scope.variablesReference > 0;
|
||||
const node = createTreeNode(scope.name, scope.variablesReference, isExpandable);
|
||||
scopesContainer.appendChild(node);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Content] Failed to load scopes:', error);
|
||||
scopesContainer.innerHTML = `<div class="color-fg-danger">Error: ${escapeHtml(error.message)}</div>`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a tree node for scope/variable display
|
||||
*/
|
||||
function createTreeNode(name, variablesReference, isExpandable, value) {
|
||||
const node = document.createElement('div');
|
||||
node.className = 'dap-tree-node';
|
||||
node.dataset.variablesRef = variablesReference;
|
||||
|
||||
const content = document.createElement('div');
|
||||
content.className = 'dap-tree-content';
|
||||
|
||||
// Expand icon
|
||||
const expandIcon = document.createElement('span');
|
||||
expandIcon.className = 'dap-expand-icon';
|
||||
expandIcon.textContent = isExpandable ? '\u25B6' : ' '; // ▶ or space
|
||||
content.appendChild(expandIcon);
|
||||
|
||||
// Name
|
||||
const nameSpan = document.createElement('span');
|
||||
nameSpan.className = 'text-bold';
|
||||
nameSpan.textContent = name;
|
||||
content.appendChild(nameSpan);
|
||||
|
||||
// Value (if provided)
|
||||
if (value !== undefined) {
|
||||
const valueSpan = document.createElement('span');
|
||||
valueSpan.className = 'color-fg-muted';
|
||||
valueSpan.textContent = `: ${value}`;
|
||||
content.appendChild(valueSpan);
|
||||
}
|
||||
|
||||
node.appendChild(content);
|
||||
|
||||
if (isExpandable && variablesReference > 0) {
|
||||
content.style.cursor = 'pointer';
|
||||
content.addEventListener('click', () => toggleTreeNode(node));
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle tree node expansion
|
||||
*/
|
||||
async function toggleTreeNode(node) {
|
||||
const children = node.querySelector('.dap-tree-children');
|
||||
const expandIcon = node.querySelector('.dap-expand-icon');
|
||||
|
||||
if (children) {
|
||||
// Toggle visibility
|
||||
children.hidden = !children.hidden;
|
||||
expandIcon.textContent = children.hidden ? '\u25B6' : '\u25BC'; // ▶ or ▼
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch children
|
||||
const variablesRef = parseInt(node.dataset.variablesRef);
|
||||
if (!variablesRef) return;
|
||||
|
||||
expandIcon.textContent = '...';
|
||||
|
||||
try {
|
||||
const response = await sendDapRequest('variables', { variablesReference: variablesRef });
|
||||
|
||||
const childContainer = document.createElement('div');
|
||||
childContainer.className = 'dap-tree-children ml-3';
|
||||
|
||||
for (const variable of response.variables) {
|
||||
const hasChildren = variable.variablesReference > 0;
|
||||
const childNode = createTreeNode(
|
||||
variable.name,
|
||||
variable.variablesReference,
|
||||
hasChildren,
|
||||
variable.value
|
||||
);
|
||||
childContainer.appendChild(childNode);
|
||||
}
|
||||
|
||||
node.appendChild(childContainer);
|
||||
expandIcon.textContent = '\u25BC'; // ▼
|
||||
} catch (error) {
|
||||
console.error('[Content] Failed to load variables:', error);
|
||||
expandIcon.textContent = '\u25B6'; // ▶
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle stopped event from DAP
|
||||
*/
|
||||
async function handleStoppedEvent(body) {
|
||||
console.log('[Content] Stopped event:', body);
|
||||
|
||||
isConnected = true;
|
||||
updateStatus('PAUSED', body.reason || 'paused');
|
||||
enableControls(true);
|
||||
|
||||
// Get current location
|
||||
try {
|
||||
const stackTrace = await sendDapRequest('stackTrace', { threadId: 1 });
|
||||
|
||||
if (stackTrace.stackFrames && stackTrace.stackFrames.length > 0) {
|
||||
const currentFrame = stackTrace.stackFrames[0];
|
||||
currentFrameId = currentFrame.id;
|
||||
|
||||
// Strip result indicator from step name for DOM lookup
|
||||
// e.g., "Run tests [running]" -> "Run tests"
|
||||
const rawStepName = stripResultIndicator(currentFrame.name);
|
||||
let stepElement = findStepByName(rawStepName);
|
||||
|
||||
if (!stepElement) {
|
||||
// Fallback: use step index
|
||||
// Note: GitHub Actions UI shows "Set up job" at index 0, which is not a real workflow step
|
||||
// DAP uses 1-based frame IDs, so frame ID 1 maps to UI step index 1 (skipping "Set up job")
|
||||
const steps = getAllSteps();
|
||||
const adjustedIndex = currentFrame.id; // 1-based, happens to match after skipping "Set up job"
|
||||
if (adjustedIndex > 0 && adjustedIndex < steps.length) {
|
||||
stepElement = steps[adjustedIndex];
|
||||
}
|
||||
}
|
||||
|
||||
if (stepElement) {
|
||||
moveDebuggerPane(stepElement, rawStepName);
|
||||
}
|
||||
|
||||
// Update step counter
|
||||
const counter = debuggerPane?.querySelector('.dap-step-counter');
|
||||
if (counter) {
|
||||
counter.textContent = `Step ${currentFrame.id} of ${stackTrace.stackFrames.length}`;
|
||||
}
|
||||
|
||||
// Load scopes
|
||||
await loadScopes(currentFrame.id);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Content] Failed to get stack trace:', error);
|
||||
appendOutput(`Error: ${error.message}`, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle output event from DAP
|
||||
*/
|
||||
function handleOutputEvent(body) {
|
||||
if (body.output) {
|
||||
const category = body.category === 'stderr' ? 'error' : 'stdout';
|
||||
appendOutput(body.output.trimEnd(), category);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle terminated event from DAP
|
||||
*/
|
||||
function handleTerminatedEvent() {
|
||||
isConnected = false;
|
||||
updateStatus('TERMINATED');
|
||||
enableControls(false);
|
||||
|
||||
const stepInfo = debuggerPane?.querySelector('.dap-step-info');
|
||||
if (stepInfo) {
|
||||
stepInfo.textContent = 'Session ended';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load current debug state (used when page loads while already paused)
|
||||
*/
|
||||
async function loadCurrentDebugState() {
|
||||
if (!debuggerPane) return;
|
||||
|
||||
try {
|
||||
const stackTrace = await sendDapRequest('stackTrace', { threadId: 1 });
|
||||
if (stackTrace.stackFrames && stackTrace.stackFrames.length > 0) {
|
||||
const currentFrame = stackTrace.stackFrames[0];
|
||||
currentFrameId = currentFrame.id;
|
||||
|
||||
// Move pane to current step
|
||||
// Strip result indicator from step name for DOM lookup
|
||||
const rawStepName = stripResultIndicator(currentFrame.name);
|
||||
let stepElement = findStepByName(rawStepName);
|
||||
|
||||
if (!stepElement) {
|
||||
// Fallback: use step index (skip "Set up job" at index 0)
|
||||
const steps = getAllSteps();
|
||||
const adjustedIndex = currentFrame.id; // 1-based, matches after skipping "Set up job"
|
||||
if (adjustedIndex > 0 && adjustedIndex < steps.length) {
|
||||
stepElement = steps[adjustedIndex];
|
||||
}
|
||||
}
|
||||
|
||||
if (stepElement) {
|
||||
moveDebuggerPane(stepElement, rawStepName);
|
||||
}
|
||||
|
||||
// Update step counter
|
||||
const counter = debuggerPane.querySelector('.dap-step-counter');
|
||||
if (counter) {
|
||||
counter.textContent = `Step ${currentFrame.id + 1} of ${stackTrace.stackFrames.length}`;
|
||||
}
|
||||
|
||||
// Load scopes
|
||||
await loadScopes(currentFrame.id);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Content] Failed to load current debug state:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle status change from background
|
||||
*/
|
||||
function handleStatusChange(status) {
|
||||
console.log('[Content] Status changed:', status);
|
||||
|
||||
switch (status) {
|
||||
case 'connected':
|
||||
isConnected = true;
|
||||
updateStatus('CONNECTED');
|
||||
const stepInfo = debuggerPane?.querySelector('.dap-step-info');
|
||||
if (stepInfo) {
|
||||
stepInfo.textContent = 'Waiting for debug event...';
|
||||
}
|
||||
break;
|
||||
|
||||
case 'paused':
|
||||
isConnected = true;
|
||||
updateStatus('PAUSED');
|
||||
enableControls(true);
|
||||
loadCurrentDebugState();
|
||||
break;
|
||||
|
||||
case 'running':
|
||||
isConnected = true;
|
||||
updateStatus('RUNNING');
|
||||
enableControls(false);
|
||||
break;
|
||||
|
||||
case 'disconnected':
|
||||
isConnected = false;
|
||||
updateStatus('DISCONNECTED');
|
||||
enableControls(false);
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
isConnected = false;
|
||||
updateStatus('ERROR');
|
||||
enableControls(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen for messages from background script
|
||||
*/
|
||||
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
|
||||
console.log('[Content] Received message:', message.type);
|
||||
|
||||
switch (message.type) {
|
||||
case 'dap-event':
|
||||
const event = message.event;
|
||||
switch (event.event) {
|
||||
case 'stopped':
|
||||
handleStoppedEvent(event.body);
|
||||
break;
|
||||
case 'output':
|
||||
handleOutputEvent(event.body);
|
||||
break;
|
||||
case 'terminated':
|
||||
handleTerminatedEvent();
|
||||
break;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'status-changed':
|
||||
handleStatusChange(message.status);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Inject debug button into GitHub Actions UI header
|
||||
*/
|
||||
function injectDebugButton() {
|
||||
const container = document.querySelector('.js-check-run-search');
|
||||
if (!container || container.querySelector('.dap-debug-btn-container')) {
|
||||
return; // Already injected or container not found
|
||||
}
|
||||
|
||||
const buttonContainer = document.createElement('div');
|
||||
buttonContainer.className = 'ml-2 dap-debug-btn-container';
|
||||
buttonContainer.innerHTML = `
|
||||
<button type="button" class="btn btn-sm dap-debug-btn" title="Toggle DAP Debugger">
|
||||
<svg viewBox="0 0 16 16" width="16" height="16" class="octicon mr-1" style="vertical-align: text-bottom;">
|
||||
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
|
||||
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
|
||||
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
|
||||
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
|
||||
</svg>
|
||||
Debug
|
||||
</button>
|
||||
`;
|
||||
|
||||
const button = buttonContainer.querySelector('button');
|
||||
button.addEventListener('click', () => {
|
||||
let pane = document.querySelector('.dap-debugger-pane');
|
||||
if (pane) {
|
||||
// Toggle visibility
|
||||
pane.hidden = !pane.hidden;
|
||||
button.classList.toggle('selected', !pane.hidden);
|
||||
} else {
|
||||
// Create and show pane
|
||||
pane = injectDebuggerPane();
|
||||
if (pane) {
|
||||
button.classList.add('selected');
|
||||
// Check connection status after creating pane
|
||||
chrome.runtime.sendMessage({ type: 'get-status' }, (response) => {
|
||||
if (response && response.status) {
|
||||
handleStatusChange(response.status);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Insert at the beginning of the container
|
||||
container.insertBefore(buttonContainer, container.firstChild);
|
||||
console.log('[Content] Debug button injected');
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize content script
|
||||
*/
|
||||
function init() {
|
||||
console.log('[Content] Actions DAP Debugger content script loaded');
|
||||
|
||||
// Check if we're on a job page
|
||||
const steps = getAllSteps();
|
||||
if (steps.length === 0) {
|
||||
console.log('[Content] No steps found, waiting for DOM...');
|
||||
// Wait for steps to appear
|
||||
const observer = new MutationObserver((mutations) => {
|
||||
const steps = getAllSteps();
|
||||
if (steps.length > 0) {
|
||||
observer.disconnect();
|
||||
console.log('[Content] Steps found, injecting debug button');
|
||||
injectDebugButton();
|
||||
}
|
||||
});
|
||||
observer.observe(document.body, { childList: true, subtree: true });
|
||||
return;
|
||||
}
|
||||
|
||||
// Inject debug button in header (user can click to show debugger pane)
|
||||
injectDebugButton();
|
||||
|
||||
// Check current connection status
|
||||
chrome.runtime.sendMessage({ type: 'get-status' }, async (response) => {
|
||||
if (response && response.status) {
|
||||
handleStatusChange(response.status);
|
||||
|
||||
// If already connected/paused, auto-show the debugger pane
|
||||
if (response.status === 'paused' || response.status === 'connected') {
|
||||
const pane = document.querySelector('.dap-debugger-pane');
|
||||
if (!pane) {
|
||||
injectDebuggerPane();
|
||||
const btn = document.querySelector('.dap-debug-btn');
|
||||
if (btn) btn.classList.add('selected');
|
||||
}
|
||||
|
||||
// If already paused, load the current debug state
|
||||
if (response.status === 'paused') {
|
||||
await loadCurrentDebugState();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize when DOM is ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', init);
|
||||
} else {
|
||||
init();
|
||||
}
|
||||
135
browser-ext/icons/generate.js
Normal file
135
browser-ext/icons/generate.js
Normal file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Create simple green circle PNG icons
|
||||
* No dependencies required - uses pure JavaScript to create valid PNG files
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const zlib = require('zlib');
|
||||
|
||||
function createPNG(size) {
|
||||
// PNG uses RGBA format, one pixel = 4 bytes
|
||||
const pixelData = [];
|
||||
|
||||
const centerX = size / 2;
|
||||
const centerY = size / 2;
|
||||
const radius = size / 2 - 1;
|
||||
const innerRadius = radius * 0.4;
|
||||
|
||||
for (let y = 0; y < size; y++) {
|
||||
pixelData.push(0); // Filter byte for each row
|
||||
for (let x = 0; x < size; x++) {
|
||||
const dx = x - centerX;
|
||||
const dy = y - centerY;
|
||||
const dist = Math.sqrt(dx * dx + dy * dy);
|
||||
|
||||
if (dist <= radius) {
|
||||
// Green circle (#238636)
|
||||
pixelData.push(35, 134, 54, 255);
|
||||
} else {
|
||||
// Transparent
|
||||
pixelData.push(0, 0, 0, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add a white "bug" shape in the center
|
||||
for (let y = 0; y < size; y++) {
|
||||
for (let x = 0; x < size; x++) {
|
||||
const dx = x - centerX;
|
||||
const dy = y - centerY;
|
||||
const dist = Math.sqrt(dx * dx + dy * dy);
|
||||
|
||||
// Bug body (oval)
|
||||
const bodyDx = dx;
|
||||
const bodyDy = (dy - size * 0.05) / 1.3;
|
||||
const bodyDist = Math.sqrt(bodyDx * bodyDx + bodyDy * bodyDy);
|
||||
|
||||
// Bug head (circle above body)
|
||||
const headDx = dx;
|
||||
const headDy = dy + size * 0.15;
|
||||
const headDist = Math.sqrt(headDx * headDx + headDy * headDy);
|
||||
|
||||
if (bodyDist < innerRadius || headDist < innerRadius * 0.6) {
|
||||
const idx = 1 + y * (1 + size * 4) + x * 4;
|
||||
pixelData[idx] = 255;
|
||||
pixelData[idx + 1] = 255;
|
||||
pixelData[idx + 2] = 255;
|
||||
pixelData[idx + 3] = 255;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const rawData = Buffer.from(pixelData);
|
||||
const compressed = zlib.deflateSync(rawData);
|
||||
|
||||
// Build PNG file
|
||||
const chunks = [];
|
||||
|
||||
// PNG signature
|
||||
chunks.push(Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]));
|
||||
|
||||
// IHDR chunk
|
||||
const ihdr = Buffer.alloc(13);
|
||||
ihdr.writeUInt32BE(size, 0); // width
|
||||
ihdr.writeUInt32BE(size, 4); // height
|
||||
ihdr.writeUInt8(8, 8); // bit depth
|
||||
ihdr.writeUInt8(6, 9); // color type (RGBA)
|
||||
ihdr.writeUInt8(0, 10); // compression
|
||||
ihdr.writeUInt8(0, 11); // filter
|
||||
ihdr.writeUInt8(0, 12); // interlace
|
||||
chunks.push(createChunk('IHDR', ihdr));
|
||||
|
||||
// IDAT chunk
|
||||
chunks.push(createChunk('IDAT', compressed));
|
||||
|
||||
// IEND chunk
|
||||
chunks.push(createChunk('IEND', Buffer.alloc(0)));
|
||||
|
||||
return Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
function createChunk(type, data) {
|
||||
const typeBuffer = Buffer.from(type);
|
||||
const length = Buffer.alloc(4);
|
||||
length.writeUInt32BE(data.length, 0);
|
||||
|
||||
const crcData = Buffer.concat([typeBuffer, data]);
|
||||
const crc = Buffer.alloc(4);
|
||||
crc.writeUInt32BE(crc32(crcData), 0);
|
||||
|
||||
return Buffer.concat([length, typeBuffer, data, crc]);
|
||||
}
|
||||
|
||||
// CRC32 implementation
|
||||
function crc32(buf) {
|
||||
let crc = 0xffffffff;
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
crc = crc32Table[(crc ^ buf[i]) & 0xff] ^ (crc >>> 8);
|
||||
}
|
||||
return (crc ^ 0xffffffff) >>> 0;
|
||||
}
|
||||
|
||||
// CRC32 lookup table
|
||||
const crc32Table = new Uint32Array(256);
|
||||
for (let i = 0; i < 256; i++) {
|
||||
let c = i;
|
||||
for (let j = 0; j < 8; j++) {
|
||||
c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1;
|
||||
}
|
||||
crc32Table[i] = c;
|
||||
}
|
||||
|
||||
// Generate icons
|
||||
const iconsDir = path.join(__dirname);
|
||||
const sizes = [16, 48, 128];
|
||||
|
||||
sizes.forEach((size) => {
|
||||
const png = createPNG(size);
|
||||
const filename = `icon${size}.png`;
|
||||
fs.writeFileSync(path.join(iconsDir, filename), png);
|
||||
console.log(`Created ${filename} (${size}x${size})`);
|
||||
});
|
||||
|
||||
console.log('Done!');
|
||||
BIN
browser-ext/icons/icon128.png
Normal file
BIN
browser-ext/icons/icon128.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 872 B |
BIN
browser-ext/icons/icon16.png
Normal file
BIN
browser-ext/icons/icon16.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 126 B |
BIN
browser-ext/icons/icon48.png
Normal file
BIN
browser-ext/icons/icon48.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 258 B |
32
browser-ext/manifest.json
Normal file
32
browser-ext/manifest.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"manifest_version": 3,
|
||||
"name": "Actions DAP Debugger",
|
||||
"version": "0.1.0",
|
||||
"description": "Debug GitHub Actions workflows with DAP - interactive debugging directly in the browser",
|
||||
"permissions": ["activeTab", "storage"],
|
||||
"host_permissions": ["https://github.com/*"],
|
||||
"background": {
|
||||
"service_worker": "background/background.js"
|
||||
},
|
||||
"content_scripts": [
|
||||
{
|
||||
"matches": ["https://github.com/*/*/actions/runs/*/job/*"],
|
||||
"js": ["lib/dap-protocol.js", "content/content.js"],
|
||||
"css": ["content/content.css"],
|
||||
"run_at": "document_idle"
|
||||
}
|
||||
],
|
||||
"action": {
|
||||
"default_popup": "popup/popup.html",
|
||||
"default_icon": {
|
||||
"16": "icons/icon16.png",
|
||||
"48": "icons/icon48.png",
|
||||
"128": "icons/icon128.png"
|
||||
}
|
||||
},
|
||||
"icons": {
|
||||
"16": "icons/icon16.png",
|
||||
"48": "icons/icon48.png",
|
||||
"128": "icons/icon128.png"
|
||||
}
|
||||
}
|
||||
228
browser-ext/popup/popup.css
Normal file
228
browser-ext/popup/popup.css
Normal file
@@ -0,0 +1,228 @@
|
||||
/**
|
||||
* Popup Styles
|
||||
*
|
||||
* GitHub-inspired dark theme for the extension popup.
|
||||
*/
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
width: 320px;
|
||||
padding: 16px;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
|
||||
font-size: 14px;
|
||||
background-color: #0d1117;
|
||||
color: #e6edf3;
|
||||
}
|
||||
|
||||
h3 {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin: 0 0 16px 0;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
h3 .icon {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
/* Status Section */
|
||||
.status-section {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
margin-bottom: 16px;
|
||||
padding: 12px;
|
||||
background-color: #161b22;
|
||||
border-radius: 6px;
|
||||
border: 1px solid #30363d;
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
margin-right: 10px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.status-disconnected {
|
||||
background-color: #6e7681;
|
||||
}
|
||||
|
||||
.status-connecting {
|
||||
background-color: #9e6a03;
|
||||
animation: pulse 1.5s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.status-connected {
|
||||
background-color: #238636;
|
||||
}
|
||||
|
||||
.status-paused {
|
||||
background-color: #9e6a03;
|
||||
}
|
||||
|
||||
.status-running {
|
||||
background-color: #238636;
|
||||
animation: pulse 1.5s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.status-error {
|
||||
background-color: #da3633;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%, 100% {
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
|
||||
#status-text {
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
/* Config Section */
|
||||
.config-section {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.config-section label {
|
||||
display: block;
|
||||
margin-bottom: 12px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: #8b949e;
|
||||
}
|
||||
|
||||
.config-section input {
|
||||
display: block;
|
||||
width: 100%;
|
||||
padding: 8px 12px;
|
||||
margin-top: 6px;
|
||||
background-color: #0d1117;
|
||||
border: 1px solid #30363d;
|
||||
border-radius: 6px;
|
||||
color: #e6edf3;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.config-section input:focus {
|
||||
border-color: #1f6feb;
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.3);
|
||||
}
|
||||
|
||||
.config-section input:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.config-hint {
|
||||
font-size: 11px;
|
||||
color: #6e7681;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
/* Actions Section */
|
||||
.actions-section {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
button {
|
||||
flex: 1;
|
||||
padding: 10px 16px;
|
||||
border: none;
|
||||
border-radius: 6px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.15s ease;
|
||||
}
|
||||
|
||||
button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background-color: #238636;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
background-color: #2ea043;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background-color: #21262d;
|
||||
color: #e6edf3;
|
||||
border: 1px solid #30363d;
|
||||
}
|
||||
|
||||
.btn-secondary:hover:not(:disabled) {
|
||||
background-color: #30363d;
|
||||
}
|
||||
|
||||
/* Help Section */
|
||||
.help-section {
|
||||
font-size: 12px;
|
||||
color: #8b949e;
|
||||
background-color: #161b22;
|
||||
border: 1px solid #30363d;
|
||||
border-radius: 6px;
|
||||
padding: 12px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.help-section p {
|
||||
margin: 6px 0;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.help-section p:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.help-section strong {
|
||||
color: #e6edf3;
|
||||
}
|
||||
|
||||
.help-section code {
|
||||
display: block;
|
||||
background-color: #0d1117;
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace;
|
||||
font-size: 11px;
|
||||
overflow-x: auto;
|
||||
margin: 8px 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
.footer {
|
||||
text-align: center;
|
||||
padding-top: 8px;
|
||||
border-top: 1px solid #21262d;
|
||||
}
|
||||
|
||||
.footer a {
|
||||
color: #58a6ff;
|
||||
text-decoration: none;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.footer a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
52
browser-ext/popup/popup.html
Normal file
52
browser-ext/popup/popup.html
Normal file
@@ -0,0 +1,52 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<link rel="stylesheet" href="popup.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="popup-container">
|
||||
<h3>
|
||||
<svg class="icon" viewBox="0 0 16 16" width="16" height="16">
|
||||
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
|
||||
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
|
||||
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
|
||||
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
|
||||
</svg>
|
||||
Actions DAP Debugger
|
||||
</h3>
|
||||
|
||||
<div class="status-section">
|
||||
<div class="status-indicator" id="status-indicator"></div>
|
||||
<span id="status-text">Disconnected</span>
|
||||
</div>
|
||||
|
||||
<div class="config-section">
|
||||
<label>
|
||||
Proxy URL
|
||||
<input type="text" id="proxy-url" value="ws://localhost:4712"
|
||||
placeholder="ws://localhost:4712 or wss://...">
|
||||
</label>
|
||||
<p class="config-hint">For codespaces, use the forwarded URL (wss://...)</p>
|
||||
</div>
|
||||
|
||||
<div class="actions-section">
|
||||
<button id="connect-btn" class="btn-primary">Connect</button>
|
||||
<button id="disconnect-btn" class="btn-secondary" disabled>Disconnect</button>
|
||||
</div>
|
||||
|
||||
<div class="help-section">
|
||||
<p><strong>Quick Start:</strong></p>
|
||||
<p>1. Start the proxy:</p>
|
||||
<code>cd browser-ext/proxy && npm install && node proxy.js</code>
|
||||
<p>2. Re-run your GitHub Actions job with "Enable debug logging"</p>
|
||||
<p>3. Click Connect when the job is waiting for debugger</p>
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<a href="https://github.com/actions/runner" target="_blank">Documentation</a>
|
||||
</div>
|
||||
</div>
|
||||
<script src="popup.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
95
browser-ext/popup/popup.js
Normal file
95
browser-ext/popup/popup.js
Normal file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Popup Script
|
||||
*
|
||||
* Handles extension popup UI and connection management.
|
||||
*/
|
||||
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
const statusIndicator = document.getElementById('status-indicator');
|
||||
const statusText = document.getElementById('status-text');
|
||||
const connectBtn = document.getElementById('connect-btn');
|
||||
const disconnectBtn = document.getElementById('disconnect-btn');
|
||||
const urlInput = document.getElementById('proxy-url');
|
||||
|
||||
// Load saved config
|
||||
chrome.storage.local.get(['proxyUrl'], (data) => {
|
||||
if (data.proxyUrl) urlInput.value = data.proxyUrl;
|
||||
});
|
||||
|
||||
// Get current status from background
|
||||
chrome.runtime.sendMessage({ type: 'get-status' }, (response) => {
|
||||
if (response) {
|
||||
updateStatusUI(response.status, response.reconnecting);
|
||||
}
|
||||
});
|
||||
|
||||
// Listen for status changes
|
||||
chrome.runtime.onMessage.addListener((message) => {
|
||||
if (message.type === 'status-changed') {
|
||||
updateStatusUI(message.status, message.reconnecting);
|
||||
}
|
||||
});
|
||||
|
||||
// Connect button
|
||||
connectBtn.addEventListener('click', () => {
|
||||
const url = urlInput.value.trim() || 'ws://localhost:4712';
|
||||
|
||||
// Save config
|
||||
chrome.storage.local.set({ proxyUrl: url });
|
||||
|
||||
// Update UI immediately
|
||||
updateStatusUI('connecting');
|
||||
|
||||
// Connect
|
||||
chrome.runtime.sendMessage({ type: 'connect', url }, (response) => {
|
||||
if (response && response.status) {
|
||||
updateStatusUI(response.status);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Disconnect button
|
||||
disconnectBtn.addEventListener('click', () => {
|
||||
chrome.runtime.sendMessage({ type: 'disconnect' }, (response) => {
|
||||
if (response && response.status) {
|
||||
updateStatusUI(response.status);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Update the UI to reflect current status
|
||||
*/
|
||||
function updateStatusUI(status, reconnecting = false) {
|
||||
// Update text
|
||||
const statusNames = {
|
||||
disconnected: 'Disconnected',
|
||||
connecting: reconnecting ? 'Reconnecting...' : 'Connecting...',
|
||||
connected: 'Connected',
|
||||
paused: 'Paused',
|
||||
running: 'Running',
|
||||
error: 'Connection Error',
|
||||
};
|
||||
statusText.textContent = statusNames[status] || status;
|
||||
|
||||
// Update indicator color
|
||||
statusIndicator.className = 'status-indicator status-' + status;
|
||||
|
||||
// Update button states
|
||||
const isConnected = ['connected', 'paused', 'running'].includes(status);
|
||||
const isConnecting = status === 'connecting';
|
||||
|
||||
connectBtn.disabled = isConnected || isConnecting;
|
||||
disconnectBtn.disabled = status === 'disconnected';
|
||||
|
||||
// Update connect button text
|
||||
if (isConnecting) {
|
||||
connectBtn.textContent = reconnecting ? 'Reconnecting...' : 'Connecting...';
|
||||
} else {
|
||||
connectBtn.textContent = 'Connect';
|
||||
}
|
||||
|
||||
// Disable inputs when connected
|
||||
urlInput.disabled = isConnected || isConnecting;
|
||||
}
|
||||
});
|
||||
36
browser-ext/proxy/package-lock.json
generated
Normal file
36
browser-ext/proxy/package-lock.json
generated
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "dap-websocket-proxy",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "dap-websocket-proxy",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"ws": "^8.16.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ws": {
|
||||
"version": "8.19.0",
|
||||
"resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
|
||||
"integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bufferutil": "^4.0.1",
|
||||
"utf-8-validate": ">=5.0.2"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bufferutil": {
|
||||
"optional": true
|
||||
},
|
||||
"utf-8-validate": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
12
browser-ext/proxy/package.json
Normal file
12
browser-ext/proxy/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "dap-websocket-proxy",
|
||||
"version": "1.0.0",
|
||||
"description": "WebSocket-to-TCP bridge for DAP debugging",
|
||||
"main": "proxy.js",
|
||||
"scripts": {
|
||||
"start": "node proxy.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"ws": "^8.16.0"
|
||||
}
|
||||
}
|
||||
207
browser-ext/proxy/proxy.js
Normal file
207
browser-ext/proxy/proxy.js
Normal file
@@ -0,0 +1,207 @@
|
||||
/**
|
||||
* DAP WebSocket-to-TCP Proxy
|
||||
*
|
||||
* Bridges WebSocket connections from browser extensions to the DAP TCP server.
|
||||
* Handles DAP message framing (Content-Length headers).
|
||||
*
|
||||
* Usage: node proxy.js [--ws-port 4712] [--dap-host 127.0.0.1] [--dap-port 4711]
|
||||
*/
|
||||
|
||||
const WebSocket = require('ws');
|
||||
const net = require('net');
|
||||
|
||||
// Configuration (can be overridden via CLI args)
|
||||
const config = {
|
||||
wsPort: parseInt(process.env.WS_PORT) || 4712,
|
||||
dapHost: process.env.DAP_HOST || '127.0.0.1',
|
||||
dapPort: parseInt(process.env.DAP_PORT) || 4711,
|
||||
};
|
||||
|
||||
// Parse CLI arguments
|
||||
for (let i = 2; i < process.argv.length; i++) {
|
||||
switch (process.argv[i]) {
|
||||
case '--ws-port':
|
||||
config.wsPort = parseInt(process.argv[++i]);
|
||||
break;
|
||||
case '--dap-host':
|
||||
config.dapHost = process.argv[++i];
|
||||
break;
|
||||
case '--dap-port':
|
||||
config.dapPort = parseInt(process.argv[++i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[Proxy] Starting WebSocket-to-TCP proxy`);
|
||||
console.log(`[Proxy] WebSocket: ws://localhost:${config.wsPort}`);
|
||||
console.log(`[Proxy] DAP Server: tcp://${config.dapHost}:${config.dapPort}`);
|
||||
|
||||
const wss = new WebSocket.Server({
|
||||
port: config.wsPort,
|
||||
// Enable ping/pong for connection health checks
|
||||
clientTracking: true,
|
||||
});
|
||||
|
||||
console.log(`[Proxy] WebSocket server listening on port ${config.wsPort}`);
|
||||
|
||||
// Ping all clients every 25 seconds to detect dead connections
|
||||
// This is shorter than Chrome's service worker timeout (~30s)
|
||||
const PING_INTERVAL = 25000;
|
||||
const pingInterval = setInterval(() => {
|
||||
wss.clients.forEach((ws) => {
|
||||
if (ws.isAlive === false) {
|
||||
console.log(`[Proxy] Client failed to respond to ping, terminating`);
|
||||
return ws.terminate();
|
||||
}
|
||||
ws.isAlive = false;
|
||||
ws.ping();
|
||||
});
|
||||
}, PING_INTERVAL);
|
||||
|
||||
wss.on('connection', (ws, req) => {
|
||||
const clientId = `${req.socket.remoteAddress}:${req.socket.remotePort}`;
|
||||
console.log(`[Proxy] WebSocket client connected: ${clientId}`);
|
||||
|
||||
// Mark as alive for ping/pong tracking
|
||||
ws.isAlive = true;
|
||||
ws.on('pong', () => {
|
||||
ws.isAlive = true;
|
||||
});
|
||||
|
||||
// Connect to DAP TCP server
|
||||
const tcp = net.createConnection({
|
||||
host: config.dapHost,
|
||||
port: config.dapPort,
|
||||
});
|
||||
|
||||
let tcpBuffer = '';
|
||||
let tcpConnected = false;
|
||||
|
||||
tcp.on('connect', () => {
|
||||
tcpConnected = true;
|
||||
console.log(`[Proxy] Connected to DAP server at ${config.dapHost}:${config.dapPort}`);
|
||||
});
|
||||
|
||||
tcp.on('error', (err) => {
|
||||
console.error(`[Proxy] TCP error: ${err.message}`);
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: 'proxy-error',
|
||||
message: `Failed to connect to DAP server: ${err.message}`,
|
||||
})
|
||||
);
|
||||
ws.close(1011, 'DAP server connection failed');
|
||||
}
|
||||
});
|
||||
|
||||
tcp.on('close', () => {
|
||||
console.log(`[Proxy] TCP connection closed`);
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.close(1000, 'DAP server disconnected');
|
||||
}
|
||||
});
|
||||
|
||||
// WebSocket → TCP: Add Content-Length framing
|
||||
ws.on('message', (data) => {
|
||||
const json = data.toString();
|
||||
try {
|
||||
// Validate it's valid JSON
|
||||
const parsed = JSON.parse(json);
|
||||
|
||||
// Handle keepalive messages from the browser extension - don't forward to DAP server
|
||||
if (parsed.type === 'keepalive') {
|
||||
console.log(`[Proxy] Keepalive received from client`);
|
||||
// Respond with a keepalive-ack to confirm the connection is alive
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(JSON.stringify({ type: 'keepalive-ack', timestamp: Date.now() }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (!tcpConnected) {
|
||||
console.warn(`[Proxy] TCP not connected, dropping message`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[Proxy] WS→TCP: ${parsed.command || parsed.event || 'message'}`);
|
||||
|
||||
// Add DAP framing
|
||||
const framed = `Content-Length: ${Buffer.byteLength(json)}\r\n\r\n${json}`;
|
||||
tcp.write(framed);
|
||||
} catch (err) {
|
||||
console.error(`[Proxy] Invalid JSON from WebSocket: ${err.message}`);
|
||||
}
|
||||
});
|
||||
|
||||
// TCP → WebSocket: Parse Content-Length framing
|
||||
tcp.on('data', (chunk) => {
|
||||
tcpBuffer += chunk.toString();
|
||||
|
||||
// Process complete DAP messages from buffer
|
||||
while (true) {
|
||||
// Look for Content-Length header
|
||||
const headerEnd = tcpBuffer.indexOf('\r\n\r\n');
|
||||
if (headerEnd === -1) break;
|
||||
|
||||
const header = tcpBuffer.substring(0, headerEnd);
|
||||
const match = header.match(/Content-Length:\s*(\d+)/i);
|
||||
if (!match) {
|
||||
console.error(`[Proxy] Invalid DAP header: ${header}`);
|
||||
tcpBuffer = tcpBuffer.substring(headerEnd + 4);
|
||||
continue;
|
||||
}
|
||||
|
||||
const contentLength = parseInt(match[1]);
|
||||
const messageStart = headerEnd + 4;
|
||||
const messageEnd = messageStart + contentLength;
|
||||
|
||||
// Check if we have the complete message
|
||||
if (tcpBuffer.length < messageEnd) break;
|
||||
|
||||
// Extract the JSON message
|
||||
const json = tcpBuffer.substring(messageStart, messageEnd);
|
||||
tcpBuffer = tcpBuffer.substring(messageEnd);
|
||||
|
||||
// Send to WebSocket
|
||||
try {
|
||||
const parsed = JSON.parse(json);
|
||||
console.log(
|
||||
`[Proxy] TCP→WS: ${parsed.type} ${parsed.command || parsed.event || ''} ${parsed.request_seq ? `(req_seq: ${parsed.request_seq})` : ''}`
|
||||
);
|
||||
|
||||
if (ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(json);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[Proxy] Invalid JSON from TCP: ${err.message}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle WebSocket close
|
||||
ws.on('close', (code, reason) => {
|
||||
console.log(`[Proxy] WebSocket closed: ${code} ${reason}`);
|
||||
tcp.end();
|
||||
});
|
||||
|
||||
ws.on('error', (err) => {
|
||||
console.error(`[Proxy] WebSocket error: ${err.message}`);
|
||||
tcp.end();
|
||||
});
|
||||
});
|
||||
|
||||
wss.on('error', (err) => {
|
||||
console.error(`[Proxy] WebSocket server error: ${err.message}`);
|
||||
});
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGINT', () => {
|
||||
console.log(`\n[Proxy] Shutting down...`);
|
||||
clearInterval(pingInterval);
|
||||
wss.clients.forEach((ws) => ws.close(1001, 'Server shutting down'));
|
||||
wss.close(() => {
|
||||
console.log(`[Proxy] Goodbye!`);
|
||||
process.exit(0);
|
||||
});
|
||||
});
|
||||
@@ -250,6 +250,42 @@ Two problem matchers can be used:
|
||||
}
|
||||
```
|
||||
|
||||
#### Default from path
|
||||
|
||||
The problem matcher can specify a `fromPath` property at the top level, which applies when a specific pattern doesn't provide a value for `fromPath`. This is useful for tools that don't include project file information in their output.
|
||||
|
||||
For example, given the following compiler output that doesn't include project file information:
|
||||
|
||||
```
|
||||
ClassLibrary.cs(16,24): warning CS0612: 'ClassLibrary.Helpers.MyHelper.Name' is obsolete
|
||||
```
|
||||
|
||||
A problem matcher with a default from path can be used:
|
||||
|
||||
```json
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "csc-minimal",
|
||||
"fromPath": "ClassLibrary/ClassLibrary.csproj",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+)\\((\\d+),(\\d+)\\): (error|warning) (.+): (.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"code": 5,
|
||||
"message": 6
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This ensures that the file is rooted to the correct path when there's not enough information in the error messages to extract a `fromPath`.
|
||||
|
||||
#### Mitigate regular expression denial of service (ReDos)
|
||||
|
||||
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
||||
|
||||
@@ -23,7 +23,7 @@ This feature is mainly intended for self hosted runner administrators.
|
||||
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
|
||||
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
|
||||
|
||||
You can set these variables to the **absolute** path of a a `.sh` or `.ps1` file.
|
||||
You can set these variables to the **absolute** path of a `.sh` or `.ps1` file.
|
||||
|
||||
We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate.
|
||||
- `.sh` files will execute with the args `-e {pathtofile}`
|
||||
|
||||
@@ -16,7 +16,7 @@ We should give them that option, and publish examples how how they can create th
|
||||
- For example, the current runner overrides `HOME`, we can do that in the hook, but we shouldn't pass that hook as an ENV with the other env's the user has set, as that is not user input, it is how the runner invokes containers
|
||||
|
||||
## Interface
|
||||
- You will set the variable `ACTIONS_RUNNER_CONTAINER_HOOK=/Users/foo/runner/hooks.js` which is the entrypoint to your hook handler.
|
||||
- You will set the variable `ACTIONS_RUNNER_CONTAINER_HOOKS=/Users/foo/runner/hooks.js` which is the entrypoint to your hook handler.
|
||||
- There is no partial opt in, you must handle every hook
|
||||
- We will pass a command and some args via `stdin`
|
||||
- An exit code of 0 is a success, every other exit code is a failure
|
||||
|
||||
65
docs/adrs/2494-runner-image-tags.md
Normal file
65
docs/adrs/2494-runner-image-tags.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# ADR 2494: Runner Image Tags
|
||||
|
||||
**Date**: 2023-03-17
|
||||
|
||||
**Status**: Accepted<!-- |Accepted|Rejected|Superceded|Deprecated -->
|
||||
|
||||
## Context
|
||||
|
||||
Following the [adoption of actions-runner-controller by GitHub](https://github.com/actions/actions-runner-controller/discussions/2072) and the introduction of the new runner scale set autoscaling mode, we needed to provide a basic runner image that could be used off the shelf without much friction.
|
||||
|
||||
The [current runner image](https://github.com/actions/runner/pkgs/container/actions-runner) is published to GHCR. Each release of this image is tagged with the runner version and the most recent release is also tagged with `latest`.
|
||||
|
||||
While the use of `latest` is common practice, we recommend that users pin a specific version of the runner image for a predictable runtime and improved security posture. However, we still notice that a large number of end users are relying on the `latest` tag & raising issues when they encounter problems.
|
||||
|
||||
Add to that, the community actions-runner-controller maintainers have issued a [deprecation notice](https://github.com/actions/actions-runner-controller/issues/2056) of the `latest` tag for the existing runner images (https://github.com/orgs/actions-runner-controller/packages).
|
||||
|
||||
## Decision
|
||||
|
||||
Proceed with Option 2, keeping the `latest` tag and adding the `NOTES.txt` file to our helm charts with the notice.
|
||||
|
||||
### Option 1: Remove the `latest` tag
|
||||
|
||||
By removing the `latest` tag, we have to proceed with either of these options:
|
||||
|
||||
1. Remove the runner image reference in the `values.yaml` provided with the `gha-runner-scale-set` helm chart and mark these fields as required so that users have to explicitly specify a runner image and a specific tag. This will obviously introduce more friction for users who want to start using actions-runner-controller for the first time.
|
||||
|
||||
```yaml
|
||||
spec:
|
||||
containers:
|
||||
- name: runner
|
||||
image: ""
|
||||
tag: ""
|
||||
command: ["/home/runner/run.sh"]
|
||||
```
|
||||
|
||||
1. Pin a specific runner image tag in the `values.yaml` provided with the `gha-runner-scale-set` helm chart. This will reduce friction for users who want to start using actions-runner-controller for the first time but will require us to update the `values.yaml` with every new runner release.
|
||||
|
||||
```yaml
|
||||
spec:
|
||||
containers:
|
||||
- name: runner
|
||||
image: "ghcr.io/actions/actions-runner"
|
||||
tag: "v2.300.0"
|
||||
command: ["/home/runner/run.sh"]
|
||||
```
|
||||
|
||||
### Option 2: Keep the `latest` tag
|
||||
|
||||
Keeping the `latest` tag is also a reasonable option especially if we don't expect to make any breaking changes to the runner image. We could enhance this by adding a [NOTES.txt](https://helm.sh/docs/chart_template_guide/notes_files/) to the helm chart which will be displayed to the user after a successful helm install/upgrade. This will help users understand the implications of using the `latest` tag and how to pin a specific version of the runner image.
|
||||
|
||||
The runner image release workflow will need to be updated so that the image is pushed to GHCR and tagged only when the runner rollout has reached all scale units.
|
||||
|
||||
## Consequences
|
||||
|
||||
Proceeding with **option 1** means:
|
||||
|
||||
1. We will enhance the runtime predictability and security posture of our end users
|
||||
1. We will have to update the `values.yaml` with every new runner release (that can be automated)
|
||||
1. We will introduce friction for users who want to start using actions-runner-controller for the first time
|
||||
|
||||
Proceeding with **option 2** means:
|
||||
|
||||
1. We will have to continue to maintain the `latest` tag
|
||||
1. We will assume that end users will be able to handle the implications of using the `latest` tag
|
||||
1. Runner image release workflow needs to be updated
|
||||
@@ -7,19 +7,26 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
|
||||
---
|
||||
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||
|
||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||
|
||||
```
|
||||
curl -v https://api.github.com/api/v3/zen
|
||||
curl -v https://api.github.com/zen
|
||||
curl -v https://codeload.github.com/_ping
|
||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://results-receiver.actions.githubusercontent.com/health
|
||||
```
|
||||
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
@@ -27,6 +34,7 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
```
|
||||
curl -v https://[hostname]/api/v3/zen
|
||||
curl -v https://codeload.[hostname]/_ping
|
||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||
```
|
||||
@@ -42,6 +50,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Ping codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
@@ -50,6 +62,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Ping results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
@@ -64,4 +80,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
Contact [GitHub Support](https://support.github.com) if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
|
||||
@@ -20,11 +20,30 @@ The test also set environment variable `GIT_TRACE=1` and `GIT_CURL_VERBOSE=1` be
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
### 1. Check global and system git config
|
||||
|
||||
If you are having issues connecting to the server, check your global and system git config for any unexpected authentication headers. You might be seeing an error like:
|
||||
|
||||
```
|
||||
fatal: unable to access 'https://github.com/actions/checkout/': The requested URL returned error: 400
|
||||
```
|
||||
|
||||
The following commands can be used to check for unexpected authentication headers:
|
||||
|
||||
```
|
||||
$ git config --global --list | grep extraheader
|
||||
http.extraheader=AUTHORIZATION: unexpected_auth_header
|
||||
|
||||
$ git config --system --list | grep extraheader
|
||||
```
|
||||
|
||||
The following command can be used to remove the above value: `git config --global --unset http.extraheader`
|
||||
|
||||
### 2. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
### 3. SSL certificate related issue
|
||||
|
||||
If you are seeing `SSL Certificate problem:` in the log, it means the `git` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||
|
||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||
- Firewall rules that block action runner from accessing [certain hosts](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github), ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||
|
||||
|
||||
### Identify and solve these problems
|
||||
@@ -42,6 +42,7 @@ If you are having trouble connecting, try these steps:
|
||||
- https://api.github.com/
|
||||
- https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
- https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
- https://results-receiver.actions.githubusercontent.com/health
|
||||
- For GHES/GHAE
|
||||
- https://myGHES.com/_services/vstoken/_apis/health
|
||||
- https://myGHES.com/_services/pipelines/_apis/health
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
|
||||
The runner carries its own copies of node.js executables under `<runner_root>/externals/node20/` and `<runner_root>/externals/node24/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
|
||||
All javascript base Actions will get executed by the built-in `node` at either `<runner_root>/externals/node20/` or `<runner_root>/externals/node24/` depending on the version specified in the action's metadata.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Contributions
|
||||
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors. Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
||||
|
||||
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
|
||||
|
||||
@@ -27,6 +27,8 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
|
||||
|
||||
 Visual Studio 2017 or newer [Install here](https://visualstudio.microsoft.com) (needed for dev sh script)
|
||||
|
||||
 Visual Studio 2022 17.3 Preview or later. [Install here](https://docs.microsoft.com/en-us/visualstudio/releases/2022/release-notes-preview)
|
||||
|
||||
## Quickstart: Run a job from a real repository
|
||||
|
||||
If you just want to get from building the sourcecode to using it to execute an action, you will need:
|
||||
@@ -155,4 +157,12 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
|
||||
## Styling
|
||||
|
||||
We use the .NET Foundation and CoreCLR style guidelines [located here](
|
||||
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
|
||||
https://github.com/dotnet/runtime/blob/main/docs/coding-guidelines/coding-style.md)
|
||||
|
||||
### Format C# Code
|
||||
|
||||
To format both staged and unstaged .cs files
|
||||
```
|
||||
cd ./src
|
||||
./dev.(cmd|sh) format
|
||||
```
|
||||
|
||||
@@ -35,7 +35,7 @@ All the configs below can be found in `.vscode/launch.json`.
|
||||
If you launch `Run` or `Run [build]`, it starts a process called `Runner.Listener`.
|
||||
This process will receive any job queued on this repository if the job runs on matching labels (e.g `runs-on: self-hosted`).
|
||||
Once a job is received, a `Runner.Listener` starts a new process of `Runner.Worker`.
|
||||
Since this is a diferent process, you can't use the same debugger session debug it.
|
||||
Since this is a different process, you can't use the same debugger session debug it.
|
||||
Instead, a parallel debugging session has to be started, using a different launch config.
|
||||
Luckily, VS Code supports multiple parallel debugging sessions.
|
||||
|
||||
|
||||
217
docs/dependency-management.md
Normal file
217
docs/dependency-management.md
Normal file
@@ -0,0 +1,217 @@
|
||||
# Runner Dependency Management Process
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the automated dependency management process for the GitHub Actions Runner, designed to ensure we maintain up-to-date and secure dependencies while providing predictable release cycles.
|
||||
|
||||
## Release Schedule
|
||||
|
||||
- **Monthly Runner Releases**: New runner versions are released monthly
|
||||
- **Weekly Dependency Checks**: Automated workflows check for dependency updates every Monday
|
||||
- **Security Patches**: Critical security vulnerabilities are addressed immediately outside the regular schedule
|
||||
|
||||
## Automated Workflows
|
||||
|
||||
**Note**: These workflows are implemented across separate PRs for easier review and independent deployment. Each workflow includes comprehensive error handling and security-focused vulnerability detection.
|
||||
|
||||
### 1. Foundation Labels
|
||||
|
||||
- **Workflow**: `.github/workflows/setup-labels.yml` (PR #4024)
|
||||
- **Purpose**: Creates consistent dependency labels for all automation workflows
|
||||
- **Labels**: `dependencies`, `security`, `typescript`, `needs-manual-review`
|
||||
- **Prerequisite**: Must be merged before other workflows for proper labeling
|
||||
|
||||
### 2. Node.js Version Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/node-upgrade.yml`
|
||||
- **Schedule**: Mondays at 6:00 AM UTC
|
||||
- **Purpose**: Updates Node.js 20 and 24 versions in `src/Misc/externals.sh`
|
||||
- **Source**: [nodejs.org](https://nodejs.org) and [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
- **Priority**: First (NPM depends on current Node.js versions)
|
||||
|
||||
### 3. NPM Security Audit
|
||||
|
||||
- **Primary Workflow**: `.github/workflows/npm-audit.yml` ("NPM Audit Fix")
|
||||
- **Schedule**: Mondays at 7:00 AM UTC
|
||||
- **Purpose**: Automated security vulnerability detection and basic fixes
|
||||
- **Location**: `src/Misc/expressionFunc/hashFiles/`
|
||||
- **Features**: npm audit, security patch application, PR creation
|
||||
- **Dependency**: Runs after Node.js updates for optimal compatibility
|
||||
|
||||
- **Fallback Workflow**: `.github/workflows/npm-audit-typescript.yml` ("NPM Audit Fix with TypeScript Auto-Fix")
|
||||
- **Trigger**: Manual dispatch only
|
||||
- **Purpose**: Manual security audit with TypeScript compatibility fixes
|
||||
- **Use Case**: When scheduled workflow fails or needs custom intervention
|
||||
- **Features**: Enhanced TypeScript auto-repair, graduated security response
|
||||
- **How to Use**:
|
||||
1. If the scheduled "NPM Audit Fix" workflow fails, go to Actions tab
|
||||
2. Select "NPM Audit Fix with TypeScript Auto-Fix" workflow
|
||||
3. Click "Run workflow" and optionally specify fix level (auto/manual)
|
||||
4. Review the generated PR for TypeScript compatibility issues
|
||||
|
||||
### 4. .NET SDK Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/dotnet-upgrade.yml`
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates .NET SDK and package versions with build validation
|
||||
- **Features**: Global.json updates, NuGet package management, compatibility checking
|
||||
- **Independence**: Runs independently of Node.js/NPM updates
|
||||
|
||||
### 5. Docker/Buildx Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/docker-buildx-upgrade.yml` ("Docker/Buildx Version Upgrade")
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates Docker and Docker Buildx versions with multi-platform validation
|
||||
- **Features**: Container security scanning, multi-architecture build testing
|
||||
- **Independence**: Runs independently of other dependency updates
|
||||
|
||||
### 6. Dependency Monitoring
|
||||
|
||||
- **Workflow**: `.github/workflows/dependency-check.yml` ("Dependency Status Check")
|
||||
- **Schedule**: Mondays at 11:00 AM UTC
|
||||
- **Purpose**: Comprehensive status report of all dependencies with security audit
|
||||
- **Features**: Multi-dependency checking, npm audit status, build validation, choice of specific component checks
|
||||
- **Summary**: Runs last to capture results from all morning dependency updates
|
||||
|
||||
## Release Process Integration
|
||||
|
||||
### Pre-Release Checklist
|
||||
|
||||
Before each monthly runner release:
|
||||
|
||||
1. **Check Dependency PRs**:
|
||||
|
||||
```bash
|
||||
# List all open dependency PRs
|
||||
gh pr list --label "dependencies" --state open
|
||||
|
||||
# List only automated weekly dependency updates
|
||||
gh pr list --label "dependencies-weekly-check" --state open
|
||||
|
||||
# List only custom dependency automation (not dependabot)
|
||||
gh pr list --label "dependencies-not-dependabot" --state open
|
||||
```
|
||||
|
||||
2. **Run Manual Dependency Check**:
|
||||
- Go to Actions tab → "Dependency Status Check" → "Run workflow"
|
||||
- Review the summary for any outdated dependencies
|
||||
|
||||
3. **Review and Merge Updates**:
|
||||
- Prioritize security-related updates
|
||||
- Test dependency updates in development environment
|
||||
- Merge approved dependency PRs
|
||||
|
||||
### Vulnerability Response
|
||||
|
||||
#### Critical Security Vulnerabilities
|
||||
|
||||
- **Response Time**: Within 24 hours
|
||||
- **Process**:
|
||||
1. Assess impact on runner security
|
||||
2. Create hotfix branch if runner data security is affected
|
||||
3. Expedite patch release if necessary
|
||||
4. Document in security advisory if applicable
|
||||
|
||||
#### Non-Critical Vulnerabilities
|
||||
|
||||
- **Response Time**: Next monthly release
|
||||
- **Process**:
|
||||
1. Evaluate if vulnerability affects runner functionality
|
||||
2. Include fix in regular dependency update cycle
|
||||
3. Document in release notes
|
||||
|
||||
## Monitoring and Alerts
|
||||
|
||||
### GitHub Actions Workflow Status
|
||||
|
||||
- All dependency workflows create PRs with the `dependencies` label
|
||||
- Failed workflows should be investigated immediately
|
||||
- Weekly dependency status reports are generated automatically
|
||||
|
||||
### Manual Checks
|
||||
|
||||
You can manually trigger dependency checks:
|
||||
|
||||
- **Full Status**: Run "Dependency Status Check" workflow
|
||||
- **Specific Component**: Use the dropdown to check individual dependencies
|
||||
|
||||
## Dependency Labels
|
||||
|
||||
All automated dependency PRs are tagged with labels for easy filtering and management:
|
||||
|
||||
### Primary Labels
|
||||
|
||||
- **`dependencies`**: All automated dependency-related PRs
|
||||
- **`dependencies-weekly-check`**: Automated weekly dependency updates from scheduled workflows
|
||||
- **`dependencies-not-dependabot`**: Custom dependency automation (not created by dependabot)
|
||||
- **`security`**: Security vulnerability fixes and patches
|
||||
- **`typescript`**: TypeScript compatibility and type definition updates
|
||||
- **`needs-manual-review`**: Complex updates requiring human verification
|
||||
|
||||
### Technology-Specific Labels
|
||||
|
||||
- **`node`**: Node.js version updates
|
||||
- **`javascript`**: JavaScript runtime and tooling updates
|
||||
- **`npm`**: NPM package and security updates
|
||||
- **`dotnet`**: .NET SDK and NuGet package updates
|
||||
- **`docker`**: Docker and container tooling updates
|
||||
|
||||
### Workflow-Specific Branches
|
||||
|
||||
- **Node.js updates**: `chore/update-node` branch
|
||||
- **NPM security fixes**: `chore/npm-audit-fix-YYYYMMDD` and `chore/npm-audit-fix-with-ts-repair` branches
|
||||
- **NuGet/.NET updates**: `feature/dotnetsdk-upgrade/{version}` branches
|
||||
- **Docker updates**: `feature/docker-buildx-upgrade` branch
|
||||
|
||||
## Special Considerations
|
||||
|
||||
### Node.js Updates
|
||||
|
||||
When updating Node.js versions, remember to:
|
||||
|
||||
1. Create a corresponding release in [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
2. Follow the alpine_nodejs getting started guide
|
||||
3. Test container builds with new Node versions
|
||||
|
||||
### .NET SDK Updates
|
||||
|
||||
- Only patch versions are auto-updated within the same major.minor version
|
||||
- Major/minor version updates require manual review and testing
|
||||
|
||||
### Docker Updates
|
||||
|
||||
- Updates include both Docker Engine and Docker Buildx
|
||||
- Verify compatibility with runner container workflows
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **NPM Audit Workflow Fails**:
|
||||
- Check if `package.json` exists in `src/Misc/expressionFunc/hashFiles/`
|
||||
- Verify Node.js setup step succeeded
|
||||
|
||||
2. **Version Detection Fails**:
|
||||
- Check if upstream APIs are available
|
||||
- Verify parsing logic for version extraction
|
||||
|
||||
3. **PR Creation Fails**:
|
||||
- Ensure `GITHUB_TOKEN` has sufficient permissions
|
||||
- Check if branch already exists
|
||||
|
||||
### Contact
|
||||
|
||||
For questions about the dependency management process:
|
||||
|
||||
- Create an issue with the `dependencies` label
|
||||
- Review existing dependency management workflows
|
||||
- Consult the runner team for security-related concerns
|
||||
|
||||
## Metrics and KPIs
|
||||
|
||||
Track these metrics to measure dependency management effectiveness:
|
||||
|
||||
- Number of open dependency PRs at release time
|
||||
- Time to merge dependency updates
|
||||
- Number of security vulnerabilities by severity
|
||||
- Release cycle adherence (monthly target)
|
||||
@@ -4,16 +4,7 @@
|
||||
|
||||
## Supported Distributions and Versions
|
||||
|
||||
x64
|
||||
- Red Hat Enterprise Linux 7
|
||||
- CentOS 7
|
||||
- Oracle Linux 7
|
||||
- Fedora 29+
|
||||
- Debian 9+
|
||||
- Ubuntu 16.04+
|
||||
- Linux Mint 18+
|
||||
- openSUSE 15+
|
||||
- SUSE Enterprise Linux (SLES) 12 SP2+
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#linux)."
|
||||
|
||||
## Install .Net Core 3.x Linux Dependencies
|
||||
|
||||
@@ -34,7 +25,7 @@ The `installdependencies.sh` script should install all required dependencies on
|
||||
|
||||
Debian based OS (Debian, Ubuntu, Linux Mint)
|
||||
|
||||
- liblttng-ust0
|
||||
- liblttng-ust1 or liblttng-ust0
|
||||
- libkrb5-3
|
||||
- zlib1g
|
||||
- libssl1.1, libssl1.0.2 or libssl1.0.0
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
- macOS High Sierra (10.13) and later versions
|
||||
- x64 and arm64 (Apple Silicon)
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#macos)."
|
||||
|
||||
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -2,11 +2,6 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
- Windows 7 64-bit
|
||||
- Windows 8.1 64-bit
|
||||
- Windows 10 64-bit
|
||||
- Windows Server 2012 R2 64-bit
|
||||
- Windows Server 2016 64-bit
|
||||
- Windows Server 2019 64-bit
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#windows)."
|
||||
|
||||
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
||||
|
||||
72
images/Dockerfile
Normal file
72
images/Dockerfile
Normal file
@@ -0,0 +1,72 @@
|
||||
# Source: https://github.com/dotnet/dotnet-docker
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble AS build
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
|
||||
ARG DOCKER_VERSION=29.0.2
|
||||
ARG BUILDX_VERSION=0.30.1
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
WORKDIR /actions-runner
|
||||
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export RUNNER_ARCH=x64 ; fi \
|
||||
&& curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-${TARGETOS}-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
|
||||
&& tar xzf ./runner.tar.gz \
|
||||
&& rm runner.tar.gz
|
||||
|
||||
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v${RUNNER_CONTAINER_HOOKS_VERSION}/actions-runner-hooks-k8s-${RUNNER_CONTAINER_HOOKS_VERSION}.zip \
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v0.8.0/actions-runner-hooks-k8s-0.8.0.zip \
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s-novolume \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
||||
&& tar zxvf docker.tgz \
|
||||
&& rm -rf docker.tgz \
|
||||
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
||||
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
||||
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||
ENV ImageOS=ubuntu24
|
||||
|
||||
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
|
||||
RUN apt update -y \
|
||||
&& apt install -y --no-install-recommends sudo lsb-release gpg-agent software-properties-common curl jq unzip \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure git-core/ppa based on guidance here: https://git-scm.com/download/linux
|
||||
RUN add-apt-repository ppa:git-core/ppa \
|
||||
&& apt update -y \
|
||||
&& apt install -y git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
&& groupadd docker --gid 123 \
|
||||
&& usermod -aG sudo runner \
|
||||
&& usermod -aG docker runner \
|
||||
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
|
||||
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers \
|
||||
&& chmod 777 /home/runner
|
||||
|
||||
WORKDIR /home/runner
|
||||
|
||||
COPY --chown=runner:docker --from=build /actions-runner .
|
||||
COPY --from=build /usr/local/lib/docker/cli-plugins/docker-buildx /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
|
||||
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
|
||||
|
||||
USER runner
|
||||
@@ -1,17 +1,39 @@
|
||||
## Features
|
||||
- Allow self-hosted runner admins to fail jobs that don't have a job container (#1895)
|
||||
- Experimental: Self-hosted runner admins can now use scripts to customize the container invocation in the runner (#1853)
|
||||
## Bugs
|
||||
- Fixed an issue where a Job Hook would fail to execute if the shell path contains a space on Windows (#1826)
|
||||
## What's Changed
|
||||
* Fix owner of /home/runner directory by @nikola-jokic in https://github.com/actions/runner/pull/4132
|
||||
* Update Docker to v29.0.2 and Buildx to v0.30.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4135
|
||||
* Update workflow around runner docker image. by @TingluoHuang in https://github.com/actions/runner/pull/4133
|
||||
* Fix regex for validating runner version format by @TingluoHuang in https://github.com/actions/runner/pull/4136
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4144
|
||||
* Ensure safe_sleep tries alternative approaches by @TingluoHuang in https://github.com/actions/runner/pull/4146
|
||||
* Bump actions/github-script from 7 to 8 by @dependabot[bot] in https://github.com/actions/runner/pull/4137
|
||||
* Bump actions/checkout from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4130
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4149
|
||||
* Bump docker image to use ubuntu 24.04 by @TingluoHuang in https://github.com/actions/runner/pull/4018
|
||||
* Add support for case function by @AllanGuigou in https://github.com/actions/runner/pull/4147
|
||||
* Cleanup feature flag actions_container_action_runner_temp by @ericsciple in https://github.com/actions/runner/pull/4163
|
||||
* Bump actions/download-artifact from 6 to 7 by @dependabot[bot] in https://github.com/actions/runner/pull/4155
|
||||
* Bump actions/upload-artifact from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4157
|
||||
* Set ACTIONS_ORCHESTRATION_ID as env to actions. by @TingluoHuang in https://github.com/actions/runner/pull/4178
|
||||
* Allow hosted VM report job telemetry via .setup_info file. by @TingluoHuang in https://github.com/actions/runner/pull/4186
|
||||
* Bump typescript from 5.9.2 to 5.9.3 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4184
|
||||
* Bump Azure.Storage.Blobs from 12.26.0 to 12.27.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4189
|
||||
|
||||
## Misc
|
||||
- Handle new `HostedRunnerShutdownMessage` to shutdown hosted runners faster (#1922)
|
||||
## New Contributors
|
||||
* @AllanGuigou made their first contribution in https://github.com/actions/runner/pull/4147
|
||||
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.330.0...v2.331.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
|
||||
|
||||
## Windows x64
|
||||
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
``` powershell
|
||||
|
||||
```powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -21,9 +43,25 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
```
|
||||
|
||||
## Windows arm64
|
||||
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
|
||||
```powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
# Download the latest runner package
|
||||
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-arm64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-arm64-<RUNNER_VERSION>.zip
|
||||
# Extract the installer
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-arm64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
```
|
||||
|
||||
## OSX x64
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -32,9 +70,9 @@ curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>
|
||||
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## [Pre-release] OSX arm64 (Apple silicon)
|
||||
## OSX arm64 (Apple silicon)
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -45,7 +83,7 @@ tar xzf ./actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux x64
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -56,7 +94,7 @@ tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux arm64
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -67,7 +105,7 @@ tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux arm
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -77,6 +115,7 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Using your self hosted runner
|
||||
|
||||
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
|
||||
|
||||
## SHA-256 Checksums
|
||||
@@ -84,29 +123,9 @@ For additional details about configuring, running, or shutting down the runner p
|
||||
The SHA-256 checksums for the packages included in this build are shown below:
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>.zip <!-- BEGIN SHA win-x64 --><WIN_X64_SHA><!-- END SHA win-x64 -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>.zip <!-- BEGIN SHA win-arm64 --><WIN_ARM64_SHA><!-- END SHA win-arm64 -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA osx-x64 --><OSX_X64_SHA><!-- END SHA osx-x64 -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA osx-arm64 --><OSX_ARM64_SHA><!-- END SHA osx-arm64 -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
||||
|
||||
@@ -8,12 +8,12 @@ set -e
|
||||
# Configures it as a service more secure
|
||||
# Should be used on VMs and not containers
|
||||
# Works on OSX and Linux
|
||||
# Assumes x64 arch
|
||||
# Assumes x64 arch (support arm64)
|
||||
# See EXAMPLES below
|
||||
|
||||
flags_found=false
|
||||
|
||||
while getopts 's:g:n:r:u:l:' opt; do
|
||||
while getopts 's:g:n:r:u:l:df' opt; do
|
||||
flags_found=true
|
||||
|
||||
case $opt in
|
||||
@@ -35,6 +35,12 @@ while getopts 's:g:n:r:u:l:' opt; do
|
||||
l)
|
||||
labels=$OPTARG
|
||||
;;
|
||||
f)
|
||||
replace='true'
|
||||
;;
|
||||
d)
|
||||
disableupdate='true'
|
||||
;;
|
||||
*)
|
||||
echo "
|
||||
Runner Service Installer
|
||||
@@ -49,7 +55,9 @@ Usage:
|
||||
-n optional name of the runner, defaults to hostname
|
||||
-r optional name of the runner group to add the runner to, defaults to the Default group
|
||||
-u optional user svc will run as, defaults to current
|
||||
-l optional list of labels (split by comma) applied on the runner"
|
||||
-l optional list of labels (split by comma) applied on the runner
|
||||
-d optional allow runner to remain on the current version for one month after the release of a newer version
|
||||
-f optional replace any existing runner with the same name"
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
@@ -79,6 +87,9 @@ sudo echo
|
||||
runner_plat=linux
|
||||
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||
|
||||
runner_arch=x64
|
||||
[ ! -z "$(arch | grep arm64)" ] && runner_arch=arm64
|
||||
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
@@ -131,7 +142,7 @@ echo "Downloading latest runner ..."
|
||||
# For the GHES Alpha, download the runner from github.com
|
||||
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
||||
latest_version=$(echo ${latest_version_label:1})
|
||||
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
|
||||
runner_file="actions-runner-${runner_plat}-${runner_arch}-${latest_version}.tar.gz"
|
||||
|
||||
if [ -f "${runner_file}" ]; then
|
||||
echo "${runner_file} exists. skipping download."
|
||||
@@ -169,8 +180,8 @@ fi
|
||||
|
||||
echo
|
||||
echo "Configuring ${runner_name} @ $runner_url"
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"}
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"} ${disableupdate:+--disableupdate}"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN ${replace:+--replace} --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"} ${disableupdate:+--disableupdate}
|
||||
|
||||
#---------------------------------------
|
||||
# Configuring as a service
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#/bin/bash
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
@@ -12,7 +12,7 @@ set -e
|
||||
#
|
||||
# Usage:
|
||||
# export RUNNER_CFG_PAT=<yourPAT>
|
||||
# ./delete.sh scope name
|
||||
# ./delete.sh <scope> [<name>]
|
||||
#
|
||||
# scope required repo (:owner/:repo) or org (:organization)
|
||||
# name optional defaults to hostname. name to delete
|
||||
@@ -26,17 +26,17 @@ set -e
|
||||
runner_scope=${1}
|
||||
runner_name=${2}
|
||||
|
||||
echo "Deleting runner ${runner_name} @ ${runner_scope}"
|
||||
|
||||
function fatal()
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
|
||||
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||
if [ -z "${runner_name}" ]; then runner_name=`hostname`; fi
|
||||
|
||||
echo "Deleting runner ${runner_name} @ ${runner_scope}"
|
||||
|
||||
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[*.cs]
|
||||
charset = utf-8
|
||||
charset = utf-8-bom
|
||||
insert_final_newline = true
|
||||
|
||||
csharp_new_line_before_else = true
|
||||
|
||||
@@ -24,6 +24,9 @@
|
||||
<PropertyGroup Condition="'$(BUILD_OS)' == 'Windows' AND '$(PackageRuntime)' == 'win-x86'">
|
||||
<DefineConstants>$(DefineConstants);X86</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(BUILD_OS)' == 'Windows' AND '$(PackageRuntime)' == 'win-arm64'">
|
||||
<DefineConstants>$(DefineConstants);ARM64</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(BUILD_OS)' == 'OSX' AND '$(PackageRuntime)' == 'osx-x64'">
|
||||
<DefineConstants>$(DefineConstants);X64</DefineConstants>
|
||||
@@ -54,4 +57,13 @@
|
||||
<PropertyGroup>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<!-- Enable NuGet package auditing -->
|
||||
<NuGetAudit>true</NuGetAudit>
|
||||
<!-- Audit direct and transitive packages -->
|
||||
<NuGetAuditMode>all</NuGetAuditMode>
|
||||
<!-- Report low, moderate, high and critical advisories -->
|
||||
<NuGetAuditLevel>moderate</NuGetAuditLevel>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
1d709d93e5d3c6c6c656a61aa6c1781050224788a05b0e6ecc4c3c0408bdf89c
|
||||
@@ -1 +0,0 @@
|
||||
b92a47cfeaad02255b1f7a377060651b73ae5e5db22a188dbbcb4183ab03a03d
|
||||
@@ -1 +0,0 @@
|
||||
68a9a8ef0843a8bb74241894f6f63fd76241a82295c5337d3cc7a940a314c78e
|
||||
@@ -1 +0,0 @@
|
||||
02c7126ff4d63ee2a0ae390c81434c125630522aadf35903bbeebb1a99d8af99
|
||||
@@ -1 +0,0 @@
|
||||
c9d5a542f8d765168855a89e83ae0a8970d00869041c4f9a766651c04c72b212
|
||||
@@ -1 +0,0 @@
|
||||
d94f2fbaf210297162bc9f3add819d73682c3aa6899e321c3872412b924d5504
|
||||
1
src/Misc/contentHash/externals/linux-arm
vendored
1
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +0,0 @@
|
||||
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51
|
||||
1
src/Misc/contentHash/externals/linux-arm64
vendored
1
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +0,0 @@
|
||||
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78
|
||||
1
src/Misc/contentHash/externals/linux-x64
vendored
1
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +0,0 @@
|
||||
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a
|
||||
1
src/Misc/contentHash/externals/osx-arm64
vendored
1
src/Misc/contentHash/externals/osx-arm64
vendored
@@ -1 +0,0 @@
|
||||
cc4708962a80325de0baa5ae8484e0cb9ae976ac6a4178c1c0d448b8c52bd7f7
|
||||
1
src/Misc/contentHash/externals/osx-x64
vendored
1
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +0,0 @@
|
||||
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b
|
||||
1
src/Misc/contentHash/externals/win-x64
vendored
1
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +0,0 @@
|
||||
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18
|
||||
1488
src/Misc/dotnet-install.ps1
vendored
1488
src/Misc/dotnet-install.ps1
vendored
File diff suppressed because it is too large
Load Diff
1256
src/Misc/dotnet-install.sh
vendored
1256
src/Misc/dotnet-install.sh
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"plugins": ["@typescript-eslint", "@stylistic"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
@@ -26,7 +26,7 @@
|
||||
],
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@stylistic/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
@@ -47,8 +47,8 @@
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@stylistic/semi": ["error", "never"],
|
||||
"@stylistic/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error",
|
||||
"filenames/match-regex" : "off",
|
||||
"github/no-then" : 1, // warning
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
|
||||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
}
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"overrides": [
|
||||
{
|
||||
"files": "*.{js,ts,json}",
|
||||
"options": {
|
||||
"tabWidth": 2
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
To update hashFiles under `Misc/layoutbin` run `npm install && npm run all`
|
||||
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run prepare && npm run all`.
|
||||
|
||||
When you commit changes to the JSON or Typescript file, the javascript binary will be automatically re-compiled and added to the latest commit.
|
||||
|
||||
5772
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
5772
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,8 @@
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
||||
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||
"prepare": "cd ../../../../ && husky"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -18,18 +19,33 @@
|
||||
"keywords": [
|
||||
"actions"
|
||||
],
|
||||
"lint-staged": {
|
||||
"*.md": [
|
||||
"prettier --write",
|
||||
"git add ."
|
||||
],
|
||||
"*.{ts,json}": [
|
||||
"sh -c 'npm run all'",
|
||||
"git add ."
|
||||
]
|
||||
},
|
||||
"author": "GitHub Actions",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/glob": "^0.1.0"
|
||||
"@actions/glob": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.7.12",
|
||||
"@typescript-eslint/parser": "^5.15.0",
|
||||
"@zeit/ncc": "^0.20.5",
|
||||
"eslint": "^8.11.0",
|
||||
"eslint-plugin-github": "^4.3.5",
|
||||
"prettier": "^1.19.1",
|
||||
"typescript": "^3.6.4"
|
||||
"@stylistic/eslint-plugin": "^3.1.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^15.5.0",
|
||||
"prettier": "^3.0.3",
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,12 +52,13 @@ async function run(): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
.then(out => {
|
||||
;(async () => {
|
||||
try {
|
||||
const out = await run()
|
||||
console.log(out)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
})()
|
||||
|
||||
@@ -3,8 +3,11 @@ PACKAGERUNTIME=$1
|
||||
PRECACHE=$2
|
||||
|
||||
NODE_URL=https://nodejs.org/dist
|
||||
NODE12_VERSION="12.22.7"
|
||||
NODE16_VERSION="16.13.0"
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||
NODE20_VERSION="20.19.6"
|
||||
NODE24_VERSION="24.12.0"
|
||||
|
||||
get_abs_path() {
|
||||
# exploits the fact that pwd will print abs path when no args
|
||||
@@ -54,12 +57,22 @@ function acquireExternalTool() {
|
||||
# Download from source to the partial file.
|
||||
echo "Downloading $download_source"
|
||||
mkdir -p "$(dirname "$download_target")" || checkRC 'mkdir'
|
||||
|
||||
CURL_VERSION=$(curl --version | awk 'NR==1{print $2}')
|
||||
echo "Curl version: $CURL_VERSION"
|
||||
|
||||
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
||||
# -k Allow connections to SSL sites without certs (H)
|
||||
# -S Show error. With -s, make curl show errors when they occur
|
||||
# -L Follow redirects (H)
|
||||
# -o FILE Write to FILE instead of stdout
|
||||
curl -fkSL -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
||||
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
||||
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
||||
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
else
|
||||
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
||||
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
fi
|
||||
|
||||
# Move the partial file to the download target.
|
||||
mv "$partial_target" "$download_target" || checkRC 'mv'
|
||||
@@ -125,10 +138,22 @@ function acquireExternalTool() {
|
||||
|
||||
# Download the external tools only for Windows.
|
||||
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.exe" node12/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.lib" node12/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||
if [[ "$PRECACHE" != "" ]]; then
|
||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download the external tools only for Windows.
|
||||
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
||||
# todo: replace these with official release when available
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||
if [[ "$PRECACHE" != "" ]]; then
|
||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||
fi
|
||||
@@ -136,29 +161,29 @@ fi
|
||||
|
||||
# Download the external tools only for OSX.
|
||||
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-x64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
||||
# node.js v12 doesn't support macOS on arm64.
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-arm64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-x64.tar.gz" node24 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-alpine-x64.tar.gz" node24_alpine
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-arm64.tar.gz" node12 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-arm64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir
|
||||
fi
|
||||
|
||||
@@ -24,7 +24,8 @@ if (exitServiceAfterNFailures <= 0) {
|
||||
exitServiceAfterNFailures = NaN;
|
||||
}
|
||||
|
||||
var consecutiveFailureCount = 0;
|
||||
var unknownFailureRetryCount = 0;
|
||||
var retriableFailureRetryCount = 0;
|
||||
|
||||
var gracefulShutdown = function () {
|
||||
console.log("Shutting down runner listener");
|
||||
@@ -62,7 +63,8 @@ var runService = function () {
|
||||
|
||||
listener.stdout.on("data", (data) => {
|
||||
if (data.toString("utf8").includes("Listening for Jobs")) {
|
||||
consecutiveFailureCount = 0;
|
||||
unknownFailureRetryCount = 0;
|
||||
retriableFailureRetryCount = 0;
|
||||
}
|
||||
process.stdout.write(data.toString("utf8"));
|
||||
});
|
||||
@@ -92,24 +94,43 @@ var runService = function () {
|
||||
console.log(
|
||||
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
);
|
||||
consecutiveFailureCount = 0;
|
||||
unknownFailureRetryCount = 0;
|
||||
retriableFailureRetryCount++;
|
||||
if (retriableFailureRetryCount >= 10) {
|
||||
console.error(
|
||||
"Stopping the runner after 10 consecutive re-tryable failures"
|
||||
);
|
||||
stopping = true;
|
||||
}
|
||||
} else if (code === 3 || code === 4) {
|
||||
console.log(
|
||||
"Runner listener exit because of updating, re-launch runner in 5 seconds."
|
||||
);
|
||||
consecutiveFailureCount = 0;
|
||||
unknownFailureRetryCount = 0;
|
||||
retriableFailureRetryCount++;
|
||||
if (retriableFailureRetryCount >= 10) {
|
||||
console.error(
|
||||
"Stopping the runner after 10 consecutive re-tryable failures"
|
||||
);
|
||||
stopping = true;
|
||||
}
|
||||
} else if (code === 5) {
|
||||
console.log(
|
||||
"Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else {
|
||||
var messagePrefix = "Runner listener exit with undefined return code";
|
||||
consecutiveFailureCount++;
|
||||
unknownFailureRetryCount++;
|
||||
retriableFailureRetryCount = 0;
|
||||
if (
|
||||
!isNaN(exitServiceAfterNFailures) &&
|
||||
consecutiveFailureCount >= exitServiceAfterNFailures
|
||||
unknownFailureRetryCount >= exitServiceAfterNFailures
|
||||
) {
|
||||
console.error(
|
||||
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
|
||||
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
|
||||
);
|
||||
gracefulShutdown();
|
||||
return;
|
||||
stopping = true
|
||||
} else {
|
||||
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[Unit]
|
||||
Description={{Description}}
|
||||
After=network.target
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart={{RunnerRoot}}/runsvc.sh
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -66,7 +66,7 @@ then
|
||||
fi
|
||||
fi
|
||||
|
||||
$apt_get update && $apt_get install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
$apt_get update && $apt_get install -y libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
@@ -94,6 +94,14 @@ then
|
||||
fi
|
||||
}
|
||||
|
||||
apt_get_with_fallbacks liblttng-ust1 liblttng-ust0
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt_get_with_fallbacks libssl1.1$ libssl1.0.2$ libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
@@ -102,7 +110,7 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt_get_with_fallbacks libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
apt_get_with_fallbacks libicu76 libicu75 libicu74 libicu73 libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
|
||||
@@ -10,7 +10,7 @@ if [ -f ".path" ]; then
|
||||
echo ".path=${PATH}"
|
||||
fi
|
||||
|
||||
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
|
||||
nodever="node20"
|
||||
|
||||
# insert anything to setup env when running as a service
|
||||
# run the host process which keep the listener alive
|
||||
|
||||
@@ -120,6 +120,9 @@ if ERRORLEVEL 1 (
|
||||
|
||||
echo [%date% %time%] Update succeed >> "%logfile%" 2>&1
|
||||
|
||||
type nul > update.finished
|
||||
echo [%date% %time%] update.finished file creation succeed >> "%logfile%" 2>&1
|
||||
|
||||
rem rename the update log file with %logfile%.succeed/.failed/succeedneedrestart
|
||||
rem runner service host can base on the log file name determin the result of the runner update
|
||||
echo [%date% %time%] Rename "%logfile%" to be "%logfile%.succeed" >> "%logfile%" 2>&1
|
||||
|
||||
@@ -123,7 +123,7 @@ fi
|
||||
# fix upgrade issue with macOS when running as a service
|
||||
attemptedtargetedfix=0
|
||||
currentplatform=$(uname | awk '{print tolower($0)}')
|
||||
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; then
|
||||
# We needed a fix for https://github.com/actions/runner/issues/743
|
||||
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
||||
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
||||
@@ -135,12 +135,23 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
then
|
||||
# inspect the open file handles to find the node process
|
||||
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
||||
nodever="node16"
|
||||
# Try finding node24 first, then fallback to earlier versions if needed
|
||||
nodever="node24"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node20
|
||||
then
|
||||
nodever="node12"
|
||||
nodever="node20"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16
|
||||
then
|
||||
nodever="node16"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
then
|
||||
nodever="node12"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [[ $? -eq 0 && -n "$path" ]]
|
||||
then
|
||||
@@ -178,8 +189,24 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# update runsvc.sh
|
||||
if [ -f "$rootfolder/runsvc.sh" ]
|
||||
then
|
||||
date "+[%F %T-%4N] Update runsvc.sh" >> "$logfile" 2>&1
|
||||
cat "$rootfolder/bin/runsvc.sh" > "$rootfolder/runsvc.sh"
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
date "+[%F %T-%4N] Can't update $rootfolder/runsvc.sh using $rootfolder/bin/runsvc.sh" >> "$logfile" 2>&1
|
||||
mv -fv "$logfile" "$logfile.failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
date "+[%F %T-%4N] Update succeed" >> "$logfile"
|
||||
|
||||
touch update.finished
|
||||
date "+[%F %T-%4N] update.finished file creation succeed" >> "$logfile"
|
||||
|
||||
# rename the update log file with %logfile%.succeed/.failed/succeedneedrestart
|
||||
# runner service host can base on the log file name determin the result of the runner update
|
||||
date "+[%F %T-%4N] Rename $logfile to be $logfile.succeed" >> "$logfile" 2>&1
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
@echo off
|
||||
|
||||
SET UPDATEFILE=update.finished
|
||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
||||
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N`
|
||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||
|
||||
if %ERRORLEVEL% EQU 0 (
|
||||
@@ -22,18 +22,37 @@ if %ERRORLEVEL% EQU 2 (
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 3 (
|
||||
rem Sleep 5 seconds to wait for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
rem Wait for 30 seconds or for flag file to exists for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner after successful update"
|
||||
FOR /L %%G IN (1,1,30) DO (
|
||||
IF EXIST %UPDATEFILE% (
|
||||
echo "Update finished successfully."
|
||||
del %FILE%
|
||||
exit /b 1
|
||||
)
|
||||
ping 127.0.0.1 -n 2 -w 1000 >NUL
|
||||
)
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 4 (
|
||||
rem Sleep 5 seconds to wait for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
rem Wait for 30 seconds or for flag file to exists for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner after successful update"
|
||||
FOR /L %%G IN (1,1,30) DO (
|
||||
IF EXIST %UPDATEFILE% (
|
||||
echo "Update finished successfully."
|
||||
del %FILE%
|
||||
exit /b 1
|
||||
)
|
||||
ping 127.0.0.1 -n 2 -w 1000 >NUL
|
||||
)
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 5 (
|
||||
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
echo "Exiting after unknown error code: %ERRORLEVEL%"
|
||||
exit /b 0
|
||||
@@ -17,6 +17,22 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
|
||||
# Wait for docker to start
|
||||
if [ ! -z "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" ]; then
|
||||
if [ "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" -gt 0 ]; then
|
||||
echo "Waiting for docker to be ready."
|
||||
for i in $(seq "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS"); do
|
||||
if docker ps > /dev/null 2>&1; then
|
||||
echo "Docker is ready."
|
||||
break
|
||||
fi
|
||||
"$DIR"/safe_sleep.sh 1
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
updateFile="update.finished"
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
returnCode=$?
|
||||
@@ -31,15 +47,32 @@ elif [[ $returnCode == 2 ]]; then
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
elif [[ $returnCode == 3 ]]; then
|
||||
# Sleep 5 seconds to wait for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
# Wait for 30 seconds or for flag file to exists for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner after successful update"
|
||||
for i in {0..30}; do
|
||||
if test -f "$updateFile"; then
|
||||
echo "Update finished successfully."
|
||||
rm "$updateFile"
|
||||
break
|
||||
fi
|
||||
"$DIR"/safe_sleep.sh 1
|
||||
done
|
||||
exit 2
|
||||
elif [[ $returnCode == 4 ]]; then
|
||||
# Sleep 5 seconds to wait for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
# Wait for 30 seconds or for flag file to exists for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner after successful update"
|
||||
for i in {0..30}; do
|
||||
if test -f "$updateFile"; then
|
||||
echo "Update finished successfully."
|
||||
rm "$updateFile"
|
||||
break
|
||||
fi
|
||||
"$DIR"/safe_sleep.sh 1
|
||||
done
|
||||
exit 2
|
||||
elif [[ $returnCode == 5 ]]; then
|
||||
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
exit 0
|
||||
else
|
||||
echo "Exiting with unknown error code: ${returnCode}"
|
||||
exit 0
|
||||
|
||||
@@ -9,16 +9,79 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
# run the helper process which keep the listener alive
|
||||
while :;
|
||||
do
|
||||
"$DIR"/run-helper.sh $*
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
else
|
||||
echo "Exiting runner..."
|
||||
exit 0
|
||||
|
||||
run() {
|
||||
# run the helper process which keep the listener alive
|
||||
while :;
|
||||
do
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
"$DIR"/run-helper.sh $*
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
else
|
||||
echo "Exiting runner..."
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
runWithManualTrap() {
|
||||
# Set job control
|
||||
set -m
|
||||
|
||||
trap 'kill -INT -$PID' INT TERM
|
||||
|
||||
# run the helper process which keep the listener alive
|
||||
while :;
|
||||
do
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
"$DIR"/run-helper.sh $* &
|
||||
PID=$!
|
||||
wait $PID
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
else
|
||||
echo "Exiting runner..."
|
||||
# Unregister signal handling before exit
|
||||
trap - INT TERM
|
||||
# wait for last parts to be logged
|
||||
wait $PID
|
||||
exit $returnCode
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function updateCerts() {
|
||||
local sudo_prefix=""
|
||||
local user_id=`id -u`
|
||||
|
||||
if [ $user_id -ne 0 ]; then
|
||||
if [[ ! -x "$(command -v sudo)" ]]; then
|
||||
echo "Warning: failed to update certificate store: sudo is required but not found"
|
||||
return 1
|
||||
else
|
||||
sudo_prefix="sudo"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -x "$(command -v update-ca-certificates)" ]]; then
|
||||
eval $sudo_prefix "update-ca-certificates"
|
||||
elif [[ -x "$(command -v update-ca-trust)" ]]; then
|
||||
eval $sudo_prefix "update-ca-trust"
|
||||
else
|
||||
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
|
||||
updateCerts
|
||||
fi
|
||||
|
||||
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
|
||||
run $*
|
||||
else
|
||||
runWithManualTrap $*
|
||||
fi
|
||||
|
||||
@@ -1,6 +1,37 @@
|
||||
#!/bin/bash
|
||||
|
||||
# try to use sleep if available
|
||||
if [ -x "$(command -v sleep)" ]; then
|
||||
sleep "$1"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# try to use ping if available
|
||||
if [ -x "$(command -v ping)" ]; then
|
||||
ping -c $(( $1 + 1 )) 127.0.0.1 > /dev/null
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# try to use read -t from stdin/stdout/stderr if we are in bash
|
||||
if [ -n "$BASH_VERSION" ]; then
|
||||
if command -v read >/dev/null 2>&1; then
|
||||
if [ -t 0 ]; then
|
||||
read -t "$1" -u 0 || :;
|
||||
exit 0
|
||||
fi
|
||||
if [ -t 1 ]; then
|
||||
read -t "$1" -u 1 || :;
|
||||
exit 0
|
||||
fi
|
||||
if [ -t 2 ]; then
|
||||
read -t "$1" -u 2 || :;
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# fallback to a busy wait
|
||||
SECONDS=0
|
||||
while [[ $SECONDS != $1 ]]; do
|
||||
while [[ $SECONDS -lt $1 ]]; do
|
||||
:
|
||||
done
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
actions.runner.plist.template
|
||||
actions.runner.service.template
|
||||
checkScripts/downloadCert.js
|
||||
checkScripts/makeWebRequest.js
|
||||
darwin.svc.sh.template
|
||||
hashFiles/index.js
|
||||
installdependencies.sh
|
||||
macos-run-invoker.js
|
||||
Microsoft.IdentityModel.Logging.dll
|
||||
Microsoft.IdentityModel.Tokens.dll
|
||||
Minimatch.dll
|
||||
Newtonsoft.Json.Bson.dll
|
||||
Newtonsoft.Json.dll
|
||||
Runner.Common.deps.json
|
||||
Runner.Common.dll
|
||||
Runner.Common.pdb
|
||||
Runner.Listener
|
||||
Runner.Listener.deps.json
|
||||
Runner.Listener.dll
|
||||
Runner.Listener.exe
|
||||
Runner.Listener.pdb
|
||||
Runner.Listener.runtimeconfig.json
|
||||
Runner.PluginHost
|
||||
Runner.PluginHost.deps.json
|
||||
Runner.PluginHost.dll
|
||||
Runner.PluginHost.exe
|
||||
Runner.PluginHost.pdb
|
||||
Runner.PluginHost.runtimeconfig.json
|
||||
Runner.Plugins.deps.json
|
||||
Runner.Plugins.dll
|
||||
Runner.Plugins.pdb
|
||||
Runner.Sdk.deps.json
|
||||
Runner.Sdk.dll
|
||||
Runner.Sdk.pdb
|
||||
Runner.Worker
|
||||
Runner.Worker.deps.json
|
||||
Runner.Worker.dll
|
||||
Runner.Worker.exe
|
||||
Runner.Worker.pdb
|
||||
Runner.Worker.runtimeconfig.json
|
||||
RunnerService.exe
|
||||
RunnerService.exe.config
|
||||
RunnerService.js
|
||||
RunnerService.pdb
|
||||
runsvc.sh
|
||||
Sdk.deps.json
|
||||
Sdk.dll
|
||||
Sdk.pdb
|
||||
System.IdentityModel.Tokens.Jwt.dll
|
||||
System.Net.Http.Formatting.dll
|
||||
System.Security.Cryptography.Pkcs.dll
|
||||
System.Security.Cryptography.ProtectedData.dll
|
||||
System.ServiceProcess.ServiceController.dll
|
||||
systemd.svc.sh.template
|
||||
update.cmd.template
|
||||
update.sh.template
|
||||
YamlDotNet.dll
|
||||
@@ -1,264 +0,0 @@
|
||||
api-ms-win-core-console-l1-1-0.dll
|
||||
api-ms-win-core-console-l1-2-0.dll
|
||||
api-ms-win-core-datetime-l1-1-0.dll
|
||||
api-ms-win-core-debug-l1-1-0.dll
|
||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
||||
api-ms-win-core-fibers-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-2-0.dll
|
||||
api-ms-win-core-file-l2-1-0.dll
|
||||
api-ms-win-core-handle-l1-1-0.dll
|
||||
api-ms-win-core-heap-l1-1-0.dll
|
||||
api-ms-win-core-interlocked-l1-1-0.dll
|
||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
||||
api-ms-win-core-localization-l1-2-0.dll
|
||||
api-ms-win-core-memory-l1-1-0.dll
|
||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-1.dll
|
||||
api-ms-win-core-profile-l1-1-0.dll
|
||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
||||
api-ms-win-core-string-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-2-0.dll
|
||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
||||
api-ms-win-core-timezone-l1-1-0.dll
|
||||
api-ms-win-core-util-l1-1-0.dll
|
||||
api-ms-win-crt-conio-l1-1-0.dll
|
||||
api-ms-win-crt-convert-l1-1-0.dll
|
||||
api-ms-win-crt-environment-l1-1-0.dll
|
||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
||||
api-ms-win-crt-heap-l1-1-0.dll
|
||||
api-ms-win-crt-locale-l1-1-0.dll
|
||||
api-ms-win-crt-math-l1-1-0.dll
|
||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
||||
api-ms-win-crt-private-l1-1-0.dll
|
||||
api-ms-win-crt-process-l1-1-0.dll
|
||||
api-ms-win-crt-runtime-l1-1-0.dll
|
||||
api-ms-win-crt-stdio-l1-1-0.dll
|
||||
api-ms-win-crt-string-l1-1-0.dll
|
||||
api-ms-win-crt-time-l1-1-0.dll
|
||||
api-ms-win-crt-utility-l1-1-0.dll
|
||||
clrcompression.dll
|
||||
clretwrc.dll
|
||||
clrjit.dll
|
||||
coreclr.dll
|
||||
createdump
|
||||
createdump.exe
|
||||
dbgshim.dll
|
||||
hostfxr.dll
|
||||
hostpolicy.dll
|
||||
libclrjit.dylib
|
||||
libclrjit.so
|
||||
libcoreclr.dylib
|
||||
libcoreclr.so
|
||||
libcoreclrtraceptprovider.so
|
||||
libdbgshim.dylib
|
||||
libdbgshim.so
|
||||
libhostfxr.dylib
|
||||
libhostfxr.so
|
||||
libhostpolicy.dylib
|
||||
libhostpolicy.so
|
||||
libmscordaccore.dylib
|
||||
libmscordaccore.so
|
||||
libmscordbi.dylib
|
||||
libmscordbi.so
|
||||
Microsoft.CSharp.dll
|
||||
Microsoft.DiaSymReader.Native.amd64.dll
|
||||
Microsoft.VisualBasic.Core.dll
|
||||
Microsoft.VisualBasic.dll
|
||||
Microsoft.Win32.Primitives.dll
|
||||
Microsoft.Win32.Registry.dll
|
||||
mscordaccore.dll
|
||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
||||
mscordbi.dll
|
||||
mscorlib.dll
|
||||
mscorrc.debug.dll
|
||||
mscorrc.dll
|
||||
msquic.dll
|
||||
netstandard.dll
|
||||
SOS_README.md
|
||||
System.AppContext.dll
|
||||
System.Buffers.dll
|
||||
System.Collections.Concurrent.dll
|
||||
System.Collections.dll
|
||||
System.Collections.Immutable.dll
|
||||
System.Collections.NonGeneric.dll
|
||||
System.Collections.Specialized.dll
|
||||
System.ComponentModel.Annotations.dll
|
||||
System.ComponentModel.DataAnnotations.dll
|
||||
System.ComponentModel.dll
|
||||
System.ComponentModel.EventBasedAsync.dll
|
||||
System.ComponentModel.Primitives.dll
|
||||
System.ComponentModel.TypeConverter.dll
|
||||
System.Configuration.dll
|
||||
System.Console.dll
|
||||
System.Core.dll
|
||||
System.Data.Common.dll
|
||||
System.Data.DataSetExtensions.dll
|
||||
System.Data.dll
|
||||
System.Diagnostics.Contracts.dll
|
||||
System.Diagnostics.Debug.dll
|
||||
System.Diagnostics.DiagnosticSource.dll
|
||||
System.Diagnostics.FileVersionInfo.dll
|
||||
System.Diagnostics.Process.dll
|
||||
System.Diagnostics.StackTrace.dll
|
||||
System.Diagnostics.TextWriterTraceListener.dll
|
||||
System.Diagnostics.Tools.dll
|
||||
System.Diagnostics.TraceSource.dll
|
||||
System.Diagnostics.Tracing.dll
|
||||
System.dll
|
||||
System.Drawing.dll
|
||||
System.Drawing.Primitives.dll
|
||||
System.Dynamic.Runtime.dll
|
||||
System.Formats.Asn1.dll
|
||||
System.Globalization.Calendars.dll
|
||||
System.Globalization.dll
|
||||
System.Globalization.Extensions.dll
|
||||
System.Globalization.Native.dylib
|
||||
System.Globalization.Native.so
|
||||
System.IO.Compression.Brotli.dll
|
||||
System.IO.Compression.dll
|
||||
System.IO.Compression.FileSystem.dll
|
||||
System.IO.Compression.Native.a
|
||||
System.IO.Compression.Native.dll
|
||||
System.IO.Compression.Native.dylib
|
||||
System.IO.Compression.Native.so
|
||||
System.IO.Compression.ZipFile.dll
|
||||
System.IO.dll
|
||||
System.IO.FileSystem.AccessControl.dll
|
||||
System.IO.FileSystem.dll
|
||||
System.IO.FileSystem.DriveInfo.dll
|
||||
System.IO.FileSystem.Primitives.dll
|
||||
System.IO.FileSystem.Watcher.dll
|
||||
System.IO.IsolatedStorage.dll
|
||||
System.IO.MemoryMappedFiles.dll
|
||||
System.IO.Pipes.AccessControl.dll
|
||||
System.IO.Pipes.dll
|
||||
System.IO.UnmanagedMemoryStream.dll
|
||||
System.Linq.dll
|
||||
System.Linq.Expressions.dll
|
||||
System.Linq.Parallel.dll
|
||||
System.Linq.Queryable.dll
|
||||
System.Memory.dll
|
||||
System.Native.a
|
||||
System.Native.dylib
|
||||
System.Native.so
|
||||
System.Net.dll
|
||||
System.Net.Http.dll
|
||||
System.Net.Http.Json.dll
|
||||
System.Net.Http.Native.a
|
||||
System.Net.Http.Native.dylib
|
||||
System.Net.Http.Native.so
|
||||
System.Net.HttpListener.dll
|
||||
System.Net.Mail.dll
|
||||
System.Net.NameResolution.dll
|
||||
System.Net.NetworkInformation.dll
|
||||
System.Net.Ping.dll
|
||||
System.Net.Primitives.dll
|
||||
System.Net.Quic.dll
|
||||
System.Net.Requests.dll
|
||||
System.Net.Security.dll
|
||||
System.Net.Security.Native.a
|
||||
System.Net.Security.Native.dylib
|
||||
System.Net.Security.Native.so
|
||||
System.Net.ServicePoint.dll
|
||||
System.Net.Sockets.dll
|
||||
System.Net.WebClient.dll
|
||||
System.Net.WebHeaderCollection.dll
|
||||
System.Net.WebProxy.dll
|
||||
System.Net.WebSockets.Client.dll
|
||||
System.Net.WebSockets.dll
|
||||
System.Numerics.dll
|
||||
System.Numerics.Vectors.dll
|
||||
System.ObjectModel.dll
|
||||
System.Private.CoreLib.dll
|
||||
System.Private.DataContractSerialization.dll
|
||||
System.Private.Uri.dll
|
||||
System.Private.Xml.dll
|
||||
System.Private.Xml.Linq.dll
|
||||
System.Reflection.DispatchProxy.dll
|
||||
System.Reflection.dll
|
||||
System.Reflection.Emit.dll
|
||||
System.Reflection.Emit.ILGeneration.dll
|
||||
System.Reflection.Emit.Lightweight.dll
|
||||
System.Reflection.Extensions.dll
|
||||
System.Reflection.Metadata.dll
|
||||
System.Reflection.Primitives.dll
|
||||
System.Reflection.TypeExtensions.dll
|
||||
System.Resources.Reader.dll
|
||||
System.Resources.ResourceManager.dll
|
||||
System.Resources.Writer.dll
|
||||
System.Runtime.CompilerServices.Unsafe.dll
|
||||
System.Runtime.CompilerServices.VisualC.dll
|
||||
System.Runtime.dll
|
||||
System.Runtime.Extensions.dll
|
||||
System.Runtime.Handles.dll
|
||||
System.Runtime.InteropServices.dll
|
||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
||||
System.Runtime.Intrinsics.dll
|
||||
System.Runtime.Loader.dll
|
||||
System.Runtime.Numerics.dll
|
||||
System.Runtime.Serialization.dll
|
||||
System.Runtime.Serialization.Formatters.dll
|
||||
System.Runtime.Serialization.Json.dll
|
||||
System.Runtime.Serialization.Primitives.dll
|
||||
System.Runtime.Serialization.Xml.dll
|
||||
System.Runtime.WindowsRuntime.dll
|
||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
||||
System.Security.AccessControl.dll
|
||||
System.Security.Claims.dll
|
||||
System.Security.Cryptography.Algorithms.dll
|
||||
System.Security.Cryptography.Cng.dll
|
||||
System.Security.Cryptography.Csp.dll
|
||||
System.Security.Cryptography.Encoding.dll
|
||||
System.Security.Cryptography.Native.Apple.a
|
||||
System.Security.Cryptography.Native.Apple.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.a
|
||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.so
|
||||
System.Security.Cryptography.OpenSsl.dll
|
||||
System.Security.Cryptography.Primitives.dll
|
||||
System.Security.Cryptography.X509Certificates.dll
|
||||
System.Security.Cryptography.XCertificates.dll
|
||||
System.Security.dll
|
||||
System.Security.Principal.dll
|
||||
System.Security.Principal.Windows.dll
|
||||
System.Security.SecureString.dll
|
||||
System.ServiceModel.Web.dll
|
||||
System.ServiceProcess.dll
|
||||
System.Text.Encoding.CodePages.dll
|
||||
System.Text.Encoding.dll
|
||||
System.Text.Encoding.Extensions.dll
|
||||
System.Text.Encodings.Web.dll
|
||||
System.Text.Json.dll
|
||||
System.Text.RegularExpressions.dll
|
||||
System.Threading.Channels.dll
|
||||
System.Threading.dll
|
||||
System.Threading.Overlapped.dll
|
||||
System.Threading.Tasks.Dataflow.dll
|
||||
System.Threading.Tasks.dll
|
||||
System.Threading.Tasks.Extensions.dll
|
||||
System.Threading.Tasks.Parallel.dll
|
||||
System.Threading.Thread.dll
|
||||
System.Threading.ThreadPool.dll
|
||||
System.Threading.Timer.dll
|
||||
System.Transactions.dll
|
||||
System.Transactions.Local.dll
|
||||
System.ValueTuple.dll
|
||||
System.Web.dll
|
||||
System.Web.HttpUtility.dll
|
||||
System.Windows.dll
|
||||
System.Xml.dll
|
||||
System.Xml.Linq.dll
|
||||
System.Xml.ReaderWriter.dll
|
||||
System.Xml.Serialization.dll
|
||||
System.Xml.XDocument.dll
|
||||
System.Xml.XmlDocument.dll
|
||||
System.Xml.XmlSerializer.dll
|
||||
System.Xml.XPath.dll
|
||||
System.Xml.XPath.XDocument.dll
|
||||
ucrtbase.dll
|
||||
WindowsBase.dll
|
||||
@@ -1,24 +0,0 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,24 +0,0 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,5 +1,4 @@
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
@@ -32,7 +31,7 @@ namespace GitHub.Runner.Common
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private readonly Dictionary<string, string> _properties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly Dictionary<string, string> _properties = new(StringComparer.OrdinalIgnoreCase);
|
||||
public const string Prefix = "##[";
|
||||
public const string _commandKey = "::";
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user