mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
340 Commits
v2.308.0
...
chore/npm-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
26b72c041a | ||
|
|
bca18f71d0 | ||
|
|
1b8efb99f6 | ||
|
|
0b2c71fc31 | ||
|
|
60af948051 | ||
|
|
ff775ca101 | ||
|
|
f74be39e77 | ||
|
|
1eb15f28a7 | ||
|
|
afe4fc8446 | ||
|
|
a12731d34d | ||
|
|
18f2450d71 | ||
|
|
2c5f29c3ca | ||
|
|
c9de9a8699 | ||
|
|
68ff57dbc4 | ||
|
|
c774eb8d46 | ||
|
|
f184048a9a | ||
|
|
338d83a941 | ||
|
|
0b074a3e93 | ||
|
|
25faeabaa8 | ||
|
|
b121ef832b | ||
|
|
170033c92b | ||
|
|
f9c4e17fd9 | ||
|
|
646da708ba | ||
|
|
bf8236344b | ||
|
|
720f16aef6 | ||
|
|
f77066a6a8 | ||
|
|
df83df2a32 | ||
|
|
97b2254146 | ||
|
|
7f72ba9e48 | ||
|
|
f8ae5bb1a7 | ||
|
|
a5631456a2 | ||
|
|
65dfa460ba | ||
|
|
80ee51f164 | ||
|
|
c95883f28e | ||
|
|
6e940643a9 | ||
|
|
629f2384a4 | ||
|
|
c3bf70becb | ||
|
|
8b65f5f9df | ||
|
|
5f1efec208 | ||
|
|
20d82ad357 | ||
|
|
0ebdf9e83d | ||
|
|
6543bf206b | ||
|
|
a942627965 | ||
|
|
83539166c9 | ||
|
|
1c1e8bfd18 | ||
|
|
59177fa379 | ||
|
|
2d7635a7f0 | ||
|
|
0203cf24d3 | ||
|
|
5e74a4d8e4 | ||
|
|
6ca97eeb88 | ||
|
|
8a9b96806d | ||
|
|
dc9cf684c9 | ||
|
|
c765c990b9 | ||
|
|
ed48ddd08c | ||
|
|
a1e6ad8d2e | ||
|
|
14856e63bc | ||
|
|
0d24afa114 | ||
|
|
20912234a5 | ||
|
|
5969cbe208 | ||
|
|
9f57d37642 | ||
|
|
60563d82d1 | ||
|
|
097ada9374 | ||
|
|
9b457781d6 | ||
|
|
9709b69571 | ||
|
|
acf3f2ba12 | ||
|
|
f03fcc8a01 | ||
|
|
e4e103c5ed | ||
|
|
a906ec302b | ||
|
|
d9e714496d | ||
|
|
df189ba6e3 | ||
|
|
4c1de69e1c | ||
|
|
26185d43d0 | ||
|
|
e911d2908d | ||
|
|
ce4b7f4dd6 | ||
|
|
505fa60905 | ||
|
|
57459ad274 | ||
|
|
890e43f6c5 | ||
|
|
3a27ca292a | ||
|
|
282f7cd2b2 | ||
|
|
f060fe5c85 | ||
|
|
1a092a24a3 | ||
|
|
26eff8e55a | ||
|
|
d7cfd2e341 | ||
|
|
a3a7b6a77e | ||
|
|
db6005b0a7 | ||
|
|
9155c42c09 | ||
|
|
1c319b4d42 | ||
|
|
fe10d4ae82 | ||
|
|
27d9c886ab | ||
|
|
5106d6578e | ||
|
|
d5ccbd10d1 | ||
|
|
f1b5b5bd5c | ||
|
|
aaf1b92847 | ||
|
|
c1095ae2d1 | ||
|
|
a0a0a76378 | ||
|
|
d47013928b | ||
|
|
cdeec012aa | ||
|
|
2cb1f9431a | ||
|
|
e86c9487ab | ||
|
|
dc9695f123 | ||
|
|
6654f6b3de | ||
|
|
f5e4e7e47c | ||
|
|
68ca457917 | ||
|
|
77700abf81 | ||
|
|
a0ba8fd399 | ||
|
|
6b08f23b6c | ||
|
|
8131246933 | ||
|
|
7211a53c9e | ||
|
|
07310cabc0 | ||
|
|
0195d7ca77 | ||
|
|
259af3eda2 | ||
|
|
0ce29d09c6 | ||
|
|
a84e1c2b15 | ||
|
|
de51cd0ed6 | ||
|
|
3333de3a36 | ||
|
|
b065e5abbe | ||
|
|
bae52e28f9 | ||
|
|
c2c91438e8 | ||
|
|
3486c54ccb | ||
|
|
a61328a7e7 | ||
|
|
52dc98b10f | ||
|
|
a7b319530e | ||
|
|
54f082722f | ||
|
|
ed9d8fc9f7 | ||
|
|
fccbe8fb0b | ||
|
|
e3bc10a931 | ||
|
|
ba50bf6482 | ||
|
|
8eef71d93d | ||
|
|
7ae9fc03a2 | ||
|
|
8e97ad4d86 | ||
|
|
aa76aa476f | ||
|
|
0738df9702 | ||
|
|
8bf52ffe7d | ||
|
|
9df3fc825d | ||
|
|
fde5227fbf | ||
|
|
27f6ca8177 | ||
|
|
078eb3b381 | ||
|
|
c46dac6736 | ||
|
|
e640a9fef3 | ||
|
|
6d266a7c44 | ||
|
|
4700649bb5 | ||
|
|
27580ef8de | ||
|
|
6c94f78f37 | ||
|
|
074d9c0922 | ||
|
|
59f2be2158 | ||
|
|
1e1f7845fa | ||
|
|
694ae12b23 | ||
|
|
d16fb2c593 | ||
|
|
d37a7ae14d | ||
|
|
6ef5803f24 | ||
|
|
2c03d74f11 | ||
|
|
3d34a3c6d6 | ||
|
|
59ec9b4139 | ||
|
|
4a99838fa2 | ||
|
|
af8dee51e1 | ||
|
|
9b3b554758 | ||
|
|
4d8402c260 | ||
|
|
aa0ee2bf64 | ||
|
|
dcc64fead2 | ||
|
|
149123c232 | ||
|
|
e292ec220e | ||
|
|
3696b7d89f | ||
|
|
6d7446a45e | ||
|
|
ddf41af767 | ||
|
|
0b0cb5520d | ||
|
|
4c0a43f0e4 | ||
|
|
65764d9ddc | ||
|
|
36c66c8083 | ||
|
|
99b464e102 | ||
|
|
e1fa1fcbc3 | ||
|
|
2979fbad94 | ||
|
|
a77fe8a53f | ||
|
|
7e84ae0b30 | ||
|
|
fb6d1adb43 | ||
|
|
7303cb5673 | ||
|
|
43d67e46db | ||
|
|
ae04147f96 | ||
|
|
12506842c0 | ||
|
|
2190396357 | ||
|
|
41bc0da6fe | ||
|
|
2a7f327d93 | ||
|
|
dbcaa7cf3d | ||
|
|
8df87a82b0 | ||
|
|
70746ff593 | ||
|
|
054fc2e046 | ||
|
|
ecb732eaf4 | ||
|
|
3dab1f1fb0 | ||
|
|
8f1c723ba0 | ||
|
|
1e74a8137b | ||
|
|
3f28dd845f | ||
|
|
edfdbb9661 | ||
|
|
00888c10f9 | ||
|
|
84b1bea43e | ||
|
|
ce4d7be00f | ||
|
|
bd7235ef62 | ||
|
|
0f15173045 | ||
|
|
76dc3a28c0 | ||
|
|
c67e7f2813 | ||
|
|
54052b94fb | ||
|
|
f2c05de91c | ||
|
|
18803bdff6 | ||
|
|
04b07b6675 | ||
|
|
dd9fcfc5b2 | ||
|
|
5107c5efb2 | ||
|
|
1b61d78c07 | ||
|
|
2e0eb2c11f | ||
|
|
2d83e1d88f | ||
|
|
4a1e38095b | ||
|
|
f467e9e125 | ||
|
|
77e0bfbb8a | ||
|
|
a52c53955c | ||
|
|
8ebf298bcd | ||
|
|
4b85145661 | ||
|
|
bc8b6e0152 | ||
|
|
82e01c6173 | ||
|
|
93bc1cd918 | ||
|
|
692d910868 | ||
|
|
2c8c941622 | ||
|
|
86d6211c75 | ||
|
|
aa90563cae | ||
|
|
4cb3cb2962 | ||
|
|
d7777fd632 | ||
|
|
d8bce88c4f | ||
|
|
601d3de3f3 | ||
|
|
034c51cd0b | ||
|
|
d296014f99 | ||
|
|
3449d5fa52 | ||
|
|
6603bfb74c | ||
|
|
b19b9462d8 | ||
|
|
3db5c90cc4 | ||
|
|
927b26a364 | ||
|
|
72559572f6 | ||
|
|
31318d81ba | ||
|
|
1d47bfa6c7 | ||
|
|
651ea42e00 | ||
|
|
bcc665a7a1 | ||
|
|
cd812f0395 | ||
|
|
fa874cf314 | ||
|
|
bf0e76631b | ||
|
|
1d82031a2c | ||
|
|
d1a619ff09 | ||
|
|
11680fc78f | ||
|
|
3e5433ec86 | ||
|
|
b647b890c5 | ||
|
|
894c50073a | ||
|
|
5268d74ade | ||
|
|
7414e08fbd | ||
|
|
dcb790f780 | ||
|
|
b7ab810945 | ||
|
|
7310ba0a08 | ||
|
|
e842959e3e | ||
|
|
9f19310b5b | ||
|
|
84220a21d1 | ||
|
|
8e0cd36cd8 | ||
|
|
f1f18f67e1 | ||
|
|
ac39c4bd0a | ||
|
|
3f3d9b0d99 | ||
|
|
af485fb660 | ||
|
|
9e3e57ff90 | ||
|
|
ac89b31d2f | ||
|
|
65201ff6be | ||
|
|
661b261959 | ||
|
|
8a25302ba3 | ||
|
|
c7d65c42d6 | ||
|
|
a9bae6f37a | ||
|
|
3136ce3a71 | ||
|
|
a4c57f2747 | ||
|
|
ce4e62c849 | ||
|
|
121f080023 | ||
|
|
cbcb4c568a | ||
|
|
5d4b391f06 | ||
|
|
85fdc9b6b4 | ||
|
|
7f58504d35 | ||
|
|
611a7a85ed | ||
|
|
fb4bdbe440 | ||
|
|
940f4f4f40 | ||
|
|
4647f3ed5f | ||
|
|
544f19042b | ||
|
|
c851794f04 | ||
|
|
22d4310b69 | ||
|
|
65361e0fb5 | ||
|
|
36e37a0885 | ||
|
|
a5cd1ba4b6 | ||
|
|
acdc6edf7c | ||
|
|
b4a7bb0969 | ||
|
|
f47384b46e | ||
|
|
f672567acc | ||
|
|
e25c754744 | ||
|
|
f57ecd8e3c | ||
|
|
463ec00cb4 | ||
|
|
c3a7188eca | ||
|
|
2a6f271afa | ||
|
|
462337a4a4 | ||
|
|
8f1c070506 | ||
|
|
bf445e2750 | ||
|
|
67d70803a9 | ||
|
|
8c917b4ad3 | ||
|
|
440238adc4 | ||
|
|
8250726be1 | ||
|
|
5b2bc388ca | ||
|
|
6a2381f525 | ||
|
|
1f0c91e23e | ||
|
|
020a1ed790 | ||
|
|
c1a5dc71a5 | ||
|
|
c68e28788d | ||
|
|
a823a7f669 | ||
|
|
21ca5e6f04 | ||
|
|
f4197fb5a5 | ||
|
|
3a8cb43022 | ||
|
|
80a17a2f0c | ||
|
|
16834edc67 | ||
|
|
2908d82845 | ||
|
|
3f5b813499 | ||
|
|
7b703d667d | ||
|
|
d2f0a46865 | ||
|
|
143639ddac | ||
|
|
474d0fb354 | ||
|
|
15c0fe6c1d | ||
|
|
2b66cbe699 | ||
|
|
0e9e9f1e8d | ||
|
|
be65955a9d | ||
|
|
e419ae3c7e | ||
|
|
bb40cd2788 | ||
|
|
e0acb14bfc | ||
|
|
1ff8ad7860 | ||
|
|
8dd2cec3af | ||
|
|
7b53c38294 | ||
|
|
e22452c2d6 | ||
|
|
9bbfed0740 | ||
|
|
cf5afc63da | ||
|
|
a00db53b0d | ||
|
|
73ef82ff85 | ||
|
|
7892066256 | ||
|
|
8b9a81c952 | ||
|
|
460d9ae5a8 | ||
|
|
e94e744bed | ||
|
|
94080812f7 | ||
|
|
1183100ab8 | ||
|
|
4f40f29cff | ||
|
|
d88823c634 |
@@ -1,27 +1,27 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
{
|
||||
"name": "Actions Runner Devcontainer",
|
||||
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "6.0.412"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "16"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-azuretools.vscode-docker",
|
||||
"ms-dotnettools.csharp",
|
||||
"eamodio.gitlens"
|
||||
]
|
||||
}
|
||||
},
|
||||
// dotnet restore to install dependencies so OmniSharp works out of the box
|
||||
// src/Test restores all other projects it references, src/Runner.PluginHost is not one of them
|
||||
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
"name": "Actions Runner Devcontainer",
|
||||
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "8.0.415"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/sshd:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-azuretools.vscode-docker",
|
||||
"ms-dotnettools.csharp",
|
||||
"eamodio.gitlens"
|
||||
]
|
||||
}
|
||||
},
|
||||
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,13 +1,13 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🛑 Request a feature in the runner application
|
||||
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
|
||||
url: https://github.com/orgs/community/discussions/categories/actions
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions section on the Github Product Feedback page.
|
||||
- name: ✅ Support for GitHub Actions
|
||||
url: https://github.community/c/code-to-cloud/52
|
||||
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
||||
- name: ✅ Feedback and suggestions for GitHub Actions
|
||||
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
|
||||
url: https://github.com/github/feedback/discussions/categories/actions
|
||||
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
|
||||
- name: ‼️ GitHub Security Bug Bounty
|
||||
url: https://bounty.github.com/
|
||||
|
||||
25
.github/copilot-instructions.md
vendored
Normal file
25
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
## Making changes
|
||||
|
||||
### Tests
|
||||
|
||||
Whenever possible, changes should be accompanied by non-trivial tests that meaningfully exercise the core functionality of the new code being introduced.
|
||||
|
||||
All tests are in the `Test/` directory at the repo root. Fast unit tests are in the `Test/L0` directory and by convention have the suffix `L0.cs`. For example: unit tests for a hypothetical `src/Runner.Worker/Foo.cs` would go in `src/Test/L0/Worker/FooL0.cs`.
|
||||
|
||||
Run tests using this command:
|
||||
|
||||
```sh
|
||||
cd src && ./dev.sh test
|
||||
```
|
||||
|
||||
### Formatting
|
||||
|
||||
After editing .cs files, always format the code using this command:
|
||||
|
||||
```sh
|
||||
cd src && ./dev.sh format
|
||||
```
|
||||
|
||||
### Feature Flags
|
||||
|
||||
Wherever possible, all changes should be safeguarded by a feature flag; `Features` are declared in [Constants.cs](src/Runner.Common/Constants.cs).
|
||||
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,6 +5,11 @@ updates:
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/src"
|
||||
schedule:
|
||||
|
||||
34
.github/workflows/build.yml
vendored
34
.github/workflows/build.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -58,29 +58,6 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
exit $NeedUpdate
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
@@ -92,17 +69,14 @@ jobs:
|
||||
- name: Package Release
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
${{ matrix.devScript }} package Release
|
||||
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
|
||||
17
.github/workflows/close-bugs-bot.yml
vendored
Normal file
17
.github/workflows/close-bugs-bot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Close Bugs Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-bug"
|
||||
only-labels: "actions-bug"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
17
.github/workflows/close-features-bot.yml
vendored
Normal file
17
.github/workflows/close-features-bot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Close Features Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-feature"
|
||||
only-labels: "actions-feature"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -23,11 +23,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v4
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -38,4 +38,4 @@ jobs:
|
||||
working-directory: src
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v4
|
||||
|
||||
211
.github/workflows/dependency-check.yml
vendored
Normal file
211
.github/workflows/dependency-check.yml
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
name: Dependency Status Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
check_type:
|
||||
description: "Type of dependency check"
|
||||
required: false
|
||||
default: "all"
|
||||
type: choice
|
||||
options:
|
||||
- all
|
||||
- node
|
||||
- dotnet
|
||||
- docker
|
||||
- npm
|
||||
schedule:
|
||||
- cron: "0 11 * * 1" # Weekly on Monday at 11 AM
|
||||
|
||||
jobs:
|
||||
dependency-status:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
node20-status: ${{ steps.check-versions.outputs.node20-status }}
|
||||
node24-status: ${{ steps.check-versions.outputs.node24-status }}
|
||||
dotnet-status: ${{ steps.check-versions.outputs.dotnet-status }}
|
||||
docker-status: ${{ steps.check-versions.outputs.docker-status }}
|
||||
buildx-status: ${{ steps.check-versions.outputs.buildx-status }}
|
||||
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
|
||||
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Check dependency versions
|
||||
id: check-versions
|
||||
run: |
|
||||
echo "## Dependency Status Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Generated on: $(date)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check Node versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "node" ]]; then
|
||||
echo "### Node.js Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
VERSIONS_JSON=$(curl -s https://raw.githubusercontent.com/actions/node-versions/main/versions-manifest.json)
|
||||
LATEST_NODE20=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("20.")) | .version' | head -1)
|
||||
LATEST_NODE24=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("24.")) | .version' | head -1)
|
||||
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
NODE20_STATUS="✅ up-to-date"
|
||||
NODE24_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NODE20_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NODE24_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Version | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 20 | $CURRENT_NODE20 | $LATEST_NODE20 | $NODE20_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 24 | $CURRENT_NODE24 | $LATEST_NODE24 | $NODE24_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "node20-status=$NODE20_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "node24-status=$NODE24_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check .NET version
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "dotnet" ]]; then
|
||||
echo "### .NET SDK Version" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_dotnet_version=$(jq -r .sdk.version ./src/global.json)
|
||||
current_major_minor=$(echo "$current_dotnet_version" | cut -d '.' -f 1,2)
|
||||
latest_dotnet_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/$current_major_minor/latest.version")
|
||||
|
||||
DOTNET_STATUS="✅ up-to-date"
|
||||
if [ "$current_dotnet_version" != "$latest_dotnet_version" ]; then
|
||||
DOTNET_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| .NET SDK | $current_dotnet_version | $latest_dotnet_version | $DOTNET_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "dotnet-status=$DOTNET_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check Docker versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "docker" ]]; then
|
||||
echo "### Docker Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_docker=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
current_buildx=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
latest_docker=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
latest_buildx=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
DOCKER_STATUS="✅ up-to-date"
|
||||
BUILDX_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$current_docker" != "$latest_docker" ]; then
|
||||
DOCKER_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$current_buildx" != "$latest_buildx" ]; then
|
||||
BUILDX_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker | $current_docker | $latest_docker | $DOCKER_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker Buildx | $current_buildx | $latest_buildx | $BUILDX_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "docker-status=$DOCKER_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "buildx-status=$BUILDX_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check npm vulnerabilities
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "npm" ]]; then
|
||||
echo "### NPM Security Audit" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
npm install --silent
|
||||
|
||||
AUDIT_OUTPUT=""
|
||||
AUDIT_EXIT_CODE=0
|
||||
# Run npm audit and capture output and exit code
|
||||
if ! AUDIT_OUTPUT=$(npm audit --json 2>&1); then
|
||||
AUDIT_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
# Check if output is valid JSON
|
||||
if echo "$AUDIT_OUTPUT" | jq . >/dev/null 2>&1; then
|
||||
VULN_COUNT=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.total // 0')
|
||||
# Ensure VULN_COUNT is a number
|
||||
VULN_COUNT=$(echo "$VULN_COUNT" | grep -o '[0-9]*' | head -1)
|
||||
VULN_COUNT=${VULN_COUNT:-0}
|
||||
|
||||
NPM_STATUS="✅ no vulnerabilities"
|
||||
if [ "$VULN_COUNT" -gt 0 ] 2>/dev/null; then
|
||||
NPM_STATUS="⚠️ $VULN_COUNT vulnerabilities found"
|
||||
|
||||
# Get vulnerability details
|
||||
HIGH_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.high // 0')
|
||||
CRITICAL_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.critical // 0')
|
||||
|
||||
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Critical | $CRITICAL_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| High | $HIGH_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No npm vulnerabilities found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
NPM_STATUS="❌ npm audit failed"
|
||||
echo "npm audit failed to run or returned invalid JSON ❌" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Exit code: $AUDIT_EXIT_CODE" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Output: $AUDIT_OUTPUT" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "npm-vulnerabilities=$NPM_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check for open dependency PRs
|
||||
id: check-prs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "### Open Dependency PRs" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Get open PRs with dependency label
|
||||
OPEN_PRS=$(gh pr list --label "dependencies" --state open --json number,title,url)
|
||||
PR_COUNT=$(echo "$OPEN_PRS" | jq '. | length')
|
||||
|
||||
if [ "$PR_COUNT" -gt 0 ]; then
|
||||
echo "Found $PR_COUNT open dependency PR(s):" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "$OPEN_PRS" | jq -r '.[] | "- [#\(.number)](\(.url)) \(.title)"' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No open dependency PRs found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "open-dependency-prs=$PR_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Check for open PRs with the \`dependency\` label before releases" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Review and merge dependency updates regularly" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Critical vulnerabilities should be addressed immediately" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Automated workflows run weekly to check for updates:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Node.js versions (Mondays at 6 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- NPM audit fix (Mondays at 7 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- .NET SDK updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Docker/Buildx updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
166
.github/workflows/docker-buildx-upgrade.yml
vendored
Normal file
166
.github/workflows/docker-buildx-upgrade.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
name: "Docker/Buildx Version Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Run every Monday at midnight
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
check-versions:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
DOCKER_SHOULD_UPDATE: ${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}
|
||||
DOCKER_LATEST_VERSION: ${{ steps.check_docker_version.outputs.LATEST_VERSION }}
|
||||
DOCKER_CURRENT_VERSION: ${{ steps.check_docker_version.outputs.CURRENT_VERSION }}
|
||||
BUILDX_SHOULD_UPDATE: ${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}
|
||||
BUILDX_LATEST_VERSION: ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}
|
||||
BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Check Docker version
|
||||
id: check_docker_version
|
||||
shell: bash
|
||||
run: |
|
||||
# Extract current Docker version from Dockerfile
|
||||
current_version=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
# Fetch latest Docker Engine version from Docker's download site
|
||||
# This gets the latest Linux static binary version which matches what's used in the Dockerfile
|
||||
latest_version=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
|
||||
# Extra check to ensure we got a valid version
|
||||
if [[ ! $latest_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Failed to retrieve a valid Docker version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
should_update=0
|
||||
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||
|
||||
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check Buildx version
|
||||
id: check_buildx_version
|
||||
shell: bash
|
||||
run: |
|
||||
# Extract current Buildx version from Dockerfile
|
||||
current_version=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
# Fetch latest Buildx version
|
||||
latest_version=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
should_update=0
|
||||
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||
|
||||
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create annotations for versions
|
||||
run: |
|
||||
docker_should_update="${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}"
|
||||
buildx_should_update="${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}"
|
||||
|
||||
# Show annotation if only Docker needs update
|
||||
if [[ "$docker_should_update" == "1" && "$buildx_should_update" == "0" ]]; then
|
||||
echo "::warning ::Docker version (${{ steps.check_docker_version.outputs.LATEST_VERSION }}) needs update but Buildx is current. Only updating when both need updates."
|
||||
fi
|
||||
|
||||
# Show annotation if only Buildx needs update
|
||||
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "1" ]]; then
|
||||
echo "::warning ::Buildx version (${{ steps.check_buildx_version.outputs.LATEST_VERSION }}) needs update but Docker is current. Only updating when both need updates."
|
||||
fi
|
||||
|
||||
# Show annotation when both are current
|
||||
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "0" ]]; then
|
||||
echo "::warning ::Latest Docker version is ${{ steps.check_docker_version.outputs.LATEST_VERSION }} and Buildx version is ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}. No updates needed."
|
||||
fi
|
||||
|
||||
update-versions:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
needs: [check-versions]
|
||||
if: ${{ needs.check-versions.outputs.DOCKER_SHOULD_UPDATE == 1 && needs.check-versions.outputs.BUILDX_SHOULD_UPDATE == 1 }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Update Docker version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_version="${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}"
|
||||
current_version="${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }}"
|
||||
|
||||
# Update version in Dockerfile
|
||||
sed -i "s/ARG DOCKER_VERSION=$current_version/ARG DOCKER_VERSION=$latest_version/g" ./images/Dockerfile
|
||||
|
||||
- name: Update Buildx version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_version="${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
current_version="${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }}"
|
||||
|
||||
# Update version in Dockerfile
|
||||
sed -i "s/ARG BUILDX_VERSION=$current_version/ARG BUILDX_VERSION=$latest_version/g" ./images/Dockerfile
|
||||
|
||||
- name: Commit changes and create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Setup branch and commit information
|
||||
branch_name="feature/docker-buildx-upgrade"
|
||||
commit_message="Upgrade Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
pr_title="Update Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch or switch to it if it exists
|
||||
if git show-ref --quiet refs/remotes/origin/$branch_name; then
|
||||
git fetch origin
|
||||
git checkout -B "$branch_name" origin/$branch_name
|
||||
else
|
||||
git checkout -b "$branch_name"
|
||||
fi
|
||||
|
||||
# Commit and push changes
|
||||
git commit -a -m "$commit_message"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated Docker and Buildx version update:
|
||||
|
||||
- Docker: ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}
|
||||
- Buildx: ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}
|
||||
|
||||
This update ensures we're using the latest stable Docker and Buildx versions for security and performance improvements.
|
||||
|
||||
**Release notes:** https://docs.docker.com/engine/release-notes/
|
||||
|
||||
**Next steps:**
|
||||
- Review the version changes
|
||||
- Verify container builds work as expected
|
||||
- Test multi-platform builds if applicable
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "$pr_title" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "docker" \
|
||||
--body-file pr_body.txt
|
||||
105
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
105
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
name: "DotNet SDK Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 8 * * 1" # Weekly on Monday at 8 AM UTC (independent of Node.js/NPM)
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
dotnet-update:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
|
||||
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Get current major minor version
|
||||
id: fetch_current_version
|
||||
shell: bash
|
||||
run: |
|
||||
current_major_minor_patch_version=$(jq .sdk.version ./src/global.json | xargs)
|
||||
current_major_minor_version=$(cut -d '.' -f 1,2 <<< "$current_major_minor_patch_version")
|
||||
|
||||
echo "DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION=${current_major_minor_patch_version}" >> $GITHUB_OUTPUT
|
||||
echo "DOTNET_CURRENT_MAJOR_MINOR_VERSION=${current_major_minor_version}" >> $GITHUB_OUTPUT
|
||||
- name: Check patch version
|
||||
id: fetch_latest_version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_patch_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version")
|
||||
current_patch_version=${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
should_update=0
|
||||
[ "$current_patch_version" != "$latest_patch_version" ] && should_update=1
|
||||
|
||||
# check if git branch already exists for the upgrade
|
||||
branch_already_exists=0
|
||||
|
||||
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
|
||||
then
|
||||
branch_already_exists=1
|
||||
should_update=0
|
||||
fi
|
||||
echo "DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION=${latest_patch_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
echo "BRANCH_EXISTS=${branch_already_exists}" >> $GITHUB_OUTPUT
|
||||
- name: Create an error annotation if branch exists
|
||||
if: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS == 1 }}
|
||||
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
|
||||
- name: Create a warning annotation if no need to update
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||
- name: Update patch version
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
shell: bash
|
||||
run: |
|
||||
patch_version="${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
current_version="${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
|
||||
# Update globals
|
||||
echo Updating globals
|
||||
globals_temp=$(mktemp)
|
||||
jq --unbuffered --arg patch_version "$patch_version" '.sdk.version = $patch_version' ./src/global.json > "$globals_temp" && mv "$globals_temp" ./src/global.json
|
||||
|
||||
# Update devcontainer
|
||||
echo Updating devcontainer
|
||||
devcontainer_temp=$(mktemp)
|
||||
jq --unbuffered --arg patch_version "$patch_version" '.features."ghcr.io/devcontainers/features/dotnet".version = $patch_version' ./.devcontainer/devcontainer.json > "$devcontainer_temp" && mv "$devcontainer_temp" ./.devcontainer/devcontainer.json
|
||||
|
||||
# Update dev.sh
|
||||
echo Updating start script
|
||||
sed -i "s/DOTNETSDK_VERSION=\"$current_version\"/DOTNETSDK_VERSION=\"$patch_version\"/g" ./src/dev.sh
|
||||
- name: GIT commit and push all changed files
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
id: create_branch
|
||||
run: |
|
||||
branch_name="feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git push --set-upstream origin $branch_name
|
||||
|
||||
create-pr:
|
||||
needs: [dotnet-update]
|
||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --label "dependencies" --label "dependencies-weekly-check" --label "dependencies-not-dependabot" --label "dotnet" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||
24
.github/workflows/lint.yml
vendored
24
.github/workflows/lint.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Ensure full list of changed files within `super-linter`
|
||||
fetch-depth: 0
|
||||
- name: Run linters
|
||||
uses: github/super-linter@v4
|
||||
env:
|
||||
DEFAULT_BRANCH: ${{ github.base_ref }}
|
||||
EDITORCONFIG_FILE_NAME: .editorconfig
|
||||
LINTER_RULES_PATH: /src/
|
||||
VALIDATE_ALL_CODEBASE: false
|
||||
VALIDATE_CSHARP: true
|
||||
194
.github/workflows/node-upgrade.yml
vendored
Normal file
194
.github/workflows/node-upgrade.yml
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
name: Auto Update Node Version
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 1" # Weekly, every Monday
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-node:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Get latest Node versions
|
||||
id: node-versions
|
||||
run: |
|
||||
# Get latest Node.js releases from official GitHub releases
|
||||
echo "Fetching latest Node.js releases..."
|
||||
|
||||
# Get latest v20.x release
|
||||
LATEST_NODE20=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v20.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
# Get latest v24.x release
|
||||
LATEST_NODE24=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v24.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
echo "Found Node.js releases: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
|
||||
# Verify these versions are available in alpine_nodejs releases
|
||||
echo "Verifying availability in alpine_nodejs..."
|
||||
ALPINE_RELEASES=$(curl -s https://api.github.com/repos/actions/alpine_nodejs/releases | jq -r '.[].tag_name')
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE20$"; then
|
||||
echo "::warning title=Node 20 Fallback::Node 20 version $LATEST_NODE20 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v20 release
|
||||
LATEST_NODE20=$(echo "$ALPINE_RELEASES" | grep "^v20\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 20: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE24$"; then
|
||||
echo "::warning title=Node 24 Fallback::Node 24 version $LATEST_NODE24 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v24 release
|
||||
LATEST_NODE24=$(echo "$ALPINE_RELEASES" | grep "^v24\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 24: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
# Validate that we have non-empty version numbers
|
||||
if [ -z "$LATEST_NODE20" ] || [ "$LATEST_NODE20" = "" ]; then
|
||||
echo "::error title=Invalid Node 20 Version::Failed to determine valid Node 20 version. Got: '$LATEST_NODE20'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$LATEST_NODE24" ] || [ "$LATEST_NODE24" = "" ]; then
|
||||
echo "::error title=Invalid Node 24 Version::Failed to determine valid Node 24 version. Got: '$LATEST_NODE24'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Additional validation: ensure versions match expected format (x.y.z)
|
||||
if ! echo "$LATEST_NODE20" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Format::Node 20 version '$LATEST_NODE20' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! echo "$LATEST_NODE24" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Format::Node 24 version '$LATEST_NODE24' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Validated Node versions: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
echo "latest_node20=$LATEST_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "latest_node24=$LATEST_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check current versions in externals.sh
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
echo "current_node20=$CURRENT_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "current_node24=$CURRENT_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine if updates are needed
|
||||
NEEDS_UPDATE20="false"
|
||||
NEEDS_UPDATE24="false"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NEEDS_UPDATE20="true"
|
||||
echo "::notice title=Node 20 Update Available::Current: $CURRENT_NODE20 → Latest: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NEEDS_UPDATE24="true"
|
||||
echo "::notice title=Node 24 Update Available::Current: $CURRENT_NODE24 → Latest: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
if [ "$NEEDS_UPDATE20" == "false" ] && [ "$NEEDS_UPDATE24" == "false" ]; then
|
||||
echo "::notice title=No Updates Needed::All Node.js versions are up to date"
|
||||
fi
|
||||
|
||||
echo "needs_update20=$NEEDS_UPDATE20" >> $GITHUB_OUTPUT
|
||||
echo "needs_update24=$NEEDS_UPDATE24" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update externals.sh and create PR
|
||||
if: steps.node-versions.outputs.needs_update20 == 'true' || steps.node-versions.outputs.needs_update24 == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Final validation before making changes
|
||||
NODE20_VERSION="${{ steps.node-versions.outputs.latest_node20 }}"
|
||||
NODE24_VERSION="${{ steps.node-versions.outputs.latest_node24 }}"
|
||||
|
||||
echo "Final validation of versions before PR creation:"
|
||||
echo "Node 20: '$NODE20_VERSION'"
|
||||
echo "Node 24: '$NODE24_VERSION'"
|
||||
|
||||
# Validate versions are not empty and match expected format
|
||||
if [ -z "$NODE20_VERSION" ] || ! echo "$NODE20_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Version::Refusing to create PR with invalid Node 20 version: '$NODE20_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$NODE24_VERSION" ] || ! echo "$NODE24_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Version::Refusing to create PR with invalid Node 24 version: '$NODE24_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All versions validated successfully"
|
||||
|
||||
# Update the files
|
||||
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
|
||||
sed -i 's/NODE20_VERSION="[^"]*"/NODE20_VERSION="'"$NODE20_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
|
||||
sed -i 's/NODE24_VERSION="[^"]*"/NODE24_VERSION="'"$NODE24_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
# Verify the changes were applied correctly
|
||||
echo "Verifying changes in externals.sh:"
|
||||
grep "NODE20_VERSION=" src/Misc/externals.sh
|
||||
grep "NODE24_VERSION=" src/Misc/externals.sh
|
||||
|
||||
# Ensure we actually have valid versions in the file
|
||||
UPDATED_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
UPDATED_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
if [ -z "$UPDATED_NODE20" ] || [ -z "$UPDATED_NODE24" ]; then
|
||||
echo "::error title=Update Failed::Failed to properly update externals.sh"
|
||||
echo "Updated Node 20: '$UPDATED_NODE20'"
|
||||
echo "Updated Node 24: '$UPDATED_NODE24'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/update-node"
|
||||
git checkout -b "$branch_name"
|
||||
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << EOF
|
||||
Automated Node.js version update:
|
||||
|
||||
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
|
||||
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
|
||||
|
||||
This update ensures we're using the latest stable Node.js versions for security and performance improvements.
|
||||
|
||||
**Note**: When updating Node versions, remember to also create a new release of alpine_nodejs at the updated version following the instructions at: https://github.com/actions/alpine_nodejs
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Node Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/node-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: update Node versions" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "node" \
|
||||
--label "javascript" \
|
||||
--body-file pr_body.txt
|
||||
|
||||
echo "::notice title=PR Created::Successfully created Node.js version update PR on branch $branch_name"
|
||||
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
name: NPM Audit Fix with TypeScript Auto-Fix
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit-with-ts-fix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: NPM install and audit fix with TypeScript auto-repair
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check for vulnerabilities first
|
||||
echo "Checking for npm vulnerabilities..."
|
||||
if npm audit --audit-level=moderate; then
|
||||
echo "✅ No moderate or higher vulnerabilities found"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "⚠️ Vulnerabilities found, attempting npm audit fix..."
|
||||
|
||||
# Attempt audit fix and capture the result
|
||||
if npm audit fix; then
|
||||
echo "✅ npm audit fix completed successfully"
|
||||
AUDIT_FIX_STATUS="success"
|
||||
else
|
||||
echo "⚠️ npm audit fix failed or had issues"
|
||||
AUDIT_FIX_STATUS="failed"
|
||||
|
||||
# Try audit fix with --force as a last resort for critical/high vulns only
|
||||
echo "Checking if critical/high vulnerabilities remain..."
|
||||
if ! npm audit --audit-level=high; then
|
||||
echo "🚨 Critical/high vulnerabilities remain, attempting --force fix..."
|
||||
if npm audit fix --force; then
|
||||
echo "⚠️ npm audit fix --force completed (may have breaking changes)"
|
||||
AUDIT_FIX_STATUS="force-fixed"
|
||||
else
|
||||
echo "❌ npm audit fix --force also failed"
|
||||
AUDIT_FIX_STATUS="force-failed"
|
||||
fi
|
||||
else
|
||||
echo "✅ Only moderate/low vulnerabilities remain after failed fix"
|
||||
AUDIT_FIX_STATUS="partial-success"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "AUDIT_FIX_STATUS=$AUDIT_FIX_STATUS" >> $GITHUB_ENV
|
||||
|
||||
# Try to fix TypeScript issues automatically
|
||||
echo "Attempting to fix TypeScript compatibility issues..."
|
||||
|
||||
# Check if build fails
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Build failed, attempting automated fixes..."
|
||||
|
||||
# Common fix 1: Update @types/node to latest compatible version
|
||||
echo "Trying to update @types/node to latest version..."
|
||||
npm update @types/node
|
||||
|
||||
# Common fix 2: If that doesn't work, try installing a specific known-good version
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying specific @types/node version..."
|
||||
# Try Node 20 compatible version
|
||||
npm install --save-dev @types/node@^20.0.0
|
||||
fi
|
||||
|
||||
# Common fix 3: Clear node_modules and reinstall if still failing
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Clearing node_modules and reinstalling..."
|
||||
rm -rf node_modules package-lock.json
|
||||
npm install
|
||||
|
||||
# Re-run audit fix after clean install if it was successful before
|
||||
if [[ "$AUDIT_FIX_STATUS" == "success" || "$AUDIT_FIX_STATUS" == "force-fixed" ]]; then
|
||||
echo "Re-running npm audit fix after clean install..."
|
||||
npm audit fix || echo "Audit fix failed on second attempt"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Common fix 4: Try updating TypeScript itself
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying to update TypeScript..."
|
||||
npm update typescript
|
||||
fi
|
||||
|
||||
# Final check
|
||||
if npm run build 2>/dev/null; then
|
||||
echo "✅ Successfully fixed TypeScript issues automatically"
|
||||
else
|
||||
echo "⚠️ Could not automatically fix TypeScript issues"
|
||||
fi
|
||||
else
|
||||
echo "✅ Build passes after audit fix"
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HUSKY: 0 # Disable husky hooks for automated commits
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-with-ts-repair"
|
||||
git checkout -b "$branch_name"
|
||||
|
||||
# Commit with --no-verify to skip husky hooks
|
||||
git commit -a -m "chore: npm audit fix with automated TypeScript compatibility fixes" --no-verify
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Check final build status and gather info about what was changed
|
||||
build_status="✅ Build passes"
|
||||
fixes_applied=""
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
|
||||
# Check what packages were updated
|
||||
if git diff HEAD~1 package.json | grep -q "@types/node"; then
|
||||
fixes_applied+="\n- Updated @types/node version for TypeScript compatibility"
|
||||
fi
|
||||
if git diff HEAD~1 package.json | grep -q "typescript"; then
|
||||
fixes_applied+="\n- Updated TypeScript version"
|
||||
fi
|
||||
if git diff HEAD~1 package-lock.json | grep -q "resolved"; then
|
||||
fixes_applied+="\n- Updated package dependencies via npm audit fix"
|
||||
fi
|
||||
|
||||
if ! npm run build 2>/dev/null; then
|
||||
build_status="⚠️ Build fails - manual review required"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
|
||||
# Create enhanced PR body using here-doc for proper formatting
|
||||
audit_status_msg=""
|
||||
case "$AUDIT_FIX_STATUS" in
|
||||
"success")
|
||||
audit_status_msg="✅ **Audit Fix**: Completed successfully"
|
||||
;;
|
||||
"partial-success")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Partial success (only moderate/low vulnerabilities remain)"
|
||||
;;
|
||||
"force-fixed")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Completed with --force (may have breaking changes)"
|
||||
;;
|
||||
"failed"|"force-failed")
|
||||
audit_status_msg="❌ **Audit Fix**: Failed to resolve vulnerabilities"
|
||||
;;
|
||||
*)
|
||||
audit_status_msg="❓ **Audit Fix**: Status unknown"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ⚠️ Build fails - manual review required
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
⚠️ **Manual Review Required**: The build is currently failing after automated fixes were attempted.
|
||||
|
||||
Common issues and solutions:
|
||||
- Check for TypeScript version compatibility with Node.js types
|
||||
- Review breaking changes in updated dependencies
|
||||
- Consider pinning problematic dependency versions temporarily
|
||||
- Review tsconfig.json for compatibility settings
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
else
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ✅ Build passes
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
✅ **Ready to Merge**: All automated fixes were successful and the build passes.
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ -n "$fixes_applied" ]; then
|
||||
# Add the fixes applied section to the file
|
||||
sed -i "/This workflow attempts/a\\
|
||||
\\
|
||||
**Automated Fixes Applied**:$fixes_applied" pr_body.txt
|
||||
fi
|
||||
|
||||
# Create PR with appropriate labels
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security"
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security,needs-manual-review"
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix with TypeScript auto-repair" \
|
||||
--label "$labels" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "No changes to commit"
|
||||
fi
|
||||
137
.github/workflows/npm-audit.yml
vendored
Normal file
137
.github/workflows/npm-audit.yml
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
name: NPM Audit Fix
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 7 * * 1" # Weekly on Monday at 7 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: NPM install and audit fix
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check what vulnerabilities exist
|
||||
echo "=== Checking current vulnerabilities ==="
|
||||
npm audit || true
|
||||
|
||||
# Apply audit fix --force to get security updates
|
||||
echo "=== Applying npm audit fix --force ==="
|
||||
npm audit fix --force
|
||||
|
||||
# Test if build still works and set status
|
||||
echo "=== Testing build compatibility ==="
|
||||
if npm run all; then
|
||||
echo "✅ Build successful after audit fix"
|
||||
echo "AUDIT_FIX_STATUS=success" >> $GITHUB_ENV
|
||||
else
|
||||
echo "❌ Build failed after audit fix - will create PR with fix instructions"
|
||||
echo "AUDIT_FIX_STATUS=build_failed" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-$(date +%Y%m%d)"
|
||||
git checkout -b "$branch_name"
|
||||
git add .
|
||||
git commit -m "chore: npm audit fix for hashFiles dependencies" --no-verify
|
||||
git push origin "$branch_name"
|
||||
|
||||
# Create PR body based on what actually happened
|
||||
if [ "$AUDIT_FIX_STATUS" = "success" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**✅ Full Fix Applied Successfully**
|
||||
This update addresses npm security advisories and ensures dependencies are secure and up-to-date.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
- Verified build compatibility with `npm run all`
|
||||
|
||||
**Next steps:**
|
||||
- Review the dependency changes
|
||||
- Verify the hashFiles functionality still works as expected
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
elif [ "$AUDIT_FIX_STATUS" = "build_failed" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**⚠️ Security Fixes Applied - Build Issues Need Manual Resolution**
|
||||
This update applies important security patches but causes build failures that require manual fixes.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
|
||||
**⚠️ Build Issues Detected:**
|
||||
The build fails after applying security fixes, likely due to TypeScript compatibility issues with updated `@types/node`.
|
||||
|
||||
**Required Manual Fixes:**
|
||||
1. Review TypeScript compilation errors in the build output
|
||||
2. Update TypeScript configuration if needed
|
||||
3. Consider pinning `@types/node` to a compatible version
|
||||
4. Run `npm run all` locally to verify fixes
|
||||
|
||||
**Next steps:**
|
||||
- **DO NOT merge until build issues are resolved**
|
||||
- Apply manual fixes for TypeScript compatibility
|
||||
- Test the hashFiles functionality still works as expected
|
||||
- Merge when build passes
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
else
|
||||
# Fallback case
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit attempted for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**ℹ️ No Changes Applied**
|
||||
No security vulnerabilities were found or no changes were needed.
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix for hashFiles dependencies" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "npm" \
|
||||
--label "typescript" \
|
||||
--label "security" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "✅ No changes to commit - npm audit fix did not modify any files"
|
||||
fi
|
||||
68
.github/workflows/publish-image.yml
vendored
68
.github/workflows/publish-image.yml
vendored
@@ -1,68 +0,0 @@
|
||||
name: Publish Runner Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
runnerVersion:
|
||||
type: string
|
||||
description: Version of the runner being installed
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const inputRunnerVersion = "${{ github.event.inputs.runnerVersion }}"
|
||||
if (inputRunnerVersion) {
|
||||
console.log(`Using input runner version ${inputRunnerVersion}`)
|
||||
core.setOutput('version', inputRunnerVersion);
|
||||
return
|
||||
}
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
console.log(`Using runner version ${runnerVersion}`)
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.image.outputs.version }}
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
509
.github/workflows/release.yml
vendored
509
.github/workflows/release.yml
vendored
@@ -11,16 +11,15 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Make sure ./releaseVersion match ./src/runnerversion
|
||||
# Query GitHub release ensure version is not used
|
||||
- name: Check version
|
||||
uses: actions/github-script@0.3.0
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
|
||||
@@ -30,7 +29,7 @@ jobs:
|
||||
return
|
||||
}
|
||||
try {
|
||||
const release = await github.repos.getReleaseByTag({
|
||||
const release = await github.rest.repos.getReleaseByTag({
|
||||
owner: '${{ github.event.repository.owner.name }}',
|
||||
repo: '${{ github.event.repository.name }}',
|
||||
tag: 'v' + runnerVersion
|
||||
@@ -53,27 +52,6 @@ jobs:
|
||||
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
||||
@@ -99,7 +77,7 @@ jobs:
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
@@ -108,7 +86,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -136,113 +114,67 @@ jobs:
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_externals
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime_externals
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: runner-packages
|
||||
name: runner-packages-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
${{ matrix.runtime }}-trimmedpackages.json
|
||||
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
# Download runner package tar.gz/zip produced by 'build' job
|
||||
- name: Download Artifact
|
||||
uses: actions/download-artifact@v1
|
||||
- name: Download Artifact (win-x64)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages
|
||||
name: runner-packages-win-x64
|
||||
path: ./
|
||||
- name: Download Artifact (win-arm64)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages-win-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-x64)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages-osx-x64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-arm64)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages-osx-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-x64)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages-linux-x64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages-linux-arm
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm64)
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: runner-packages-linux-arm64
|
||||
path: ./
|
||||
|
||||
# Create ReleaseNote file
|
||||
- name: Create ReleaseNote
|
||||
id: releaseNote
|
||||
uses: actions/github-script@0.3.0
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
@@ -253,33 +185,11 @@ jobs:
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
@@ -304,379 +214,93 @@ jobs:
|
||||
|
||||
# Upload release assets (full runner packages)
|
||||
- name: Upload Release Asset (win-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (win-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
publish-image:
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v6
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -685,10 +309,10 @@ jobs:
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -696,7 +320,7 @@ jobs:
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
@@ -712,3 +336,10 @@ jobs:
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
||||
13
.github/workflows/stale-bot.yml
vendored
13
.github/workflows/stale-bot.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: ‘Close stale Runner issues’
|
||||
name: Stale Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@@ -7,11 +7,10 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
stale-issue-message: ‘This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days.’
|
||||
close-issue-message: ‘This issue was closed because it has been stalled for 15 days with no activity.’
|
||||
exempt-issue-labels: ‘keep’
|
||||
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||
exempt-issue-labels: "keep"
|
||||
days-before-stale: 365
|
||||
days-before-close: 15
|
||||
debug-only: true
|
||||
days-before-close: 15
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -26,4 +26,5 @@ _dotnetsdk
|
||||
TestResults
|
||||
TestLogs
|
||||
.DS_Store
|
||||
.mono
|
||||
**/*.DotSettings.user
|
||||
1
.husky/pre-commit
Executable file
1
.husky/pre-commit
Executable file
@@ -0,0 +1 @@
|
||||
cd src/Misc/expressionFunc/hashFiles && npx lint-staged
|
||||
18
README.md
18
README.md
@@ -20,6 +20,20 @@ Runner releases:
|
||||
|
||||
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
|
||||
|
||||
## Contribute
|
||||
### Note
|
||||
|
||||
We accept contributions in the form of issues and pull requests. The runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. [Read more about our guidelines here](docs/contribute.md) before contributing.
|
||||
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](security.md)
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
|
||||
@@ -250,6 +250,42 @@ Two problem matchers can be used:
|
||||
}
|
||||
```
|
||||
|
||||
#### Default from path
|
||||
|
||||
The problem matcher can specify a `fromPath` property at the top level, which applies when a specific pattern doesn't provide a value for `fromPath`. This is useful for tools that don't include project file information in their output.
|
||||
|
||||
For example, given the following compiler output that doesn't include project file information:
|
||||
|
||||
```
|
||||
ClassLibrary.cs(16,24): warning CS0612: 'ClassLibrary.Helpers.MyHelper.Name' is obsolete
|
||||
```
|
||||
|
||||
A problem matcher with a default from path can be used:
|
||||
|
||||
```json
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "csc-minimal",
|
||||
"fromPath": "ClassLibrary/ClassLibrary.csproj",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+)\\((\\d+),(\\d+)\\): (error|warning) (.+): (.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"code": 5,
|
||||
"message": 6
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This ensures that the file is rooted to the correct path when there's not enough information in the error messages to extract a `fromPath`.
|
||||
|
||||
#### Mitigate regular expression denial of service (ReDos)
|
||||
|
||||
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
||||
|
||||
@@ -23,7 +23,7 @@ This feature is mainly intended for self hosted runner administrators.
|
||||
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
|
||||
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
|
||||
|
||||
You can set these variables to the **absolute** path of a a `.sh` or `.ps1` file.
|
||||
You can set these variables to the **absolute** path of a `.sh` or `.ps1` file.
|
||||
|
||||
We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate.
|
||||
- `.sh` files will execute with the args `-e {pathtofile}`
|
||||
|
||||
@@ -7,19 +7,26 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
|
||||
---
|
||||
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||
|
||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||
|
||||
```
|
||||
curl -v https://api.github.com/api/v3/zen
|
||||
curl -v https://api.github.com/zen
|
||||
curl -v https://codeload.github.com/_ping
|
||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://results-receiver.actions.githubusercontent.com/health
|
||||
```
|
||||
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
@@ -27,6 +34,7 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
```
|
||||
curl -v https://[hostname]/api/v3/zen
|
||||
curl -v https://codeload.[hostname]/_ping
|
||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||
```
|
||||
@@ -42,6 +50,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Ping codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
@@ -50,6 +62,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Ping results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||
|
||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||
- Firewall rules that block action runner from accessing [certain hosts](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github), ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||
|
||||
|
||||
### Identify and solve these problems
|
||||
@@ -42,6 +42,7 @@ If you are having trouble connecting, try these steps:
|
||||
- https://api.github.com/
|
||||
- https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
- https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
- https://results-receiver.actions.githubusercontent.com/health
|
||||
- For GHES/GHAE
|
||||
- https://myGHES.com/_services/vstoken/_apis/health
|
||||
- https://myGHES.com/_services/pipelines/_apis/health
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
|
||||
The runner carries its own copies of node.js executables under `<runner_root>/externals/node20/` and `<runner_root>/externals/node24/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
|
||||
All javascript base Actions will get executed by the built-in `node` at either `<runner_root>/externals/node20/` or `<runner_root>/externals/node24/` depending on the version specified in the action's metadata.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Contributions
|
||||
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors. Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
||||
|
||||
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
|
||||
|
||||
|
||||
217
docs/dependency-management.md
Normal file
217
docs/dependency-management.md
Normal file
@@ -0,0 +1,217 @@
|
||||
# Runner Dependency Management Process
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the automated dependency management process for the GitHub Actions Runner, designed to ensure we maintain up-to-date and secure dependencies while providing predictable release cycles.
|
||||
|
||||
## Release Schedule
|
||||
|
||||
- **Monthly Runner Releases**: New runner versions are released monthly
|
||||
- **Weekly Dependency Checks**: Automated workflows check for dependency updates every Monday
|
||||
- **Security Patches**: Critical security vulnerabilities are addressed immediately outside the regular schedule
|
||||
|
||||
## Automated Workflows
|
||||
|
||||
**Note**: These workflows are implemented across separate PRs for easier review and independent deployment. Each workflow includes comprehensive error handling and security-focused vulnerability detection.
|
||||
|
||||
### 1. Foundation Labels
|
||||
|
||||
- **Workflow**: `.github/workflows/setup-labels.yml` (PR #4024)
|
||||
- **Purpose**: Creates consistent dependency labels for all automation workflows
|
||||
- **Labels**: `dependencies`, `security`, `typescript`, `needs-manual-review`
|
||||
- **Prerequisite**: Must be merged before other workflows for proper labeling
|
||||
|
||||
### 2. Node.js Version Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/node-upgrade.yml`
|
||||
- **Schedule**: Mondays at 6:00 AM UTC
|
||||
- **Purpose**: Updates Node.js 20 and 24 versions in `src/Misc/externals.sh`
|
||||
- **Source**: [nodejs.org](https://nodejs.org) and [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
- **Priority**: First (NPM depends on current Node.js versions)
|
||||
|
||||
### 3. NPM Security Audit
|
||||
|
||||
- **Primary Workflow**: `.github/workflows/npm-audit.yml` ("NPM Audit Fix")
|
||||
- **Schedule**: Mondays at 7:00 AM UTC
|
||||
- **Purpose**: Automated security vulnerability detection and basic fixes
|
||||
- **Location**: `src/Misc/expressionFunc/hashFiles/`
|
||||
- **Features**: npm audit, security patch application, PR creation
|
||||
- **Dependency**: Runs after Node.js updates for optimal compatibility
|
||||
|
||||
- **Fallback Workflow**: `.github/workflows/npm-audit-typescript.yml` ("NPM Audit Fix with TypeScript Auto-Fix")
|
||||
- **Trigger**: Manual dispatch only
|
||||
- **Purpose**: Manual security audit with TypeScript compatibility fixes
|
||||
- **Use Case**: When scheduled workflow fails or needs custom intervention
|
||||
- **Features**: Enhanced TypeScript auto-repair, graduated security response
|
||||
- **How to Use**:
|
||||
1. If the scheduled "NPM Audit Fix" workflow fails, go to Actions tab
|
||||
2. Select "NPM Audit Fix with TypeScript Auto-Fix" workflow
|
||||
3. Click "Run workflow" and optionally specify fix level (auto/manual)
|
||||
4. Review the generated PR for TypeScript compatibility issues
|
||||
|
||||
### 4. .NET SDK Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/dotnet-upgrade.yml`
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates .NET SDK and package versions with build validation
|
||||
- **Features**: Global.json updates, NuGet package management, compatibility checking
|
||||
- **Independence**: Runs independently of Node.js/NPM updates
|
||||
|
||||
### 5. Docker/Buildx Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/docker-buildx-upgrade.yml` ("Docker/Buildx Version Upgrade")
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates Docker and Docker Buildx versions with multi-platform validation
|
||||
- **Features**: Container security scanning, multi-architecture build testing
|
||||
- **Independence**: Runs independently of other dependency updates
|
||||
|
||||
### 6. Dependency Monitoring
|
||||
|
||||
- **Workflow**: `.github/workflows/dependency-check.yml` ("Dependency Status Check")
|
||||
- **Schedule**: Mondays at 11:00 AM UTC
|
||||
- **Purpose**: Comprehensive status report of all dependencies with security audit
|
||||
- **Features**: Multi-dependency checking, npm audit status, build validation, choice of specific component checks
|
||||
- **Summary**: Runs last to capture results from all morning dependency updates
|
||||
|
||||
## Release Process Integration
|
||||
|
||||
### Pre-Release Checklist
|
||||
|
||||
Before each monthly runner release:
|
||||
|
||||
1. **Check Dependency PRs**:
|
||||
|
||||
```bash
|
||||
# List all open dependency PRs
|
||||
gh pr list --label "dependencies" --state open
|
||||
|
||||
# List only automated weekly dependency updates
|
||||
gh pr list --label "dependencies-weekly-check" --state open
|
||||
|
||||
# List only custom dependency automation (not dependabot)
|
||||
gh pr list --label "dependencies-not-dependabot" --state open
|
||||
```
|
||||
|
||||
2. **Run Manual Dependency Check**:
|
||||
- Go to Actions tab → "Dependency Status Check" → "Run workflow"
|
||||
- Review the summary for any outdated dependencies
|
||||
|
||||
3. **Review and Merge Updates**:
|
||||
- Prioritize security-related updates
|
||||
- Test dependency updates in development environment
|
||||
- Merge approved dependency PRs
|
||||
|
||||
### Vulnerability Response
|
||||
|
||||
#### Critical Security Vulnerabilities
|
||||
|
||||
- **Response Time**: Within 24 hours
|
||||
- **Process**:
|
||||
1. Assess impact on runner security
|
||||
2. Create hotfix branch if runner data security is affected
|
||||
3. Expedite patch release if necessary
|
||||
4. Document in security advisory if applicable
|
||||
|
||||
#### Non-Critical Vulnerabilities
|
||||
|
||||
- **Response Time**: Next monthly release
|
||||
- **Process**:
|
||||
1. Evaluate if vulnerability affects runner functionality
|
||||
2. Include fix in regular dependency update cycle
|
||||
3. Document in release notes
|
||||
|
||||
## Monitoring and Alerts
|
||||
|
||||
### GitHub Actions Workflow Status
|
||||
|
||||
- All dependency workflows create PRs with the `dependencies` label
|
||||
- Failed workflows should be investigated immediately
|
||||
- Weekly dependency status reports are generated automatically
|
||||
|
||||
### Manual Checks
|
||||
|
||||
You can manually trigger dependency checks:
|
||||
|
||||
- **Full Status**: Run "Dependency Status Check" workflow
|
||||
- **Specific Component**: Use the dropdown to check individual dependencies
|
||||
|
||||
## Dependency Labels
|
||||
|
||||
All automated dependency PRs are tagged with labels for easy filtering and management:
|
||||
|
||||
### Primary Labels
|
||||
|
||||
- **`dependencies`**: All automated dependency-related PRs
|
||||
- **`dependencies-weekly-check`**: Automated weekly dependency updates from scheduled workflows
|
||||
- **`dependencies-not-dependabot`**: Custom dependency automation (not created by dependabot)
|
||||
- **`security`**: Security vulnerability fixes and patches
|
||||
- **`typescript`**: TypeScript compatibility and type definition updates
|
||||
- **`needs-manual-review`**: Complex updates requiring human verification
|
||||
|
||||
### Technology-Specific Labels
|
||||
|
||||
- **`node`**: Node.js version updates
|
||||
- **`javascript`**: JavaScript runtime and tooling updates
|
||||
- **`npm`**: NPM package and security updates
|
||||
- **`dotnet`**: .NET SDK and NuGet package updates
|
||||
- **`docker`**: Docker and container tooling updates
|
||||
|
||||
### Workflow-Specific Branches
|
||||
|
||||
- **Node.js updates**: `chore/update-node` branch
|
||||
- **NPM security fixes**: `chore/npm-audit-fix-YYYYMMDD` and `chore/npm-audit-fix-with-ts-repair` branches
|
||||
- **NuGet/.NET updates**: `feature/dotnetsdk-upgrade/{version}` branches
|
||||
- **Docker updates**: `feature/docker-buildx-upgrade` branch
|
||||
|
||||
## Special Considerations
|
||||
|
||||
### Node.js Updates
|
||||
|
||||
When updating Node.js versions, remember to:
|
||||
|
||||
1. Create a corresponding release in [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
2. Follow the alpine_nodejs getting started guide
|
||||
3. Test container builds with new Node versions
|
||||
|
||||
### .NET SDK Updates
|
||||
|
||||
- Only patch versions are auto-updated within the same major.minor version
|
||||
- Major/minor version updates require manual review and testing
|
||||
|
||||
### Docker Updates
|
||||
|
||||
- Updates include both Docker Engine and Docker Buildx
|
||||
- Verify compatibility with runner container workflows
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **NPM Audit Workflow Fails**:
|
||||
- Check if `package.json` exists in `src/Misc/expressionFunc/hashFiles/`
|
||||
- Verify Node.js setup step succeeded
|
||||
|
||||
2. **Version Detection Fails**:
|
||||
- Check if upstream APIs are available
|
||||
- Verify parsing logic for version extraction
|
||||
|
||||
3. **PR Creation Fails**:
|
||||
- Ensure `GITHUB_TOKEN` has sufficient permissions
|
||||
- Check if branch already exists
|
||||
|
||||
### Contact
|
||||
|
||||
For questions about the dependency management process:
|
||||
|
||||
- Create an issue with the `dependencies` label
|
||||
- Review existing dependency management workflows
|
||||
- Consult the runner team for security-related concerns
|
||||
|
||||
## Metrics and KPIs
|
||||
|
||||
Track these metrics to measure dependency management effectiveness:
|
||||
|
||||
- Number of open dependency PRs at release time
|
||||
- Time to merge dependency updates
|
||||
- Number of security vulnerabilities by severity
|
||||
- Release cycle adherence (monthly target)
|
||||
@@ -4,16 +4,7 @@
|
||||
|
||||
## Supported Distributions and Versions
|
||||
|
||||
x64
|
||||
- Red Hat Enterprise Linux 7
|
||||
- CentOS 7
|
||||
- Oracle Linux 7
|
||||
- Fedora 29+
|
||||
- Debian 9+
|
||||
- Ubuntu 16.04+
|
||||
- Linux Mint 18+
|
||||
- openSUSE 15+
|
||||
- SUSE Enterprise Linux (SLES) 12 SP2+
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#linux)."
|
||||
|
||||
## Install .Net Core 3.x Linux Dependencies
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
- macOS High Sierra (10.13) and later versions
|
||||
- x64 and arm64 (Apple Silicon)
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#macos)."
|
||||
|
||||
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -2,11 +2,6 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
- Windows 7 64-bit
|
||||
- Windows 8.1 64-bit
|
||||
- Windows 10 64-bit
|
||||
- Windows Server 2012 R2 64-bit
|
||||
- Windows Server 2016 64-bit
|
||||
- Windows Server 2019 64-bit
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#windows)."
|
||||
|
||||
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
# Source: https://github.com/dotnet/dotnet-docker
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy AS build
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
|
||||
ARG DOCKER_VERSION=20.10.23
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
|
||||
ARG DOCKER_VERSION=28.5.1
|
||||
ARG BUILDX_VERSION=0.29.1
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
@@ -20,23 +21,37 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v0.8.0/actions-runner-hooks-k8s-0.8.0.zip \
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s-novolume \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
||||
&& tar zxvf docker.tgz \
|
||||
&& rm -rf docker.tgz
|
||||
&& rm -rf docker.tgz \
|
||||
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
||||
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
||||
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||
ENV ImageOS=ubuntu22
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
sudo \
|
||||
lsb-release \
|
||||
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
|
||||
RUN apt update -y \
|
||||
&& apt install -y --no-install-recommends sudo lsb-release gpg-agent software-properties-common curl jq unzip \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure git-core/ppa based on guidance here: https://git-scm.com/download/linux
|
||||
RUN add-apt-repository ppa:git-core/ppa \
|
||||
&& apt update -y \
|
||||
&& apt install -y git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
@@ -49,6 +64,7 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
WORKDIR /home/runner
|
||||
|
||||
COPY --chown=runner:docker --from=build /actions-runner .
|
||||
COPY --from=build /usr/local/lib/docker/cli-plugins/docker-buildx /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
|
||||
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
|
||||
|
||||
|
||||
103
releaseNote.md
103
releaseNote.md
@@ -1,29 +1,55 @@
|
||||
## Features
|
||||
- Support linux/arm64 docker build (#2601)
|
||||
- Add node20 to runner (#2732)
|
||||
- Update node16 to latest version (#2736)
|
||||
- Remove node12 from runner (#2717)
|
||||
## What's Changed
|
||||
* Update safe_sleep.sh for bug when scheduler is paused for more than 1 second by @horner in https://github.com/actions/runner/pull/3157
|
||||
* Acknowledge runner request by @ericsciple in https://github.com/actions/runner/pull/3996
|
||||
* Update Docker to v28.3.3 and Buildx to v0.27.0 by @github-actions[bot] in https://github.com/actions/runner/pull/3999
|
||||
* Update dotnet sdk to latest version @8.0.413 by @github-actions[bot] in https://github.com/actions/runner/pull/4000
|
||||
* Bump actions/attest-build-provenance from 2 to 3 by @dependabot[bot] in https://github.com/actions/runner/pull/4002
|
||||
* Bump @typescript-eslint/eslint-plugin from 6.7.2 to 8.35.0 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3920
|
||||
* Bump husky from 8.0.3 to 9.1.7 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3842
|
||||
* Bump @vercel/ncc from 0.38.0 to 0.38.3 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3841
|
||||
* Bump eslint-plugin-github from 4.10.0 to 4.10.2 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3180
|
||||
* Bump typescript from 5.2.2 to 5.9.2 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4007
|
||||
* chore: migrate Husky config from v8 to v9 format by @salmanmkc in https://github.com/actions/runner/pull/4003
|
||||
* Map RUNNER_TEMP for container action by @ericsciple in https://github.com/actions/runner/pull/4011
|
||||
* Break UseV2Flow into UseV2Flow and UseRunnerAdminFlow. by @TingluoHuang in https://github.com/actions/runner/pull/4013
|
||||
* Update Docker to v28.4.0 and Buildx to v0.28.0 by @github-actions[bot] in https://github.com/actions/runner/pull/4020
|
||||
* Bump node.js to latest version in runner. by @TingluoHuang in https://github.com/actions/runner/pull/4022
|
||||
* feat: add automated .NET dependency management workflow by @salmanmkc in https://github.com/actions/runner/pull/4028
|
||||
* feat: add automated Docker BuildX dependency management workflow by @salmanmkc in https://github.com/actions/runner/pull/4029
|
||||
* feat: add automated Node.js version management workflow by @salmanmkc in https://github.com/actions/runner/pull/4026
|
||||
* feat: add comprehensive NPM security management workflow by @salmanmkc in https://github.com/actions/runner/pull/4027
|
||||
* feat: add comprehensive dependency monitoring system by @salmanmkc in https://github.com/actions/runner/pull/4025
|
||||
* Use BrokerURL when using RunnerAdmin by @luketomlinson in https://github.com/actions/runner/pull/4044
|
||||
* Bump actions/github-script from 7.0.1 to 8.0.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4016
|
||||
* Bump actions/stale from 9 to 10 by @dependabot[bot] in https://github.com/actions/runner/pull/4015
|
||||
* fix: prevent Node.js upgrade workflow from creating PRs with empty versions by @salmanmkc in https://github.com/actions/runner/pull/4055
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4057
|
||||
* Bump actions/setup-node from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/4037
|
||||
* Bump Azure.Storage.Blobs from 12.25.0 to 12.25.1 by @dependabot[bot] in https://github.com/actions/runner/pull/4058
|
||||
* Update Docker to v28.5.0 and Buildx to v0.29.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4069
|
||||
* Bump github/codeql-action from 3 to 4 by @dependabot[bot] in https://github.com/actions/runner/pull/4072
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4075
|
||||
* Include k8s novolume (version v0.8.0) by @nikola-jokic in https://github.com/actions/runner/pull/4063
|
||||
* Make sure runner-admin has both auth_url and auth_url_v2. by @TingluoHuang in https://github.com/actions/runner/pull/4066
|
||||
* Report job has infra failure to run-service by @TingluoHuang in https://github.com/actions/runner/pull/4073
|
||||
* Bump actions/setup-node from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4078
|
||||
|
||||
## Misc
|
||||
- Pass timeout in ExecutionContext instead of StepsRunner (#2714)
|
||||
- Return early on invalid_client OAuth exception (#2721)
|
||||
- Expose results service endpoint as environment variable (#2726)
|
||||
- Update HTTPEventSourceListener to trace the right events (#2727)
|
||||
- Change RunnerId/AgentId from int32 to uint64 (#2661)
|
||||
- Configure stale bot for Runner (#2729)
|
||||
- Add in dependabot security scanning/updates (#2743)
|
||||
- Bump dotnet sdk to latest version (#2733)
|
||||
- Switch from InnerException to ErrorCode on disableupdate check (#2718)
|
||||
## New Contributors
|
||||
* @horner made their first contribution in https://github.com/actions/runner/pull/3157
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.328.0...v2.329.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
|
||||
|
||||
## Windows x64
|
||||
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
``` powershell
|
||||
|
||||
```powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -33,13 +59,13 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
```
|
||||
|
||||
## [Pre-release] Windows arm64
|
||||
**Warning:** Windows arm64 runners are currently in preview status and use [unofficial versions of nodejs](https://unofficial-builds.nodejs.org/). They are not intended for production workflows.
|
||||
## Windows arm64
|
||||
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
``` powershell
|
||||
|
||||
```powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -51,7 +77,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
|
||||
## OSX x64
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -62,7 +88,7 @@ tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## OSX arm64 (Apple silicon)
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -73,7 +99,7 @@ tar xzf ./actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux x64
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -84,7 +110,7 @@ tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux arm64
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -95,7 +121,7 @@ tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux arm
|
||||
|
||||
``` bash
|
||||
```bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -105,6 +131,7 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Using your self hosted runner
|
||||
|
||||
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
|
||||
|
||||
## SHA-256 Checksums
|
||||
@@ -118,27 +145,3 @@ The SHA-256 checksums for the packages included in this build are shown below:
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.308.0
|
||||
<Update to ./src/runnerversion when creating release>
|
||||
|
||||
@@ -8,7 +8,7 @@ set -e
|
||||
# Configures it as a service more secure
|
||||
# Should be used on VMs and not containers
|
||||
# Works on OSX and Linux
|
||||
# Assumes x64 arch
|
||||
# Assumes x64 arch (support arm64)
|
||||
# See EXAMPLES below
|
||||
|
||||
flags_found=false
|
||||
@@ -87,6 +87,9 @@ sudo echo
|
||||
runner_plat=linux
|
||||
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||
|
||||
runner_arch=x64
|
||||
[ ! -z "$(arch | grep arm64)" ] && runner_arch=arm64
|
||||
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
@@ -139,7 +142,7 @@ echo "Downloading latest runner ..."
|
||||
# For the GHES Alpha, download the runner from github.com
|
||||
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
||||
latest_version=$(echo ${latest_version_label:1})
|
||||
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
|
||||
runner_file="actions-runner-${runner_plat}-${runner_arch}-${latest_version}.tar.gz"
|
||||
|
||||
if [ -f "${runner_file}" ]; then
|
||||
echo "${runner_file} exists. skipping download."
|
||||
|
||||
@@ -57,4 +57,13 @@
|
||||
<PropertyGroup>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<!-- Enable NuGet package auditing -->
|
||||
<NuGetAudit>true</NuGetAudit>
|
||||
<!-- Audit direct and transitive packages -->
|
||||
<NuGetAuditMode>all</NuGetAuditMode>
|
||||
<!-- Report low, moderate, high and critical advisories -->
|
||||
<NuGetAuditLevel>moderate</NuGetAuditLevel>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
7b78ca2997fbe048642d3717ab7321cdd359752b97158f3c67eb3df8786e21d3
|
||||
@@ -1 +0,0 @@
|
||||
6f34c1d501c87c2e22c2278df7152999aca628c66ee4176d32325773487da6d7
|
||||
@@ -1 +0,0 @@
|
||||
921ca58050be56e0b84af05e544cab4a151cb66405e815e19c0e0928ef7313f5
|
||||
@@ -1 +0,0 @@
|
||||
50f5c147074fc4943b4198b2d9b57c5e94344ab21350b0880ec8e2b85d27152b
|
||||
@@ -1 +0,0 @@
|
||||
16269548335b1f2add41a409aa3558c56581b63f280a9a26956707b6370558bd
|
||||
@@ -1 +0,0 @@
|
||||
e4aa6003ec77a2b21f3021927fed48727bde379fafff300f39565ff2fff4dd87
|
||||
@@ -1 +0,0 @@
|
||||
16ab4c166c58bc4c5600ff055be7ce0a9bb0dd993388114a76efea51e4ea14cb
|
||||
1
src/Misc/contentHash/externals/linux-arm
vendored
1
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +0,0 @@
|
||||
5bdddd32bab1e57af252b470579083049496e9e39b6e4f50de01232581f9a2d8
|
||||
1
src/Misc/contentHash/externals/linux-arm64
vendored
1
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +0,0 @@
|
||||
54b3b3a72da93db0fa38708c759fceadddb70cacdd3620a079084a242126dd78
|
||||
1
src/Misc/contentHash/externals/linux-x64
vendored
1
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +0,0 @@
|
||||
e7f2da271abb174285c3a757503538b3e9792e9d731b0382b6d1f21bb59a79ba
|
||||
1
src/Misc/contentHash/externals/osx-arm64
vendored
1
src/Misc/contentHash/externals/osx-arm64
vendored
@@ -1 +0,0 @@
|
||||
2481c5b0d06b2b5621635f2568b86a43b0e5b259fed1298167ba4f33d4c464c7
|
||||
1
src/Misc/contentHash/externals/osx-x64
vendored
1
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +0,0 @@
|
||||
85de7677165e65ec69b8a9e344c0811efa51b7fe5376a1aa083505c560ea6f57
|
||||
1
src/Misc/contentHash/externals/win-arm64
vendored
1
src/Misc/contentHash/externals/win-arm64
vendored
@@ -1 +0,0 @@
|
||||
763d18de11c11fd299c0e75e98fefc8a0e6605ae0ad6aba3bbc110db2262ab41
|
||||
1
src/Misc/contentHash/externals/win-x64
vendored
1
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +0,0 @@
|
||||
16f3cc545dfe10e84df43746073fc64d3c44d1891782532805aeb2118869a55d
|
||||
1488
src/Misc/dotnet-install.ps1
vendored
1488
src/Misc/dotnet-install.ps1
vendored
File diff suppressed because it is too large
Load Diff
1256
src/Misc/dotnet-install.sh
vendored
1256
src/Misc/dotnet-install.sh
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,19 @@
|
||||
|
||||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
}
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"overrides": [
|
||||
{
|
||||
"files": "*.{js,ts,json}",
|
||||
"options": {
|
||||
"tabWidth": 2
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run all`.
|
||||
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run prepare && npm run all`.
|
||||
|
||||
> Note: this package also needs to be recompiled for dependabot PRs updating one of
|
||||
> its dependencies.
|
||||
When you commit changes to the JSON or Typescript file, the javascript binary will be automatically re-compiled and added to the latest commit.
|
||||
|
||||
3277
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
3277
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,8 @@
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
||||
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||
"prepare": "cd ../../../../ && husky"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -18,20 +19,32 @@
|
||||
"keywords": [
|
||||
"actions"
|
||||
],
|
||||
"lint-staged": {
|
||||
"*.md": [
|
||||
"prettier --write",
|
||||
"git add ."
|
||||
],
|
||||
"*.{ts,json}": [
|
||||
"sh -c 'npm run all'",
|
||||
"git add ."
|
||||
]
|
||||
},
|
||||
"author": "GitHub Actions",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/glob": "^0.1.0"
|
||||
"@actions/glob": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.7.12",
|
||||
"@typescript-eslint/eslint-plugin": "^6.3.0",
|
||||
"@typescript-eslint/parser": "^6.3.0",
|
||||
"@vercel/ncc": "^0.36.1",
|
||||
"eslint": "^8.11.0",
|
||||
"eslint-plugin-github": "^4.9.2",
|
||||
"@types/node": "^20.6.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.21.0",
|
||||
"@typescript-eslint/parser": "^6.7.2",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"prettier": "^3.0.1",
|
||||
"typescript": "^5.1.6"
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^15.5.0",
|
||||
"prettier": "^3.0.3",
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,12 +52,13 @@ async function run(): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
.then(out => {
|
||||
;(async () => {
|
||||
try {
|
||||
const out = await run()
|
||||
console.log(out)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
})()
|
||||
|
||||
@@ -3,11 +3,11 @@ PACKAGERUNTIME=$1
|
||||
PRECACHE=$2
|
||||
|
||||
NODE_URL=https://nodejs.org/dist
|
||||
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
||||
NODE16_VERSION="16.20.1"
|
||||
NODE20_VERSION="20.5.0"
|
||||
# used only for win-arm64, remove node16 unofficial version when official version is available
|
||||
NODE16_UNOFFICIAL_VERSION="16.20.0"
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||
NODE20_VERSION="20.19.5"
|
||||
NODE24_VERSION="24.10.0"
|
||||
|
||||
get_abs_path() {
|
||||
# exploits the fact that pwd will print abs path when no args
|
||||
@@ -62,17 +62,16 @@ function acquireExternalTool() {
|
||||
echo "Curl version: $CURL_VERSION"
|
||||
|
||||
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
||||
# -k Allow connections to SSL sites without certs (H)
|
||||
# -S Show error. With -s, make curl show errors when they occur
|
||||
# -L Follow redirects (H)
|
||||
# -o FILE Write to FILE instead of stdout
|
||||
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
||||
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
||||
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
||||
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
else
|
||||
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
||||
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
fi
|
||||
|
||||
# Move the partial file to the download target.
|
||||
@@ -139,10 +138,10 @@ function acquireExternalTool() {
|
||||
|
||||
# Download the external tools only for Windows.
|
||||
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||
if [[ "$PRECACHE" != "" ]]; then
|
||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||
fi
|
||||
@@ -151,10 +150,10 @@ fi
|
||||
# Download the external tools only for Windows.
|
||||
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
||||
# todo: replace these with official release when available
|
||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||
if [[ "$PRECACHE" != "" ]]; then
|
||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||
fi
|
||||
@@ -162,30 +161,29 @@ fi
|
||||
|
||||
# Download the external tools only for OSX.
|
||||
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-x64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
||||
# node.js v12 doesn't support macOS on arm64.
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-arm64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE20_VERSION}/alpine/x64/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-x64.tar.gz" node24 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-alpine-x64.tar.gz" node24_alpine
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-arm64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir
|
||||
fi
|
||||
|
||||
@@ -114,6 +114,11 @@ var runService = function () {
|
||||
);
|
||||
stopping = true;
|
||||
}
|
||||
} else if (code === 5) {
|
||||
console.log(
|
||||
"Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else {
|
||||
var messagePrefix = "Runner listener exit with undefined return code";
|
||||
unknownFailureRetryCount++;
|
||||
|
||||
@@ -6,6 +6,29 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -22,13 +45,6 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
@@ -37,7 +53,7 @@ const path = __importStar(__nccwpck_require__(1017));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
function run() {
|
||||
var e_1, _a;
|
||||
var _a, e_1, _b, _c;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// arg0 -> node
|
||||
// arg1 -> hashFiles.js
|
||||
@@ -56,8 +72,10 @@ function run() {
|
||||
let count = 0;
|
||||
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
||||
try {
|
||||
for (var _b = __asyncValues(globber.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
||||
const file = _c.value;
|
||||
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const file = _c;
|
||||
console.log(file);
|
||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||
@@ -80,7 +98,7 @@ function run() {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||||
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@@ -94,15 +112,18 @@ function run() {
|
||||
}
|
||||
});
|
||||
}
|
||||
run()
|
||||
.then(out => {
|
||||
console.log(out);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
;
|
||||
(() => __awaiter(void 0, void 0, void 0, function* () {
|
||||
try {
|
||||
const out = yield run();
|
||||
console.log(out);
|
||||
process.exit(0);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}))();
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -246,7 +267,6 @@ const file_command_1 = __nccwpck_require__(717);
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const uuid_1 = __nccwpck_require__(5840);
|
||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
||||
/**
|
||||
* The code to exit an action
|
||||
@@ -276,20 +296,9 @@ function exportVariable(name, val) {
|
||||
process.env[name] = convertedVal;
|
||||
const filePath = process.env['GITHUB_ENV'] || '';
|
||||
if (filePath) {
|
||||
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||||
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
|
||||
if (name.includes(delimiter)) {
|
||||
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||||
}
|
||||
if (convertedVal.includes(delimiter)) {
|
||||
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||||
}
|
||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
||||
file_command_1.issueCommand('ENV', commandValue);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
||||
}
|
||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||
}
|
||||
exports.exportVariable = exportVariable;
|
||||
/**
|
||||
@@ -307,7 +316,7 @@ exports.setSecret = setSecret;
|
||||
function addPath(inputPath) {
|
||||
const filePath = process.env['GITHUB_PATH'] || '';
|
||||
if (filePath) {
|
||||
file_command_1.issueCommand('PATH', inputPath);
|
||||
file_command_1.issueFileCommand('PATH', inputPath);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
@@ -347,7 +356,10 @@ function getMultilineInput(name, options) {
|
||||
const inputs = getInput(name, options)
|
||||
.split('\n')
|
||||
.filter(x => x !== '');
|
||||
return inputs;
|
||||
if (options && options.trimWhitespace === false) {
|
||||
return inputs;
|
||||
}
|
||||
return inputs.map(input => input.trim());
|
||||
}
|
||||
exports.getMultilineInput = getMultilineInput;
|
||||
/**
|
||||
@@ -380,8 +392,12 @@ exports.getBooleanInput = getBooleanInput;
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function setOutput(name, value) {
|
||||
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
||||
if (filePath) {
|
||||
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
||||
}
|
||||
process.stdout.write(os.EOL);
|
||||
command_1.issueCommand('set-output', { name }, value);
|
||||
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
||||
}
|
||||
exports.setOutput = setOutput;
|
||||
/**
|
||||
@@ -510,7 +526,11 @@ exports.group = group;
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function saveState(name, value) {
|
||||
command_1.issueCommand('save-state', { name }, value);
|
||||
const filePath = process.env['GITHUB_STATE'] || '';
|
||||
if (filePath) {
|
||||
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
||||
}
|
||||
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
||||
}
|
||||
exports.saveState = saveState;
|
||||
/**
|
||||
@@ -576,13 +596,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.issueCommand = void 0;
|
||||
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const uuid_1 = __nccwpck_require__(5840);
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
function issueCommand(command, message) {
|
||||
function issueFileCommand(command, message) {
|
||||
const filePath = process.env[`GITHUB_${command}`];
|
||||
if (!filePath) {
|
||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||
@@ -594,7 +615,22 @@ function issueCommand(command, message) {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
exports.issueCommand = issueCommand;
|
||||
exports.issueFileCommand = issueFileCommand;
|
||||
function prepareKeyValueMessage(key, value) {
|
||||
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||||
const convertedValue = utils_1.toCommandValue(value);
|
||||
// These should realistically never happen, but just in case someone finds a
|
||||
// way to exploit uuid generation let's not allow keys or values that contain
|
||||
// the delimiter.
|
||||
if (key.includes(delimiter)) {
|
||||
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||||
}
|
||||
if (convertedValue.includes(delimiter)) {
|
||||
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||||
}
|
||||
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
||||
}
|
||||
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||
//# sourceMappingURL=file-command.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -1100,7 +1136,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = exports.create = void 0;
|
||||
const internal_globber_1 = __nccwpck_require__(8298);
|
||||
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
@@ -1113,17 +1151,56 @@ function create(patterns, options) {
|
||||
});
|
||||
}
|
||||
exports.create = create;
|
||||
/**
|
||||
* Computes the sha256 hash of a glob
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param currentWorkspace Workspace used when matching files
|
||||
* @param options Glob options
|
||||
* @param verbose Enables verbose logging
|
||||
*/
|
||||
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let followSymbolicLinks = true;
|
||||
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||
followSymbolicLinks = options.followSymbolicLinks;
|
||||
}
|
||||
const globber = yield create(patterns, { followSymbolicLinks });
|
||||
return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
|
||||
});
|
||||
}
|
||||
exports.hashFiles = hashFiles;
|
||||
//# sourceMappingURL=glob.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1026:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const core = __nccwpck_require__(2186);
|
||||
exports.getOptions = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
@@ -1131,6 +1208,7 @@ function getOptions(copy) {
|
||||
const result = {
|
||||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
matchDirectories: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
};
|
||||
if (copy) {
|
||||
@@ -1142,6 +1220,10 @@ function getOptions(copy) {
|
||||
result.implicitDescendants = copy.implicitDescendants;
|
||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||||
}
|
||||
if (typeof copy.matchDirectories === 'boolean') {
|
||||
result.matchDirectories = copy.matchDirectories;
|
||||
core.debug(`matchDirectories '${result.matchDirectories}'`);
|
||||
}
|
||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||
@@ -1159,6 +1241,25 @@ exports.getOptions = getOptions;
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -1188,11 +1289,12 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const core = __nccwpck_require__(2186);
|
||||
const fs = __nccwpck_require__(7147);
|
||||
const globOptionsHelper = __nccwpck_require__(1026);
|
||||
const path = __nccwpck_require__(1017);
|
||||
const patternHelper = __nccwpck_require__(9005);
|
||||
exports.DefaultGlobber = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||
@@ -1238,7 +1340,7 @@ class DefaultGlobber {
|
||||
if (options.implicitDescendants &&
|
||||
(pattern.trailingSeparator ||
|
||||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
|
||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||||
}
|
||||
}
|
||||
// Push the search paths
|
||||
@@ -1281,7 +1383,7 @@ class DefaultGlobber {
|
||||
// Directory
|
||||
if (stats.isDirectory()) {
|
||||
// Matched
|
||||
if (match & internal_match_kind_1.MatchKind.Directory) {
|
||||
if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {
|
||||
yield yield __await(item.path);
|
||||
}
|
||||
// Descend?
|
||||
@@ -1376,12 +1478,117 @@ exports.DefaultGlobber = DefaultGlobber;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2448:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = void 0;
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const writeDelegate = verbose ? core.info : core.debug;
|
||||
let hasMatch = false;
|
||||
const githubWorkspace = currentWorkspace
|
||||
? currentWorkspace
|
||||
: (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const result = crypto.createHash('sha256');
|
||||
let count = 0;
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
writeDelegate(file);
|
||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||
continue;
|
||||
}
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
writeDelegate(`Skip directory '${file}'.`);
|
||||
continue;
|
||||
}
|
||||
const hash = crypto.createHash('sha256');
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
yield pipeline(fs.createReadStream(file), hash);
|
||||
result.write(hash.digest());
|
||||
count++;
|
||||
if (!hasMatch) {
|
||||
hasMatch = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
result.end();
|
||||
if (hasMatch) {
|
||||
writeDelegate(`Found ${count} files to hash.`);
|
||||
return result.digest('hex');
|
||||
}
|
||||
else {
|
||||
writeDelegate(`No matches found for glob`);
|
||||
return '';
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.hashFiles = hashFiles;
|
||||
//# sourceMappingURL=internal-hash-files.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1063:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.MatchKind = void 0;
|
||||
/**
|
||||
* Indicates whether a pattern matches a path
|
||||
*/
|
||||
@@ -1401,13 +1608,36 @@ var MatchKind;
|
||||
/***/ }),
|
||||
|
||||
/***/ 1849:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const assert = __nccwpck_require__(9491);
|
||||
const path = __nccwpck_require__(1017);
|
||||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
@@ -1447,8 +1677,8 @@ exports.dirname = dirname;
|
||||
* or `C:` are expanded based on the current working directory.
|
||||
*/
|
||||
function ensureAbsoluteRoot(root, itemPath) {
|
||||
assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||
assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
// Already rooted
|
||||
if (hasAbsoluteRoot(itemPath)) {
|
||||
return itemPath;
|
||||
@@ -1458,7 +1688,7 @@ function ensureAbsoluteRoot(root, itemPath) {
|
||||
// Check for itemPath like C: or C:foo
|
||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||
let cwd = process.cwd();
|
||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
// Drive letter matches cwd? Expand to cwd
|
||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||
// Drive only, e.g. C:
|
||||
@@ -1483,11 +1713,11 @@ function ensureAbsoluteRoot(root, itemPath) {
|
||||
// Check for itemPath like \ or \foo
|
||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||
const cwd = process.cwd();
|
||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||
}
|
||||
}
|
||||
assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||
// Otherwise ensure root ends with a separator
|
||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||
// Intentionally empty
|
||||
@@ -1504,7 +1734,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
function hasAbsoluteRoot(itemPath) {
|
||||
assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath);
|
||||
// Windows
|
||||
@@ -1521,7 +1751,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
function hasRoot(itemPath) {
|
||||
assert(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath);
|
||||
// Windows
|
||||
@@ -1583,14 +1813,37 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||||
/***/ }),
|
||||
|
||||
/***/ 6836:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const assert = __nccwpck_require__(9491);
|
||||
const path = __nccwpck_require__(1017);
|
||||
const pathHelper = __nccwpck_require__(1849);
|
||||
exports.Path = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
@@ -1604,7 +1857,7 @@ class Path {
|
||||
this.segments = [];
|
||||
// String
|
||||
if (typeof itemPath === 'string') {
|
||||
assert(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
// Not rooted
|
||||
@@ -1631,24 +1884,24 @@ class Path {
|
||||
// Array
|
||||
else {
|
||||
// Must not be empty
|
||||
assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||
// Each segment
|
||||
for (let i = 0; i < itemPath.length; i++) {
|
||||
let segment = itemPath[i];
|
||||
// Must not be empty
|
||||
assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||
// Normalize slashes
|
||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||
// Root segment
|
||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||
assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||
this.segments.push(segment);
|
||||
}
|
||||
// All other segments
|
||||
else {
|
||||
// Must not contain slash
|
||||
assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||
this.segments.push(segment);
|
||||
}
|
||||
}
|
||||
@@ -1680,12 +1933,32 @@ exports.Path = Path;
|
||||
/***/ }),
|
||||
|
||||
/***/ 9005:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const pathHelper = __nccwpck_require__(1849);
|
||||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
@@ -1761,21 +2034,44 @@ exports.partialMatch = partialMatch;
|
||||
/***/ }),
|
||||
|
||||
/***/ 4536:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const assert = __nccwpck_require__(9491);
|
||||
const os = __nccwpck_require__(2037);
|
||||
const path = __nccwpck_require__(1017);
|
||||
const pathHelper = __nccwpck_require__(1849);
|
||||
exports.Pattern = void 0;
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const minimatch_1 = __nccwpck_require__(3973);
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_path_1 = __nccwpck_require__(6836);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class Pattern {
|
||||
constructor(patternOrNegate, segments) {
|
||||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||
/**
|
||||
* Indicates whether matches should be excluded from the result set
|
||||
*/
|
||||
@@ -1789,9 +2085,9 @@ class Pattern {
|
||||
else {
|
||||
// Convert to pattern
|
||||
segments = segments || [];
|
||||
assert(segments.length, `Parameter 'segments' must not empty`);
|
||||
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||||
const root = Pattern.getLiteral(segments[0]);
|
||||
assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||
if (patternOrNegate) {
|
||||
pattern = `!${pattern}`;
|
||||
@@ -1803,7 +2099,7 @@ class Pattern {
|
||||
pattern = pattern.substr(1).trim();
|
||||
}
|
||||
// Normalize slashes and ensures absolute root
|
||||
pattern = Pattern.fixupPattern(pattern);
|
||||
pattern = Pattern.fixupPattern(pattern, homedir);
|
||||
// Segments
|
||||
this.segments = new internal_path_1.Path(pattern).segments;
|
||||
// Trailing slash indicates the pattern should only match directories, not regular files
|
||||
@@ -1819,6 +2115,7 @@ class Pattern {
|
||||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||||
// Root RegExp (required when determining partial match)
|
||||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||||
this.isImplicitPattern = isImplicitPattern;
|
||||
// Create minimatch
|
||||
const minimatchOptions = {
|
||||
dot: true,
|
||||
@@ -1840,11 +2137,11 @@ class Pattern {
|
||||
// Normalize slashes
|
||||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||||
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||||
if (!itemPath.endsWith(path.sep)) {
|
||||
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||||
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
|
||||
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||||
itemPath = `${itemPath}${path.sep}`;
|
||||
}
|
||||
}
|
||||
@@ -1882,15 +2179,15 @@ class Pattern {
|
||||
/**
|
||||
* Normalizes slashes and ensures absolute root
|
||||
*/
|
||||
static fixupPattern(pattern) {
|
||||
static fixupPattern(pattern, homedir) {
|
||||
// Empty
|
||||
assert(pattern, 'pattern cannot be empty');
|
||||
assert_1.default(pattern, 'pattern cannot be empty');
|
||||
// Must not contain `.` segment, unless first segment
|
||||
// Must not contain `..` segment
|
||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||
assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||
assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||
// Normalize slashes
|
||||
pattern = pathHelper.normalizeSeparators(pattern);
|
||||
// Replace leading `.` segment
|
||||
@@ -1899,9 +2196,9 @@ class Pattern {
|
||||
}
|
||||
// Replace leading `~` segment
|
||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||
const homedir = os.homedir();
|
||||
assert(homedir, 'Unable to determine HOME directory');
|
||||
assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||
homedir = homedir || os.homedir();
|
||||
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||||
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||
}
|
||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||
@@ -2004,6 +2301,7 @@ exports.Pattern = Pattern;
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.SearchState = void 0;
|
||||
class SearchState {
|
||||
constructor(path, level) {
|
||||
this.path = path;
|
||||
@@ -2232,6 +2530,19 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@@ -2736,7 +3047,13 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
return new URL(proxyVar);
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@@ -2747,6 +3064,10 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@@ -2772,13 +3093,24 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -2817,6 +3149,9 @@ function range(a, b, str) {
|
||||
var i = ai;
|
||||
|
||||
if (ai >= 0 && bi > 0) {
|
||||
if(a===b) {
|
||||
return [ai, bi];
|
||||
}
|
||||
begs = [];
|
||||
left = str.length;
|
||||
|
||||
@@ -2964,7 +3299,7 @@ function expand(str, isTop) {
|
||||
var isOptions = m.body.indexOf(',') >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
// {a},b}
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str = m.pre + '{' + m.body + escClose + m.post;
|
||||
return expand(str);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ if [ -f ".path" ]; then
|
||||
echo ".path=${PATH}"
|
||||
fi
|
||||
|
||||
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
|
||||
nodever="node20"
|
||||
|
||||
# insert anything to setup env when running as a service
|
||||
# run the host process which keep the listener alive
|
||||
|
||||
@@ -123,7 +123,7 @@ fi
|
||||
# fix upgrade issue with macOS when running as a service
|
||||
attemptedtargetedfix=0
|
||||
currentplatform=$(uname | awk '{print tolower($0)}')
|
||||
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; then
|
||||
# We needed a fix for https://github.com/actions/runner/issues/743
|
||||
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
||||
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
||||
@@ -135,12 +135,23 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
then
|
||||
# inspect the open file handles to find the node process
|
||||
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
||||
nodever="node16"
|
||||
# Try finding node24 first, then fallback to earlier versions if needed
|
||||
nodever="node24"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node20
|
||||
then
|
||||
nodever="node12"
|
||||
nodever="node20"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16
|
||||
then
|
||||
nodever="node16"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
then
|
||||
nodever="node12"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [[ $? -eq 0 && -n "$path" ]]
|
||||
then
|
||||
@@ -178,6 +189,19 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# update runsvc.sh
|
||||
if [ -f "$rootfolder/runsvc.sh" ]
|
||||
then
|
||||
date "+[%F %T-%4N] Update runsvc.sh" >> "$logfile" 2>&1
|
||||
cat "$rootfolder/bin/runsvc.sh" > "$rootfolder/runsvc.sh"
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
date "+[%F %T-%4N] Can't update $rootfolder/runsvc.sh using $rootfolder/bin/runsvc.sh" >> "$logfile" 2>&1
|
||||
mv -fv "$logfile" "$logfile.failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
date "+[%F %T-%4N] Update succeed" >> "$logfile"
|
||||
|
||||
touch update.finished
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
SET UPDATEFILE=update.finished
|
||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
||||
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N`
|
||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||
|
||||
if %ERRORLEVEL% EQU 0 (
|
||||
@@ -49,5 +49,10 @@ if %ERRORLEVEL% EQU 4 (
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 5 (
|
||||
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
echo "Exiting after unknown error code: %ERRORLEVEL%"
|
||||
exit /b 0
|
||||
@@ -70,6 +70,9 @@ elif [[ $returnCode == 4 ]]; then
|
||||
"$DIR"/safe_sleep.sh 1
|
||||
done
|
||||
exit 2
|
||||
elif [[ $returnCode == 5 ]]; then
|
||||
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
exit 0
|
||||
else
|
||||
echo "Exiting with unknown error code: ${returnCode}"
|
||||
exit 0
|
||||
|
||||
@@ -38,7 +38,7 @@ runWithManualTrap() {
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
"$DIR"/run-helper.sh $* &
|
||||
PID=$!
|
||||
wait -f $PID
|
||||
wait $PID
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
@@ -84,4 +84,4 @@ if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
|
||||
run $*
|
||||
else
|
||||
runWithManualTrap $*
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SECONDS=0
|
||||
while [[ $SECONDS != $1 ]]; do
|
||||
while [[ $SECONDS -lt $1 ]]; do
|
||||
:
|
||||
done
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
actions.runner.plist.template
|
||||
actions.runner.service.template
|
||||
checkScripts/downloadCert.js
|
||||
checkScripts/makeWebRequest.js
|
||||
darwin.svc.sh.template
|
||||
hashFiles/index.js
|
||||
installdependencies.sh
|
||||
macos-run-invoker.js
|
||||
Microsoft.IdentityModel.Logging.dll
|
||||
Microsoft.IdentityModel.Tokens.dll
|
||||
Minimatch.dll
|
||||
Newtonsoft.Json.Bson.dll
|
||||
Newtonsoft.Json.dll
|
||||
Runner.Common.deps.json
|
||||
Runner.Common.dll
|
||||
Runner.Common.pdb
|
||||
Runner.Listener
|
||||
Runner.Listener.deps.json
|
||||
Runner.Listener.dll
|
||||
Runner.Listener.exe
|
||||
Runner.Listener.pdb
|
||||
Runner.Listener.runtimeconfig.json
|
||||
Runner.PluginHost
|
||||
Runner.PluginHost.deps.json
|
||||
Runner.PluginHost.dll
|
||||
Runner.PluginHost.exe
|
||||
Runner.PluginHost.pdb
|
||||
Runner.PluginHost.runtimeconfig.json
|
||||
Runner.Plugins.deps.json
|
||||
Runner.Plugins.dll
|
||||
Runner.Plugins.pdb
|
||||
Runner.Sdk.deps.json
|
||||
Runner.Sdk.dll
|
||||
Runner.Sdk.pdb
|
||||
Runner.Worker
|
||||
Runner.Worker.deps.json
|
||||
Runner.Worker.dll
|
||||
Runner.Worker.exe
|
||||
Runner.Worker.pdb
|
||||
Runner.Worker.runtimeconfig.json
|
||||
RunnerService.exe
|
||||
RunnerService.exe.config
|
||||
RunnerService.js
|
||||
RunnerService.pdb
|
||||
runsvc.sh
|
||||
Sdk.deps.json
|
||||
Sdk.dll
|
||||
Sdk.pdb
|
||||
System.IdentityModel.Tokens.Jwt.dll
|
||||
System.Net.Http.Formatting.dll
|
||||
System.Security.Cryptography.Pkcs.dll
|
||||
System.Security.Cryptography.ProtectedData.dll
|
||||
System.ServiceProcess.ServiceController.dll
|
||||
systemd.svc.sh.template
|
||||
update.cmd.template
|
||||
update.sh.template
|
||||
YamlDotNet.dll
|
||||
@@ -1,268 +0,0 @@
|
||||
api-ms-win-core-console-l1-1-0.dll
|
||||
api-ms-win-core-console-l1-2-0.dll
|
||||
api-ms-win-core-datetime-l1-1-0.dll
|
||||
api-ms-win-core-debug-l1-1-0.dll
|
||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
||||
api-ms-win-core-fibers-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-2-0.dll
|
||||
api-ms-win-core-file-l2-1-0.dll
|
||||
api-ms-win-core-handle-l1-1-0.dll
|
||||
api-ms-win-core-heap-l1-1-0.dll
|
||||
api-ms-win-core-interlocked-l1-1-0.dll
|
||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
||||
api-ms-win-core-localization-l1-2-0.dll
|
||||
api-ms-win-core-memory-l1-1-0.dll
|
||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-1.dll
|
||||
api-ms-win-core-profile-l1-1-0.dll
|
||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
||||
api-ms-win-core-string-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-2-0.dll
|
||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
||||
api-ms-win-core-timezone-l1-1-0.dll
|
||||
api-ms-win-core-util-l1-1-0.dll
|
||||
api-ms-win-crt-conio-l1-1-0.dll
|
||||
api-ms-win-crt-convert-l1-1-0.dll
|
||||
api-ms-win-crt-environment-l1-1-0.dll
|
||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
||||
api-ms-win-crt-heap-l1-1-0.dll
|
||||
api-ms-win-crt-locale-l1-1-0.dll
|
||||
api-ms-win-crt-math-l1-1-0.dll
|
||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
||||
api-ms-win-crt-private-l1-1-0.dll
|
||||
api-ms-win-crt-process-l1-1-0.dll
|
||||
api-ms-win-crt-runtime-l1-1-0.dll
|
||||
api-ms-win-crt-stdio-l1-1-0.dll
|
||||
api-ms-win-crt-string-l1-1-0.dll
|
||||
api-ms-win-crt-time-l1-1-0.dll
|
||||
api-ms-win-crt-utility-l1-1-0.dll
|
||||
clrcompression.dll
|
||||
clretwrc.dll
|
||||
clrjit.dll
|
||||
coreclr.dll
|
||||
createdump
|
||||
createdump.exe
|
||||
dbgshim.dll
|
||||
hostfxr.dll
|
||||
hostpolicy.dll
|
||||
libclrjit.dylib
|
||||
libclrjit.so
|
||||
libcoreclr.dylib
|
||||
libcoreclr.so
|
||||
libcoreclrtraceptprovider.so
|
||||
libdbgshim.dylib
|
||||
libdbgshim.so
|
||||
libhostfxr.dylib
|
||||
libhostfxr.so
|
||||
libhostpolicy.dylib
|
||||
libhostpolicy.so
|
||||
libmscordaccore.dylib
|
||||
libmscordaccore.so
|
||||
libmscordbi.dylib
|
||||
libmscordbi.so
|
||||
Microsoft.CSharp.dll
|
||||
Microsoft.DiaSymReader.Native.amd64.dll
|
||||
Microsoft.DiaSymReader.Native.arm64.dll
|
||||
Microsoft.VisualBasic.Core.dll
|
||||
Microsoft.VisualBasic.dll
|
||||
Microsoft.Win32.Primitives.dll
|
||||
Microsoft.Win32.Registry.dll
|
||||
mscordaccore.dll
|
||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
||||
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
||||
mscordbi.dll
|
||||
mscorlib.dll
|
||||
mscorrc.debug.dll
|
||||
mscorrc.dll
|
||||
msquic.dll
|
||||
netstandard.dll
|
||||
SOS_README.md
|
||||
System.AppContext.dll
|
||||
System.Buffers.dll
|
||||
System.Collections.Concurrent.dll
|
||||
System.Collections.dll
|
||||
System.Collections.Immutable.dll
|
||||
System.Collections.NonGeneric.dll
|
||||
System.Collections.Specialized.dll
|
||||
System.ComponentModel.Annotations.dll
|
||||
System.ComponentModel.DataAnnotations.dll
|
||||
System.ComponentModel.dll
|
||||
System.ComponentModel.EventBasedAsync.dll
|
||||
System.ComponentModel.Primitives.dll
|
||||
System.ComponentModel.TypeConverter.dll
|
||||
System.Configuration.dll
|
||||
System.Console.dll
|
||||
System.Core.dll
|
||||
System.Data.Common.dll
|
||||
System.Data.DataSetExtensions.dll
|
||||
System.Data.dll
|
||||
System.Diagnostics.Contracts.dll
|
||||
System.Diagnostics.Debug.dll
|
||||
System.Diagnostics.DiagnosticSource.dll
|
||||
System.Diagnostics.FileVersionInfo.dll
|
||||
System.Diagnostics.Process.dll
|
||||
System.Diagnostics.StackTrace.dll
|
||||
System.Diagnostics.TextWriterTraceListener.dll
|
||||
System.Diagnostics.Tools.dll
|
||||
System.Diagnostics.TraceSource.dll
|
||||
System.Diagnostics.Tracing.dll
|
||||
System.dll
|
||||
System.Drawing.dll
|
||||
System.Drawing.Primitives.dll
|
||||
System.Dynamic.Runtime.dll
|
||||
System.Formats.Asn1.dll
|
||||
System.Globalization.Calendars.dll
|
||||
System.Globalization.dll
|
||||
System.Globalization.Extensions.dll
|
||||
System.Globalization.Native.dylib
|
||||
System.Globalization.Native.so
|
||||
System.IO.Compression.Brotli.dll
|
||||
System.IO.Compression.dll
|
||||
System.IO.Compression.FileSystem.dll
|
||||
System.IO.Compression.Native.a
|
||||
System.IO.Compression.Native.dll
|
||||
System.IO.Compression.Native.dylib
|
||||
System.IO.Compression.Native.so
|
||||
System.IO.Compression.ZipFile.dll
|
||||
System.IO.dll
|
||||
System.IO.FileSystem.AccessControl.dll
|
||||
System.IO.FileSystem.dll
|
||||
System.IO.FileSystem.DriveInfo.dll
|
||||
System.IO.FileSystem.Primitives.dll
|
||||
System.IO.FileSystem.Watcher.dll
|
||||
System.IO.IsolatedStorage.dll
|
||||
System.IO.MemoryMappedFiles.dll
|
||||
System.IO.Pipes.AccessControl.dll
|
||||
System.IO.Pipes.dll
|
||||
System.IO.UnmanagedMemoryStream.dll
|
||||
System.Linq.dll
|
||||
System.Linq.Expressions.dll
|
||||
System.Linq.Parallel.dll
|
||||
System.Linq.Queryable.dll
|
||||
System.Memory.dll
|
||||
System.Native.a
|
||||
System.Native.dylib
|
||||
System.Native.so
|
||||
System.Net.dll
|
||||
System.Net.Http.dll
|
||||
System.Net.Http.Json.dll
|
||||
System.Net.Http.Native.a
|
||||
System.Net.Http.Native.dylib
|
||||
System.Net.Http.Native.so
|
||||
System.Net.HttpListener.dll
|
||||
System.Net.Mail.dll
|
||||
System.Net.NameResolution.dll
|
||||
System.Net.NetworkInformation.dll
|
||||
System.Net.Ping.dll
|
||||
System.Net.Primitives.dll
|
||||
System.Net.Quic.dll
|
||||
System.Net.Requests.dll
|
||||
System.Net.Security.dll
|
||||
System.Net.Security.Native.a
|
||||
System.Net.Security.Native.dylib
|
||||
System.Net.Security.Native.so
|
||||
System.Net.ServicePoint.dll
|
||||
System.Net.Sockets.dll
|
||||
System.Net.WebClient.dll
|
||||
System.Net.WebHeaderCollection.dll
|
||||
System.Net.WebProxy.dll
|
||||
System.Net.WebSockets.Client.dll
|
||||
System.Net.WebSockets.dll
|
||||
System.Numerics.dll
|
||||
System.Numerics.Vectors.dll
|
||||
System.ObjectModel.dll
|
||||
System.Private.CoreLib.dll
|
||||
System.Private.DataContractSerialization.dll
|
||||
System.Private.Uri.dll
|
||||
System.Private.Xml.dll
|
||||
System.Private.Xml.Linq.dll
|
||||
System.Reflection.DispatchProxy.dll
|
||||
System.Reflection.dll
|
||||
System.Reflection.Emit.dll
|
||||
System.Reflection.Emit.ILGeneration.dll
|
||||
System.Reflection.Emit.Lightweight.dll
|
||||
System.Reflection.Extensions.dll
|
||||
System.Reflection.Metadata.dll
|
||||
System.Reflection.Primitives.dll
|
||||
System.Reflection.TypeExtensions.dll
|
||||
System.Resources.Reader.dll
|
||||
System.Resources.ResourceManager.dll
|
||||
System.Resources.Writer.dll
|
||||
System.Runtime.CompilerServices.Unsafe.dll
|
||||
System.Runtime.CompilerServices.VisualC.dll
|
||||
System.Runtime.dll
|
||||
System.Runtime.Extensions.dll
|
||||
System.Runtime.Handles.dll
|
||||
System.Runtime.InteropServices.dll
|
||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
||||
System.Runtime.Intrinsics.dll
|
||||
System.Runtime.Loader.dll
|
||||
System.Runtime.Numerics.dll
|
||||
System.Runtime.Serialization.dll
|
||||
System.Runtime.Serialization.Formatters.dll
|
||||
System.Runtime.Serialization.Json.dll
|
||||
System.Runtime.Serialization.Primitives.dll
|
||||
System.Runtime.Serialization.Xml.dll
|
||||
System.Runtime.WindowsRuntime.dll
|
||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
||||
System.Security.AccessControl.dll
|
||||
System.Security.Claims.dll
|
||||
System.Security.Cryptography.Algorithms.dll
|
||||
System.Security.Cryptography.Cng.dll
|
||||
System.Security.Cryptography.Csp.dll
|
||||
System.Security.Cryptography.Encoding.dll
|
||||
System.Security.Cryptography.Native.Apple.a
|
||||
System.Security.Cryptography.Native.Apple.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.a
|
||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.so
|
||||
System.Security.Cryptography.OpenSsl.dll
|
||||
System.Security.Cryptography.Primitives.dll
|
||||
System.Security.Cryptography.X509Certificates.dll
|
||||
System.Security.Cryptography.XCertificates.dll
|
||||
System.Security.dll
|
||||
System.Security.Principal.dll
|
||||
System.Security.Principal.Windows.dll
|
||||
System.Security.SecureString.dll
|
||||
System.ServiceModel.Web.dll
|
||||
System.ServiceProcess.dll
|
||||
System.Text.Encoding.CodePages.dll
|
||||
System.Text.Encoding.dll
|
||||
System.Text.Encoding.Extensions.dll
|
||||
System.Text.Encodings.Web.dll
|
||||
System.Text.Json.dll
|
||||
System.Text.RegularExpressions.dll
|
||||
System.Threading.Channels.dll
|
||||
System.Threading.dll
|
||||
System.Threading.Overlapped.dll
|
||||
System.Threading.Tasks.Dataflow.dll
|
||||
System.Threading.Tasks.dll
|
||||
System.Threading.Tasks.Extensions.dll
|
||||
System.Threading.Tasks.Parallel.dll
|
||||
System.Threading.Thread.dll
|
||||
System.Threading.ThreadPool.dll
|
||||
System.Threading.Timer.dll
|
||||
System.Transactions.dll
|
||||
System.Transactions.Local.dll
|
||||
System.ValueTuple.dll
|
||||
System.Web.dll
|
||||
System.Web.HttpUtility.dll
|
||||
System.Windows.dll
|
||||
System.Xml.dll
|
||||
System.Xml.Linq.dll
|
||||
System.Xml.ReaderWriter.dll
|
||||
System.Xml.Serialization.dll
|
||||
System.Xml.XDocument.dll
|
||||
System.Xml.XmlDocument.dll
|
||||
System.Xml.XmlSerializer.dll
|
||||
System.Xml.XPath.dll
|
||||
System.Xml.XPath.XDocument.dll
|
||||
ucrtbase.dll
|
||||
WindowsBase.dll
|
||||
@@ -1,24 +0,0 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,24 +0,0 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -20,12 +20,12 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
private bool _hasConnection;
|
||||
private VssConnection _connection;
|
||||
private TaskAgentHttpClient _taskAgentClient;
|
||||
private ActionsRunServerHttpClient _actionsRunServerClient;
|
||||
|
||||
public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials)
|
||||
{
|
||||
_connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100));
|
||||
_taskAgentClient = _connection.GetClient<TaskAgentHttpClient>();
|
||||
_actionsRunServerClient = _connection.GetClient<ActionsRunServerHttpClient>();
|
||||
_hasConnection = true;
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Common
|
||||
CheckConnection();
|
||||
var jobMessage = RetryRequest<AgentJobRequestMessage>(async () =>
|
||||
{
|
||||
return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken);
|
||||
return await _actionsRunServerClient.GetJobMessageAsync(id, cancellationToken);
|
||||
}, cancellationToken);
|
||||
|
||||
return jobMessage;
|
||||
|
||||
13
src/Runner.Common/AuthMigration.cs
Normal file
13
src/Runner.Common/AuthMigration.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
public class AuthMigrationEventArgs : EventArgs
|
||||
{
|
||||
public AuthMigrationEventArgs(string trace)
|
||||
{
|
||||
Trace = trace;
|
||||
}
|
||||
public string Trace { get; private set; }
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
using Sdk.WebApi.WebApi.RawClient;
|
||||
|
||||
@@ -17,7 +18,16 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
|
||||
Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken);
|
||||
Task DeleteSessionAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
||||
|
||||
Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken token);
|
||||
|
||||
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
|
||||
|
||||
Task ForceRefreshConnection(VssCredentials credentials);
|
||||
}
|
||||
|
||||
public sealed class BrokerServer : RunnerService, IBrokerServer
|
||||
@@ -29,6 +39,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
|
||||
{
|
||||
Trace.Entering();
|
||||
_brokerUri = serverUri;
|
||||
|
||||
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
|
||||
@@ -44,13 +55,65 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
|
||||
public async Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
var jobMessage = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
|
||||
var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken);
|
||||
|
||||
return jobMessage;
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
var brokerSession = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
|
||||
|
||||
return brokerSession;
|
||||
}
|
||||
|
||||
public async Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
|
||||
// No retries
|
||||
await _brokerHttpClient.AcknowledgeRunnerRequestAsync(runnerRequestId, sessionId, version, status, os, architecture, cancellationToken);
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
await _brokerHttpClient.DeleteSessionAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials)
|
||||
{
|
||||
if (_brokerUri != serverUri || !_hasConnection)
|
||||
{
|
||||
return ConnectAsync(serverUri, credentials);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task ForceRefreshConnection(VssCredentials credentials)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(_brokerUri?.AbsoluteUri))
|
||||
{
|
||||
return ConnectAsync(_brokerUri, credentials);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public bool ShouldRetryException(Exception ex)
|
||||
{
|
||||
if (ex is AccessDeniedException || ex is RunnerNotFoundException || ex is HostedRunnerDeprovisionedException)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -53,6 +53,9 @@ namespace GitHub.Runner.Common
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool UseV2Flow { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool UseRunnerAdminFlow { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string ServerUrlV2 { get; set; }
|
||||
|
||||
@@ -61,8 +64,20 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
get
|
||||
{
|
||||
// Old runners do not have this property. Hosted runners likely don't have this property either.
|
||||
return _isHostedServer ?? true;
|
||||
// If the value has been explicitly set, return it.
|
||||
if (_isHostedServer.HasValue)
|
||||
{
|
||||
return _isHostedServer.Value;
|
||||
}
|
||||
|
||||
// Otherwise, try to infer it from the GitHubUrl.
|
||||
if (!string.IsNullOrEmpty(GitHubUrl))
|
||||
{
|
||||
return UrlUtil.IsHostedServer(new UriBuilder(GitHubUrl));
|
||||
}
|
||||
|
||||
// Default to true since Hosted runners likely don't have this property set.
|
||||
return true;
|
||||
}
|
||||
|
||||
set
|
||||
@@ -116,11 +131,15 @@ namespace GitHub.Runner.Common
|
||||
bool IsConfigured();
|
||||
bool IsServiceConfigured();
|
||||
bool HasCredentials();
|
||||
bool IsMigratedConfigured();
|
||||
CredentialData GetCredentials();
|
||||
CredentialData GetMigratedCredentials();
|
||||
RunnerSettings GetSettings();
|
||||
RunnerSettings GetMigratedSettings();
|
||||
void SaveCredential(CredentialData credential);
|
||||
void SaveMigratedCredential(CredentialData credential);
|
||||
void SaveSettings(RunnerSettings settings);
|
||||
void SaveMigratedSettings(RunnerSettings settings);
|
||||
void DeleteCredential();
|
||||
void DeleteMigratedCredential();
|
||||
void DeleteSettings();
|
||||
@@ -130,6 +149,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
private string _binPath;
|
||||
private string _configFilePath;
|
||||
private string _migratedConfigFilePath;
|
||||
private string _credFilePath;
|
||||
private string _migratedCredFilePath;
|
||||
private string _serviceConfigFilePath;
|
||||
@@ -137,6 +157,7 @@ namespace GitHub.Runner.Common
|
||||
private CredentialData _creds;
|
||||
private CredentialData _migratedCreds;
|
||||
private RunnerSettings _settings;
|
||||
private RunnerSettings _migratedSettings;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -154,6 +175,9 @@ namespace GitHub.Runner.Common
|
||||
_configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Runner);
|
||||
Trace.Info("ConfigFilePath: {0}", _configFilePath);
|
||||
|
||||
_migratedConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedRunner);
|
||||
Trace.Info("MigratedConfigFilePath: {0}", _migratedConfigFilePath);
|
||||
|
||||
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
Trace.Info("CredFilePath: {0}", _credFilePath);
|
||||
|
||||
@@ -169,7 +193,7 @@ namespace GitHub.Runner.Common
|
||||
public bool HasCredentials()
|
||||
{
|
||||
Trace.Info("HasCredentials()");
|
||||
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
|
||||
bool credsStored = new FileInfo(_credFilePath).Exists || new FileInfo(_migratedCredFilePath).Exists;
|
||||
Trace.Info("stored {0}", credsStored);
|
||||
return credsStored;
|
||||
}
|
||||
@@ -177,7 +201,7 @@ namespace GitHub.Runner.Common
|
||||
public bool IsConfigured()
|
||||
{
|
||||
Trace.Info("IsConfigured()");
|
||||
bool configured = new FileInfo(_configFilePath).Exists;
|
||||
bool configured = new FileInfo(_configFilePath).Exists || new FileInfo(_migratedConfigFilePath).Exists;
|
||||
Trace.Info("IsConfigured: {0}", configured);
|
||||
return configured;
|
||||
}
|
||||
@@ -185,11 +209,19 @@ namespace GitHub.Runner.Common
|
||||
public bool IsServiceConfigured()
|
||||
{
|
||||
Trace.Info("IsServiceConfigured()");
|
||||
bool serviceConfigured = (new FileInfo(_serviceConfigFilePath)).Exists;
|
||||
bool serviceConfigured = new FileInfo(_serviceConfigFilePath).Exists;
|
||||
Trace.Info($"IsServiceConfigured: {serviceConfigured}");
|
||||
return serviceConfigured;
|
||||
}
|
||||
|
||||
public bool IsMigratedConfigured()
|
||||
{
|
||||
Trace.Info("IsMigratedConfigured()");
|
||||
bool configured = new FileInfo(_migratedConfigFilePath).Exists;
|
||||
Trace.Info("IsMigratedConfigured: {0}", configured);
|
||||
return configured;
|
||||
}
|
||||
|
||||
public CredentialData GetCredentials()
|
||||
{
|
||||
if (_creds == null)
|
||||
@@ -229,6 +261,25 @@ namespace GitHub.Runner.Common
|
||||
return _settings;
|
||||
}
|
||||
|
||||
public RunnerSettings GetMigratedSettings()
|
||||
{
|
||||
if (_migratedSettings == null)
|
||||
{
|
||||
RunnerSettings configuredSettings = null;
|
||||
if (File.Exists(_migratedConfigFilePath))
|
||||
{
|
||||
string json = File.ReadAllText(_migratedConfigFilePath, Encoding.UTF8);
|
||||
Trace.Info($"Read migrated setting file: {json.Length} chars");
|
||||
configuredSettings = StringUtil.ConvertFromJson<RunnerSettings>(json);
|
||||
}
|
||||
|
||||
ArgUtil.NotNull(configuredSettings, nameof(configuredSettings));
|
||||
_migratedSettings = configuredSettings;
|
||||
}
|
||||
|
||||
return _migratedSettings;
|
||||
}
|
||||
|
||||
public void SaveCredential(CredentialData credential)
|
||||
{
|
||||
Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath);
|
||||
@@ -244,6 +295,21 @@ namespace GitHub.Runner.Common
|
||||
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveMigratedCredential(CredentialData credential)
|
||||
{
|
||||
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
|
||||
if (File.Exists(_migratedCredFilePath))
|
||||
{
|
||||
// Delete existing credential file first, since the file is hidden and not able to overwrite.
|
||||
Trace.Info("Delete exist runner migrated credential file.");
|
||||
IOUtil.DeleteFile(_migratedCredFilePath);
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(credential, _migratedCredFilePath);
|
||||
Trace.Info("Migrated Credentials Saved.");
|
||||
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveSettings(RunnerSettings settings)
|
||||
{
|
||||
Trace.Info("Saving runner settings.");
|
||||
@@ -259,6 +325,21 @@ namespace GitHub.Runner.Common
|
||||
File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveMigratedSettings(RunnerSettings settings)
|
||||
{
|
||||
Trace.Info("Saving runner migrated settings");
|
||||
if (File.Exists(_migratedConfigFilePath))
|
||||
{
|
||||
// Delete existing settings file first, since the file is hidden and not able to overwrite.
|
||||
Trace.Info("Delete exist runner migrated settings file.");
|
||||
IOUtil.DeleteFile(_migratedConfigFilePath);
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(settings, _migratedConfigFilePath);
|
||||
Trace.Info("Migrated Settings Saved.");
|
||||
File.SetAttributes(_migratedConfigFilePath, File.GetAttributes(_migratedConfigFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void DeleteCredential()
|
||||
{
|
||||
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
||||
@@ -273,6 +354,12 @@ namespace GitHub.Runner.Common
|
||||
public void DeleteSettings()
|
||||
{
|
||||
IOUtil.Delete(_configFilePath, default(CancellationToken));
|
||||
IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken));
|
||||
}
|
||||
|
||||
public void DeleteMigratedSettings()
|
||||
{
|
||||
IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ namespace GitHub.Runner.Common
|
||||
public enum WellKnownConfigFile
|
||||
{
|
||||
Runner,
|
||||
MigratedRunner,
|
||||
Credentials,
|
||||
MigratedCredentials,
|
||||
RSACredentials,
|
||||
@@ -69,6 +70,8 @@ namespace GitHub.Runner.Common
|
||||
public static readonly OSPlatform Platform = OSPlatform.OSX;
|
||||
#elif OS_WINDOWS
|
||||
public static readonly OSPlatform Platform = OSPlatform.Windows;
|
||||
#else
|
||||
public static readonly OSPlatform Platform = OSPlatform.Linux;
|
||||
#endif
|
||||
|
||||
#if X86
|
||||
@@ -79,6 +82,8 @@ namespace GitHub.Runner.Common
|
||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm;
|
||||
#elif ARM64
|
||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
|
||||
#else
|
||||
public static readonly Architecture PlatformArchitecture = Architecture.X64;
|
||||
#endif
|
||||
|
||||
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
|
||||
@@ -149,15 +154,40 @@ namespace GitHub.Runner.Common
|
||||
public const int RetryableError = 2;
|
||||
public const int RunnerUpdating = 3;
|
||||
public const int RunOnceRunnerUpdating = 4;
|
||||
public const int SessionConflict = 5;
|
||||
// Temporary error code to indicate that the runner configuration has been refreshed
|
||||
// and the runner should be restarted. This is a temporary code and will be removed in the future after
|
||||
// the runner is migrated to runner admin.
|
||||
public const int RunnerConfigurationRefreshed = 6;
|
||||
}
|
||||
|
||||
public static class Features
|
||||
{
|
||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||
public static readonly string Node16Warning = "DistributedTask.AddWarningToNode16Action";
|
||||
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
|
||||
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
||||
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
||||
public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context";
|
||||
public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors";
|
||||
public static readonly string ContainerActionRunnerTemp = "actions_container_action_runner_temp";
|
||||
public static readonly string SnapshotPreflightHostedRunnerCheck = "actions_snapshot_preflight_hosted_runner_check";
|
||||
public static readonly string SnapshotPreflightImageGenPoolCheck = "actions_snapshot_preflight_image_gen_pool_check";
|
||||
}
|
||||
|
||||
// Node version migration related constants
|
||||
public static class NodeMigration
|
||||
{
|
||||
// Node versions
|
||||
public static readonly string Node20 = "node20";
|
||||
public static readonly string Node24 = "node24";
|
||||
|
||||
// Environment variables for controlling node version selection
|
||||
public static readonly string ForceNode24Variable = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE24";
|
||||
public static readonly string AllowUnsecureNodeVersionVariable = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
|
||||
// Feature flags for controlling the migration phases
|
||||
public static readonly string UseNode24ByDefaultFlag = "actions.runner.usenode24bydefault";
|
||||
public static readonly string RequireNode24Flag = "actions.runner.requirenode24";
|
||||
}
|
||||
|
||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||
@@ -171,8 +201,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
@@ -243,17 +271,17 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string RequireJobContainer = "ACTIONS_RUNNER_REQUIRE_JOB_CONTAINER";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
}
|
||||
|
||||
public static class Agent
|
||||
{
|
||||
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
|
||||
|
||||
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
|
||||
// Set this env var to "nodeXY" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
|
||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
||||
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
||||
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
|
||||
}
|
||||
|
||||
public static class System
|
||||
|
||||
@@ -15,6 +15,7 @@ using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Logging;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -36,6 +37,12 @@ namespace GitHub.Runner.Common
|
||||
event EventHandler Unloading;
|
||||
void ShutdownRunner(ShutdownReason reason);
|
||||
void WritePerfCounter(string counter);
|
||||
void LoadDefaultUserAgents();
|
||||
|
||||
bool AllowAuthMigration { get; }
|
||||
void EnableAuthMigration(string trace);
|
||||
void DeferAuthMigration(TimeSpan deferred, string trace);
|
||||
event EventHandler<AuthMigrationEventArgs> AuthMigrationChanged;
|
||||
}
|
||||
|
||||
public enum StartupType
|
||||
@@ -67,17 +74,28 @@ namespace GitHub.Runner.Common
|
||||
private StartupType _startupType;
|
||||
private string _perfFile;
|
||||
private RunnerWebProxy _webProxy = new();
|
||||
private string _hostType = string.Empty;
|
||||
|
||||
// disable auth migration by default
|
||||
private readonly ManualResetEventSlim _allowAuthMigration = new ManualResetEventSlim(false);
|
||||
private DateTime _deferredAuthMigrationTime = DateTime.MaxValue;
|
||||
private readonly object _authMigrationLock = new object();
|
||||
private CancellationTokenSource _authMigrationAutoReenableTaskCancellationTokenSource = new();
|
||||
private Task _authMigrationAutoReenableTask;
|
||||
|
||||
public event EventHandler Unloading;
|
||||
public event EventHandler<AuthMigrationEventArgs> AuthMigrationChanged;
|
||||
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
||||
public ShutdownReason RunnerShutdownReason { get; private set; }
|
||||
public ISecretMasker SecretMasker => _secretMasker;
|
||||
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
|
||||
public RunnerWebProxy WebProxy => _webProxy;
|
||||
public bool AllowAuthMigration => _allowAuthMigration.IsSet;
|
||||
public HostContext(string hostType, string logFile = null)
|
||||
{
|
||||
// Validate args.
|
||||
ArgUtil.NotNullOrEmpty(hostType, nameof(hostType));
|
||||
_hostType = hostType;
|
||||
|
||||
_loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly);
|
||||
_loadContext.Unloading += LoadContext_Unloading;
|
||||
@@ -196,14 +214,88 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||
}
|
||||
|
||||
LoadDefaultUserAgents();
|
||||
}
|
||||
|
||||
// marked as internal for testing
|
||||
internal async Task AuthMigrationAuthReenableAsync(TimeSpan refreshInterval, CancellationToken token)
|
||||
{
|
||||
try
|
||||
{
|
||||
while (!token.IsCancellationRequested)
|
||||
{
|
||||
_trace.Verbose($"Auth migration defer timer is set to expire at {_deferredAuthMigrationTime.ToString("O")}. AllowAuthMigration: {_allowAuthMigration.IsSet}.");
|
||||
await Task.Delay(refreshInterval, token);
|
||||
if (!_allowAuthMigration.IsSet && DateTime.UtcNow > _deferredAuthMigrationTime)
|
||||
{
|
||||
_trace.Info($"Auth migration defer timer expired. Allowing auth migration.");
|
||||
EnableAuthMigration("Auth migration defer timer expired.");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// Task was cancelled, exit the loop.
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Info("Error in auth migration reenable task.");
|
||||
_trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public void EnableAuthMigration(string trace)
|
||||
{
|
||||
_allowAuthMigration.Set();
|
||||
|
||||
lock (_authMigrationLock)
|
||||
{
|
||||
if (_authMigrationAutoReenableTask == null)
|
||||
{
|
||||
var refreshIntervalInMS = 60 * 1000;
|
||||
#if DEBUG
|
||||
// For L0, we will refresh faster
|
||||
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL")))
|
||||
{
|
||||
refreshIntervalInMS = int.Parse(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL"));
|
||||
}
|
||||
#endif
|
||||
_authMigrationAutoReenableTask = AuthMigrationAuthReenableAsync(TimeSpan.FromMilliseconds(refreshIntervalInMS), _authMigrationAutoReenableTaskCancellationTokenSource.Token);
|
||||
}
|
||||
}
|
||||
|
||||
_trace.Info($"Enable auth migration at {DateTime.UtcNow.ToString("O")}.");
|
||||
AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace));
|
||||
}
|
||||
|
||||
public void DeferAuthMigration(TimeSpan deferred, string trace)
|
||||
{
|
||||
_allowAuthMigration.Reset();
|
||||
|
||||
// defer migration for a while
|
||||
lock (_authMigrationLock)
|
||||
{
|
||||
_deferredAuthMigrationTime = DateTime.UtcNow.Add(deferred);
|
||||
}
|
||||
|
||||
_trace.Info($"Disabled auth migration until {_deferredAuthMigrationTime.ToString("O")}.");
|
||||
AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace));
|
||||
}
|
||||
|
||||
public void LoadDefaultUserAgents()
|
||||
{
|
||||
if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress))
|
||||
{
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
else
|
||||
{
|
||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString));
|
||||
}
|
||||
|
||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
@@ -215,6 +307,36 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
|
||||
}
|
||||
|
||||
// for Hosted runner, we can pull orchestrationId from JWT claims of the runner listening token.
|
||||
if (credData != null &&
|
||||
credData.Scheme == Constants.Configuration.OAuthAccessToken &&
|
||||
credData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Token, out var accessToken) &&
|
||||
!string.IsNullOrEmpty(accessToken))
|
||||
{
|
||||
try
|
||||
{
|
||||
var jwt = JsonWebToken.Create(accessToken);
|
||||
var claims = jwt.ExtractClaims();
|
||||
var orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
if (string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
// fallback to orchid for C# actions-service
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
_trace.Info($"Pull OrchestrationId {orchestrationId} from runner JWT claims");
|
||||
_userAgents.Insert(0, new ProductInfoHeaderValue("OrchestrationId", orchestrationId));
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Error("Fail to extract OrchestrationId from runner JWT claims");
|
||||
_trace.Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
|
||||
@@ -240,6 +362,11 @@ namespace GitHub.Runner.Common
|
||||
_trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests.");
|
||||
_userAgents.Add(extraUserAgentHeader);
|
||||
}
|
||||
|
||||
var currentProcess = Process.GetCurrentProcess();
|
||||
_userAgents.Add(new ProductInfoHeaderValue("Pid", currentProcess.Id.ToString()));
|
||||
_userAgents.Add(new ProductInfoHeaderValue("CreationTime", Uri.EscapeDataString(DateTime.UtcNow.ToString("O"))));
|
||||
_userAgents.Add(new ProductInfoHeaderValue($"({_hostType})"));
|
||||
}
|
||||
|
||||
public string GetDirectory(WellKnownDirectory directory)
|
||||
@@ -326,6 +453,12 @@ namespace GitHub.Runner.Common
|
||||
".runner");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.MigratedRunner:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".runner_migrated");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.Credentials:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
@@ -526,6 +659,18 @@ namespace GitHub.Runner.Common
|
||||
_loadContext.Unloading -= LoadContext_Unloading;
|
||||
_loadContext = null;
|
||||
}
|
||||
|
||||
if (_authMigrationAutoReenableTask != null)
|
||||
{
|
||||
_authMigrationAutoReenableTaskCancellationTokenSource?.Cancel();
|
||||
}
|
||||
|
||||
if (_authMigrationAutoReenableTaskCancellationTokenSource != null)
|
||||
{
|
||||
_authMigrationAutoReenableTaskCancellationTokenSource?.Dispose();
|
||||
_authMigrationAutoReenableTaskCancellationTokenSource = null;
|
||||
}
|
||||
|
||||
_httpTraceSubscription?.Dispose();
|
||||
_diagListenerSubscription?.Dispose();
|
||||
_traceManager?.Dispose();
|
||||
@@ -612,7 +757,7 @@ namespace GitHub.Runner.Common
|
||||
payload[0] = Enum.Parse(typeof(GitHub.Services.Common.VssCredentialsType), ((int)payload[0]).ToString());
|
||||
}
|
||||
|
||||
if (payload.Length > 0)
|
||||
if (payload.Length > 0 && !string.IsNullOrEmpty(eventData.Message))
|
||||
{
|
||||
message = String.Format(eventData.Message.Replace("%n", Environment.NewLine), payload);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Security;
|
||||
using System.Net.WebSockets;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
@@ -179,6 +180,10 @@ namespace GitHub.Runner.Common
|
||||
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
|
||||
userAgentValues.AddRange(HostContext.UserAgents);
|
||||
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
this._websocketClient.Options.RemoteCertificateValidationCallback = (_, _, _, _) => true;
|
||||
}
|
||||
|
||||
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
@@ -14,10 +15,11 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(JobServerQueue))]
|
||||
public interface IJobServerQueue : IRunnerService, IThrottlingReporter
|
||||
{
|
||||
IList<JobTelemetry> JobTelemetries { get; }
|
||||
TaskCompletionSource<int> JobRecordUpdated { get; }
|
||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
Task ShutdownAsync();
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
||||
@@ -69,13 +71,19 @@ namespace GitHub.Runner.Common
|
||||
private Task[] _allDequeueTasks;
|
||||
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
|
||||
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
|
||||
private readonly List<JobTelemetry> _jobTelemetries = new();
|
||||
private bool _queueInProcess = false;
|
||||
private bool _resultsServiceOnly = false;
|
||||
private int _resultsServiceExceptionsCount = 0;
|
||||
private Stopwatch _resultsUploadTimer = new();
|
||||
private Stopwatch _actionsUploadTimer = new();
|
||||
|
||||
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
|
||||
|
||||
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
|
||||
public IList<JobTelemetry> JobTelemetries => _jobTelemetries;
|
||||
|
||||
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
||||
// Then the dequeue will happen every 500ms.
|
||||
// In this way, customer still can get instance live console output on job start,
|
||||
@@ -87,6 +95,7 @@ namespace GitHub.Runner.Common
|
||||
private bool _firstConsoleOutputs = true;
|
||||
|
||||
private bool _resultsClientInitiated = false;
|
||||
private bool _enableTelemetry = false;
|
||||
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -96,14 +105,14 @@ namespace GitHub.Runner.Common
|
||||
_resultsServer = hostContext.GetService<IResultsServer>();
|
||||
}
|
||||
|
||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
|
||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false)
|
||||
{
|
||||
Trace.Entering();
|
||||
_resultsServiceOnly = resultServiceOnly;
|
||||
_resultsServiceOnly = resultsServiceOnly;
|
||||
|
||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!resultServiceOnly)
|
||||
if (!resultsServiceOnly)
|
||||
{
|
||||
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
||||
}
|
||||
@@ -119,17 +128,23 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
string liveConsoleFeedUrl = null;
|
||||
Trace.Info("Initializing results client");
|
||||
if (resultServiceOnly
|
||||
if (resultsServiceOnly
|
||||
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
|
||||
&& !string.IsNullOrEmpty(feedStreamUrl))
|
||||
{
|
||||
liveConsoleFeedUrl = feedStreamUrl;
|
||||
}
|
||||
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
|
||||
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
|
||||
_resultsClientInitiated = true;
|
||||
}
|
||||
|
||||
// Enable telemetry if we have both results service and actions service
|
||||
if (_resultsClientInitiated && !_resultsServiceOnly)
|
||||
{
|
||||
_enableTelemetry = true;
|
||||
}
|
||||
|
||||
if (_queueInProcess)
|
||||
{
|
||||
Trace.Info("No-opt, all queue process tasks are running.");
|
||||
@@ -211,6 +226,12 @@ namespace GitHub.Runner.Common
|
||||
await _resultsServer.DisposeAsync();
|
||||
|
||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
var uploadTimeComparison = $"Actions upload time: {_actionsUploadTimer.ElapsedMilliseconds} ms, Result upload time: {_resultsUploadTimer.ElapsedMilliseconds} ms";
|
||||
Trace.Info(uploadTimeComparison);
|
||||
_jobTelemetries.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = uploadTimeComparison });
|
||||
}
|
||||
}
|
||||
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||
@@ -456,6 +477,10 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_actionsUploadTimer.Start();
|
||||
}
|
||||
await UploadFile(file);
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -471,6 +496,13 @@ namespace GitHub.Runner.Common
|
||||
// _fileUploadQueue.Enqueue(file);
|
||||
//}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_actionsUploadTimer.Stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
|
||||
@@ -517,10 +549,18 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_resultsUploadTimer.Start();
|
||||
}
|
||||
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await UploadSummaryFile(file);
|
||||
}
|
||||
if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await UploadResultsDiagnosticLogsFile(file);
|
||||
}
|
||||
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (file.RecordId != _jobTimelineRecordId)
|
||||
@@ -540,11 +580,20 @@ namespace GitHub.Runner.Common
|
||||
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
|
||||
Trace.Error(ex);
|
||||
errorCount++;
|
||||
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
|
||||
_resultsClientInitiated = false;
|
||||
|
||||
SendResultsTelemetry(ex);
|
||||
_resultsServiceExceptionsCount++;
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3)
|
||||
{
|
||||
_resultsClientInitiated = false;
|
||||
SendResultsTelemetry(ex);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_resultsUploadTimer.Stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -564,7 +613,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
private void SendResultsTelemetry(Exception ex)
|
||||
{
|
||||
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
|
||||
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {HostContext.SecretMasker.MaskSecrets(ex.Message)}" };
|
||||
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
|
||||
|
||||
var telemetryRecord = new TimelineRecord()
|
||||
@@ -660,9 +709,13 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Info("Catch exception during update steps, skip update Results.");
|
||||
Trace.Error(e);
|
||||
_resultsClientInitiated = false;
|
||||
|
||||
SendResultsTelemetry(e);
|
||||
_resultsServiceExceptionsCount++;
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3)
|
||||
{
|
||||
_resultsClientInitiated = false;
|
||||
SendResultsTelemetry(e);
|
||||
}
|
||||
}
|
||||
|
||||
if (_bufferedRetryRecords.Remove(update.TimelineId))
|
||||
@@ -881,6 +934,17 @@ namespace GitHub.Runner.Common
|
||||
await UploadResultsFile(file, summaryHandler);
|
||||
}
|
||||
|
||||
private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file)
|
||||
{
|
||||
Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}");
|
||||
ResultsFileUploadHandler diagnosticLogsHandler = async (file) =>
|
||||
{
|
||||
await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None);
|
||||
};
|
||||
|
||||
await UploadResultsFile(file, diagnosticLogsHandler);
|
||||
}
|
||||
|
||||
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
|
||||
{
|
||||
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.Launch.Client;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -14,7 +15,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
void InitializeLaunchClient(Uri uri, string token);
|
||||
|
||||
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors);
|
||||
}
|
||||
|
||||
public sealed class LaunchServer : RunnerService, ILaunchServer
|
||||
@@ -23,17 +24,34 @@ namespace GitHub.Runner.Common
|
||||
|
||||
public void InitializeLaunchClient(Uri uri, string token)
|
||||
{
|
||||
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
||||
this._launchClient = new LaunchHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
||||
// Using default 100 timeout
|
||||
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||
|
||||
// Create retry handler
|
||||
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
|
||||
if (settings.MaxRetryRequest > 0)
|
||||
{
|
||||
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
|
||||
}
|
||||
|
||||
// Setup RawHttpMessageHandler without credentials
|
||||
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
|
||||
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||
|
||||
this._launchClient = new LaunchHttpClient(uri, pipeline, token, disposeHandler: true);
|
||||
}
|
||||
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
|
||||
CancellationToken cancellationToken)
|
||||
CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors)
|
||||
{
|
||||
if (_launchClient != null)
|
||||
{
|
||||
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
||||
cancellationToken: cancellationToken);
|
||||
if (!displayHelpfulActionsDownloadErrors)
|
||||
{
|
||||
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
return _launchClient.GetResolveActionsDownloadInfoAsyncV2(planId, jobId, actionReferenceList, cancellationToken);
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Launch client is not initialized.");
|
||||
|
||||
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using System;
|
||||
using System.ComponentModel;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
/// <summary>
|
||||
/// Handles redirects for Http requests
|
||||
/// </summary>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
public class RedirectMessageHandler : DelegatingHandler
|
||||
{
|
||||
public RedirectMessageHandler(ITraceWriter trace)
|
||||
{
|
||||
Trace = trace;
|
||||
}
|
||||
|
||||
protected override async Task<HttpResponseMessage> SendAsync(
|
||||
HttpRequestMessage request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
HttpResponseMessage response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response != null &&
|
||||
IsRedirect(response.StatusCode) &&
|
||||
response.Headers.Location != null)
|
||||
{
|
||||
Trace.Info($"Redirecting to '{response.Headers.Location}'.");
|
||||
|
||||
request = await CloneAsync(request, response.Headers.Location).ConfigureAwait(false);
|
||||
|
||||
response.Dispose();
|
||||
|
||||
response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private static bool IsRedirect(HttpStatusCode statusCode)
|
||||
{
|
||||
return (int)statusCode >= 300 && (int)statusCode < 400;
|
||||
}
|
||||
|
||||
private static async Task<HttpRequestMessage> CloneAsync(HttpRequestMessage request, Uri requestUri)
|
||||
{
|
||||
var clone = new HttpRequestMessage(request.Method, requestUri)
|
||||
{
|
||||
Version = request.Version
|
||||
};
|
||||
|
||||
request.Headers.ForEach(header => clone.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||
|
||||
request.Options.ForEach(option => clone.Options.Set(new HttpRequestOptionsKey<object>(option.Key), option.Value));
|
||||
|
||||
if (request.Content != null)
|
||||
{
|
||||
clone.Content = new ByteArrayContent(await request.Content.ReadAsByteArrayAsync().ConfigureAwait(false));
|
||||
|
||||
request.Content.Headers.ForEach(header => clone.Content.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
private readonly ITraceWriter Trace;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.WebSockets;
|
||||
using System.Security;
|
||||
@@ -18,7 +19,7 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(ResultServer))]
|
||||
public interface IResultsServer : IRunnerService, IAsyncDisposable
|
||||
{
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk);
|
||||
|
||||
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||
|
||||
@@ -34,6 +35,8 @@ namespace GitHub.Runner.Common
|
||||
|
||||
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
|
||||
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||
|
||||
Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class ResultServer : RunnerService, IResultsServer
|
||||
@@ -50,10 +53,10 @@ namespace GitHub.Runner.Common
|
||||
private String _liveConsoleFeedUrl;
|
||||
private string _token;
|
||||
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk)
|
||||
{
|
||||
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
||||
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
||||
this._resultsClient = CreateHttpClient(uri, token, useSdk);
|
||||
|
||||
_token = token;
|
||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||
{
|
||||
@@ -62,6 +65,26 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
|
||||
{
|
||||
// Using default 100 timeout
|
||||
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||
|
||||
// Create retry handler
|
||||
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
|
||||
if (settings.MaxRetryRequest > 0)
|
||||
{
|
||||
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
|
||||
}
|
||||
|
||||
// Setup RawHttpMessageHandler without credentials
|
||||
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
|
||||
|
||||
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||
|
||||
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
|
||||
}
|
||||
|
||||
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -120,6 +143,18 @@ namespace GitHub.Runner.Common
|
||||
throw new InvalidOperationException("Results client is not initialized.");
|
||||
}
|
||||
|
||||
public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_resultsClient != null)
|
||||
{
|
||||
return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Results client is not initialized.");
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Threading.Tasks;
|
||||
using GitHub.Actions.RunService.WebApi;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
@@ -17,7 +18,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||
|
||||
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
|
||||
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, string billingOwnerId, CancellationToken token);
|
||||
|
||||
Task CompleteJobAsync(
|
||||
Guid planId,
|
||||
@@ -27,6 +28,9 @@ namespace GitHub.Runner.Common
|
||||
IList<StepResult> stepResults,
|
||||
IList<Annotation> jobAnnotations,
|
||||
string environmentUrl,
|
||||
IList<Telemetry> telemetry,
|
||||
string billingOwnerId,
|
||||
string infrastructureFailureCategory,
|
||||
CancellationToken token);
|
||||
|
||||
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
||||
@@ -56,12 +60,15 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
|
||||
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, string billingOwnerId, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest<AgentJobRequestMessage>(
|
||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
|
||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, billingOwnerId, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not TaskOrchestrationJobNotFoundException && // HTTP status 404
|
||||
ex is not TaskOrchestrationJobAlreadyAcquiredException && // HTTP status 409
|
||||
ex is not TaskOrchestrationJobUnprocessableException); // HTTP status 422
|
||||
}
|
||||
|
||||
public Task CompleteJobAsync(
|
||||
@@ -72,18 +79,26 @@ namespace GitHub.Runner.Common
|
||||
IList<StepResult> stepResults,
|
||||
IList<Annotation> jobAnnotations,
|
||||
string environmentUrl,
|
||||
IList<Telemetry> telemetry,
|
||||
string billingOwnerId,
|
||||
string infrastructureFailureCategory,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest(
|
||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, cancellationToken), cancellationToken);
|
||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, infrastructureFailureCategory, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not VssUnauthorizedException && // HTTP status 401
|
||||
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||
}
|
||||
|
||||
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest<RenewJobResponse>(
|
||||
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
|
||||
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<OutputType>Library</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<NoWarn>NU1701;NU1603;SYSLIB0050;SYSLIB0051</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
||||
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageReference Include="System.Threading.Channels" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
|
||||
@@ -15,12 +15,12 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
||||
public interface IRunnerDotcomServer : IRunnerService
|
||||
{
|
||||
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
|
||||
Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName);
|
||||
|
||||
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||
Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId);
|
||||
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
||||
|
||||
string GetGitHubRequestId(HttpResponseHeaders headers);
|
||||
}
|
||||
|
||||
public enum RequestType
|
||||
@@ -42,101 +42,31 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
|
||||
|
||||
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
|
||||
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||
}
|
||||
|
||||
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners?name={Uri.EscapeDataString(agentName)}";
|
||||
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||
var agents = runnersList.ToTaskAgents();
|
||||
|
||||
if (string.IsNullOrEmpty(agentName))
|
||||
{
|
||||
return agents;
|
||||
}
|
||||
|
||||
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
|
||||
return runnersList.ToTaskAgents();
|
||||
}
|
||||
|
||||
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||
}
|
||||
|
||||
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runner-groups";
|
||||
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||
|
||||
return agentPools?.ToAgentPoolList();
|
||||
}
|
||||
|
||||
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||
{
|
||||
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, false);
|
||||
}
|
||||
|
||||
public async Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||
{
|
||||
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, true);
|
||||
}
|
||||
|
||||
private async Task<DistributedTask.WebApi.Runner> AddOrReplaceRunner(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, bool replace)
|
||||
{
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
@@ -159,14 +89,26 @@ namespace GitHub.Runner.Common
|
||||
{"updates_disabled", agent.DisableUpdate},
|
||||
{"ephemeral", agent.Ephemeral},
|
||||
{"labels", agent.Labels},
|
||||
{"public_key", publicKey}
|
||||
{"public_key", publicKey},
|
||||
};
|
||||
|
||||
if (replace)
|
||||
{
|
||||
bodyObject.Add("runner_id", agent.Id);
|
||||
bodyObject.Add("replace", replace);
|
||||
}
|
||||
|
||||
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
||||
|
||||
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
||||
}
|
||||
|
||||
public async Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId)
|
||||
{
|
||||
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners/{runnerId}";
|
||||
await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Delete, 3, "Failed to delete agent");
|
||||
}
|
||||
|
||||
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
|
||||
{
|
||||
int retry = 0;
|
||||
@@ -183,19 +125,28 @@ namespace GitHub.Runner.Common
|
||||
try
|
||||
{
|
||||
HttpResponseMessage response = null;
|
||||
if (requestType == RequestType.Get)
|
||||
switch (requestType)
|
||||
{
|
||||
response = await httpClient.GetAsync(githubApiUrl);
|
||||
}
|
||||
else
|
||||
{
|
||||
response = await httpClient.PostAsync(githubApiUrl, body);
|
||||
case RequestType.Get:
|
||||
response = await httpClient.GetAsync(githubApiUrl);
|
||||
break;
|
||||
case RequestType.Post:
|
||||
response = await httpClient.PostAsync(githubApiUrl, body);
|
||||
break;
|
||||
case RequestType.Patch:
|
||||
response = await httpClient.PatchAsync(githubApiUrl, body);
|
||||
break;
|
||||
case RequestType.Delete:
|
||||
response = await httpClient.DeleteAsync(githubApiUrl);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException(nameof(requestType), requestType, null);
|
||||
}
|
||||
|
||||
if (response != null)
|
||||
{
|
||||
responseStatus = response.StatusCode;
|
||||
var githubRequestId = GetGitHubRequestId(response.Headers);
|
||||
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
@@ -215,7 +166,7 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
Trace.Error($"{errorMessage} -- Atempt: {retry}");
|
||||
Trace.Error($"{errorMessage} -- Attempt: {retry}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
@@ -225,13 +176,60 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public string GetGitHubRequestId(HttpResponseHeaders headers)
|
||||
private string GetEntityUrl(string githubUrl)
|
||||
{
|
||||
if (headers.TryGetValues("x-github-request-id", out var headerValues))
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
var isOrgRunner = path.Length == 1;
|
||||
var isRepoOrEnterpriseRunner = path.Length == 2;
|
||||
var isRepoRunner = isRepoOrEnterpriseRunner && !string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (isOrgRunner)
|
||||
{
|
||||
return headerValues.FirstOrDefault();
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions";
|
||||
}
|
||||
}
|
||||
return string.Empty;
|
||||
else if (isRepoOrEnterpriseRunner)
|
||||
{
|
||||
// Repository Runner
|
||||
if (isRepoRunner)
|
||||
{
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/repos/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/repos/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Enterprise Runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||
}
|
||||
|
||||
return githubApiUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
|
||||
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
|
||||
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
|
||||
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
|
||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
|
||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken);
|
||||
|
||||
// job request
|
||||
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
||||
@@ -50,7 +50,10 @@ namespace GitHub.Runner.Common
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||
|
||||
// agent update
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace);
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default);
|
||||
|
||||
// runner config refresh
|
||||
Task<string> RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||
@@ -272,10 +275,10 @@ namespace GitHub.Runner.Common
|
||||
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
|
||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
|
||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, disableUpdate, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
@@ -315,10 +318,17 @@ namespace GitHub.Runner.Common
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace)
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
// runner config refresh
|
||||
public Task<string> RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.RefreshRunnerConfigAsync(agentId, configType, encodedRunnerConfig, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,7 +70,8 @@ namespace GitHub.Runner.Common
|
||||
|
||||
protected async Task RetryRequest(Func<Task> func,
|
||||
CancellationToken cancellationToken,
|
||||
int maxRetryAttemptsCount = 5
|
||||
int maxAttempts = 5,
|
||||
Func<Exception, bool> shouldRetry = null
|
||||
)
|
||||
{
|
||||
async Task<Unit> wrappedFunc()
|
||||
@@ -78,31 +79,31 @@ namespace GitHub.Runner.Common
|
||||
await func();
|
||||
return Unit.Value;
|
||||
}
|
||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
|
||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxAttempts, shouldRetry);
|
||||
}
|
||||
|
||||
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
||||
CancellationToken cancellationToken,
|
||||
int maxRetryAttemptsCount = 5,
|
||||
int maxAttempts = 5,
|
||||
Func<Exception, bool> shouldRetry = null
|
||||
)
|
||||
{
|
||||
var retryCount = 0;
|
||||
var attempt = 0;
|
||||
while (true)
|
||||
{
|
||||
retryCount++;
|
||||
attempt++;
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
return await func();
|
||||
}
|
||||
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
||||
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
|
||||
catch (Exception ex) when (attempt < maxAttempts && (shouldRetry == null || shouldRetry(ex)))
|
||||
{
|
||||
Trace.Error("Catch exception during request");
|
||||
Trace.Error(ex);
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
|
||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
|
||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxAttempts - attempt} attempt left.");
|
||||
await Task.Delay(backOff, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,35 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class NodeUtil
|
||||
{
|
||||
private const string _defaultNodeVersion = "node16";
|
||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16", "node20" });
|
||||
/// <summary>
|
||||
/// Represents details about an environment variable, including its value and source
|
||||
/// </summary>
|
||||
private class EnvironmentVariableInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets whether the value evaluates to true
|
||||
/// </summary>
|
||||
public bool IsTrue { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether the value came from the workflow environment
|
||||
/// </summary>
|
||||
public bool FromWorkflow { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether the value came from the system environment
|
||||
/// </summary>
|
||||
public bool FromSystem { get; set; }
|
||||
}
|
||||
|
||||
private const string _defaultNodeVersion = "node20";
|
||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node20" });
|
||||
public static string GetInternalNodeVersion()
|
||||
{
|
||||
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
||||
@@ -18,5 +41,122 @@ namespace GitHub.Runner.Common.Util
|
||||
}
|
||||
return _defaultNodeVersion;
|
||||
}
|
||||
/// <summary>
|
||||
/// Determines the appropriate Node version for Actions to use
|
||||
/// </summary>
|
||||
/// <param name="workflowEnvironment">Optional dictionary containing workflow-level environment variables</param>
|
||||
/// <param name="useNode24ByDefault">Feature flag indicating if Node 24 should be the default</param>
|
||||
/// <param name="requireNode24">Feature flag indicating if Node 24 is required</param>
|
||||
/// <returns>The Node version to use (node20 or node24) and warning message if both env vars are set</returns>
|
||||
public static (string nodeVersion, string warningMessage) DetermineActionsNodeVersion(
|
||||
IDictionary<string, string> workflowEnvironment = null,
|
||||
bool useNode24ByDefault = false,
|
||||
bool requireNode24 = false)
|
||||
{
|
||||
// Phase 3: Always use Node 24 regardless of environment variables
|
||||
if (requireNode24)
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
// Get environment variable details with source information
|
||||
var forceNode24Details = GetEnvironmentVariableDetails(
|
||||
Constants.Runner.NodeMigration.ForceNode24Variable, workflowEnvironment);
|
||||
|
||||
var allowUnsecureNodeDetails = GetEnvironmentVariableDetails(
|
||||
Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, workflowEnvironment);
|
||||
|
||||
bool forceNode24 = forceNode24Details.IsTrue;
|
||||
bool allowUnsecureNode = allowUnsecureNodeDetails.IsTrue;
|
||||
string warningMessage = null;
|
||||
|
||||
// Check if both flags are set from the same source
|
||||
bool bothFromWorkflow = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue &&
|
||||
forceNode24Details.FromWorkflow && allowUnsecureNodeDetails.FromWorkflow;
|
||||
|
||||
bool bothFromSystem = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue &&
|
||||
forceNode24Details.FromSystem && allowUnsecureNodeDetails.FromSystem;
|
||||
|
||||
// Handle the case when both are set in the same source
|
||||
if (bothFromWorkflow || bothFromSystem)
|
||||
{
|
||||
string source = bothFromWorkflow ? "workflow" : "system";
|
||||
string defaultVersion = useNode24ByDefault ? Constants.Runner.NodeMigration.Node24 : Constants.Runner.NodeMigration.Node20;
|
||||
warningMessage = $"Both {Constants.Runner.NodeMigration.ForceNode24Variable} and {Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable} environment variables are set to true in the {source} environment. This is likely a configuration error. Using the default Node version: {defaultVersion}.";
|
||||
return (defaultVersion, warningMessage);
|
||||
}
|
||||
|
||||
// Phase 2: Node 24 is the default
|
||||
if (useNode24ByDefault)
|
||||
{
|
||||
if (allowUnsecureNode)
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node20, null);
|
||||
}
|
||||
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
// Phase 1: Node 20 is the default
|
||||
if (forceNode24)
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
return (Constants.Runner.NodeMigration.Node20, null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if Node24 is requested but running on ARM32 Linux, and determines if fallback is needed.
|
||||
/// </summary>
|
||||
/// <param name="preferredVersion">The preferred Node version</param>
|
||||
/// <returns>A tuple containing the adjusted node version and an optional warning message</returns>
|
||||
public static (string nodeVersion, string warningMessage) CheckNodeVersionForLinuxArm32(string preferredVersion)
|
||||
{
|
||||
if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase) &&
|
||||
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm) &&
|
||||
Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux))
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node20, "Node 24 is not supported on Linux ARM32 platforms. Falling back to Node 20.");
|
||||
}
|
||||
|
||||
return (preferredVersion, null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets detailed information about an environment variable from both workflow and system environments
|
||||
/// </summary>
|
||||
/// <param name="variableName">The name of the environment variable</param>
|
||||
/// <param name="workflowEnvironment">Optional dictionary containing workflow-level environment variables</param>
|
||||
/// <returns>An EnvironmentVariableInfo object containing details about the variable from both sources</returns>
|
||||
private static EnvironmentVariableInfo GetEnvironmentVariableDetails(string variableName, IDictionary<string, string> workflowEnvironment)
|
||||
{
|
||||
var info = new EnvironmentVariableInfo();
|
||||
|
||||
// Check workflow environment
|
||||
bool foundInWorkflow = false;
|
||||
string workflowValue = null;
|
||||
|
||||
if (workflowEnvironment != null && workflowEnvironment.TryGetValue(variableName, out workflowValue))
|
||||
{
|
||||
foundInWorkflow = true;
|
||||
info.FromWorkflow = true;
|
||||
info.IsTrue = StringUtil.ConvertToBoolean(workflowValue); // Workflow value takes precedence for the boolean value
|
||||
}
|
||||
|
||||
// Also check system environment
|
||||
string systemValue = Environment.GetEnvironmentVariable(variableName);
|
||||
bool foundInSystem = !string.IsNullOrEmpty(systemValue);
|
||||
|
||||
info.FromSystem = foundInSystem;
|
||||
|
||||
// If not found in workflow, use system values
|
||||
if (!foundInWorkflow)
|
||||
{
|
||||
info.IsTrue = StringUtil.ConvertToBoolean(systemValue);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,11 +9,12 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -22,33 +23,242 @@ namespace GitHub.Runner.Listener
|
||||
private RunnerSettings _settings;
|
||||
private ITerminal _term;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private VssCredentials _credsV2;
|
||||
private TaskAgentSession _session;
|
||||
private IRunnerServer _runnerServer;
|
||||
private IBrokerServer _brokerServer;
|
||||
private ICredentialManager _credMgr;
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private bool _accessTokenRevoked = false;
|
||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
private bool _needRefreshCredsV2 = false;
|
||||
private bool _handlerInitialized = false;
|
||||
private bool _isMigratedSettings = false;
|
||||
private const int _maxMigratedSettingsRetries = 3;
|
||||
private int _migratedSettingsRetryCount = 0;
|
||||
|
||||
public BrokerMessageListener()
|
||||
{
|
||||
}
|
||||
|
||||
public BrokerMessageListener(RunnerSettings settings, bool isMigratedSettings = false)
|
||||
{
|
||||
_settings = settings;
|
||||
_isMigratedSettings = isMigratedSettings;
|
||||
}
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
_brokerServer = HostContext.GetService<IBrokerServer>();
|
||||
_credMgr = HostContext.GetService<ICredentialManager>();
|
||||
}
|
||||
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
||||
{
|
||||
await RefreshBrokerConnection();
|
||||
return await Task.FromResult(true);
|
||||
Trace.Entering();
|
||||
|
||||
// Load settings if not provided through constructor
|
||||
if (_settings == null)
|
||||
{
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
_settings = configManager.LoadSettings();
|
||||
Trace.Info("Settings loaded from config manager");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Using provided settings");
|
||||
if (_isMigratedSettings)
|
||||
{
|
||||
Trace.Info("Using migrated settings from .runner_migrated");
|
||||
}
|
||||
}
|
||||
|
||||
var serverUrlV2 = _settings.ServerUrlV2;
|
||||
var serverUrl = _settings.ServerUrl;
|
||||
Trace.Info(_settings);
|
||||
|
||||
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||
{
|
||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||
}
|
||||
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
_creds = _credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
Id = _settings.AgentId,
|
||||
Name = _settings.AgentName,
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
};
|
||||
var currentProcess = Process.GetCurrentProcess();
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})";
|
||||
var taskAgentSession = new TaskAgentSession(sessionName, agent);
|
||||
|
||||
string errorMessage = string.Empty;
|
||||
bool encounteringError = false;
|
||||
|
||||
while (true)
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
Trace.Info($"Attempt to create session.");
|
||||
try
|
||||
{
|
||||
Trace.Info("Connecting to the Broker Server...");
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.ConnectAsync(new Uri(serverUrlV2), _credsV2);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
if (!string.IsNullOrEmpty(serverUrl) &&
|
||||
!string.Equals(serverUrl, serverUrlV2, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info("Connecting to the Runner server...");
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||
Trace.Info("VssConnection created");
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Connected to GitHub");
|
||||
_term.WriteLine();
|
||||
|
||||
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
||||
|
||||
Trace.Info($"Session created.");
|
||||
if (encounteringError)
|
||||
{
|
||||
_term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected.");
|
||||
_sessionCreationExceptionTracker.Clear();
|
||||
encounteringError = false;
|
||||
}
|
||||
|
||||
if (!_handlerInitialized)
|
||||
{
|
||||
// Register event handler for auth migration state change
|
||||
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||
_handlerInitialized = true;
|
||||
}
|
||||
|
||||
return CreateSessionResult.Success;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Session creation has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
// If using migrated settings, limit the number of retries before returning failure
|
||||
if (_isMigratedSettings)
|
||||
{
|
||||
_migratedSettingsRetryCount++;
|
||||
Trace.Warning($"Migrated settings retry {_migratedSettingsRetryCount} of {_maxMigratedSettingsRetries}");
|
||||
|
||||
if (_migratedSettingsRetryCount >= _maxMigratedSettingsRetries)
|
||||
{
|
||||
Trace.Warning("Reached maximum retry attempts for migrated settings. Returning failure to try default settings.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
}
|
||||
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
ex is VssOAuthTokenRequestException vssOAuthEx &&
|
||||
_credsV2.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
|
||||
// Check whether we get 401 because the runner registration already removed by the service.
|
||||
// If the runner registration get deleted, we can't exchange oauth token.
|
||||
Trace.Error("Test oauth app registration.");
|
||||
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrlV2));
|
||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
}
|
||||
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
return CreateSessionResult.SessionConflict;
|
||||
}
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Session creation failed with exception: {ex}");
|
||||
}
|
||||
|
||||
if (!encounteringError) //print the message only on the first error
|
||||
{
|
||||
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_sessionCreationRetryInterval, token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync()
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
if (_handlerInitialized)
|
||||
{
|
||||
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||
}
|
||||
|
||||
if (!_accessTokenRevoked)
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _brokerServer.DeleteSessionAsync(ts.Token);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
runnerStatus = e.Status;
|
||||
_runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
@@ -73,7 +283,20 @@ namespace GitHub.Runner.Listener
|
||||
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
try
|
||||
{
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
|
||||
if (_needRefreshCredsV2)
|
||||
{
|
||||
Trace.Info("Refreshing broker connection.");
|
||||
await RefreshBrokerConnectionAsync();
|
||||
_needRefreshCredsV2 = false;
|
||||
}
|
||||
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
_getMessagesTokenSource.Token);
|
||||
|
||||
if (message == null)
|
||||
{
|
||||
@@ -97,7 +320,16 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||
throw;
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||
catch (HostedRunnerDeprovisionedException)
|
||||
{
|
||||
Trace.Info("Hosted runner has been deprovisioned.");
|
||||
throw;
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
@@ -106,9 +338,10 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during get next message.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (!IsGetNextMessageExceptionRetriable(ex))
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
!IsGetNextMessageExceptionRetriable(ex))
|
||||
{
|
||||
throw;
|
||||
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -137,8 +370,14 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}");
|
||||
}
|
||||
|
||||
// re-create VssConnection before next retry
|
||||
await RefreshBrokerConnection();
|
||||
await RefreshBrokerConnectionAsync();
|
||||
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
||||
@@ -168,17 +407,38 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RefreshListenerTokenAsync()
|
||||
{
|
||||
await RefreshBrokerConnectionAsync();
|
||||
}
|
||||
|
||||
public async Task DeleteMessageAsync(TaskAgentMessage message)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||
{
|
||||
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||
runnerRequestId,
|
||||
_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
linkedCts.Token);
|
||||
}
|
||||
|
||||
private bool IsGetNextMessageExceptionRetriable(Exception ex)
|
||||
{
|
||||
if (ex is TaskAgentNotFoundException ||
|
||||
ex is TaskAgentPoolNotFoundException ||
|
||||
ex is TaskAgentSessionExpiredException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is RunnerNotFoundException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
@@ -191,19 +451,90 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RefreshBrokerConnection()
|
||||
private bool IsSessionCreationExceptionRetriable(Exception ex)
|
||||
{
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
_settings = configManager.LoadSettings();
|
||||
|
||||
if (_settings.ServerUrlV2 == null)
|
||||
if (ex is TaskAgentNotFoundException)
|
||||
{
|
||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
|
||||
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
|
||||
return false;
|
||||
}
|
||||
else if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
Trace.Info("The session for this runner already exists.");
|
||||
_term.WriteError("A session for this runner already exists.");
|
||||
if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException)))
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++;
|
||||
if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds)
|
||||
{
|
||||
Trace.Info("The session conflict exception have reached retry limit.");
|
||||
_term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1;
|
||||
}
|
||||
|
||||
Trace.Info("The session conflict exception haven't reached retry limit.");
|
||||
return true;
|
||||
}
|
||||
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
|
||||
{
|
||||
Trace.Info("Local clock might be skewed.");
|
||||
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
|
||||
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++;
|
||||
if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds)
|
||||
{
|
||||
Trace.Info("The OAuth token request exception have reached retry limit.");
|
||||
_term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1;
|
||||
}
|
||||
|
||||
Trace.Info("The OAuth token request exception haven't reached retry limit.");
|
||||
return true;
|
||||
}
|
||||
else if (ex is TaskAgentPoolNotFoundException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials creds = credMgr.LoadCredentials();
|
||||
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
|
||||
else if (ex is InvalidOperationException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Retriable exception: {ex.Message}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RefreshBrokerConnectionAsync()
|
||||
{
|
||||
Trace.Info("Reload credentials.");
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), _credsV2);
|
||||
Trace.Info("Connection to Broker Server recreated.");
|
||||
}
|
||||
|
||||
private void HandleAuthMigrationChanged(object sender, EventArgs e)
|
||||
{
|
||||
Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}");
|
||||
_needRefreshCredsV2 = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ namespace GitHub.Runner.Listener.Check
|
||||
string githubApiUrl = null;
|
||||
string actionsTokenServiceUrl = null;
|
||||
string actionsPipelinesServiceUrl = null;
|
||||
string resultsReceiverServiceUrl = null;
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
@@ -47,6 +48,7 @@ namespace GitHub.Runner.Listener.Check
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||
resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health";
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -56,13 +58,31 @@ namespace GitHub.Runner.Listener.Check
|
||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet.
|
||||
}
|
||||
|
||||
var codeLoadUrlBuilder = new UriBuilder(url);
|
||||
codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}";
|
||||
codeLoadUrlBuilder.Path = "_ping";
|
||||
|
||||
// check github api
|
||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check github codeload
|
||||
checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||
checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check results-receiver service
|
||||
if (!string.IsNullOrEmpty(resultsReceiverServiceUrl))
|
||||
{
|
||||
checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
}
|
||||
|
||||
// check actions token service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||
|
||||
@@ -25,6 +25,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Task UnconfigureAsync(CommandSettings command);
|
||||
void DeleteLocalRunnerConfig();
|
||||
RunnerSettings LoadSettings();
|
||||
RunnerSettings LoadMigratedSettings();
|
||||
}
|
||||
|
||||
public sealed class ConfigurationManager : RunnerService, IConfigurationManager
|
||||
@@ -66,6 +67,22 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return settings;
|
||||
}
|
||||
|
||||
public RunnerSettings LoadMigratedSettings()
|
||||
{
|
||||
Trace.Info(nameof(LoadMigratedSettings));
|
||||
|
||||
// Check if migrated settings file exists
|
||||
if (!_store.IsMigratedConfigured())
|
||||
{
|
||||
throw new NonRetryableException("No migrated configuration found.");
|
||||
}
|
||||
|
||||
RunnerSettings settings = _store.GetMigratedSettings();
|
||||
Trace.Info("Migrated Settings Loaded");
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
public async Task ConfigureAsync(CommandSettings command)
|
||||
{
|
||||
_term.WriteLine();
|
||||
@@ -127,7 +144,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
runnerSettings.ServerUrl = inputUrl;
|
||||
// Get the credentials
|
||||
credProvider = GetCredentialProvider(command, runnerSettings.ServerUrl);
|
||||
creds = credProvider.GetVssCredentials(HostContext);
|
||||
creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false);
|
||||
Trace.Info("legacy vss cred retrieved");
|
||||
}
|
||||
else
|
||||
@@ -136,8 +153,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
runnerSettings.UseV2Flow = authResult.UseV2Flow;
|
||||
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
|
||||
runnerSettings.UseRunnerAdminFlow = authResult.UseRunnerAdminFlow;
|
||||
Trace.Info($"Using runner-admin flow: {runnerSettings.UseRunnerAdminFlow}");
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
@@ -194,7 +211,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
string poolName = null;
|
||||
TaskAgentPool agentPool = null;
|
||||
List<TaskAgentPool> agentPools;
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
|
||||
}
|
||||
@@ -242,13 +259,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
var userLabels = command.GetLabels();
|
||||
_term.WriteLine();
|
||||
List<TaskAgent> agents;
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||
}
|
||||
else
|
||||
{
|
||||
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
||||
agents = await _runnerServer.GetAgentsAsync(runnerSettings.AgentName);
|
||||
}
|
||||
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
@@ -263,7 +280,31 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
try
|
||||
{
|
||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||
|
||||
agent.Id = runner.Id;
|
||||
agent.Authorization = new TaskAgentAuthorization()
|
||||
{
|
||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||
{
|
||||
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||
}
|
||||
|
||||
if (command.DisableUpdate &&
|
||||
command.DisableUpdate != agent.DisableUpdate)
|
||||
{
|
||||
@@ -297,10 +338,11 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
try
|
||||
{
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||
|
||||
agent.Id = runner.Id;
|
||||
agent.Authorization = new TaskAgentAuthorization()
|
||||
@@ -308,6 +350,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||
{
|
||||
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -350,25 +399,46 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
||||
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", false).ToString() }
|
||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", true).ToString() }
|
||||
},
|
||||
};
|
||||
|
||||
if (agent.Properties.GetValue("EnableAuthMigrationByDefault", false) &&
|
||||
agent.Properties.TryGetValue<string>("AuthorizationUrlV2", out var authUrlV2) &&
|
||||
!string.IsNullOrEmpty(authUrlV2))
|
||||
{
|
||||
credentialData.Data["enableAuthMigrationByDefault"] = "true";
|
||||
credentialData.Data["authorizationUrlV2"] = authUrlV2;
|
||||
}
|
||||
|
||||
// Save the negotiated OAuth credential data
|
||||
_store.SaveCredential(credentialData);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
throw new NotSupportedException("Message queue listen OAuth token.");
|
||||
}
|
||||
|
||||
// allow the server to override the serverUrlV2 and useV2Flow
|
||||
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
|
||||
!string.IsNullOrEmpty(serverUrlV2))
|
||||
{
|
||||
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
|
||||
runnerSettings.ServerUrlV2 = serverUrlV2;
|
||||
}
|
||||
|
||||
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
|
||||
{
|
||||
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
|
||||
runnerSettings.UseV2Flow = useV2Flow;
|
||||
}
|
||||
|
||||
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
||||
|
||||
if (!runnerSettings.UseV2Flow)
|
||||
if (!runnerSettings.UseV2Flow && !runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials credential = credMgr.LoadCredentials();
|
||||
VssCredentials credential = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
try
|
||||
{
|
||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
||||
@@ -482,41 +552,50 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
if (isConfigured && hasCredentials)
|
||||
{
|
||||
RunnerSettings settings = _store.GetSettings();
|
||||
var credentialManager = HostContext.GetService<ICredentialManager>();
|
||||
|
||||
// Get the credentials
|
||||
VssCredentials creds = null;
|
||||
if (string.IsNullOrEmpty(settings.GitHubUrl))
|
||||
{
|
||||
var credProvider = GetCredentialProvider(command, settings.ServerUrl);
|
||||
creds = credProvider.GetVssCredentials(HostContext);
|
||||
Trace.Info("legacy vss cred retrieved");
|
||||
}
|
||||
else
|
||||
if (settings.UseRunnerAdminFlow)
|
||||
{
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
var agents = await _runnerServer.GetAgentsAsync(settings.AgentName);
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
TaskAgent agent = agents.FirstOrDefault();
|
||||
if (agent == null)
|
||||
{
|
||||
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
||||
await _dotcomServer.DeleteRunnerAsync(settings.GitHubUrl, deletionToken, settings.AgentId);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _runnerServer.DeleteAgentAsync(settings.AgentId);
|
||||
var credentialManager = HostContext.GetService<ICredentialManager>();
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Runner removed successfully");
|
||||
// Get the credentials
|
||||
VssCredentials creds = null;
|
||||
if (string.IsNullOrEmpty(settings.GitHubUrl))
|
||||
{
|
||||
var credProvider = GetCredentialProvider(command, settings.ServerUrl);
|
||||
creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false);
|
||||
Trace.Info("legacy vss cred retrieved");
|
||||
}
|
||||
else
|
||||
{
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
var agents = await _runnerServer.GetAgentsAsync(settings.AgentName);
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
TaskAgent agent = agents.FirstOrDefault();
|
||||
if (agent == null)
|
||||
{
|
||||
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _runnerServer.DeleteAgentAsync(settings.AgentId);
|
||||
}
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Runner removed successfully");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -709,7 +788,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
responseStatus = response.StatusCode;
|
||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
||||
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
@@ -728,7 +807,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
|
||||
Trace.Error($"Failed to get JIT runner token -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
@@ -772,7 +851,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
responseStatus = response.StatusCode;
|
||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
||||
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
@@ -791,7 +870,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
|
||||
Trace.Error($"Failed to get tenant credentials -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public interface ICredentialManager : IRunnerService
|
||||
{
|
||||
ICredentialProvider GetCredentialProvider(string credType);
|
||||
VssCredentials LoadCredentials();
|
||||
VssCredentials LoadCredentials(bool allowAuthUrlV2);
|
||||
}
|
||||
|
||||
public class CredentialManager : RunnerService, ICredentialManager
|
||||
@@ -40,32 +40,27 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return creds;
|
||||
}
|
||||
|
||||
public VssCredentials LoadCredentials()
|
||||
public VssCredentials LoadCredentials(bool allowAuthUrlV2)
|
||||
{
|
||||
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
|
||||
|
||||
if (!store.HasCredentials())
|
||||
{
|
||||
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
||||
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
||||
}
|
||||
|
||||
CredentialData credData = store.GetCredentials();
|
||||
var migratedCred = store.GetMigratedCredentials();
|
||||
if (migratedCred != null)
|
||||
if (migratedCred != null &&
|
||||
migratedCred.Scheme == Constants.Configuration.OAuth)
|
||||
{
|
||||
credData = migratedCred;
|
||||
|
||||
// Re-write .credentials with Token URL
|
||||
store.SaveCredential(credData);
|
||||
|
||||
// Delete .credentials_migrated
|
||||
store.DeleteMigratedCredential();
|
||||
}
|
||||
|
||||
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
||||
credProv.CredentialData = credData;
|
||||
|
||||
VssCredentials creds = credProv.GetVssCredentials(HostContext);
|
||||
VssCredentials creds = credProv.GetVssCredentials(HostContext, allowAuthUrlV2);
|
||||
|
||||
return creds;
|
||||
}
|
||||
@@ -94,7 +89,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public string Token { get; set; }
|
||||
|
||||
[DataMember(Name = "use_v2_flow")]
|
||||
public bool UseV2Flow { get; set; }
|
||||
public bool UseRunnerAdminFlow { get; set; }
|
||||
|
||||
public VssCredentials ToVssCredentials()
|
||||
{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
|
||||
namespace GitHub.Runner.Listener.Configuration
|
||||
@@ -10,7 +10,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
Boolean RequireInteractive { get; }
|
||||
CredentialData CredentialData { get; set; }
|
||||
VssCredentials GetVssCredentials(IHostContext context);
|
||||
VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2);
|
||||
void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public virtual Boolean RequireInteractive => false;
|
||||
public CredentialData CredentialData { get; set; }
|
||||
|
||||
public abstract VssCredentials GetVssCredentials(IHostContext context);
|
||||
public abstract VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2);
|
||||
public abstract void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
public OAuthAccessTokenCredential() : base(Constants.Configuration.OAuthAccessToken) { }
|
||||
|
||||
public override VssCredentials GetVssCredentials(IHostContext context)
|
||||
public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2)
|
||||
{
|
||||
ArgUtil.NotNull(context, nameof(context));
|
||||
Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential));
|
||||
|
||||
@@ -514,9 +514,25 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
|
||||
|
||||
// Lock the Service Database
|
||||
svcLock = LockServiceDatabase(scmHndl);
|
||||
if (svcLock.ToInt64() <= 0)
|
||||
int svcLockRetries = 10;
|
||||
int svcLockRetryTimeout = 5000;
|
||||
while (true)
|
||||
{
|
||||
svcLock = LockServiceDatabase(scmHndl);
|
||||
if (svcLock.ToInt64() > 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
_term.WriteLine("Retrying Lock Service Database...");
|
||||
|
||||
svcLockRetries--;
|
||||
if (svcLockRetries > 0)
|
||||
{
|
||||
Thread.Sleep(svcLockRetryTimeout);
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new Exception("Failed to Lock Service Database for Write");
|
||||
}
|
||||
|
||||
|
||||
@@ -22,10 +22,18 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
// Nothing to verify here
|
||||
}
|
||||
|
||||
public override VssCredentials GetVssCredentials(IHostContext context)
|
||||
public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2)
|
||||
{
|
||||
var clientId = this.CredentialData.Data.GetValueOrDefault("clientId", null);
|
||||
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
var authorizationUrlV2 = this.CredentialData.Data.GetValueOrDefault("authorizationUrlV2", null);
|
||||
|
||||
if (allowAuthUrlV2 &&
|
||||
!string.IsNullOrEmpty(authorizationUrlV2) &&
|
||||
context.AllowAuthMigration)
|
||||
{
|
||||
authorizationUrl = authorizationUrlV2;
|
||||
}
|
||||
|
||||
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
||||
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
@@ -84,4 +85,5 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
|
||||
44
src/Runner.Listener/ErrorThrottler.cs
Normal file
44
src/Runner.Listener/ErrorThrottler.cs
Normal file
@@ -0,0 +1,44 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
[ServiceLocator(Default = typeof(ErrorThrottler))]
|
||||
public interface IErrorThrottler : IRunnerService
|
||||
{
|
||||
void Reset();
|
||||
Task IncrementAndWaitAsync(CancellationToken token);
|
||||
}
|
||||
|
||||
public sealed class ErrorThrottler : RunnerService, IErrorThrottler
|
||||
{
|
||||
internal static readonly TimeSpan MinBackoff = TimeSpan.FromSeconds(1);
|
||||
internal static readonly TimeSpan MaxBackoff = TimeSpan.FromMinutes(1);
|
||||
internal static readonly TimeSpan BackoffCoefficient = TimeSpan.FromSeconds(1);
|
||||
private int _count = 0;
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
_count = 0;
|
||||
}
|
||||
|
||||
public async Task IncrementAndWaitAsync(CancellationToken token)
|
||||
{
|
||||
if (++_count <= 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff(
|
||||
attempt: _count - 2, // 0-based attempt
|
||||
minBackoff: MinBackoff,
|
||||
maxBackoff: MaxBackoff,
|
||||
deltaBackoff: BackoffCoefficient);
|
||||
Trace.Warning($"Back off {backoff.TotalSeconds} seconds before next attempt. Current consecutive error count: {_count}");
|
||||
await HostContext.Delay(backoff, token);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,7 +35,7 @@ namespace GitHub.Runner.Listener
|
||||
// This implementation of IJobDispatcher is not thread safe.
|
||||
// It is based on the fact that the current design of the runner is a dequeue
|
||||
// and processes one message from the message queue at a time.
|
||||
// In addition, it only executes one job every time,
|
||||
// In addition, it only executes one job every time,
|
||||
// and the server will not send another job while this one is still running.
|
||||
public sealed class JobDispatcher : RunnerService, IJobDispatcher
|
||||
{
|
||||
@@ -98,7 +98,7 @@ namespace GitHub.Runner.Listener
|
||||
Guid dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
||||
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
||||
{
|
||||
Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,7 +110,12 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
var jwt = JsonWebToken.Create(accessToken);
|
||||
var claims = jwt.ExtractClaims();
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
if (string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
|
||||
@@ -162,12 +167,12 @@ namespace GitHub.Runner.Listener
|
||||
dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
||||
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
||||
{
|
||||
Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose($"There is no running WorkerDispather needs to await.");
|
||||
Trace.Verbose($"There is no running WorkerDispatcher needs to await.");
|
||||
}
|
||||
|
||||
if (currentDispatch != null)
|
||||
@@ -176,7 +181,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Waiting WorkerDispather for job {currentDispatch.JobId} run to finish.");
|
||||
Trace.Info($"Waiting WorkerDispatcher for job {currentDispatch.JobId} run to finish.");
|
||||
await currentDispatch.WorkerDispatch;
|
||||
Trace.Info($"Job request {currentDispatch.JobId} processed succeed.");
|
||||
}
|
||||
@@ -190,7 +195,7 @@ namespace GitHub.Runner.Listener
|
||||
WorkerDispatcher workerDispatcher;
|
||||
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
||||
{
|
||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
workerDispatcher.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -209,7 +214,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Ensure WorkerDispather for job {currentDispatch.JobId} run to finish, cancel any running job.");
|
||||
Trace.Info($"Ensure WorkerDispatcher for job {currentDispatch.JobId} run to finish, cancel any running job.");
|
||||
await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -222,7 +227,7 @@ namespace GitHub.Runner.Listener
|
||||
WorkerDispatcher workerDispatcher;
|
||||
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
||||
{
|
||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
workerDispatcher.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -327,7 +332,7 @@ namespace GitHub.Runner.Listener
|
||||
WorkerDispatcher workerDispatcher;
|
||||
if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher))
|
||||
{
|
||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
|
||||
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
|
||||
workerDispatcher.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -545,14 +550,36 @@ namespace GitHub.Runner.Listener
|
||||
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||
|
||||
var jobServer = await InitializeJobServerAsync(systemConnection);
|
||||
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
|
||||
|
||||
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
|
||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
try
|
||||
{
|
||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||
await ForceFailJob(jobServer, message, detailInfo);
|
||||
var jobServer = await InitializeJobServerAsync(systemConnection);
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
switch (jobServer)
|
||||
{
|
||||
case IJobServer js:
|
||||
{
|
||||
await LogWorkerProcessUnhandledException(js, message, unhandledExceptionIssue);
|
||||
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
|
||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||
await ForceFailJob(js, message);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case IRunServer rs:
|
||||
await ForceFailJob(rs, message, unhandledExceptionIssue);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException($"JobServer type '{jobServer.GetType().Name}' is not supported.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Catch exception during log worker process unhandled exception.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -629,8 +656,22 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// message send failed, this might indicate worker process is already exited or stuck.
|
||||
Trace.Info($"Job cancel message sending for job {message.JobId} failed, kill running worker. {ex}");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
|
||||
// wait worker to exit
|
||||
// wait worker to exit
|
||||
// if worker doesn't exit within timeout, then kill worker.
|
||||
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
||||
|
||||
@@ -1117,77 +1158,65 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
// log an error issue to job level timeline record
|
||||
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, Issue issue)
|
||||
{
|
||||
if (server is IJobServer jobServer)
|
||||
try
|
||||
{
|
||||
try
|
||||
{
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(issue);
|
||||
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
Trace.Info("Mark the job as failed since the worker crashed");
|
||||
jobRecord.Result = TaskResult.Failed;
|
||||
// mark the job as completed so service will pickup the result
|
||||
jobRecord.State = TimelineRecordState.Completed;
|
||||
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
else
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
|
||||
return;
|
||||
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
// raise job completed event to fail the job.
|
||||
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
|
||||
{
|
||||
if (server is IJobServer jobServer)
|
||||
try
|
||||
{
|
||||
try
|
||||
{
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
|
||||
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise JobCompletedEvent back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
|
||||
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
|
||||
}
|
||||
else if (server is IRunServer runServer)
|
||||
catch (Exception ex)
|
||||
{
|
||||
try
|
||||
{
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
|
||||
var jobAnnotations = new List<Annotation>();
|
||||
if (unhandledAnnotation.HasValue)
|
||||
{
|
||||
jobAnnotations.Add(unhandledAnnotation.Value);
|
||||
}
|
||||
Trace.Error("Fail to raise JobCompletedEvent back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise job completion back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
else
|
||||
private async Task ForceFailJob(IRunServer runServer, Pipelines.AgentJobRequestMessage message, Issue issue)
|
||||
{
|
||||
try
|
||||
{
|
||||
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
|
||||
var annotation = issue.ToAnnotation();
|
||||
var jobAnnotations = new List<Annotation>();
|
||||
if (annotation.HasValue)
|
||||
{
|
||||
jobAnnotations.Add(annotation.Value);
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise job completion back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,20 +9,32 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
public enum CreateSessionResult
|
||||
{
|
||||
Success,
|
||||
Failure,
|
||||
SessionConflict
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(MessageListener))]
|
||||
public interface IMessageListener : IRunnerService
|
||||
{
|
||||
Task<Boolean> CreateSessionAsync(CancellationToken token);
|
||||
Task<CreateSessionResult> CreateSessionAsync(CancellationToken token);
|
||||
Task DeleteSessionAsync();
|
||||
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
||||
Task DeleteMessageAsync(TaskAgentMessage message);
|
||||
Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken);
|
||||
|
||||
Task RefreshListenerTokenAsync();
|
||||
void OnJobStatus(object sender, JobStatusEventArgs e);
|
||||
}
|
||||
|
||||
@@ -32,6 +44,8 @@ namespace GitHub.Runner.Listener
|
||||
private RunnerSettings _settings;
|
||||
private ITerminal _term;
|
||||
private IRunnerServer _runnerServer;
|
||||
private IBrokerServer _brokerServer;
|
||||
private ICredentialManager _credMgr;
|
||||
private TaskAgentSession _session;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private bool _accessTokenRevoked = false;
|
||||
@@ -39,8 +53,12 @@ namespace GitHub.Runner.Listener
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private VssCredentials _credsV2;
|
||||
private bool _needRefreshCredsV2 = false;
|
||||
private bool _handlerInitialized = false;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -48,9 +66,11 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
_brokerServer = hostContext.GetService<IBrokerServer>();
|
||||
_credMgr = hostContext.GetService<ICredentialManager>();
|
||||
}
|
||||
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
@@ -62,8 +82,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials creds = credMgr.LoadCredentials();
|
||||
_creds = _credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
@@ -72,7 +91,8 @@ namespace GitHub.Runner.Listener
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
};
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"}";
|
||||
var currentProcess = Process.GetCurrentProcess();
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})";
|
||||
var taskAgentSession = new TaskAgentSession(sessionName, agent);
|
||||
|
||||
string errorMessage = string.Empty;
|
||||
@@ -85,7 +105,7 @@ namespace GitHub.Runner.Listener
|
||||
try
|
||||
{
|
||||
Trace.Info("Connecting to the Runner Server...");
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
_term.WriteLine();
|
||||
@@ -96,7 +116,6 @@ namespace GitHub.Runner.Listener
|
||||
_settings.PoolId,
|
||||
taskAgentSession,
|
||||
token);
|
||||
|
||||
Trace.Info($"Session created.");
|
||||
if (encounteringError)
|
||||
{
|
||||
@@ -105,7 +124,14 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = false;
|
||||
}
|
||||
|
||||
return true;
|
||||
if (!_handlerInitialized)
|
||||
{
|
||||
Trace.Info("Registering AuthMigrationChanged event handler.");
|
||||
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||
_handlerInitialized = true;
|
||||
}
|
||||
|
||||
return CreateSessionResult.Success;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
@@ -123,13 +149,13 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
||||
return false;
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
|
||||
// Check whether we get 401 because the runner registration already removed by the service.
|
||||
@@ -139,15 +165,19 @@ namespace GitHub.Runner.Listener
|
||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
||||
return false;
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
}
|
||||
|
||||
if (!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
return false;
|
||||
if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
return CreateSessionResult.SessionConflict;
|
||||
}
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
|
||||
if (!encounteringError) //print the message only on the first error
|
||||
@@ -166,6 +196,11 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
if (_handlerInitialized)
|
||||
{
|
||||
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||
}
|
||||
|
||||
if (!_accessTokenRevoked)
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
@@ -182,19 +217,17 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
_runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
}
|
||||
catch (ObjectDisposedException)
|
||||
{
|
||||
Trace.Info("_getMessagesTokenSource is already disposed.");
|
||||
}
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
}
|
||||
catch (ObjectDisposedException)
|
||||
{
|
||||
Trace.Info("_getMessagesTokenSource is already disposed.");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
|
||||
@@ -204,6 +237,7 @@ namespace GitHub.Runner.Listener
|
||||
ArgUtil.NotNull(_settings, nameof(_settings));
|
||||
bool encounteringError = false;
|
||||
int continuousError = 0;
|
||||
int continuousEmptyMessage = 0;
|
||||
string errorMessage = string.Empty;
|
||||
Stopwatch heartbeat = new();
|
||||
heartbeat.Restart();
|
||||
@@ -217,13 +251,38 @@ namespace GitHub.Runner.Listener
|
||||
message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId,
|
||||
_session.SessionId,
|
||||
_lastMessageId,
|
||||
runnerStatus,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
_getMessagesTokenSource.Token);
|
||||
|
||||
// Decrypt the message body if the session is using encryption
|
||||
message = DecryptMessage(message);
|
||||
|
||||
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
|
||||
{
|
||||
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
|
||||
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _credsV2);
|
||||
if (_needRefreshCredsV2)
|
||||
{
|
||||
Trace.Info("Refreshing credentials for V2.");
|
||||
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||
_needRefreshCredsV2 = false;
|
||||
}
|
||||
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
token);
|
||||
}
|
||||
|
||||
if (message != null)
|
||||
{
|
||||
_lastMessageId = message.MessageId;
|
||||
@@ -252,7 +311,16 @@ namespace GitHub.Runner.Listener
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||
catch (HostedRunnerDeprovisionedException)
|
||||
{
|
||||
Trace.Info("Hosted runner has been deprovisioned.");
|
||||
throw;
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
@@ -261,12 +329,19 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during get next message.");
|
||||
Trace.Error(ex);
|
||||
|
||||
// clear out potential message for broker migration,
|
||||
// in case the exception is thrown from get message from broker-listener.
|
||||
message = null;
|
||||
|
||||
// don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs.
|
||||
if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && await CreateSessionAsync(token))
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
ex is TaskAgentSessionExpiredException &&
|
||||
!_settings.SkipSessionRecover && (await CreateSessionAsync(token) == CreateSessionResult.Success))
|
||||
{
|
||||
Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session.");
|
||||
}
|
||||
else if (!IsGetNextMessageExceptionRetriable(ex))
|
||||
else if (!HostContext.AllowAuthMigration &&
|
||||
!IsGetNextMessageExceptionRetriable(ex))
|
||||
{
|
||||
throw;
|
||||
}
|
||||
@@ -293,6 +368,12 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}");
|
||||
}
|
||||
|
||||
// re-create VssConnection before next retry
|
||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||
|
||||
@@ -307,16 +388,27 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
if (message == null)
|
||||
{
|
||||
continuousEmptyMessage++;
|
||||
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
|
||||
{
|
||||
Trace.Info($"No message retrieved from session '{_session.SessionId}' within last 30 minutes.");
|
||||
heartbeat.Restart();
|
||||
continuousEmptyMessage = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose($"No message retrieved from session '{_session.SessionId}'.");
|
||||
}
|
||||
|
||||
if (continuousEmptyMessage > 50)
|
||||
{
|
||||
// retried more than 50 times in less than 30mins and still getting empty message
|
||||
// something is not right on the service side, backoff for 15-30s before retry
|
||||
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -339,6 +431,28 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RefreshListenerTokenAsync()
|
||||
{
|
||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||
}
|
||||
|
||||
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||
{
|
||||
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||
runnerRequestId,
|
||||
_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
linkedCts.Token);
|
||||
}
|
||||
|
||||
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
||||
{
|
||||
if (_session.EncryptionKey == null ||
|
||||
@@ -388,6 +502,7 @@ namespace GitHub.Runner.Listener
|
||||
ex is TaskAgentPoolNotFoundException ||
|
||||
ex is TaskAgentSessionExpiredException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is RunnerNotFoundException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
@@ -454,7 +569,8 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else if (ex is TaskAgentPoolNotFoundException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is VssUnauthorizedException)
|
||||
ex is VssUnauthorizedException ||
|
||||
(ex is VssOAuthTokenRequestException oauthEx && oauthEx.Error != "server_error"))
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
@@ -465,5 +581,11 @@ namespace GitHub.Runner.Listener
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private void HandleAuthMigrationChanged(object sender, EventArgs e)
|
||||
{
|
||||
Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}");
|
||||
_needRefreshCredsV2 = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ using System.Reflection;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -144,6 +145,12 @@ namespace GitHub.Runner.Listener
|
||||
trace.Error(e);
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
catch (RunnerNotFoundException e)
|
||||
{
|
||||
terminal.WriteError($"An error occurred: {e.Message}");
|
||||
trace.Error(e);
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
terminal.WriteError($"An error occurred: {e.Message}");
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||
<SelfContained>true</SelfContained>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<NoWarn>NU1701;NU1603;SYSLIB0050;SYSLIB0051</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
|
||||
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
|
||||
@@ -18,17 +19,11 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="..\Misc\runnercoreassets">
|
||||
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
|
||||
</EmbeddedResource>
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="5.0.0" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user