mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
145 Commits
Duplicated
...
v2.313.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d47bfa6c7 | ||
|
|
651ea42e00 | ||
|
|
bcc665a7a1 | ||
|
|
cd812f0395 | ||
|
|
fa874cf314 | ||
|
|
bf0e76631b | ||
|
|
1d82031a2c | ||
|
|
d1a619ff09 | ||
|
|
11680fc78f | ||
|
|
3e5433ec86 | ||
|
|
b647b890c5 | ||
|
|
894c50073a | ||
|
|
5268d74ade | ||
|
|
7414e08fbd | ||
|
|
dcb790f780 | ||
|
|
b7ab810945 | ||
|
|
7310ba0a08 | ||
|
|
e842959e3e | ||
|
|
9f19310b5b | ||
|
|
84220a21d1 | ||
|
|
8e0cd36cd8 | ||
|
|
f1f18f67e1 | ||
|
|
ac39c4bd0a | ||
|
|
3f3d9b0d99 | ||
|
|
af485fb660 | ||
|
|
9e3e57ff90 | ||
|
|
ac89b31d2f | ||
|
|
65201ff6be | ||
|
|
661b261959 | ||
|
|
8a25302ba3 | ||
|
|
c7d65c42d6 | ||
|
|
a9bae6f37a | ||
|
|
3136ce3a71 | ||
|
|
a4c57f2747 | ||
|
|
ce4e62c849 | ||
|
|
121f080023 | ||
|
|
cbcb4c568a | ||
|
|
5d4b391f06 | ||
|
|
85fdc9b6b4 | ||
|
|
7f58504d35 | ||
|
|
611a7a85ed | ||
|
|
fb4bdbe440 | ||
|
|
940f4f4f40 | ||
|
|
4647f3ed5f | ||
|
|
544f19042b | ||
|
|
c851794f04 | ||
|
|
22d4310b69 | ||
|
|
65361e0fb5 | ||
|
|
36e37a0885 | ||
|
|
a5cd1ba4b6 | ||
|
|
acdc6edf7c | ||
|
|
b4a7bb0969 | ||
|
|
f47384b46e | ||
|
|
f672567acc | ||
|
|
e25c754744 | ||
|
|
f57ecd8e3c | ||
|
|
463ec00cb4 | ||
|
|
c3a7188eca | ||
|
|
2a6f271afa | ||
|
|
462337a4a4 | ||
|
|
8f1c070506 | ||
|
|
bf445e2750 | ||
|
|
67d70803a9 | ||
|
|
8c917b4ad3 | ||
|
|
440238adc4 | ||
|
|
8250726be1 | ||
|
|
5b2bc388ca | ||
|
|
6a2381f525 | ||
|
|
1f0c91e23e | ||
|
|
020a1ed790 | ||
|
|
c1a5dc71a5 | ||
|
|
c68e28788d | ||
|
|
a823a7f669 | ||
|
|
21ca5e6f04 | ||
|
|
f4197fb5a5 | ||
|
|
3a8cb43022 | ||
|
|
80a17a2f0c | ||
|
|
16834edc67 | ||
|
|
2908d82845 | ||
|
|
3f5b813499 | ||
|
|
7b703d667d | ||
|
|
d2f0a46865 | ||
|
|
143639ddac | ||
|
|
474d0fb354 | ||
|
|
15c0fe6c1d | ||
|
|
2b66cbe699 | ||
|
|
0e9e9f1e8d | ||
|
|
be65955a9d | ||
|
|
e419ae3c7e | ||
|
|
bb40cd2788 | ||
|
|
e0acb14bfc | ||
|
|
1ff8ad7860 | ||
|
|
8dd2cec3af | ||
|
|
7b53c38294 | ||
|
|
e22452c2d6 | ||
|
|
9bbfed0740 | ||
|
|
cf5afc63da | ||
|
|
a00db53b0d | ||
|
|
73ef82ff85 | ||
|
|
7892066256 | ||
|
|
8b9a81c952 | ||
|
|
460d9ae5a8 | ||
|
|
e94e744bed | ||
|
|
94080812f7 | ||
|
|
1183100ab8 | ||
|
|
4f40f29cff | ||
|
|
d88823c634 | ||
|
|
a8783c023f | ||
|
|
2606425cc5 | ||
|
|
8fb038b0e0 | ||
|
|
8b30f9381b | ||
|
|
8206cf4e73 | ||
|
|
6680a3b142 | ||
|
|
b882f6696a | ||
|
|
e76de55cda | ||
|
|
9eb4b96713 | ||
|
|
719348e0bf | ||
|
|
9fe5aa2a9a | ||
|
|
765a5c3efc | ||
|
|
e752edf7b5 | ||
|
|
e350f35217 | ||
|
|
8fa970a1e6 | ||
|
|
8eefd849c1 | ||
|
|
f6e9809844 | ||
|
|
5b2e4049bc | ||
|
|
7cb61925b0 | ||
|
|
a61d3f37dc | ||
|
|
e30b9d6d12 | ||
|
|
496904c0b7 | ||
|
|
b91ad56f92 | ||
|
|
f25c9dfba3 | ||
|
|
7d432fb24c | ||
|
|
e8ee6f7b1b | ||
|
|
d4bbbb8419 | ||
|
|
4ffd081aea | ||
|
|
c05e6748c3 | ||
|
|
a2b7856c9c | ||
|
|
5f1c6f4708 | ||
|
|
8415f13bab | ||
|
|
471e3ae2d9 | ||
|
|
1096b975e4 | ||
|
|
282ba4cfc8 | ||
|
|
b737a5ac5c | ||
|
|
20721bc950 | ||
|
|
fde86b0666 |
@@ -1,27 +1,27 @@
|
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
|
||||||
{
|
{
|
||||||
"name": "Actions Runner Devcontainer",
|
"name": "Actions Runner Devcontainer",
|
||||||
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||||
"ghcr.io/devcontainers/features/dotnet": {
|
"ghcr.io/devcontainers/features/dotnet": {
|
||||||
"version": "6.0.405"
|
"version": "6.0.418"
|
||||||
},
|
},
|
||||||
"ghcr.io/devcontainers/features/node:1": {
|
"ghcr.io/devcontainers/features/node:1": {
|
||||||
"version": "16"
|
"version": "16"
|
||||||
}
|
},
|
||||||
},
|
"ghcr.io/devcontainers/features/sshd:1": {
|
||||||
"customizations": {
|
"version": "latest"
|
||||||
"vscode": {
|
}
|
||||||
"extensions": [
|
},
|
||||||
"ms-azuretools.vscode-docker",
|
"customizations": {
|
||||||
"ms-dotnettools.csharp",
|
"vscode": {
|
||||||
"eamodio.gitlens"
|
"extensions": [
|
||||||
]
|
"ms-azuretools.vscode-docker",
|
||||||
}
|
"ms-dotnettools.csharp",
|
||||||
},
|
"eamodio.gitlens"
|
||||||
// dotnet restore to install dependencies so OmniSharp works out of the box
|
]
|
||||||
// src/Test restores all other projects it references, src/Runner.PluginHost is not one of them
|
}
|
||||||
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
},
|
||||||
"remoteUser": "vscode"
|
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
||||||
|
"remoteUser": "vscode"
|
||||||
}
|
}
|
||||||
20
.github/dependabot.yml
vendored
Normal file
20
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/images"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
target-branch: "main"
|
||||||
|
- package-ecosystem: "nuget"
|
||||||
|
directory: "/src"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
target-branch: "main"
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/src/Misc/expressionFunc/hashFiles"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
target-branch: "main"
|
||||||
|
allow:
|
||||||
|
- dependency-type: direct
|
||||||
|
- dependency-type: production # check only dependencies, which are going to the compiled app, not supporting tools like @vue-cli
|
||||||
26
.github/workflows/build.yml
vendored
26
.github/workflows/build.yml
vendored
@@ -58,29 +58,6 @@ jobs:
|
|||||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
# Check runtime/externals hash
|
|
||||||
- name: Compute/Compare runtime and externals Hash
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
|
||||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
|
||||||
|
|
||||||
NeedUpdate=0
|
|
||||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
|
||||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
|
||||||
NeedUpdate=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
|
||||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
|
||||||
NeedUpdate=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit $NeedUpdate
|
|
||||||
env:
|
|
||||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
|
||||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
|
||||||
|
|
||||||
# Run tests
|
# Run tests
|
||||||
- name: L0
|
- name: L0
|
||||||
run: |
|
run: |
|
||||||
@@ -103,6 +80,3 @@ jobs:
|
|||||||
name: runner-package-${{ matrix.runtime }}
|
name: runner-package-${{ matrix.runtime }}
|
||||||
path: |
|
path: |
|
||||||
_package
|
_package
|
||||||
_package_trims/trim_externals
|
|
||||||
_package_trims/trim_runtime
|
|
||||||
_package_trims/trim_runtime_externals
|
|
||||||
|
|||||||
17
.github/workflows/close-bugs-bot.yml
vendored
Normal file
17
.github/workflows/close-bugs-bot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name: Close Bugs Bot
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v8
|
||||||
|
with:
|
||||||
|
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||||
|
exempt-issue-labels: "keep"
|
||||||
|
stale-issue-label: "actions-bug"
|
||||||
|
only-labels: "actions-bug"
|
||||||
|
days-before-stale: 0
|
||||||
|
days-before-close: 1
|
||||||
17
.github/workflows/close-features-bot.yml
vendored
Normal file
17
.github/workflows/close-features-bot.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name: Close Features Bot
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # every day at midnight
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v8
|
||||||
|
with:
|
||||||
|
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||||
|
exempt-issue-labels: "keep"
|
||||||
|
stale-issue-label: "actions-feature"
|
||||||
|
only-labels: "actions-feature"
|
||||||
|
days-before-stale: 0
|
||||||
|
days-before-close: 1
|
||||||
105
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
105
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
name: "DotNet SDK Upgrade"
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 1'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dotnet-update:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
|
||||||
|
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Get current major minor version
|
||||||
|
id: fetch_current_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
current_major_minor_patch_version=$(jq .sdk.version ./src/global.json | xargs)
|
||||||
|
current_major_minor_version=$(cut -d '.' -f 1,2 <<< "$current_major_minor_patch_version")
|
||||||
|
|
||||||
|
echo "DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION=${current_major_minor_patch_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "DOTNET_CURRENT_MAJOR_MINOR_VERSION=${current_major_minor_version}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Check patch version
|
||||||
|
id: fetch_latest_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
latest_patch_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version")
|
||||||
|
current_patch_version=${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
should_update=0
|
||||||
|
[ "$current_patch_version" != "$latest_patch_version" ] && should_update=1
|
||||||
|
|
||||||
|
# check if git branch already exists for the upgrade
|
||||||
|
branch_already_exists=0
|
||||||
|
|
||||||
|
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
|
||||||
|
then
|
||||||
|
branch_already_exists=1
|
||||||
|
should_update=0
|
||||||
|
fi
|
||||||
|
echo "DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION=${latest_patch_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||||
|
echo "BRANCH_EXISTS=${branch_already_exists}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Create an error annotation if branch exists
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS == 1 }}
|
||||||
|
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
|
||||||
|
- name: Create a warning annotation if no need to update
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_latest_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||||
|
- name: Update patch version
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
patch_version="${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
current_version="${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
|
||||||
|
# Update globals
|
||||||
|
echo Updating globals
|
||||||
|
globals_temp=$(mktemp)
|
||||||
|
jq --unbuffered --arg patch_version "$patch_version" '.sdk.version = $patch_version' ./src/global.json > "$globals_temp" && mv "$globals_temp" ./src/global.json
|
||||||
|
|
||||||
|
# Update devcontainer
|
||||||
|
echo Updating devcontainer
|
||||||
|
devcontainer_temp=$(mktemp)
|
||||||
|
jq --unbuffered --arg patch_version "$patch_version" '.features."ghcr.io/devcontainers/features/dotnet".version = $patch_version' ./.devcontainer/devcontainer.json > "$devcontainer_temp" && mv "$devcontainer_temp" ./.devcontainer/devcontainer.json
|
||||||
|
|
||||||
|
# Update dev.sh
|
||||||
|
echo Updating start script
|
||||||
|
sed -i "s/DOTNETSDK_VERSION=\"$current_version\"/DOTNETSDK_VERSION=\"$patch_version\"/g" ./src/dev.sh
|
||||||
|
- name: GIT commit and push all changed files
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
id: create_branch
|
||||||
|
run: |
|
||||||
|
branch_name="feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
git checkout -b $branch_name
|
||||||
|
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
git push --set-upstream origin $branch_name
|
||||||
|
|
||||||
|
create-pr:
|
||||||
|
needs: [dotnet-update]
|
||||||
|
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
- name: Create Pull Request
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||||
|
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||||
3
.github/workflows/publish-image.yml
vendored
3
.github/workflows/publish-image.yml
vendored
@@ -53,6 +53,9 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: ./images
|
context: ./images
|
||||||
|
platforms: |
|
||||||
|
linux/amd64
|
||||||
|
linux/arm64
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
|||||||
422
.github/workflows/release.yml
vendored
422
.github/workflows/release.yml
vendored
@@ -53,27 +53,6 @@ jobs:
|
|||||||
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
||||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||||
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
||||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
|
||||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
|
||||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
|
||||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
|
||||||
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
|
|
||||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
|
||||||
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
|
|
||||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
|
||||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
|
||||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
|
||||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
|
||||||
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
|
|
||||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
|
||||||
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
|
|
||||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
|
||||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
|
||||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
|
||||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
|
||||||
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
|
|
||||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
|
||||||
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
||||||
@@ -136,76 +115,6 @@ jobs:
|
|||||||
id: sha
|
id: sha
|
||||||
name: Compute SHA256
|
name: Compute SHA256
|
||||||
working-directory: _package
|
working-directory: _package
|
||||||
- run: |
|
|
||||||
file=$(ls)
|
|
||||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
|
||||||
echo "Computed sha256: $sha for $file"
|
|
||||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
|
||||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
||||||
id: sha_noexternals
|
|
||||||
name: Compute SHA256
|
|
||||||
working-directory: _package_trims/trim_externals
|
|
||||||
- run: |
|
|
||||||
file=$(ls)
|
|
||||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
|
||||||
echo "Computed sha256: $sha for $file"
|
|
||||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
|
||||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
||||||
id: sha_noruntime
|
|
||||||
name: Compute SHA256
|
|
||||||
working-directory: _package_trims/trim_runtime
|
|
||||||
- run: |
|
|
||||||
file=$(ls)
|
|
||||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
|
||||||
echo "Computed sha256: $sha for $file"
|
|
||||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
|
||||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
||||||
id: sha_noruntime_noexternals
|
|
||||||
name: Compute SHA256
|
|
||||||
working-directory: _package_trims/trim_runtime_externals
|
|
||||||
|
|
||||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
|
||||||
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
|
|
||||||
uses: actions/github-script@0.3.0
|
|
||||||
with:
|
|
||||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
|
||||||
script: |
|
|
||||||
const core = require('@actions/core')
|
|
||||||
const fs = require('fs');
|
|
||||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
|
||||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
|
||||||
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
|
||||||
|
|
||||||
console.log(trimmedPackages)
|
|
||||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
|
||||||
|
|
||||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
|
||||||
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
|
|
||||||
uses: actions/github-script@0.3.0
|
|
||||||
with:
|
|
||||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
|
||||||
script: |
|
|
||||||
const core = require('@actions/core')
|
|
||||||
const fs = require('fs');
|
|
||||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
|
||||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
|
||||||
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
|
||||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
|
||||||
|
|
||||||
console.log(trimmedPackages)
|
|
||||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
|
||||||
|
|
||||||
# Upload runner package tar.gz/zip as artifact.
|
# Upload runner package tar.gz/zip as artifact.
|
||||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||||
@@ -216,10 +125,6 @@ jobs:
|
|||||||
name: runner-packages
|
name: runner-packages
|
||||||
path: |
|
path: |
|
||||||
_package
|
_package
|
||||||
_package_trims/trim_externals
|
|
||||||
_package_trims/trim_runtime
|
|
||||||
_package_trims/trim_runtime_externals
|
|
||||||
${{ matrix.runtime }}-trimmedpackages.json
|
|
||||||
|
|
||||||
release:
|
release:
|
||||||
needs: build
|
needs: build
|
||||||
@@ -253,33 +158,11 @@ jobs:
|
|||||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
|
||||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
|
||||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
|
||||||
console.log(releaseNote)
|
console.log(releaseNote)
|
||||||
core.setOutput('version', runnerVersion);
|
core.setOutput('version', runnerVersion);
|
||||||
core.setOutput('note', releaseNote);
|
core.setOutput('note', releaseNote);
|
||||||
|
|
||||||
- name: Validate Packages HASH
|
- name: Validate Packages HASH
|
||||||
working-directory: _package
|
|
||||||
run: |
|
run: |
|
||||||
ls -l
|
ls -l
|
||||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||||
@@ -309,7 +192,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
@@ -319,7 +202,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
@@ -329,7 +212,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
@@ -339,7 +222,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
@@ -349,7 +232,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
@@ -359,7 +242,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
@@ -369,298 +252,10 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
# Upload release assets (trim externals)
|
|
||||||
- name: Upload Release Asset (win-x64-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
|
||||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trim externals)
|
|
||||||
- name: Upload Release Asset (win-arm64-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
|
||||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-x64-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-x64-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-arm64-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trim runtime)
|
|
||||||
- name: Upload Release Asset (win-x64-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
|
||||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trim runtime)
|
|
||||||
- name: Upload Release Asset (win-arm64-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
|
||||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-x64-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-x64-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-arm64-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trim runtime and externals)
|
|
||||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
|
||||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trim runtime and externals)
|
|
||||||
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
|
||||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trimmedpackages.json)
|
|
||||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
# Upload release assets (trimmedpackages.json)
|
|
||||||
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
|
||||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
|
||||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
publish-image:
|
publish-image:
|
||||||
needs: release
|
needs: release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -699,6 +294,9 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: ./images
|
context: ./images
|
||||||
|
platforms: |
|
||||||
|
linux/amd64
|
||||||
|
linux/arm64
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
|||||||
16
.github/workflows/stale-bot.yml
vendored
Normal file
16
.github/workflows/stale-bot.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
name: Stale Bot
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 1' # every monday at midnight
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v8
|
||||||
|
with:
|
||||||
|
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||||
|
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||||
|
exempt-issue-labels: "keep"
|
||||||
|
days-before-stale: 365
|
||||||
|
days-before-close: 15
|
||||||
6
.husky/pre-commit
Executable file
6
.husky/pre-commit
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
. "$(dirname -- "$0")/_/husky.sh"
|
||||||
|
|
||||||
|
cd src/Misc/expressionFunc/hashFiles
|
||||||
|
|
||||||
|
npx lint-staged
|
||||||
@@ -7,19 +7,26 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
|||||||
|
|
||||||
- For GitHub.com
|
- For GitHub.com
|
||||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||||
|
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
|
||||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||||
|
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
|
||||||
|
---
|
||||||
|
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||||
|
|
||||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||||
|
|
||||||
```
|
```
|
||||||
curl -v https://api.github.com/api/v3/zen
|
curl -v https://api.github.com/zen
|
||||||
|
curl -v https://codeload.github.com/_ping
|
||||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||||
|
curl -v https://results-receiver.actions.githubusercontent.com/health
|
||||||
```
|
```
|
||||||
|
|
||||||
- For GitHub Enterprise Server
|
- For GitHub Enterprise Server
|
||||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||||
|
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
|
||||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||||
|
|
||||||
@@ -27,6 +34,7 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
|||||||
|
|
||||||
```
|
```
|
||||||
curl -v https://[hostname]/api/v3/zen
|
curl -v https://[hostname]/api/v3/zen
|
||||||
|
curl -v https://codeload.[hostname]/_ping
|
||||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||||
```
|
```
|
||||||
@@ -42,6 +50,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
|||||||
- Ping api.github.com or myGHES.com using dotnet
|
- Ping api.github.com or myGHES.com using dotnet
|
||||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||||
---
|
---
|
||||||
|
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
|
||||||
|
- Ping codeload.github.com or codeload.myGHES.com using dotnet
|
||||||
|
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||||
|
---
|
||||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||||
@@ -50,6 +62,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
|||||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||||
|
---
|
||||||
|
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
|
||||||
|
- Ping results-receiver.actions.githubusercontent.com using dotnet
|
||||||
|
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||||
|
|
||||||
## How to fix the issue?
|
## How to fix the issue?
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||||
|
|
||||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
- Firewall rules that block action runner from accessing [certain hosts](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github), ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||||
|
|
||||||
|
|
||||||
### Identify and solve these problems
|
### Identify and solve these problems
|
||||||
@@ -42,6 +42,7 @@ If you are having trouble connecting, try these steps:
|
|||||||
- https://api.github.com/
|
- https://api.github.com/
|
||||||
- https://vstoken.actions.githubusercontent.com/_apis/health
|
- https://vstoken.actions.githubusercontent.com/_apis/health
|
||||||
- https://pipelines.actions.githubusercontent.com/_apis/health
|
- https://pipelines.actions.githubusercontent.com/_apis/health
|
||||||
|
- https://results-receiver.actions.githubusercontent.com/health
|
||||||
- For GHES/GHAE
|
- For GHES/GHAE
|
||||||
- https://myGHES.com/_services/vstoken/_apis/health
|
- https://myGHES.com/_services/vstoken/_apis/health
|
||||||
- https://myGHES.com/_services/pipelines/_apis/health
|
- https://myGHES.com/_services/pipelines/_apis/health
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
## Supported Distributions and Versions
|
## Supported Distributions and Versions
|
||||||
|
|
||||||
x64
|
x64
|
||||||
- Red Hat Enterprise Linux 7
|
- Red Hat Enterprise Linux 7+
|
||||||
- CentOS 7
|
- CentOS 7+
|
||||||
- Oracle Linux 7
|
- Oracle Linux 7+
|
||||||
- Fedora 29+
|
- Fedora 29+
|
||||||
- Debian 9+
|
- Debian 9+
|
||||||
- Ubuntu 16.04+
|
- Ubuntu 16.04+
|
||||||
|
|||||||
@@ -1,14 +1,19 @@
|
|||||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
|
# Source: https://github.com/dotnet/dotnet-docker
|
||||||
|
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
|
||||||
|
|
||||||
|
ARG TARGETOS
|
||||||
|
ARG TARGETARCH
|
||||||
ARG RUNNER_VERSION
|
ARG RUNNER_VERSION
|
||||||
ARG RUNNER_ARCH="x64"
|
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.5.1
|
||||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
|
ARG DOCKER_VERSION=25.0.2
|
||||||
ARG DOCKER_VERSION=20.10.23
|
ARG BUILDX_VERSION=0.12.1
|
||||||
|
|
||||||
RUN apt update -y && apt install curl unzip -y
|
RUN apt update -y && apt install curl unzip -y
|
||||||
|
|
||||||
WORKDIR /actions-runner
|
WORKDIR /actions-runner
|
||||||
RUN curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-linux-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
|
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||||
|
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export RUNNER_ARCH=x64 ; fi \
|
||||||
|
&& curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-${TARGETOS}-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
|
||||||
&& tar xzf ./runner.tar.gz \
|
&& tar xzf ./runner.tar.gz \
|
||||||
&& rm runner.tar.gz
|
&& rm runner.tar.gz
|
||||||
|
|
||||||
@@ -16,21 +21,28 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
|
|||||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||||
&& rm runner-container-hooks.zip
|
&& rm runner-container-hooks.zip
|
||||||
|
|
||||||
RUN export DOCKER_ARCH=x86_64 \
|
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||||
|
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||||
&& curl -fLo docker.tgz https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
||||||
&& tar zxvf docker.tgz \
|
&& tar zxvf docker.tgz \
|
||||||
&& rm -rf docker.tgz
|
&& rm -rf docker.tgz \
|
||||||
|
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
||||||
|
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
||||||
|
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||||
|
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||||
|
|
||||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
|
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||||
|
ENV ImageOS=ubuntu22
|
||||||
|
|
||||||
RUN apt-get update -y \
|
RUN apt-get update -y \
|
||||||
&& apt-get install -y --no-install-recommends \
|
&& apt-get install -y --no-install-recommends \
|
||||||
sudo \
|
sudo \
|
||||||
|
lsb-release \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||||
@@ -43,6 +55,7 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
|||||||
WORKDIR /home/runner
|
WORKDIR /home/runner
|
||||||
|
|
||||||
COPY --chown=runner:docker --from=build /actions-runner .
|
COPY --chown=runner:docker --from=build /actions-runner .
|
||||||
|
COPY --from=build /usr/local/lib/docker/cli-plugins/docker-buildx /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||||
|
|
||||||
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
|
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,26 @@
|
|||||||
## Features
|
## What's Changed
|
||||||
- Runner changes for communication with Results service (#2510, #2531, #2535, #2516)
|
* Fix `buildx` installation by @ajschmidt8 in https://github.com/actions/runner/pull/2952
|
||||||
- Add `*.ghe.localhost` domains to hosted server check (#2536)
|
* Create close-features and close-bugs bot for runner issues by @ruvceskistefan in https://github.com/actions/runner/pull/2909
|
||||||
- Add `OrchestrationId` to user-agent for better telemetry correlation. (#2568)
|
* Send disableUpdate as query parameter by @luketomlinson in https://github.com/actions/runner/pull/2970
|
||||||
|
* Handle SelfUpdate Flow when Package is provided in Message by @luketomlinson in https://github.com/actions/runner/pull/2926
|
||||||
|
* Bump container hook version to 0.5.0 in runner image by @nikola-jokic in https://github.com/actions/runner/pull/3003
|
||||||
|
* Set `ImageOS` environment variable in runner images by @int128 in https://github.com/actions/runner/pull/2878
|
||||||
|
* Mark job as failed on worker crash. by @TingluoHuang in https://github.com/actions/runner/pull/3006
|
||||||
|
* Include whether http proxy configured as part of UserAgent. by @TingluoHuang in https://github.com/actions/runner/pull/3009
|
||||||
|
* Add codeload to the list of service we check during '--check'. by @TingluoHuang in https://github.com/actions/runner/pull/3011
|
||||||
|
* close reason update by @ruvceskistefan in https://github.com/actions/runner/pull/3027
|
||||||
|
* Update envlinux.md by @adjn in https://github.com/actions/runner/pull/3040
|
||||||
|
* Extend `--check` to check Results-Receiver service. by @TingluoHuang in https://github.com/actions/runner/pull/3078
|
||||||
|
* Use Azure SDK to upload files to Azure Blob by @yacaovsnc in https://github.com/actions/runner/pull/3033
|
||||||
|
* Remove code in runner for handling trimmed packages. by @TingluoHuang in https://github.com/actions/runner/pull/3074
|
||||||
|
* Update dotnet sdk to latest version @6.0.418 by @github-actions in https://github.com/actions/runner/pull/3085
|
||||||
|
* Patch Curl to no longer use -k by @thboop in https://github.com/actions/runner/pull/3091
|
||||||
|
|
||||||
## Bugs
|
## New Contributors
|
||||||
- Fix JIT configurations on Windows (#2497)
|
* @int128 made their first contribution in https://github.com/actions/runner/pull/2878
|
||||||
- Guard against NullReference while creating HostContext (#2343)
|
* @adjn made their first contribution in https://github.com/actions/runner/pull/3040
|
||||||
- Handles broken symlink in `Which` (#2150, #2196)
|
|
||||||
- Adding curl retry for external tool downloads (#2552, #2557)
|
|
||||||
- Limit the time we wait for waiting websocket to connect. (#2554)
|
|
||||||
|
|
||||||
## Misc
|
**Full Changelog**: https://github.com/actions/runner/compare/v2.311.0...v2.312.0
|
||||||
- Bump container hooks version to 0.3.1 in runner image (#2496)
|
|
||||||
- Runner changes to communicate with vNext services (#2487, #2500, #2505, #2541, #2547)
|
|
||||||
|
|
||||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||||
@@ -117,27 +125,3 @@ The SHA-256 checksums for the packages included in this build are shown below:
|
|||||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||||
|
|
||||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
|
||||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
|
|
||||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
|
||||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
|
|
||||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
|
||||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
|
||||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
|
||||||
|
|
||||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
|
||||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
|
|
||||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
|
||||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
|
|
||||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
|
||||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
|
||||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
|
||||||
|
|
||||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
|
||||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
|
|
||||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
|
||||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
|
|
||||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
|
||||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
|
||||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ set -e
|
|||||||
# Configures it as a service more secure
|
# Configures it as a service more secure
|
||||||
# Should be used on VMs and not containers
|
# Should be used on VMs and not containers
|
||||||
# Works on OSX and Linux
|
# Works on OSX and Linux
|
||||||
# Assumes x64 arch
|
# Assumes x64 arch (support arm64)
|
||||||
# See EXAMPLES below
|
# See EXAMPLES below
|
||||||
|
|
||||||
flags_found=false
|
flags_found=false
|
||||||
@@ -87,6 +87,9 @@ sudo echo
|
|||||||
runner_plat=linux
|
runner_plat=linux
|
||||||
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||||
|
|
||||||
|
runner_arch=x64
|
||||||
|
[ ! -z "$(arch | grep arm64)" ] && runner_arch=arm64
|
||||||
|
|
||||||
function fatal()
|
function fatal()
|
||||||
{
|
{
|
||||||
echo "error: $1" >&2
|
echo "error: $1" >&2
|
||||||
@@ -139,7 +142,7 @@ echo "Downloading latest runner ..."
|
|||||||
# For the GHES Alpha, download the runner from github.com
|
# For the GHES Alpha, download the runner from github.com
|
||||||
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
||||||
latest_version=$(echo ${latest_version_label:1})
|
latest_version=$(echo ${latest_version_label:1})
|
||||||
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
|
runner_file="actions-runner-${runner_plat}-${runner_arch}-${latest_version}.tar.gz"
|
||||||
|
|
||||||
if [ -f "${runner_file}" ]; then
|
if [ -f "${runner_file}" ]; then
|
||||||
echo "${runner_file} exists. skipping download."
|
echo "${runner_file} exists. skipping download."
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
39f2a931565d6a10e695ac8ed14bb9dcbb568151410349b32dbf9c27bae29602
|
54d95a44d118dba852395991224a6b9c1abe916858c87138656f80c619e85331
|
||||||
@@ -1 +1 @@
|
|||||||
29ffb303537d8ba674fbebc7729292c21c4ebd17b3198f91ed593ef4cbbb67b5
|
68015af17f06a824fa478e62ae7393766ce627fd5599ab916432a14656a19a52
|
||||||
@@ -1 +1 @@
|
|||||||
de6868a836fa3cb9e5ddddbc079da1c25e819aa2d2fc193cc9931c353687c57c
|
a2628119ca419cb54e279103ffae7986cdbd0814d57c73ff0dc74c38be08b9ae
|
||||||
@@ -1 +1 @@
|
|||||||
339d3e1a5fd28450c0fe6cb820cc7aae291f0f9e2d153ac34e1f7b080e35d30e
|
de71ca09ead807e1a2ce9df0a5b23eb7690cb71fff51169a77e4c3992be53dda
|
||||||
@@ -1 +1 @@
|
|||||||
dcb7f606c1d7d290381e5020ee73e7f16dcbd2f20ac9b431362ccbb5120d449c
|
d009e05e6b26d614d65be736a15d1bd151932121c16a9ff1b986deadecc982b9
|
||||||
@@ -1 +1 @@
|
|||||||
1bbcb0e9a2cf4be4b1fce77458de139b70ac58efcbb415a6db028b9373ae1673
|
f730db39c2305800b4653795360ba9c10c68f384a46b85d808f1f9f0ed3c42e4
|
||||||
@@ -1 +1 @@
|
|||||||
44cd25f3c104d0abb44d262397a80e0b2c4f206465c5d899a22eec043dac0fb3
|
a35b5722375490e9473cdcccb5e18b41eba3dbf4344fe31abc9821e21f18ea5a
|
||||||
2
src/Misc/contentHash/externals/linux-arm
vendored
2
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +1 @@
|
|||||||
3807dcbf947e840c33535fb466b096d76bf09e5c0254af8fc8cbbb24c6388222
|
4bf3e1af0d482af1b2eaf9f08250248a8c1aea8ec20a3c5be116d58cdd930009
|
||||||
2
src/Misc/contentHash/externals/linux-arm64
vendored
2
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +1 @@
|
|||||||
ee01eee80cd8a460a4b9780ee13fdd20f25c59e754b4ccd99df55fbba2a85634
|
ec1719a8cb4d8687328aa64f4aa7c4e3498a715d8939117874782e3e6e63a14b
|
||||||
2
src/Misc/contentHash/externals/linux-x64
vendored
2
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +1 @@
|
|||||||
a9fb9c14e24e79aec97d4da197dd7bfc6364297d6fce573afb2df48cc9a931f8
|
50538de29f173bb73f708c4ed2c8328a62b8795829b97b2a6cb57197e2305287
|
||||||
2
src/Misc/contentHash/externals/osx-arm64
vendored
2
src/Misc/contentHash/externals/osx-arm64
vendored
@@ -1 +1 @@
|
|||||||
a4e0e8fc62eba0967a39c7d693dcd0aeb8b2bed0765f9c38df80d42884f65341
|
a0a96cbb7593643b69e669bf14d7b29b7f27800b3a00bb3305aebe041456c701
|
||||||
2
src/Misc/contentHash/externals/osx-x64
vendored
2
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +1 @@
|
|||||||
17ac17fbe785b3d6fa2868d8d17185ebfe0c90b4b0ddf6b67eac70e42bcd989b
|
6255b22692779467047ecebd60ad46984866d75cdfe10421d593a7b51d620b09
|
||||||
2
src/Misc/contentHash/externals/win-arm64
vendored
2
src/Misc/contentHash/externals/win-arm64
vendored
@@ -1 +1 @@
|
|||||||
89f24657a550f1e818b0e9975e5b80edcf4dd22b7d4bccbb9e48e37f45d30fb1
|
6ff1abd055dc35bfbf06f75c2f08908f660346f66ad1d8f81c910068e9ba029d
|
||||||
2
src/Misc/contentHash/externals/win-x64
vendored
2
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +1 @@
|
|||||||
24fd131b5dce33ef16038b771407bc0507da8682a72fb3b7780607235f76db0b
|
433a6d748742d12abd20dc2a79b62ac3d9718ae47ef26f8e84dc8c180eea3659
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
"printWidth": 80,
|
"printWidth": 80,
|
||||||
"tabWidth": 2,
|
"tabWidth": 2,
|
||||||
"useTabs": false,
|
"useTabs": false,
|
||||||
"semi": false,
|
"semi": false,
|
||||||
"singleQuote": true,
|
"singleQuote": true,
|
||||||
"trailingComma": "none",
|
"trailingComma": "none",
|
||||||
"bracketSpacing": false,
|
"bracketSpacing": false,
|
||||||
"arrowParens": "avoid",
|
"arrowParens": "avoid",
|
||||||
"parser": "typescript"
|
"overrides": [
|
||||||
}
|
{
|
||||||
|
"files": "*.{js,ts,json}",
|
||||||
|
"options": {
|
||||||
|
"tabWidth": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run all`.
|
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run prepare && npm run all`.
|
||||||
|
|
||||||
> Note: this package also needs to be recompiled for dependabot PRs updating one of
|
When you commit changes to the JSON or Typescript file, the javascript binary will be automatically re-compiled and added to the latest commit.
|
||||||
> its dependencies.
|
|
||||||
|
|||||||
4626
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
4626
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,9 @@
|
|||||||
"format-check": "prettier --check **/*.ts",
|
"format-check": "prettier --check **/*.ts",
|
||||||
"lint": "eslint src/**/*.ts",
|
"lint": "eslint src/**/*.ts",
|
||||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||||
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||||
|
"prepare": "cd ../../../../ && husky install"
|
||||||
|
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -18,18 +20,32 @@
|
|||||||
"keywords": [
|
"keywords": [
|
||||||
"actions"
|
"actions"
|
||||||
],
|
],
|
||||||
|
"lint-staged": {
|
||||||
|
"*.md": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add ."
|
||||||
|
],
|
||||||
|
"*.{ts,json}": [
|
||||||
|
"sh -c 'npm run all'",
|
||||||
|
"git add ."
|
||||||
|
]
|
||||||
|
},
|
||||||
"author": "GitHub Actions",
|
"author": "GitHub Actions",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/glob": "^0.1.0"
|
"@actions/glob": "^0.4.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^12.7.12",
|
"@types/node": "^20.6.2",
|
||||||
"@typescript-eslint/parser": "^5.15.0",
|
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
||||||
"@vercel/ncc": "^0.36.0",
|
"@typescript-eslint/parser": "^6.7.2",
|
||||||
"eslint": "^8.11.0",
|
"@vercel/ncc": "^0.38.0",
|
||||||
"eslint-plugin-github": "^4.3.5",
|
"eslint": "^8.47.0",
|
||||||
"prettier": "^1.19.1",
|
"eslint-plugin-github": "^4.10.0",
|
||||||
"typescript": "^3.6.4"
|
"eslint-plugin-prettier": "^5.0.0",
|
||||||
|
"prettier": "^3.0.3",
|
||||||
|
"typescript": "^5.2.2",
|
||||||
|
"husky": "^8.0.3",
|
||||||
|
"lint-staged": "^14.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -52,12 +52,13 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
run()
|
;(async () => {
|
||||||
.then(out => {
|
try {
|
||||||
|
const out = await run()
|
||||||
console.log(out)
|
console.log(out)
|
||||||
process.exit(0)
|
process.exit(0)
|
||||||
})
|
} catch (err) {
|
||||||
.catch(err => {
|
|
||||||
console.error(err)
|
console.error(err)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
})
|
}
|
||||||
|
})()
|
||||||
|
|||||||
@@ -4,8 +4,11 @@ PRECACHE=$2
|
|||||||
|
|
||||||
NODE_URL=https://nodejs.org/dist
|
NODE_URL=https://nodejs.org/dist
|
||||||
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
||||||
NODE12_VERSION="12.22.7"
|
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||||
NODE16_VERSION="16.16.0"
|
NODE16_VERSION="16.20.2"
|
||||||
|
NODE20_VERSION="20.8.1"
|
||||||
|
# used only for win-arm64, remove node16 unofficial version when official version is available
|
||||||
|
NODE16_UNOFFICIAL_VERSION="16.20.0"
|
||||||
|
|
||||||
get_abs_path() {
|
get_abs_path() {
|
||||||
# exploits the fact that pwd will print abs path when no args
|
# exploits the fact that pwd will print abs path when no args
|
||||||
@@ -60,17 +63,16 @@ function acquireExternalTool() {
|
|||||||
echo "Curl version: $CURL_VERSION"
|
echo "Curl version: $CURL_VERSION"
|
||||||
|
|
||||||
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
||||||
# -k Allow connections to SSL sites without certs (H)
|
|
||||||
# -S Show error. With -s, make curl show errors when they occur
|
# -S Show error. With -s, make curl show errors when they occur
|
||||||
# -L Follow redirects (H)
|
# -L Follow redirects (H)
|
||||||
# -o FILE Write to FILE instead of stdout
|
# -o FILE Write to FILE instead of stdout
|
||||||
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
||||||
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
||||||
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
||||||
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||||
else
|
else
|
||||||
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
||||||
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Move the partial file to the download target.
|
# Move the partial file to the download target.
|
||||||
@@ -137,10 +139,10 @@ function acquireExternalTool() {
|
|||||||
|
|
||||||
# Download the external tools only for Windows.
|
# Download the external tools only for Windows.
|
||||||
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.exe" node12/bin
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.lib" node12/bin
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||||
if [[ "$PRECACHE" != "" ]]; then
|
if [[ "$PRECACHE" != "" ]]; then
|
||||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||||
fi
|
fi
|
||||||
@@ -149,8 +151,10 @@ fi
|
|||||||
# Download the external tools only for Windows.
|
# Download the external tools only for Windows.
|
||||||
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
||||||
# todo: replace these with official release when available
|
# todo: replace these with official release when available
|
||||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||||
if [[ "$PRECACHE" != "" ]]; then
|
if [[ "$PRECACHE" != "" ]]; then
|
||||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||||
fi
|
fi
|
||||||
@@ -158,29 +162,30 @@ fi
|
|||||||
|
|
||||||
# Download the external tools only for OSX.
|
# Download the external tools only for OSX.
|
||||||
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
||||||
# node.js v12 doesn't support macOS on arm64.
|
# node.js v12 doesn't support macOS on arm64.
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-arm64.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -6,6 +6,29 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -22,13 +45,6 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
};
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const crypto = __importStar(__nccwpck_require__(6113));
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
@@ -37,7 +53,7 @@ const path = __importStar(__nccwpck_require__(1017));
|
|||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
function run() {
|
function run() {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// arg0 -> node
|
// arg0 -> node
|
||||||
// arg1 -> hashFiles.js
|
// arg1 -> hashFiles.js
|
||||||
@@ -56,8 +72,10 @@ function run() {
|
|||||||
let count = 0;
|
let count = 0;
|
||||||
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
||||||
try {
|
try {
|
||||||
for (var _b = __asyncValues(globber.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||||
const file = _c.value;
|
_c = _f.value;
|
||||||
|
_d = false;
|
||||||
|
const file = _c;
|
||||||
console.log(file);
|
console.log(file);
|
||||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
@@ -80,7 +98,7 @@ function run() {
|
|||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@@ -94,15 +112,18 @@ function run() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
run()
|
;
|
||||||
.then(out => {
|
(() => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
console.log(out);
|
try {
|
||||||
process.exit(0);
|
const out = yield run();
|
||||||
})
|
console.log(out);
|
||||||
.catch(err => {
|
process.exit(0);
|
||||||
console.error(err);
|
}
|
||||||
process.exit(1);
|
catch (err) {
|
||||||
});
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}))();
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -246,7 +267,6 @@ const file_command_1 = __nccwpck_require__(717);
|
|||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(5278);
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const uuid_1 = __nccwpck_require__(5840);
|
|
||||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
const oidc_utils_1 = __nccwpck_require__(8041);
|
||||||
/**
|
/**
|
||||||
* The code to exit an action
|
* The code to exit an action
|
||||||
@@ -276,20 +296,9 @@ function exportVariable(name, val) {
|
|||||||
process.env[name] = convertedVal;
|
process.env[name] = convertedVal;
|
||||||
const filePath = process.env['GITHUB_ENV'] || '';
|
const filePath = process.env['GITHUB_ENV'] || '';
|
||||||
if (filePath) {
|
if (filePath) {
|
||||||
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
||||||
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
|
|
||||||
if (name.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
if (convertedVal.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
|
||||||
file_command_1.issueCommand('ENV', commandValue);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
||||||
}
|
}
|
||||||
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||||
}
|
}
|
||||||
exports.exportVariable = exportVariable;
|
exports.exportVariable = exportVariable;
|
||||||
/**
|
/**
|
||||||
@@ -307,7 +316,7 @@ exports.setSecret = setSecret;
|
|||||||
function addPath(inputPath) {
|
function addPath(inputPath) {
|
||||||
const filePath = process.env['GITHUB_PATH'] || '';
|
const filePath = process.env['GITHUB_PATH'] || '';
|
||||||
if (filePath) {
|
if (filePath) {
|
||||||
file_command_1.issueCommand('PATH', inputPath);
|
file_command_1.issueFileCommand('PATH', inputPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
command_1.issueCommand('add-path', {}, inputPath);
|
command_1.issueCommand('add-path', {}, inputPath);
|
||||||
@@ -347,7 +356,10 @@ function getMultilineInput(name, options) {
|
|||||||
const inputs = getInput(name, options)
|
const inputs = getInput(name, options)
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter(x => x !== '');
|
.filter(x => x !== '');
|
||||||
return inputs;
|
if (options && options.trimWhitespace === false) {
|
||||||
|
return inputs;
|
||||||
|
}
|
||||||
|
return inputs.map(input => input.trim());
|
||||||
}
|
}
|
||||||
exports.getMultilineInput = getMultilineInput;
|
exports.getMultilineInput = getMultilineInput;
|
||||||
/**
|
/**
|
||||||
@@ -380,8 +392,12 @@ exports.getBooleanInput = getBooleanInput;
|
|||||||
*/
|
*/
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function setOutput(name, value) {
|
function setOutput(name, value) {
|
||||||
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
process.stdout.write(os.EOL);
|
process.stdout.write(os.EOL);
|
||||||
command_1.issueCommand('set-output', { name }, value);
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
||||||
}
|
}
|
||||||
exports.setOutput = setOutput;
|
exports.setOutput = setOutput;
|
||||||
/**
|
/**
|
||||||
@@ -510,7 +526,11 @@ exports.group = group;
|
|||||||
*/
|
*/
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function saveState(name, value) {
|
function saveState(name, value) {
|
||||||
command_1.issueCommand('save-state', { name }, value);
|
const filePath = process.env['GITHUB_STATE'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
||||||
}
|
}
|
||||||
exports.saveState = saveState;
|
exports.saveState = saveState;
|
||||||
/**
|
/**
|
||||||
@@ -576,13 +596,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.issueCommand = void 0;
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||||
// We use any as a valid input type
|
// We use any as a valid input type
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
|
const uuid_1 = __nccwpck_require__(5840);
|
||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(5278);
|
||||||
function issueCommand(command, message) {
|
function issueFileCommand(command, message) {
|
||||||
const filePath = process.env[`GITHUB_${command}`];
|
const filePath = process.env[`GITHUB_${command}`];
|
||||||
if (!filePath) {
|
if (!filePath) {
|
||||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||||
@@ -594,7 +615,22 @@ function issueCommand(command, message) {
|
|||||||
encoding: 'utf8'
|
encoding: 'utf8'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.issueCommand = issueCommand;
|
exports.issueFileCommand = issueFileCommand;
|
||||||
|
function prepareKeyValueMessage(key, value) {
|
||||||
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||||||
|
const convertedValue = utils_1.toCommandValue(value);
|
||||||
|
// These should realistically never happen, but just in case someone finds a
|
||||||
|
// way to exploit uuid generation let's not allow keys or values that contain
|
||||||
|
// the delimiter.
|
||||||
|
if (key.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
if (convertedValue.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
||||||
|
}
|
||||||
|
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||||
//# sourceMappingURL=file-command.js.map
|
//# sourceMappingURL=file-command.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -1100,7 +1136,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.hashFiles = exports.create = void 0;
|
||||||
const internal_globber_1 = __nccwpck_require__(8298);
|
const internal_globber_1 = __nccwpck_require__(8298);
|
||||||
|
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||||
/**
|
/**
|
||||||
* Constructs a globber
|
* Constructs a globber
|
||||||
*
|
*
|
||||||
@@ -1113,17 +1151,56 @@ function create(patterns, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.create = create;
|
exports.create = create;
|
||||||
|
/**
|
||||||
|
* Computes the sha256 hash of a glob
|
||||||
|
*
|
||||||
|
* @param patterns Patterns separated by newlines
|
||||||
|
* @param currentWorkspace Workspace used when matching files
|
||||||
|
* @param options Glob options
|
||||||
|
* @param verbose Enables verbose logging
|
||||||
|
*/
|
||||||
|
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let followSymbolicLinks = true;
|
||||||
|
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||||
|
followSymbolicLinks = options.followSymbolicLinks;
|
||||||
|
}
|
||||||
|
const globber = yield create(patterns, { followSymbolicLinks });
|
||||||
|
return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.hashFiles = hashFiles;
|
||||||
//# sourceMappingURL=glob.js.map
|
//# sourceMappingURL=glob.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1026:
|
/***/ 1026:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const core = __nccwpck_require__(2186);
|
exports.getOptions = void 0;
|
||||||
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
/**
|
/**
|
||||||
* Returns a copy with defaults filled in.
|
* Returns a copy with defaults filled in.
|
||||||
*/
|
*/
|
||||||
@@ -1131,6 +1208,7 @@ function getOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
|
matchDirectories: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
@@ -1142,6 +1220,10 @@ function getOptions(copy) {
|
|||||||
result.implicitDescendants = copy.implicitDescendants;
|
result.implicitDescendants = copy.implicitDescendants;
|
||||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||||||
}
|
}
|
||||||
|
if (typeof copy.matchDirectories === 'boolean') {
|
||||||
|
result.matchDirectories = copy.matchDirectories;
|
||||||
|
core.debug(`matchDirectories '${result.matchDirectories}'`);
|
||||||
|
}
|
||||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||||
@@ -1159,6 +1241,25 @@ exports.getOptions = getOptions;
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -1188,11 +1289,12 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
|||||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const core = __nccwpck_require__(2186);
|
exports.DefaultGlobber = void 0;
|
||||||
const fs = __nccwpck_require__(7147);
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const globOptionsHelper = __nccwpck_require__(1026);
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const path = __nccwpck_require__(1017);
|
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||||
const patternHelper = __nccwpck_require__(9005);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||||
@@ -1238,7 +1340,7 @@ class DefaultGlobber {
|
|||||||
if (options.implicitDescendants &&
|
if (options.implicitDescendants &&
|
||||||
(pattern.trailingSeparator ||
|
(pattern.trailingSeparator ||
|
||||||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
|
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Push the search paths
|
// Push the search paths
|
||||||
@@ -1281,7 +1383,7 @@ class DefaultGlobber {
|
|||||||
// Directory
|
// Directory
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory()) {
|
||||||
// Matched
|
// Matched
|
||||||
if (match & internal_match_kind_1.MatchKind.Directory) {
|
if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {
|
||||||
yield yield __await(item.path);
|
yield yield __await(item.path);
|
||||||
}
|
}
|
||||||
// Descend?
|
// Descend?
|
||||||
@@ -1376,12 +1478,117 @@ exports.DefaultGlobber = DefaultGlobber;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 2448:
|
||||||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||||
|
var m = o[Symbol.asyncIterator], i;
|
||||||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.hashFiles = void 0;
|
||||||
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||||
|
var e_1, _a;
|
||||||
|
var _b;
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const writeDelegate = verbose ? core.info : core.debug;
|
||||||
|
let hasMatch = false;
|
||||||
|
const githubWorkspace = currentWorkspace
|
||||||
|
? currentWorkspace
|
||||||
|
: (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
|
const result = crypto.createHash('sha256');
|
||||||
|
let count = 0;
|
||||||
|
try {
|
||||||
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
|
const file = _d.value;
|
||||||
|
writeDelegate(file);
|
||||||
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
|
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (fs.statSync(file).isDirectory()) {
|
||||||
|
writeDelegate(`Skip directory '${file}'.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const hash = crypto.createHash('sha256');
|
||||||
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
|
yield pipeline(fs.createReadStream(file), hash);
|
||||||
|
result.write(hash.digest());
|
||||||
|
count++;
|
||||||
|
if (!hasMatch) {
|
||||||
|
hasMatch = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
|
finally {
|
||||||
|
try {
|
||||||
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
|
}
|
||||||
|
finally { if (e_1) throw e_1.error; }
|
||||||
|
}
|
||||||
|
result.end();
|
||||||
|
if (hasMatch) {
|
||||||
|
writeDelegate(`Found ${count} files to hash.`);
|
||||||
|
return result.digest('hex');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
writeDelegate(`No matches found for glob`);
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.hashFiles = hashFiles;
|
||||||
|
//# sourceMappingURL=internal-hash-files.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1063:
|
/***/ 1063:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.MatchKind = void 0;
|
||||||
/**
|
/**
|
||||||
* Indicates whether a pattern matches a path
|
* Indicates whether a pattern matches a path
|
||||||
*/
|
*/
|
||||||
@@ -1401,13 +1608,36 @@ var MatchKind;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1849:
|
/***/ 1849:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||||
@@ -1447,8 +1677,8 @@ exports.dirname = dirname;
|
|||||||
* or `C:` are expanded based on the current working directory.
|
* or `C:` are expanded based on the current working directory.
|
||||||
*/
|
*/
|
||||||
function ensureAbsoluteRoot(root, itemPath) {
|
function ensureAbsoluteRoot(root, itemPath) {
|
||||||
assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||||
assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Already rooted
|
// Already rooted
|
||||||
if (hasAbsoluteRoot(itemPath)) {
|
if (hasAbsoluteRoot(itemPath)) {
|
||||||
return itemPath;
|
return itemPath;
|
||||||
@@ -1458,7 +1688,7 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like C: or C:foo
|
// Check for itemPath like C: or C:foo
|
||||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||||
let cwd = process.cwd();
|
let cwd = process.cwd();
|
||||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
// Drive letter matches cwd? Expand to cwd
|
// Drive letter matches cwd? Expand to cwd
|
||||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||||
// Drive only, e.g. C:
|
// Drive only, e.g. C:
|
||||||
@@ -1483,11 +1713,11 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like \ or \foo
|
// Check for itemPath like \ or \foo
|
||||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||||
// Otherwise ensure root ends with a separator
|
// Otherwise ensure root ends with a separator
|
||||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||||
// Intentionally empty
|
// Intentionally empty
|
||||||
@@ -1504,7 +1734,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
|||||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasAbsoluteRoot(itemPath) {
|
function hasAbsoluteRoot(itemPath) {
|
||||||
assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@@ -1521,7 +1751,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
|||||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasRoot(itemPath) {
|
function hasRoot(itemPath) {
|
||||||
assert(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@@ -1583,14 +1813,37 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6836:
|
/***/ 6836:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.Path = void 0;
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Helper class for parsing paths into segments
|
* Helper class for parsing paths into segments
|
||||||
@@ -1604,7 +1857,7 @@ class Path {
|
|||||||
this.segments = [];
|
this.segments = [];
|
||||||
// String
|
// String
|
||||||
if (typeof itemPath === 'string') {
|
if (typeof itemPath === 'string') {
|
||||||
assert(itemPath, `Parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||||
// Normalize slashes and trim unnecessary trailing slash
|
// Normalize slashes and trim unnecessary trailing slash
|
||||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||||
// Not rooted
|
// Not rooted
|
||||||
@@ -1631,24 +1884,24 @@ class Path {
|
|||||||
// Array
|
// Array
|
||||||
else {
|
else {
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||||
// Each segment
|
// Each segment
|
||||||
for (let i = 0; i < itemPath.length; i++) {
|
for (let i = 0; i < itemPath.length; i++) {
|
||||||
let segment = itemPath[i];
|
let segment = itemPath[i];
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||||
// Root segment
|
// Root segment
|
||||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||||
assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
// All other segments
|
// All other segments
|
||||||
else {
|
else {
|
||||||
// Must not contain slash
|
// Must not contain slash
|
||||||
assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1680,12 +1933,32 @@ exports.Path = Path;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9005:
|
/***/ 9005:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||||
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
@@ -1761,21 +2034,44 @@ exports.partialMatch = partialMatch;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4536:
|
/***/ 4536:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.Pattern = void 0;
|
||||||
const os = __nccwpck_require__(2037);
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const minimatch_1 = __nccwpck_require__(3973);
|
const minimatch_1 = __nccwpck_require__(3973);
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const internal_path_1 = __nccwpck_require__(6836);
|
const internal_path_1 = __nccwpck_require__(6836);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
class Pattern {
|
class Pattern {
|
||||||
constructor(patternOrNegate, segments) {
|
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||||
/**
|
/**
|
||||||
* Indicates whether matches should be excluded from the result set
|
* Indicates whether matches should be excluded from the result set
|
||||||
*/
|
*/
|
||||||
@@ -1789,9 +2085,9 @@ class Pattern {
|
|||||||
else {
|
else {
|
||||||
// Convert to pattern
|
// Convert to pattern
|
||||||
segments = segments || [];
|
segments = segments || [];
|
||||||
assert(segments.length, `Parameter 'segments' must not empty`);
|
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||||||
const root = Pattern.getLiteral(segments[0]);
|
const root = Pattern.getLiteral(segments[0]);
|
||||||
assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||||
if (patternOrNegate) {
|
if (patternOrNegate) {
|
||||||
pattern = `!${pattern}`;
|
pattern = `!${pattern}`;
|
||||||
@@ -1803,7 +2099,7 @@ class Pattern {
|
|||||||
pattern = pattern.substr(1).trim();
|
pattern = pattern.substr(1).trim();
|
||||||
}
|
}
|
||||||
// Normalize slashes and ensures absolute root
|
// Normalize slashes and ensures absolute root
|
||||||
pattern = Pattern.fixupPattern(pattern);
|
pattern = Pattern.fixupPattern(pattern, homedir);
|
||||||
// Segments
|
// Segments
|
||||||
this.segments = new internal_path_1.Path(pattern).segments;
|
this.segments = new internal_path_1.Path(pattern).segments;
|
||||||
// Trailing slash indicates the pattern should only match directories, not regular files
|
// Trailing slash indicates the pattern should only match directories, not regular files
|
||||||
@@ -1819,6 +2115,7 @@ class Pattern {
|
|||||||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||||||
// Root RegExp (required when determining partial match)
|
// Root RegExp (required when determining partial match)
|
||||||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||||||
|
this.isImplicitPattern = isImplicitPattern;
|
||||||
// Create minimatch
|
// Create minimatch
|
||||||
const minimatchOptions = {
|
const minimatchOptions = {
|
||||||
dot: true,
|
dot: true,
|
||||||
@@ -1840,11 +2137,11 @@ class Pattern {
|
|||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||||||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||||||
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||||||
if (!itemPath.endsWith(path.sep)) {
|
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||||||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||||||
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
|
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||||||
itemPath = `${itemPath}${path.sep}`;
|
itemPath = `${itemPath}${path.sep}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1882,15 +2179,15 @@ class Pattern {
|
|||||||
/**
|
/**
|
||||||
* Normalizes slashes and ensures absolute root
|
* Normalizes slashes and ensures absolute root
|
||||||
*/
|
*/
|
||||||
static fixupPattern(pattern) {
|
static fixupPattern(pattern, homedir) {
|
||||||
// Empty
|
// Empty
|
||||||
assert(pattern, 'pattern cannot be empty');
|
assert_1.default(pattern, 'pattern cannot be empty');
|
||||||
// Must not contain `.` segment, unless first segment
|
// Must not contain `.` segment, unless first segment
|
||||||
// Must not contain `..` segment
|
// Must not contain `..` segment
|
||||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||||
assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||||
assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
pattern = pathHelper.normalizeSeparators(pattern);
|
pattern = pathHelper.normalizeSeparators(pattern);
|
||||||
// Replace leading `.` segment
|
// Replace leading `.` segment
|
||||||
@@ -1899,9 +2196,9 @@ class Pattern {
|
|||||||
}
|
}
|
||||||
// Replace leading `~` segment
|
// Replace leading `~` segment
|
||||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||||
const homedir = os.homedir();
|
homedir = homedir || os.homedir();
|
||||||
assert(homedir, 'Unable to determine HOME directory');
|
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||||||
assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||||
}
|
}
|
||||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||||
@@ -2004,6 +2301,7 @@ exports.Pattern = Pattern;
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.SearchState = void 0;
|
||||||
class SearchState {
|
class SearchState {
|
||||||
constructor(path, level) {
|
constructor(path, level) {
|
||||||
this.path = path;
|
this.path = path;
|
||||||
@@ -2232,6 +2530,19 @@ class HttpClientResponse {
|
|||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
readBodyBuffer() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const chunks = [];
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(Buffer.concat(chunks));
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
@@ -2736,7 +3047,13 @@ function getProxyUrl(reqUrl) {
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
return new URL(proxyVar);
|
try {
|
||||||
|
return new URL(proxyVar);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||||
|
return new URL(`http://${proxyVar}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -2747,6 +3064,10 @@ function checkBypass(reqUrl) {
|
|||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
const reqHost = reqUrl.hostname;
|
||||||
|
if (isLoopbackAddress(reqHost)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
@@ -2772,13 +3093,24 @@ function checkBypass(reqUrl) {
|
|||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
if (upperNoProxyItem === '*' ||
|
||||||
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||||
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||||
|
(upperNoProxyItem.startsWith('.') &&
|
||||||
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
|
function isLoopbackAddress(host) {
|
||||||
|
const hostLower = host.toLowerCase();
|
||||||
|
return (hostLower === 'localhost' ||
|
||||||
|
hostLower.startsWith('127.') ||
|
||||||
|
hostLower.startsWith('[::1]') ||
|
||||||
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||||
|
}
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -2817,6 +3149,9 @@ function range(a, b, str) {
|
|||||||
var i = ai;
|
var i = ai;
|
||||||
|
|
||||||
if (ai >= 0 && bi > 0) {
|
if (ai >= 0 && bi > 0) {
|
||||||
|
if(a===b) {
|
||||||
|
return [ai, bi];
|
||||||
|
}
|
||||||
begs = [];
|
begs = [];
|
||||||
left = str.length;
|
left = str.length;
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
SET UPDATEFILE=update.finished
|
SET UPDATEFILE=update.finished
|
||||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||||
|
|
||||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N`
|
||||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||||
|
|
||||||
if %ERRORLEVEL% EQU 0 (
|
if %ERRORLEVEL% EQU 0 (
|
||||||
|
|||||||
@@ -1,57 +0,0 @@
|
|||||||
actions.runner.plist.template
|
|
||||||
actions.runner.service.template
|
|
||||||
checkScripts/downloadCert.js
|
|
||||||
checkScripts/makeWebRequest.js
|
|
||||||
darwin.svc.sh.template
|
|
||||||
hashFiles/index.js
|
|
||||||
installdependencies.sh
|
|
||||||
macos-run-invoker.js
|
|
||||||
Microsoft.IdentityModel.Logging.dll
|
|
||||||
Microsoft.IdentityModel.Tokens.dll
|
|
||||||
Minimatch.dll
|
|
||||||
Newtonsoft.Json.Bson.dll
|
|
||||||
Newtonsoft.Json.dll
|
|
||||||
Runner.Common.deps.json
|
|
||||||
Runner.Common.dll
|
|
||||||
Runner.Common.pdb
|
|
||||||
Runner.Listener
|
|
||||||
Runner.Listener.deps.json
|
|
||||||
Runner.Listener.dll
|
|
||||||
Runner.Listener.exe
|
|
||||||
Runner.Listener.pdb
|
|
||||||
Runner.Listener.runtimeconfig.json
|
|
||||||
Runner.PluginHost
|
|
||||||
Runner.PluginHost.deps.json
|
|
||||||
Runner.PluginHost.dll
|
|
||||||
Runner.PluginHost.exe
|
|
||||||
Runner.PluginHost.pdb
|
|
||||||
Runner.PluginHost.runtimeconfig.json
|
|
||||||
Runner.Plugins.deps.json
|
|
||||||
Runner.Plugins.dll
|
|
||||||
Runner.Plugins.pdb
|
|
||||||
Runner.Sdk.deps.json
|
|
||||||
Runner.Sdk.dll
|
|
||||||
Runner.Sdk.pdb
|
|
||||||
Runner.Worker
|
|
||||||
Runner.Worker.deps.json
|
|
||||||
Runner.Worker.dll
|
|
||||||
Runner.Worker.exe
|
|
||||||
Runner.Worker.pdb
|
|
||||||
Runner.Worker.runtimeconfig.json
|
|
||||||
RunnerService.exe
|
|
||||||
RunnerService.exe.config
|
|
||||||
RunnerService.js
|
|
||||||
RunnerService.pdb
|
|
||||||
runsvc.sh
|
|
||||||
Sdk.deps.json
|
|
||||||
Sdk.dll
|
|
||||||
Sdk.pdb
|
|
||||||
System.IdentityModel.Tokens.Jwt.dll
|
|
||||||
System.Net.Http.Formatting.dll
|
|
||||||
System.Security.Cryptography.Pkcs.dll
|
|
||||||
System.Security.Cryptography.ProtectedData.dll
|
|
||||||
System.ServiceProcess.ServiceController.dll
|
|
||||||
systemd.svc.sh.template
|
|
||||||
update.cmd.template
|
|
||||||
update.sh.template
|
|
||||||
YamlDotNet.dll
|
|
||||||
@@ -1,267 +0,0 @@
|
|||||||
api-ms-win-core-console-l1-1-0.dll
|
|
||||||
api-ms-win-core-console-l1-2-0.dll
|
|
||||||
api-ms-win-core-datetime-l1-1-0.dll
|
|
||||||
api-ms-win-core-debug-l1-1-0.dll
|
|
||||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
|
||||||
api-ms-win-core-fibers-l1-1-0.dll
|
|
||||||
api-ms-win-core-file-l1-1-0.dll
|
|
||||||
api-ms-win-core-file-l1-2-0.dll
|
|
||||||
api-ms-win-core-file-l2-1-0.dll
|
|
||||||
api-ms-win-core-handle-l1-1-0.dll
|
|
||||||
api-ms-win-core-heap-l1-1-0.dll
|
|
||||||
api-ms-win-core-interlocked-l1-1-0.dll
|
|
||||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
|
||||||
api-ms-win-core-localization-l1-2-0.dll
|
|
||||||
api-ms-win-core-memory-l1-1-0.dll
|
|
||||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
|
||||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
|
||||||
api-ms-win-core-processthreads-l1-1-0.dll
|
|
||||||
api-ms-win-core-processthreads-l1-1-1.dll
|
|
||||||
api-ms-win-core-profile-l1-1-0.dll
|
|
||||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
|
||||||
api-ms-win-core-string-l1-1-0.dll
|
|
||||||
api-ms-win-core-synch-l1-1-0.dll
|
|
||||||
api-ms-win-core-synch-l1-2-0.dll
|
|
||||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
|
||||||
api-ms-win-core-timezone-l1-1-0.dll
|
|
||||||
api-ms-win-core-util-l1-1-0.dll
|
|
||||||
api-ms-win-crt-conio-l1-1-0.dll
|
|
||||||
api-ms-win-crt-convert-l1-1-0.dll
|
|
||||||
api-ms-win-crt-environment-l1-1-0.dll
|
|
||||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
|
||||||
api-ms-win-crt-heap-l1-1-0.dll
|
|
||||||
api-ms-win-crt-locale-l1-1-0.dll
|
|
||||||
api-ms-win-crt-math-l1-1-0.dll
|
|
||||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
|
||||||
api-ms-win-crt-private-l1-1-0.dll
|
|
||||||
api-ms-win-crt-process-l1-1-0.dll
|
|
||||||
api-ms-win-crt-runtime-l1-1-0.dll
|
|
||||||
api-ms-win-crt-stdio-l1-1-0.dll
|
|
||||||
api-ms-win-crt-string-l1-1-0.dll
|
|
||||||
api-ms-win-crt-time-l1-1-0.dll
|
|
||||||
api-ms-win-crt-utility-l1-1-0.dll
|
|
||||||
clrcompression.dll
|
|
||||||
clretwrc.dll
|
|
||||||
clrjit.dll
|
|
||||||
coreclr.dll
|
|
||||||
createdump
|
|
||||||
createdump.exe
|
|
||||||
dbgshim.dll
|
|
||||||
hostfxr.dll
|
|
||||||
hostpolicy.dll
|
|
||||||
libclrjit.dylib
|
|
||||||
libclrjit.so
|
|
||||||
libcoreclr.dylib
|
|
||||||
libcoreclr.so
|
|
||||||
libcoreclrtraceptprovider.so
|
|
||||||
libdbgshim.dylib
|
|
||||||
libdbgshim.so
|
|
||||||
libhostfxr.dylib
|
|
||||||
libhostfxr.so
|
|
||||||
libhostpolicy.dylib
|
|
||||||
libhostpolicy.so
|
|
||||||
libmscordaccore.dylib
|
|
||||||
libmscordaccore.so
|
|
||||||
libmscordbi.dylib
|
|
||||||
libmscordbi.so
|
|
||||||
Microsoft.CSharp.dll
|
|
||||||
Microsoft.DiaSymReader.Native.amd64.dll
|
|
||||||
Microsoft.DiaSymReader.Native.arm64.dll
|
|
||||||
Microsoft.VisualBasic.Core.dll
|
|
||||||
Microsoft.VisualBasic.dll
|
|
||||||
Microsoft.Win32.Primitives.dll
|
|
||||||
Microsoft.Win32.Registry.dll
|
|
||||||
mscordaccore.dll
|
|
||||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
|
||||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
|
||||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
|
||||||
mscordbi.dll
|
|
||||||
mscorlib.dll
|
|
||||||
mscorrc.debug.dll
|
|
||||||
mscorrc.dll
|
|
||||||
msquic.dll
|
|
||||||
netstandard.dll
|
|
||||||
SOS_README.md
|
|
||||||
System.AppContext.dll
|
|
||||||
System.Buffers.dll
|
|
||||||
System.Collections.Concurrent.dll
|
|
||||||
System.Collections.dll
|
|
||||||
System.Collections.Immutable.dll
|
|
||||||
System.Collections.NonGeneric.dll
|
|
||||||
System.Collections.Specialized.dll
|
|
||||||
System.ComponentModel.Annotations.dll
|
|
||||||
System.ComponentModel.DataAnnotations.dll
|
|
||||||
System.ComponentModel.dll
|
|
||||||
System.ComponentModel.EventBasedAsync.dll
|
|
||||||
System.ComponentModel.Primitives.dll
|
|
||||||
System.ComponentModel.TypeConverter.dll
|
|
||||||
System.Configuration.dll
|
|
||||||
System.Console.dll
|
|
||||||
System.Core.dll
|
|
||||||
System.Data.Common.dll
|
|
||||||
System.Data.DataSetExtensions.dll
|
|
||||||
System.Data.dll
|
|
||||||
System.Diagnostics.Contracts.dll
|
|
||||||
System.Diagnostics.Debug.dll
|
|
||||||
System.Diagnostics.DiagnosticSource.dll
|
|
||||||
System.Diagnostics.FileVersionInfo.dll
|
|
||||||
System.Diagnostics.Process.dll
|
|
||||||
System.Diagnostics.StackTrace.dll
|
|
||||||
System.Diagnostics.TextWriterTraceListener.dll
|
|
||||||
System.Diagnostics.Tools.dll
|
|
||||||
System.Diagnostics.TraceSource.dll
|
|
||||||
System.Diagnostics.Tracing.dll
|
|
||||||
System.dll
|
|
||||||
System.Drawing.dll
|
|
||||||
System.Drawing.Primitives.dll
|
|
||||||
System.Dynamic.Runtime.dll
|
|
||||||
System.Formats.Asn1.dll
|
|
||||||
System.Globalization.Calendars.dll
|
|
||||||
System.Globalization.dll
|
|
||||||
System.Globalization.Extensions.dll
|
|
||||||
System.Globalization.Native.dylib
|
|
||||||
System.Globalization.Native.so
|
|
||||||
System.IO.Compression.Brotli.dll
|
|
||||||
System.IO.Compression.dll
|
|
||||||
System.IO.Compression.FileSystem.dll
|
|
||||||
System.IO.Compression.Native.a
|
|
||||||
System.IO.Compression.Native.dll
|
|
||||||
System.IO.Compression.Native.dylib
|
|
||||||
System.IO.Compression.Native.so
|
|
||||||
System.IO.Compression.ZipFile.dll
|
|
||||||
System.IO.dll
|
|
||||||
System.IO.FileSystem.AccessControl.dll
|
|
||||||
System.IO.FileSystem.dll
|
|
||||||
System.IO.FileSystem.DriveInfo.dll
|
|
||||||
System.IO.FileSystem.Primitives.dll
|
|
||||||
System.IO.FileSystem.Watcher.dll
|
|
||||||
System.IO.IsolatedStorage.dll
|
|
||||||
System.IO.MemoryMappedFiles.dll
|
|
||||||
System.IO.Pipes.AccessControl.dll
|
|
||||||
System.IO.Pipes.dll
|
|
||||||
System.IO.UnmanagedMemoryStream.dll
|
|
||||||
System.Linq.dll
|
|
||||||
System.Linq.Expressions.dll
|
|
||||||
System.Linq.Parallel.dll
|
|
||||||
System.Linq.Queryable.dll
|
|
||||||
System.Memory.dll
|
|
||||||
System.Native.a
|
|
||||||
System.Native.dylib
|
|
||||||
System.Native.so
|
|
||||||
System.Net.dll
|
|
||||||
System.Net.Http.dll
|
|
||||||
System.Net.Http.Json.dll
|
|
||||||
System.Net.Http.Native.a
|
|
||||||
System.Net.Http.Native.dylib
|
|
||||||
System.Net.Http.Native.so
|
|
||||||
System.Net.HttpListener.dll
|
|
||||||
System.Net.Mail.dll
|
|
||||||
System.Net.NameResolution.dll
|
|
||||||
System.Net.NetworkInformation.dll
|
|
||||||
System.Net.Ping.dll
|
|
||||||
System.Net.Primitives.dll
|
|
||||||
System.Net.Quic.dll
|
|
||||||
System.Net.Requests.dll
|
|
||||||
System.Net.Security.dll
|
|
||||||
System.Net.Security.Native.a
|
|
||||||
System.Net.Security.Native.dylib
|
|
||||||
System.Net.Security.Native.so
|
|
||||||
System.Net.ServicePoint.dll
|
|
||||||
System.Net.Sockets.dll
|
|
||||||
System.Net.WebClient.dll
|
|
||||||
System.Net.WebHeaderCollection.dll
|
|
||||||
System.Net.WebProxy.dll
|
|
||||||
System.Net.WebSockets.Client.dll
|
|
||||||
System.Net.WebSockets.dll
|
|
||||||
System.Numerics.dll
|
|
||||||
System.Numerics.Vectors.dll
|
|
||||||
System.ObjectModel.dll
|
|
||||||
System.Private.CoreLib.dll
|
|
||||||
System.Private.DataContractSerialization.dll
|
|
||||||
System.Private.Uri.dll
|
|
||||||
System.Private.Xml.dll
|
|
||||||
System.Private.Xml.Linq.dll
|
|
||||||
System.Reflection.DispatchProxy.dll
|
|
||||||
System.Reflection.dll
|
|
||||||
System.Reflection.Emit.dll
|
|
||||||
System.Reflection.Emit.ILGeneration.dll
|
|
||||||
System.Reflection.Emit.Lightweight.dll
|
|
||||||
System.Reflection.Extensions.dll
|
|
||||||
System.Reflection.Metadata.dll
|
|
||||||
System.Reflection.Primitives.dll
|
|
||||||
System.Reflection.TypeExtensions.dll
|
|
||||||
System.Resources.Reader.dll
|
|
||||||
System.Resources.ResourceManager.dll
|
|
||||||
System.Resources.Writer.dll
|
|
||||||
System.Runtime.CompilerServices.Unsafe.dll
|
|
||||||
System.Runtime.CompilerServices.VisualC.dll
|
|
||||||
System.Runtime.dll
|
|
||||||
System.Runtime.Extensions.dll
|
|
||||||
System.Runtime.Handles.dll
|
|
||||||
System.Runtime.InteropServices.dll
|
|
||||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
|
||||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
|
||||||
System.Runtime.Intrinsics.dll
|
|
||||||
System.Runtime.Loader.dll
|
|
||||||
System.Runtime.Numerics.dll
|
|
||||||
System.Runtime.Serialization.dll
|
|
||||||
System.Runtime.Serialization.Formatters.dll
|
|
||||||
System.Runtime.Serialization.Json.dll
|
|
||||||
System.Runtime.Serialization.Primitives.dll
|
|
||||||
System.Runtime.Serialization.Xml.dll
|
|
||||||
System.Runtime.WindowsRuntime.dll
|
|
||||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
|
||||||
System.Security.AccessControl.dll
|
|
||||||
System.Security.Claims.dll
|
|
||||||
System.Security.Cryptography.Algorithms.dll
|
|
||||||
System.Security.Cryptography.Cng.dll
|
|
||||||
System.Security.Cryptography.Csp.dll
|
|
||||||
System.Security.Cryptography.Encoding.dll
|
|
||||||
System.Security.Cryptography.Native.Apple.a
|
|
||||||
System.Security.Cryptography.Native.Apple.dylib
|
|
||||||
System.Security.Cryptography.Native.OpenSsl.a
|
|
||||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
|
||||||
System.Security.Cryptography.Native.OpenSsl.so
|
|
||||||
System.Security.Cryptography.OpenSsl.dll
|
|
||||||
System.Security.Cryptography.Primitives.dll
|
|
||||||
System.Security.Cryptography.X509Certificates.dll
|
|
||||||
System.Security.Cryptography.XCertificates.dll
|
|
||||||
System.Security.dll
|
|
||||||
System.Security.Principal.dll
|
|
||||||
System.Security.Principal.Windows.dll
|
|
||||||
System.Security.SecureString.dll
|
|
||||||
System.ServiceModel.Web.dll
|
|
||||||
System.ServiceProcess.dll
|
|
||||||
System.Text.Encoding.CodePages.dll
|
|
||||||
System.Text.Encoding.dll
|
|
||||||
System.Text.Encoding.Extensions.dll
|
|
||||||
System.Text.Encodings.Web.dll
|
|
||||||
System.Text.Json.dll
|
|
||||||
System.Text.RegularExpressions.dll
|
|
||||||
System.Threading.Channels.dll
|
|
||||||
System.Threading.dll
|
|
||||||
System.Threading.Overlapped.dll
|
|
||||||
System.Threading.Tasks.Dataflow.dll
|
|
||||||
System.Threading.Tasks.dll
|
|
||||||
System.Threading.Tasks.Extensions.dll
|
|
||||||
System.Threading.Tasks.Parallel.dll
|
|
||||||
System.Threading.Thread.dll
|
|
||||||
System.Threading.ThreadPool.dll
|
|
||||||
System.Threading.Timer.dll
|
|
||||||
System.Transactions.dll
|
|
||||||
System.Transactions.Local.dll
|
|
||||||
System.ValueTuple.dll
|
|
||||||
System.Web.dll
|
|
||||||
System.Web.HttpUtility.dll
|
|
||||||
System.Windows.dll
|
|
||||||
System.Xml.dll
|
|
||||||
System.Xml.Linq.dll
|
|
||||||
System.Xml.ReaderWriter.dll
|
|
||||||
System.Xml.Serialization.dll
|
|
||||||
System.Xml.XDocument.dll
|
|
||||||
System.Xml.XmlDocument.dll
|
|
||||||
System.Xml.XmlSerializer.dll
|
|
||||||
System.Xml.XPath.dll
|
|
||||||
System.Xml.XPath.XDocument.dll
|
|
||||||
ucrtbase.dll
|
|
||||||
WindowsBase.dll
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
|
||||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
|
||||||
"TrimmedContents": {
|
|
||||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
|
||||||
"externals": "<EXTERNALS_HASH>"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"HashValue": "<NO_RUNTIME_HASH>",
|
|
||||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
|
||||||
"TrimmedContents": {
|
|
||||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
|
||||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
|
||||||
"TrimmedContents": {
|
|
||||||
"externals": "<EXTERNALS_HASH>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
|
||||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
|
||||||
"TrimmedContents": {
|
|
||||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
|
||||||
"externals": "<EXTERNALS_HASH>"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"HashValue": "<NO_RUNTIME_HASH>",
|
|
||||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
|
||||||
"TrimmedContents": {
|
|
||||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
|
||||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
|
||||||
"TrimmedContents": {
|
|
||||||
"externals": "<EXTERNALS_HASH>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -17,7 +17,10 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||||
|
|
||||||
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
|
Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken);
|
||||||
|
Task DeleteSessionAsync(CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class BrokerServer : RunnerService, IBrokerServer
|
public sealed class BrokerServer : RunnerService, IBrokerServer
|
||||||
@@ -44,13 +47,27 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
|
public async Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
var jobMessage = RetryRequest<TaskAgentMessage>(
|
var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken);
|
||||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
|
|
||||||
|
|
||||||
return jobMessage;
|
return jobMessage;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
CheckConnection();
|
||||||
|
var brokerSession = RetryRequest<TaskAgentMessage>(
|
||||||
|
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken);
|
||||||
|
|
||||||
|
return brokerSession;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
CheckConnection();
|
||||||
|
await _brokerHttpClient.DeleteSessionAsync(cancellationToken);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
|
|||||||
private bool? _isHostedServer;
|
private bool? _isHostedServer;
|
||||||
|
|
||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public int AgentId { get; set; }
|
public ulong AgentId { get; set; }
|
||||||
|
|
||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public string AgentName { get; set; }
|
public string AgentName { get; set; }
|
||||||
|
|||||||
@@ -69,6 +69,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly OSPlatform Platform = OSPlatform.OSX;
|
public static readonly OSPlatform Platform = OSPlatform.OSX;
|
||||||
#elif OS_WINDOWS
|
#elif OS_WINDOWS
|
||||||
public static readonly OSPlatform Platform = OSPlatform.Windows;
|
public static readonly OSPlatform Platform = OSPlatform.Windows;
|
||||||
|
#else
|
||||||
|
public static readonly OSPlatform Platform = OSPlatform.Linux;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if X86
|
#if X86
|
||||||
@@ -79,6 +81,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm;
|
public static readonly Architecture PlatformArchitecture = Architecture.Arm;
|
||||||
#elif ARM64
|
#elif ARM64
|
||||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
|
public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
|
||||||
|
#else
|
||||||
|
public static readonly Architecture PlatformArchitecture = Architecture.X64;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
|
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
|
||||||
@@ -154,7 +158,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static class Features
|
public static class Features
|
||||||
{
|
{
|
||||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||||
public static readonly string Node12Warning = "DistributedTask.AddWarningToNode12Action";
|
public static readonly string Node16Warning = "DistributedTask.AddWarningToNode16Action";
|
||||||
|
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
|
||||||
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
||||||
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
||||||
}
|
}
|
||||||
@@ -170,7 +175,11 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||||
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||||
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/.";
|
public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/.";
|
||||||
|
public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings";
|
||||||
|
public static readonly string DeprecatedNodeVersion = "node16";
|
||||||
|
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
|
||||||
|
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class RunnerEvent
|
public static class RunnerEvent
|
||||||
@@ -252,6 +261,7 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||||
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
||||||
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
||||||
|
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class System
|
public static class System
|
||||||
|
|||||||
@@ -200,10 +200,14 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString));
|
||||||
|
}
|
||||||
|
|
||||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||||
{
|
{
|
||||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||||
}
|
}
|
||||||
|
|
||||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
@@ -14,10 +15,11 @@ namespace GitHub.Runner.Common
|
|||||||
[ServiceLocator(Default = typeof(JobServerQueue))]
|
[ServiceLocator(Default = typeof(JobServerQueue))]
|
||||||
public interface IJobServerQueue : IRunnerService, IThrottlingReporter
|
public interface IJobServerQueue : IRunnerService, IThrottlingReporter
|
||||||
{
|
{
|
||||||
|
IList<JobTelemetry> JobTelemetries { get; }
|
||||||
TaskCompletionSource<int> JobRecordUpdated { get; }
|
TaskCompletionSource<int> JobRecordUpdated { get; }
|
||||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||||
Task ShutdownAsync();
|
Task ShutdownAsync();
|
||||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
|
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false);
|
||||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||||
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
||||||
@@ -69,13 +71,18 @@ namespace GitHub.Runner.Common
|
|||||||
private Task[] _allDequeueTasks;
|
private Task[] _allDequeueTasks;
|
||||||
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
|
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
|
||||||
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
|
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
|
||||||
|
private readonly List<JobTelemetry> _jobTelemetries = new();
|
||||||
private bool _queueInProcess = false;
|
private bool _queueInProcess = false;
|
||||||
private bool _resultsServiceOnly = false;
|
private bool _resultsServiceOnly = false;
|
||||||
|
private Stopwatch _resultsUploadTimer = new();
|
||||||
|
private Stopwatch _actionsUploadTimer = new();
|
||||||
|
|
||||||
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
|
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
|
||||||
|
|
||||||
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||||
|
|
||||||
|
public IList<JobTelemetry> JobTelemetries => _jobTelemetries;
|
||||||
|
|
||||||
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
||||||
// Then the dequeue will happen every 500ms.
|
// Then the dequeue will happen every 500ms.
|
||||||
// In this way, customer still can get instance live console output on job start,
|
// In this way, customer still can get instance live console output on job start,
|
||||||
@@ -87,6 +94,7 @@ namespace GitHub.Runner.Common
|
|||||||
private bool _firstConsoleOutputs = true;
|
private bool _firstConsoleOutputs = true;
|
||||||
|
|
||||||
private bool _resultsClientInitiated = false;
|
private bool _resultsClientInitiated = false;
|
||||||
|
private bool _enableTelemetry = false;
|
||||||
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
|
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
@@ -96,14 +104,15 @@ namespace GitHub.Runner.Common
|
|||||||
_resultsServer = hostContext.GetService<IResultsServer>();
|
_resultsServer = hostContext.GetService<IResultsServer>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
|
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
_resultsServiceOnly = resultServiceOnly;
|
_resultsServiceOnly = resultsServiceOnly;
|
||||||
|
_enableTelemetry = enableTelemetry;
|
||||||
|
|
||||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
|
|
||||||
if (!resultServiceOnly)
|
if (!resultsServiceOnly)
|
||||||
{
|
{
|
||||||
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
||||||
}
|
}
|
||||||
@@ -119,14 +128,14 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
string liveConsoleFeedUrl = null;
|
string liveConsoleFeedUrl = null;
|
||||||
Trace.Info("Initializing results client");
|
Trace.Info("Initializing results client");
|
||||||
if (resultServiceOnly
|
if (resultsServiceOnly
|
||||||
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
|
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
|
||||||
&& !string.IsNullOrEmpty(feedStreamUrl))
|
&& !string.IsNullOrEmpty(feedStreamUrl))
|
||||||
{
|
{
|
||||||
liveConsoleFeedUrl = feedStreamUrl;
|
liveConsoleFeedUrl = feedStreamUrl;
|
||||||
}
|
}
|
||||||
|
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
|
||||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
|
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
|
||||||
_resultsClientInitiated = true;
|
_resultsClientInitiated = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -211,6 +220,12 @@ namespace GitHub.Runner.Common
|
|||||||
await _resultsServer.DisposeAsync();
|
await _resultsServer.DisposeAsync();
|
||||||
|
|
||||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
var uploadTimeComparison = $"Actions upload time: {_actionsUploadTimer.ElapsedMilliseconds} ms, Result upload time: {_resultsUploadTimer.ElapsedMilliseconds} ms";
|
||||||
|
Trace.Info(uploadTimeComparison);
|
||||||
|
_jobTelemetries.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = uploadTimeComparison });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||||
@@ -456,6 +471,10 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_actionsUploadTimer.Start();
|
||||||
|
}
|
||||||
await UploadFile(file);
|
await UploadFile(file);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -471,6 +490,13 @@ namespace GitHub.Runner.Common
|
|||||||
// _fileUploadQueue.Enqueue(file);
|
// _fileUploadQueue.Enqueue(file);
|
||||||
//}
|
//}
|
||||||
}
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_actionsUploadTimer.Stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
|
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
|
||||||
@@ -517,10 +543,18 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_resultsUploadTimer.Start();
|
||||||
|
}
|
||||||
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
|
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
await UploadSummaryFile(file);
|
await UploadSummaryFile(file);
|
||||||
}
|
}
|
||||||
|
if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
await UploadResultsDiagnosticLogsFile(file);
|
||||||
|
}
|
||||||
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
|
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
if (file.RecordId != _jobTimelineRecordId)
|
if (file.RecordId != _jobTimelineRecordId)
|
||||||
@@ -541,10 +575,19 @@ namespace GitHub.Runner.Common
|
|||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
errorCount++;
|
errorCount++;
|
||||||
|
|
||||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
|
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||||
_resultsClientInitiated = false;
|
if (!_resultsServiceOnly)
|
||||||
|
{
|
||||||
SendResultsTelemetry(ex);
|
_resultsClientInitiated = false;
|
||||||
|
SendResultsTelemetry(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_resultsUploadTimer.Stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -660,9 +703,11 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
Trace.Info("Catch exception during update steps, skip update Results.");
|
Trace.Info("Catch exception during update steps, skip update Results.");
|
||||||
Trace.Error(e);
|
Trace.Error(e);
|
||||||
_resultsClientInitiated = false;
|
if (!_resultsServiceOnly)
|
||||||
|
{
|
||||||
SendResultsTelemetry(e);
|
_resultsClientInitiated = false;
|
||||||
|
SendResultsTelemetry(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_bufferedRetryRecords.Remove(update.TimelineId))
|
if (_bufferedRetryRecords.Remove(update.TimelineId))
|
||||||
@@ -881,6 +926,17 @@ namespace GitHub.Runner.Common
|
|||||||
await UploadResultsFile(file, summaryHandler);
|
await UploadResultsFile(file, summaryHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file)
|
||||||
|
{
|
||||||
|
Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}");
|
||||||
|
ResultsFileUploadHandler diagnosticLogsHandler = async (file) =>
|
||||||
|
{
|
||||||
|
await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None);
|
||||||
|
};
|
||||||
|
|
||||||
|
await UploadResultsFile(file, diagnosticLogsHandler);
|
||||||
|
}
|
||||||
|
|
||||||
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
|
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
|
||||||
{
|
{
|
||||||
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
|
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
|
||||||
|
|||||||
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
using System;
|
||||||
|
using System.ComponentModel;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Handles redirects for Http requests
|
||||||
|
/// </summary>
|
||||||
|
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||||
|
public class RedirectMessageHandler : DelegatingHandler
|
||||||
|
{
|
||||||
|
public RedirectMessageHandler(ITraceWriter trace)
|
||||||
|
{
|
||||||
|
Trace = trace;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override async Task<HttpResponseMessage> SendAsync(
|
||||||
|
HttpRequestMessage request,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
HttpResponseMessage response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (response != null &&
|
||||||
|
IsRedirect(response.StatusCode) &&
|
||||||
|
response.Headers.Location != null)
|
||||||
|
{
|
||||||
|
Trace.Info($"Redirecting to '{response.Headers.Location}'.");
|
||||||
|
|
||||||
|
request = await CloneAsync(request, response.Headers.Location).ConfigureAwait(false);
|
||||||
|
|
||||||
|
response.Dispose();
|
||||||
|
|
||||||
|
response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsRedirect(HttpStatusCode statusCode)
|
||||||
|
{
|
||||||
|
return (int)statusCode >= 300 && (int)statusCode < 400;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<HttpRequestMessage> CloneAsync(HttpRequestMessage request, Uri requestUri)
|
||||||
|
{
|
||||||
|
var clone = new HttpRequestMessage(request.Method, requestUri)
|
||||||
|
{
|
||||||
|
Version = request.Version
|
||||||
|
};
|
||||||
|
|
||||||
|
request.Headers.ForEach(header => clone.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||||
|
|
||||||
|
request.Options.ForEach(option => clone.Options.Set(new HttpRequestOptionsKey<object>(option.Key), option.Value));
|
||||||
|
|
||||||
|
if (request.Content != null)
|
||||||
|
{
|
||||||
|
clone.Content = new ByteArrayContent(await request.Content.ReadAsByteArrayAsync().ConfigureAwait(false));
|
||||||
|
|
||||||
|
request.Content.Headers.ForEach(header => clone.Content.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||||
|
}
|
||||||
|
|
||||||
|
return clone;
|
||||||
|
}
|
||||||
|
|
||||||
|
private readonly ITraceWriter Trace;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Net.Http;
|
||||||
using System.Net.Http.Headers;
|
using System.Net.Http.Headers;
|
||||||
using System.Net.WebSockets;
|
using System.Net.WebSockets;
|
||||||
using System.Security;
|
using System.Security;
|
||||||
@@ -18,7 +19,7 @@ namespace GitHub.Runner.Common
|
|||||||
[ServiceLocator(Default = typeof(ResultServer))]
|
[ServiceLocator(Default = typeof(ResultServer))]
|
||||||
public interface IResultsServer : IRunnerService, IAsyncDisposable
|
public interface IResultsServer : IRunnerService, IAsyncDisposable
|
||||||
{
|
{
|
||||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
|
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk);
|
||||||
|
|
||||||
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||||
|
|
||||||
@@ -34,6 +35,8 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
|
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
|
||||||
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ResultServer : RunnerService, IResultsServer
|
public sealed class ResultServer : RunnerService, IResultsServer
|
||||||
@@ -50,10 +53,10 @@ namespace GitHub.Runner.Common
|
|||||||
private String _liveConsoleFeedUrl;
|
private String _liveConsoleFeedUrl;
|
||||||
private string _token;
|
private string _token;
|
||||||
|
|
||||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk)
|
||||||
{
|
{
|
||||||
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
this._resultsClient = CreateHttpClient(uri, token, useSdk);
|
||||||
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
|
||||||
_token = token;
|
_token = token;
|
||||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||||
{
|
{
|
||||||
@@ -62,6 +65,26 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
|
||||||
|
{
|
||||||
|
// Using default 100 timeout
|
||||||
|
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||||
|
|
||||||
|
// Create retry handler
|
||||||
|
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
|
||||||
|
if (settings.MaxRetryRequest > 0)
|
||||||
|
{
|
||||||
|
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup RawHttpMessageHandler without credentials
|
||||||
|
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
|
||||||
|
|
||||||
|
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||||
|
|
||||||
|
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
|
||||||
|
}
|
||||||
|
|
||||||
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
@@ -120,6 +143,18 @@ namespace GitHub.Runner.Common
|
|||||||
throw new InvalidOperationException("Results client is not initialized.");
|
throw new InvalidOperationException("Results client is not initialized.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (_resultsClient != null)
|
||||||
|
{
|
||||||
|
return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file,
|
||||||
|
cancellationToken: cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new InvalidOperationException("Results client is not initialized.");
|
||||||
|
}
|
||||||
|
|
||||||
public ValueTask DisposeAsync()
|
public ValueTask DisposeAsync()
|
||||||
{
|
{
|
||||||
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
|
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ namespace GitHub.Runner.Common
|
|||||||
Dictionary<String, VariableValue> outputs,
|
Dictionary<String, VariableValue> outputs,
|
||||||
IList<StepResult> stepResults,
|
IList<StepResult> stepResults,
|
||||||
IList<Annotation> jobAnnotations,
|
IList<Annotation> jobAnnotations,
|
||||||
|
string environmentUrl,
|
||||||
CancellationToken token);
|
CancellationToken token);
|
||||||
|
|
||||||
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
||||||
@@ -70,11 +71,12 @@ namespace GitHub.Runner.Common
|
|||||||
Dictionary<String, VariableValue> outputs,
|
Dictionary<String, VariableValue> outputs,
|
||||||
IList<StepResult> stepResults,
|
IList<StepResult> stepResults,
|
||||||
IList<Annotation> jobAnnotations,
|
IList<Annotation> jobAnnotations,
|
||||||
|
string environmentUrl,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
return RetryRequest(
|
return RetryRequest(
|
||||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, cancellationToken), cancellationToken);
|
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, cancellationToken), cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
||||||
|
|||||||
@@ -15,12 +15,11 @@ namespace GitHub.Runner.Common
|
|||||||
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
||||||
public interface IRunnerDotcomServer : IRunnerService
|
public interface IRunnerDotcomServer : IRunnerService
|
||||||
{
|
{
|
||||||
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
|
Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName);
|
||||||
|
|
||||||
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
|
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
||||||
|
|
||||||
string GetGitHubRequestId(HttpResponseHeaders headers);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum RequestType
|
public enum RequestType
|
||||||
@@ -42,7 +41,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
|
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
||||||
{
|
{
|
||||||
var githubApiUrl = "";
|
var githubApiUrl = "";
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
@@ -52,11 +51,11 @@ namespace GitHub.Runner.Common
|
|||||||
// org runner
|
// org runner
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (path.Length == 2)
|
else if (path.Length == 2)
|
||||||
@@ -69,11 +68,11 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -82,14 +81,8 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||||
var agents = runnersList.ToTaskAgents();
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(agentName))
|
return runnersList.ToTaskAgents();
|
||||||
{
|
|
||||||
return agents;
|
|
||||||
}
|
|
||||||
|
|
||||||
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
||||||
@@ -137,6 +130,16 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||||
|
{
|
||||||
|
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||||
|
{
|
||||||
|
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<DistributedTask.WebApi.Runner> AddOrReplaceRunner(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, bool replace)
|
||||||
{
|
{
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||||
@@ -159,9 +162,15 @@ namespace GitHub.Runner.Common
|
|||||||
{"updates_disabled", agent.DisableUpdate},
|
{"updates_disabled", agent.DisableUpdate},
|
||||||
{"ephemeral", agent.Ephemeral},
|
{"ephemeral", agent.Ephemeral},
|
||||||
{"labels", agent.Labels},
|
{"labels", agent.Labels},
|
||||||
{"public_key", publicKey}
|
{"public_key", publicKey},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (replace)
|
||||||
|
{
|
||||||
|
bodyObject.Add("runner_id", agent.Id);
|
||||||
|
bodyObject.Add("replace", replace);
|
||||||
|
}
|
||||||
|
|
||||||
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
||||||
|
|
||||||
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
||||||
@@ -195,7 +204,7 @@ namespace GitHub.Runner.Common
|
|||||||
if (response != null)
|
if (response != null)
|
||||||
{
|
{
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -215,7 +224,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
|
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
Trace.Error($"{errorMessage} -- Atempt: {retry}");
|
Trace.Error($"{errorMessage} -- Attempt: {retry}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -224,14 +233,5 @@ namespace GitHub.Runner.Common
|
|||||||
await Task.Delay(backOff);
|
await Task.Delay(backOff);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public string GetGitHubRequestId(HttpResponseHeaders headers)
|
|
||||||
{
|
|
||||||
if (headers.TryGetValues("x-github-request-id", out var headerValues))
|
|
||||||
{
|
|
||||||
return headerValues.FirstOrDefault();
|
|
||||||
}
|
|
||||||
return string.Empty;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,8 +27,8 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
// Configuration
|
// Configuration
|
||||||
Task<TaskAgent> AddAgentAsync(Int32 agentPoolId, TaskAgent agent);
|
Task<TaskAgent> AddAgentAsync(Int32 agentPoolId, TaskAgent agent);
|
||||||
Task DeleteAgentAsync(int agentPoolId, int agentId);
|
Task DeleteAgentAsync(int agentPoolId, ulong agentId);
|
||||||
Task DeleteAgentAsync(int agentId);
|
Task DeleteAgentAsync(ulong agentId);
|
||||||
Task<List<TaskAgentPool>> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation);
|
Task<List<TaskAgentPool>> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation);
|
||||||
Task<List<TaskAgent>> GetAgentsAsync(int agentPoolId, string agentName = null);
|
Task<List<TaskAgent>> GetAgentsAsync(int agentPoolId, string agentName = null);
|
||||||
Task<List<TaskAgent>> GetAgentsAsync(string agentName);
|
Task<List<TaskAgent>> GetAgentsAsync(string agentName);
|
||||||
@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
|
|||||||
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
|
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
|
||||||
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
|
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
|
||||||
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
|
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
|
||||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
|
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken);
|
||||||
|
|
||||||
// job request
|
// job request
|
||||||
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
||||||
@@ -50,7 +50,7 @@ namespace GitHub.Runner.Common
|
|||||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||||
|
|
||||||
// agent update
|
// agent update
|
||||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace);
|
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||||
@@ -239,13 +239,13 @@ namespace GitHub.Runner.Common
|
|||||||
return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent);
|
return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task DeleteAgentAsync(int agentPoolId, int agentId)
|
public Task DeleteAgentAsync(int agentPoolId, ulong agentId)
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.Generic);
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId);
|
return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task DeleteAgentAsync(int agentId)
|
public Task DeleteAgentAsync(ulong agentId)
|
||||||
{
|
{
|
||||||
return DeleteAgentAsync(0, agentId); // agentPool is ignored server side
|
return DeleteAgentAsync(0, agentId); // agentPool is ignored server side
|
||||||
}
|
}
|
||||||
@@ -272,10 +272,10 @@ namespace GitHub.Runner.Common
|
|||||||
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
|
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
|
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
|
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, disableUpdate, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
//-----------------------------------------------------------------
|
//-----------------------------------------------------------------
|
||||||
@@ -315,7 +315,7 @@ namespace GitHub.Runner.Common
|
|||||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace)
|
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace)
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.Generic);
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
||||||
|
|||||||
@@ -6,13 +6,7 @@ namespace GitHub.Runner.Common.Util
|
|||||||
public static class NodeUtil
|
public static class NodeUtil
|
||||||
{
|
{
|
||||||
private const string _defaultNodeVersion = "node16";
|
private const string _defaultNodeVersion = "node16";
|
||||||
|
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16", "node20" });
|
||||||
#if (OS_OSX || OS_WINDOWS) && ARM64
|
|
||||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16" });
|
|
||||||
#else
|
|
||||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node12", "node16" });
|
|
||||||
#endif
|
|
||||||
|
|
||||||
public static string GetInternalNodeVersion()
|
public static string GetInternalNodeVersion()
|
||||||
{
|
{
|
||||||
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
||||||
|
|||||||
@@ -24,7 +24,15 @@ namespace GitHub.Runner.Listener
|
|||||||
private TimeSpan _getNextMessageRetryInterval;
|
private TimeSpan _getNextMessageRetryInterval;
|
||||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||||
private CancellationTokenSource _getMessagesTokenSource;
|
private CancellationTokenSource _getMessagesTokenSource;
|
||||||
|
private VssCredentials _creds;
|
||||||
|
private TaskAgentSession _session;
|
||||||
private IBrokerServer _brokerServer;
|
private IBrokerServer _brokerServer;
|
||||||
|
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||||
|
private bool _accessTokenRevoked = false;
|
||||||
|
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||||
|
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||||
|
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||||
|
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
@@ -36,13 +44,134 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||||
{
|
{
|
||||||
await RefreshBrokerConnection();
|
Trace.Entering();
|
||||||
return await Task.FromResult(true);
|
|
||||||
|
// Settings
|
||||||
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
|
_settings = configManager.LoadSettings();
|
||||||
|
var serverUrl = _settings.ServerUrlV2;
|
||||||
|
Trace.Info(_settings);
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create connection.
|
||||||
|
Trace.Info("Loading Credentials");
|
||||||
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
|
_creds = credMgr.LoadCredentials();
|
||||||
|
|
||||||
|
var agent = new TaskAgentReference
|
||||||
|
{
|
||||||
|
Id = _settings.AgentId,
|
||||||
|
Name = _settings.AgentName,
|
||||||
|
Version = BuildConstants.RunnerPackage.Version,
|
||||||
|
OSDescription = RuntimeInformation.OSDescription,
|
||||||
|
};
|
||||||
|
string sessionName = $"{Environment.MachineName ?? "RUNNER"}";
|
||||||
|
var taskAgentSession = new TaskAgentSession(sessionName, agent);
|
||||||
|
|
||||||
|
string errorMessage = string.Empty;
|
||||||
|
bool encounteringError = false;
|
||||||
|
|
||||||
|
while (true)
|
||||||
|
{
|
||||||
|
token.ThrowIfCancellationRequested();
|
||||||
|
Trace.Info($"Attempt to create session.");
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Trace.Info("Connecting to the Broker Server...");
|
||||||
|
await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||||
|
Trace.Info("VssConnection created");
|
||||||
|
|
||||||
|
_term.WriteLine();
|
||||||
|
_term.WriteSuccessMessage("Connected to GitHub");
|
||||||
|
_term.WriteLine();
|
||||||
|
|
||||||
|
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
||||||
|
|
||||||
|
Trace.Info($"Session created.");
|
||||||
|
if (encounteringError)
|
||||||
|
{
|
||||||
|
_term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected.");
|
||||||
|
_sessionCreationExceptionTracker.Clear();
|
||||||
|
encounteringError = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Info("Session creation has been cancelled.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (TaskAgentAccessTokenExpiredException)
|
||||||
|
{
|
||||||
|
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
|
||||||
|
_accessTokenRevoked = true;
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error("Catch exception during create session.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
|
||||||
|
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||||
|
{
|
||||||
|
// "invalid_client" means the runner registration has been deleted from the server.
|
||||||
|
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check whether we get 401 because the runner registration already removed by the service.
|
||||||
|
// If the runner registration get deleted, we can't exchange oauth token.
|
||||||
|
Trace.Error("Test oauth app registration.");
|
||||||
|
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
|
||||||
|
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||||
|
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!IsSessionCreationExceptionRetriable(ex))
|
||||||
|
{
|
||||||
|
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!encounteringError) //print the message only on the first error
|
||||||
|
{
|
||||||
|
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
|
||||||
|
encounteringError = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds);
|
||||||
|
await HostContext.Delay(_sessionCreationRetryInterval, token);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task DeleteSessionAsync()
|
public async Task DeleteSessionAsync()
|
||||||
{
|
{
|
||||||
await Task.CompletedTask;
|
if (_session != null && _session.SessionId != Guid.Empty)
|
||||||
|
{
|
||||||
|
if (!_accessTokenRevoked)
|
||||||
|
{
|
||||||
|
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
|
{
|
||||||
|
await _brokerServer.DeleteSessionAsync(ts.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||||
@@ -73,7 +202,13 @@ namespace GitHub.Runner.Listener
|
|||||||
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
|
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||||
|
runnerStatus,
|
||||||
|
BuildConstants.RunnerPackage.Version,
|
||||||
|
VarUtil.OS,
|
||||||
|
VarUtil.OSArchitecture,
|
||||||
|
_settings.DisableUpdate,
|
||||||
|
_getMessagesTokenSource.Token);
|
||||||
|
|
||||||
if (message == null)
|
if (message == null)
|
||||||
{
|
{
|
||||||
@@ -97,7 +232,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
|
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -108,7 +243,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
if (!IsGetNextMessageExceptionRetriable(ex))
|
if (!IsGetNextMessageExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
throw;
|
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -138,7 +273,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
|
|
||||||
// re-create VssConnection before next retry
|
// re-create VssConnection before next retry
|
||||||
await RefreshBrokerConnection();
|
await RefreshBrokerConnectionAsync();
|
||||||
|
|
||||||
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
||||||
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
||||||
@@ -168,6 +303,11 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
await RefreshBrokerConnectionAsync();
|
||||||
|
}
|
||||||
|
|
||||||
public async Task DeleteMessageAsync(TaskAgentMessage message)
|
public async Task DeleteMessageAsync(TaskAgentMessage message)
|
||||||
{
|
{
|
||||||
await Task.CompletedTask;
|
await Task.CompletedTask;
|
||||||
@@ -191,12 +331,84 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task RefreshBrokerConnection()
|
private bool IsSessionCreationExceptionRetriable(Exception ex)
|
||||||
|
{
|
||||||
|
if (ex is TaskAgentNotFoundException)
|
||||||
|
{
|
||||||
|
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
|
||||||
|
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else if (ex is TaskAgentSessionConflictException)
|
||||||
|
{
|
||||||
|
Trace.Info("The session for this runner already exists.");
|
||||||
|
_term.WriteError("A session for this runner already exists.");
|
||||||
|
if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException)))
|
||||||
|
{
|
||||||
|
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++;
|
||||||
|
if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds)
|
||||||
|
{
|
||||||
|
Trace.Info("The session conflict exception have reached retry limit.");
|
||||||
|
_term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("The session conflict exception haven't reached retry limit.");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
|
||||||
|
{
|
||||||
|
Trace.Info("Local clock might be skewed.");
|
||||||
|
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
|
||||||
|
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
|
||||||
|
{
|
||||||
|
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++;
|
||||||
|
if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds)
|
||||||
|
{
|
||||||
|
Trace.Info("The OAuth token request exception have reached retry limit.");
|
||||||
|
_term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("The OAuth token request exception haven't reached retry limit.");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if (ex is TaskAgentPoolNotFoundException ||
|
||||||
|
ex is AccessDeniedException ||
|
||||||
|
ex is VssUnauthorizedException)
|
||||||
|
{
|
||||||
|
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (ex is InvalidOperationException)
|
||||||
|
{
|
||||||
|
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Info($"Retriable exception: {ex.Message}");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task RefreshBrokerConnectionAsync()
|
||||||
{
|
{
|
||||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
_settings = configManager.LoadSettings();
|
_settings = configManager.LoadSettings();
|
||||||
|
|
||||||
if (_settings.ServerUrlV2 == null)
|
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||||
{
|
{
|
||||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
string githubApiUrl = null;
|
string githubApiUrl = null;
|
||||||
string actionsTokenServiceUrl = null;
|
string actionsTokenServiceUrl = null;
|
||||||
string actionsPipelinesServiceUrl = null;
|
string actionsPipelinesServiceUrl = null;
|
||||||
|
string resultsReceiverServiceUrl = null;
|
||||||
var urlBuilder = new UriBuilder(url);
|
var urlBuilder = new UriBuilder(url);
|
||||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||||
{
|
{
|
||||||
@@ -47,6 +48,7 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||||
|
resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -56,13 +58,31 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||||
|
resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var codeLoadUrlBuilder = new UriBuilder(url);
|
||||||
|
codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}";
|
||||||
|
codeLoadUrlBuilder.Path = "_ping";
|
||||||
|
|
||||||
// check github api
|
// check github api
|
||||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||||
|
|
||||||
|
// check github codeload
|
||||||
|
checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||||
|
checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||||
|
checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||||
|
|
||||||
|
// check results-receiver service
|
||||||
|
if (!string.IsNullOrEmpty(resultsReceiverServiceUrl))
|
||||||
|
{
|
||||||
|
checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl));
|
||||||
|
checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl));
|
||||||
|
checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||||
|
}
|
||||||
|
|
||||||
// check actions token service
|
// check actions token service
|
||||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||||
|
|||||||
@@ -351,21 +351,39 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new()
|
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new()
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
"Microsoft-System-Net-Http",
|
"System.Net.Http",
|
||||||
new HashSet<string>
|
new HashSet<string>
|
||||||
{
|
{
|
||||||
"Info",
|
"Info",
|
||||||
"Associate",
|
"Associate",
|
||||||
"Enter",
|
|
||||||
"Exit"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Microsoft-System-Net-Security",
|
"System.Net.Security",
|
||||||
|
new HashSet<string>
|
||||||
|
{
|
||||||
|
"Info",
|
||||||
|
"DumpBuffer",
|
||||||
|
"SslStreamCtor",
|
||||||
|
"SecureChannelCtor",
|
||||||
|
"NoDelegateNoClientCert",
|
||||||
|
"CertsAfterFiltering",
|
||||||
|
"UsingCachedCredential",
|
||||||
|
"SspiSelectedCipherSuite"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Private.InternalDiagnostics.System.Net.Http",
|
||||||
|
new HashSet<string>
|
||||||
|
{
|
||||||
|
"Info",
|
||||||
|
"Associate",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Private.InternalDiagnostics.System.Net.Security",
|
||||||
new HashSet<string>
|
new HashSet<string>
|
||||||
{
|
{
|
||||||
"Enter",
|
|
||||||
"Exit",
|
|
||||||
"Info",
|
"Info",
|
||||||
"DumpBuffer",
|
"DumpBuffer",
|
||||||
"SslStreamCtor",
|
"SslStreamCtor",
|
||||||
@@ -391,8 +409,8 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
{
|
{
|
||||||
base.OnEventSourceCreated(eventSource);
|
base.OnEventSourceCreated(eventSource);
|
||||||
|
|
||||||
if (eventSource.Name == "Microsoft-System-Net-Http" ||
|
if (eventSource.Name.Contains("System.Net.Http") ||
|
||||||
eventSource.Name == "Microsoft-System-Net-Security")
|
eventSource.Name.Contains("System.Net.Security"))
|
||||||
{
|
{
|
||||||
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -244,11 +244,11 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
List<TaskAgent> agents;
|
List<TaskAgent> agents;
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseV2Flow)
|
||||||
{
|
{
|
||||||
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
agents = await _runnerServer.GetAgentsAsync(runnerSettings.AgentName);
|
||||||
}
|
}
|
||||||
|
|
||||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||||
@@ -263,7 +263,23 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
if (runnerSettings.UseV2Flow)
|
||||||
|
{
|
||||||
|
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||||
|
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||||
|
|
||||||
|
agent.Id = runner.Id;
|
||||||
|
agent.Authorization = new TaskAgentAuthorization()
|
||||||
|
{
|
||||||
|
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||||
|
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||||
|
}
|
||||||
|
|
||||||
if (command.DisableUpdate &&
|
if (command.DisableUpdate &&
|
||||||
command.DisableUpdate != agent.DisableUpdate)
|
command.DisableUpdate != agent.DisableUpdate)
|
||||||
{
|
{
|
||||||
@@ -709,7 +725,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -728,7 +744,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
retryCount++;
|
retryCount++;
|
||||||
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
|
Trace.Error($"Failed to get JIT runner token -- Attempt: {retryCount}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -772,7 +788,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -791,7 +807,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
retryCount++;
|
retryCount++;
|
||||||
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
|
Trace.Error($"Failed to get tenant credentials -- Attempt: {retryCount}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
if (!store.HasCredentials())
|
if (!store.HasCredentials())
|
||||||
{
|
{
|
||||||
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
||||||
}
|
}
|
||||||
|
|
||||||
CredentialData credData = store.GetCredentials();
|
CredentialData credData = store.GetCredentials();
|
||||||
|
|||||||
@@ -514,9 +514,25 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
|
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
|
||||||
|
|
||||||
// Lock the Service Database
|
// Lock the Service Database
|
||||||
svcLock = LockServiceDatabase(scmHndl);
|
int svcLockRetries = 10;
|
||||||
if (svcLock.ToInt64() <= 0)
|
int svcLockRetryTimeout = 5000;
|
||||||
|
while (true)
|
||||||
{
|
{
|
||||||
|
svcLock = LockServiceDatabase(scmHndl);
|
||||||
|
if (svcLock.ToInt64() > 0)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
_term.WriteLine("Retrying Lock Service Database...");
|
||||||
|
|
||||||
|
svcLockRetries--;
|
||||||
|
if (svcLockRetries > 0)
|
||||||
|
{
|
||||||
|
Thread.Sleep(svcLockRetryTimeout);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
throw new Exception("Failed to Lock Service Database for Write");
|
throw new Exception("Failed to Lock Service Database for Write");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Guid dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
Guid dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
||||||
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
|
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -162,12 +162,12 @@ namespace GitHub.Runner.Listener
|
|||||||
dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
||||||
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
|
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Trace.Verbose($"There is no running WorkerDispather needs to await.");
|
Trace.Verbose($"There is no running WorkerDispatcher needs to await.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (currentDispatch != null)
|
if (currentDispatch != null)
|
||||||
@@ -176,7 +176,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
Trace.Info($"Waiting WorkerDispather for job {currentDispatch.JobId} run to finish.");
|
Trace.Info($"Waiting WorkerDispatcher for job {currentDispatch.JobId} run to finish.");
|
||||||
await currentDispatch.WorkerDispatch;
|
await currentDispatch.WorkerDispatch;
|
||||||
Trace.Info($"Job request {currentDispatch.JobId} processed succeed.");
|
Trace.Info($"Job request {currentDispatch.JobId} processed succeed.");
|
||||||
}
|
}
|
||||||
@@ -190,7 +190,7 @@ namespace GitHub.Runner.Listener
|
|||||||
WorkerDispatcher workerDispatcher;
|
WorkerDispatcher workerDispatcher;
|
||||||
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||||
workerDispatcher.Dispose();
|
workerDispatcher.Dispose();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -209,7 +209,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
Trace.Info($"Ensure WorkerDispather for job {currentDispatch.JobId} run to finish, cancel any running job.");
|
Trace.Info($"Ensure WorkerDispatcher for job {currentDispatch.JobId} run to finish, cancel any running job.");
|
||||||
await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true);
|
await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Listener
|
|||||||
WorkerDispatcher workerDispatcher;
|
WorkerDispatcher workerDispatcher;
|
||||||
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||||
workerDispatcher.Dispose();
|
workerDispatcher.Dispose();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -327,7 +327,7 @@ namespace GitHub.Runner.Listener
|
|||||||
WorkerDispatcher workerDispatcher;
|
WorkerDispatcher workerDispatcher;
|
||||||
if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher))
|
if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher))
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
|
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
|
||||||
workerDispatcher.Dispose();
|
workerDispatcher.Dispose();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -629,6 +629,20 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info("worker process has been killed.");
|
Trace.Info("worker process has been killed.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
// message send failed, this might indicate worker process is already exited or stuck.
|
||||||
|
Trace.Info($"Job cancel message sending for job {message.JobId} failed, kill running worker. {ex}");
|
||||||
|
workerProcessCancelTokenSource.Cancel();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await workerProcessTask;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
Trace.Info("worker process has been killed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// wait worker to exit
|
// wait worker to exit
|
||||||
// if worker doesn't exit within timeout, then kill worker.
|
// if worker doesn't exit within timeout, then kill worker.
|
||||||
@@ -1077,23 +1091,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
if (this._isRunServiceJob)
|
if (this._isRunServiceJob)
|
||||||
{
|
{
|
||||||
var runServer = await GetRunServerAsync(systemConnection);
|
Trace.Verbose($"Skip CompleteJobRequestAsync call from Listener because it's RunService job");
|
||||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
|
||||||
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
|
|
||||||
var jobAnnotations = new List<Annotation>();
|
|
||||||
if (unhandledAnnotation.HasValue)
|
|
||||||
{
|
|
||||||
jobAnnotations.Add(unhandledAnnotation.Value);
|
|
||||||
}
|
|
||||||
try
|
|
||||||
{
|
|
||||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, CancellationToken.None);
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error("Fail to raise job completion back to service.");
|
|
||||||
Trace.Error(ex);
|
|
||||||
}
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1150,6 +1148,15 @@ namespace GitHub.Runner.Listener
|
|||||||
jobRecord.ErrorCount++;
|
jobRecord.ErrorCount++;
|
||||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||||
|
|
||||||
|
if (message.Variables.TryGetValue("DistributedTask.MarkJobAsFailedOnWorkerCrash", out var markJobAsFailedOnWorkerCrash) &&
|
||||||
|
StringUtil.ConvertToBoolean(markJobAsFailedOnWorkerCrash?.Value))
|
||||||
|
{
|
||||||
|
Trace.Info("Mark the job as failed since the worker crashed");
|
||||||
|
jobRecord.Result = TaskResult.Failed;
|
||||||
|
// mark the job as completed so service will pickup the result
|
||||||
|
jobRecord.State = TimelineRecordState.Completed;
|
||||||
|
}
|
||||||
|
|
||||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -1193,7 +1200,7 @@ namespace GitHub.Runner.Listener
|
|||||||
jobAnnotations.Add(unhandledAnnotation.Value);
|
jobAnnotations.Add(unhandledAnnotation.Value);
|
||||||
}
|
}
|
||||||
|
|
||||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, CancellationToken.None);
|
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -9,10 +9,12 @@ using System.Threading;
|
|||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Listener.Configuration;
|
using GitHub.Runner.Listener.Configuration;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.OAuth;
|
using GitHub.Services.OAuth;
|
||||||
|
using GitHub.Services.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener
|
namespace GitHub.Runner.Listener
|
||||||
{
|
{
|
||||||
@@ -23,6 +25,8 @@ namespace GitHub.Runner.Listener
|
|||||||
Task DeleteSessionAsync();
|
Task DeleteSessionAsync();
|
||||||
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
||||||
Task DeleteMessageAsync(TaskAgentMessage message);
|
Task DeleteMessageAsync(TaskAgentMessage message);
|
||||||
|
|
||||||
|
Task RefreshListenerTokenAsync(CancellationToken token);
|
||||||
void OnJobStatus(object sender, JobStatusEventArgs e);
|
void OnJobStatus(object sender, JobStatusEventArgs e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,6 +36,7 @@ namespace GitHub.Runner.Listener
|
|||||||
private RunnerSettings _settings;
|
private RunnerSettings _settings;
|
||||||
private ITerminal _term;
|
private ITerminal _term;
|
||||||
private IRunnerServer _runnerServer;
|
private IRunnerServer _runnerServer;
|
||||||
|
private IBrokerServer _brokerServer;
|
||||||
private TaskAgentSession _session;
|
private TaskAgentSession _session;
|
||||||
private TimeSpan _getNextMessageRetryInterval;
|
private TimeSpan _getNextMessageRetryInterval;
|
||||||
private bool _accessTokenRevoked = false;
|
private bool _accessTokenRevoked = false;
|
||||||
@@ -41,6 +46,9 @@ namespace GitHub.Runner.Listener
|
|||||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||||
private CancellationTokenSource _getMessagesTokenSource;
|
private CancellationTokenSource _getMessagesTokenSource;
|
||||||
|
private VssCredentials _creds;
|
||||||
|
|
||||||
|
private bool _isBrokerSession = false;
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
@@ -48,6 +56,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
_term = HostContext.GetService<ITerminal>();
|
_term = HostContext.GetService<ITerminal>();
|
||||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
|
_brokerServer = hostContext.GetService<IBrokerServer>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||||
@@ -63,7 +72,7 @@ namespace GitHub.Runner.Listener
|
|||||||
// Create connection.
|
// Create connection.
|
||||||
Trace.Info("Loading Credentials");
|
Trace.Info("Loading Credentials");
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
VssCredentials creds = credMgr.LoadCredentials();
|
_creds = credMgr.LoadCredentials();
|
||||||
|
|
||||||
var agent = new TaskAgentReference
|
var agent = new TaskAgentReference
|
||||||
{
|
{
|
||||||
@@ -85,7 +94,7 @@ namespace GitHub.Runner.Listener
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
Trace.Info("Connecting to the Runner Server...");
|
Trace.Info("Connecting to the Runner Server...");
|
||||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||||
Trace.Info("VssConnection created");
|
Trace.Info("VssConnection created");
|
||||||
|
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
@@ -97,6 +106,14 @@ namespace GitHub.Runner.Listener
|
|||||||
taskAgentSession,
|
taskAgentSession,
|
||||||
token);
|
token);
|
||||||
|
|
||||||
|
if (_session.BrokerMigrationMessage != null)
|
||||||
|
{
|
||||||
|
Trace.Info("Runner session is in migration mode: Creating Broker session with BrokerBaseUrl: {0}", _session.BrokerMigrationMessage.BrokerBaseUrl);
|
||||||
|
await _brokerServer.ConnectAsync(_session.BrokerMigrationMessage.BrokerBaseUrl, _creds);
|
||||||
|
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
||||||
|
_isBrokerSession = true;
|
||||||
|
}
|
||||||
|
|
||||||
Trace.Info($"Session created.");
|
Trace.Info($"Session created.");
|
||||||
if (encounteringError)
|
if (encounteringError)
|
||||||
{
|
{
|
||||||
@@ -123,8 +140,15 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Error("Catch exception during create session.");
|
Trace.Error("Catch exception during create session.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|
||||||
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
|
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||||
{
|
{
|
||||||
|
// "invalid_client" means the runner registration has been deleted from the server.
|
||||||
|
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
// Check whether we get 401 because the runner registration already removed by the service.
|
// Check whether we get 401 because the runner registration already removed by the service.
|
||||||
// If the runner registration get deleted, we can't exchange oauth token.
|
// If the runner registration get deleted, we can't exchange oauth token.
|
||||||
Trace.Error("Test oauth app registration.");
|
Trace.Error("Test oauth app registration.");
|
||||||
@@ -132,7 +156,7 @@ namespace GitHub.Runner.Listener
|
|||||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -163,6 +187,11 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
{
|
{
|
||||||
|
if (_isBrokerSession)
|
||||||
|
{
|
||||||
|
await _brokerServer.DeleteSessionAsync(ts.Token);
|
||||||
|
return;
|
||||||
|
}
|
||||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -212,11 +241,31 @@ namespace GitHub.Runner.Listener
|
|||||||
_lastMessageId,
|
_lastMessageId,
|
||||||
runnerStatus,
|
runnerStatus,
|
||||||
BuildConstants.RunnerPackage.Version,
|
BuildConstants.RunnerPackage.Version,
|
||||||
|
VarUtil.OS,
|
||||||
|
VarUtil.OSArchitecture,
|
||||||
|
_settings.DisableUpdate,
|
||||||
_getMessagesTokenSource.Token);
|
_getMessagesTokenSource.Token);
|
||||||
|
|
||||||
// Decrypt the message body if the session is using encryption
|
// Decrypt the message body if the session is using encryption
|
||||||
message = DecryptMessage(message);
|
message = DecryptMessage(message);
|
||||||
|
|
||||||
|
|
||||||
|
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
|
||||||
|
{
|
||||||
|
Trace.Info("BrokerMigration message received. Polling Broker for messages...");
|
||||||
|
|
||||||
|
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
|
||||||
|
|
||||||
|
await _brokerServer.ConnectAsync(migrationMessage.BrokerBaseUrl, _creds);
|
||||||
|
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||||
|
runnerStatus,
|
||||||
|
BuildConstants.RunnerPackage.Version,
|
||||||
|
VarUtil.OS,
|
||||||
|
VarUtil.OSArchitecture,
|
||||||
|
_settings.DisableUpdate,
|
||||||
|
token);
|
||||||
|
}
|
||||||
|
|
||||||
if (message != null)
|
if (message != null)
|
||||||
{
|
{
|
||||||
_lastMessageId = message.MessageId;
|
_lastMessageId = message.MessageId;
|
||||||
@@ -245,7 +294,7 @@ namespace GitHub.Runner.Listener
|
|||||||
_accessTokenRevoked = true;
|
_accessTokenRevoked = true;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
|
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -332,6 +381,11 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||||
|
}
|
||||||
|
|
||||||
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
||||||
{
|
{
|
||||||
if (_session.EncryptionKey == null ||
|
if (_session.EncryptionKey == null ||
|
||||||
|
|||||||
@@ -138,7 +138,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
|
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||||
{
|
{
|
||||||
terminal.WriteError($"An error occured: {e.Message}");
|
terminal.WriteError($"An error occured: {e.Message}");
|
||||||
trace.Error(e);
|
trace.Error(e);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
@@ -19,18 +19,12 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
|
||||||
<EmbeddedResource Include="..\Misc\runnercoreassets">
|
|
||||||
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
|
|
||||||
</EmbeddedResource>
|
|
||||||
</ItemGroup>
|
|
||||||
|
|
||||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||||
<DebugType>portable</DebugType>
|
<DebugType>portable</DebugType>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|||||||
@@ -457,22 +457,13 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
message = await getNextMessage; //get next message
|
message = await getNextMessage; //get next message
|
||||||
HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}");
|
HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}");
|
||||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase) ||
|
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||||
string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
{
|
||||||
if (autoUpdateInProgress == false)
|
if (autoUpdateInProgress == false)
|
||||||
{
|
{
|
||||||
autoUpdateInProgress = true;
|
autoUpdateInProgress = true;
|
||||||
AgentRefreshMessage runnerUpdateMessage = null;
|
AgentRefreshMessage runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
|
||||||
runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
var brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
|
|
||||||
runnerUpdateMessage = new AgentRefreshMessage(brokerRunnerUpdateMessage.RunnerId, brokerRunnerUpdateMessage.TargetVersion, TimeSpan.FromSeconds(brokerRunnerUpdateMessage.TimeoutInSeconds));
|
|
||||||
}
|
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
// Can mock the update for testing
|
// Can mock the update for testing
|
||||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||||
@@ -503,6 +494,22 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
if (autoUpdateInProgress == false)
|
||||||
|
{
|
||||||
|
autoUpdateInProgress = true;
|
||||||
|
RunnerRefreshMessage brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
|
||||||
|
|
||||||
|
var selfUpdater = HostContext.GetService<ISelfUpdaterV2>();
|
||||||
|
selfUpdateTask = selfUpdater.SelfUpdate(brokerRunnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
|
||||||
|
Trace.Info("Refresh message received, kick-off selfupdate background process.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
||||||
|
}
|
||||||
|
}
|
||||||
else if (string.Equals(message.MessageType, JobRequestMessageTypes.PipelineAgentJobRequest, StringComparison.OrdinalIgnoreCase))
|
else if (string.Equals(message.MessageType, JobRequestMessageTypes.PipelineAgentJobRequest, StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
if (autoUpdateInProgress || runOnceJobReceived)
|
if (autoUpdateInProgress || runOnceJobReceived)
|
||||||
@@ -589,6 +596,10 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'.");
|
Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'.");
|
||||||
return Constants.Runner.ReturnCode.Success;
|
return Constants.Runner.ReturnCode.Success;
|
||||||
}
|
}
|
||||||
|
else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh))
|
||||||
|
{
|
||||||
|
await _listener.RefreshListenerTokenAsync(messageQueueLoopTokenSource.Token);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}.");
|
Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}.");
|
||||||
@@ -627,6 +638,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
Trace.Info("Deleting Runner Session...");
|
||||||
await _listener.DeleteSessionAsync();
|
await _listener.DeleteSessionAsync();
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (runOnce)
|
catch (Exception ex) when (runOnce)
|
||||||
|
|||||||
@@ -6,13 +6,11 @@ using System.IO;
|
|||||||
using System.IO.Compression;
|
using System.IO.Compression;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
using System.Reflection;
|
|
||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.Services.WebApi;
|
||||||
@@ -30,20 +28,14 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
private static string _packageType = "agent";
|
private static string _packageType = "agent";
|
||||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||||
private static string _dotnetRuntime = "dotnetRuntime";
|
|
||||||
private static string _externals = "externals";
|
|
||||||
private readonly Dictionary<string, string> _contentHashes = new();
|
|
||||||
|
|
||||||
private PackageMetadata _targetPackage;
|
private PackageMetadata _targetPackage;
|
||||||
private ITerminal _terminal;
|
private ITerminal _terminal;
|
||||||
private IRunnerServer _runnerServer;
|
private IRunnerServer _runnerServer;
|
||||||
private int _poolId;
|
private int _poolId;
|
||||||
private int _agentId;
|
private ulong _agentId;
|
||||||
|
private const int _numberOfOldVersionsToKeep = 1;
|
||||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||||
private Task _cloneAndCalculateContentHashTask;
|
|
||||||
private string _dotnetRuntimeCloneDirectory;
|
|
||||||
private string _externalsCloneDirectory;
|
|
||||||
|
|
||||||
public bool Busy { get; private set; }
|
public bool Busy { get; private set; }
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
@@ -56,8 +48,6 @@ namespace GitHub.Runner.Listener
|
|||||||
var settings = configStore.GetSettings();
|
var settings = configStore.GetSettings();
|
||||||
_poolId = settings.PoolId;
|
_poolId = settings.PoolId;
|
||||||
_agentId = settings.AgentId;
|
_agentId = settings.AgentId;
|
||||||
_dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__");
|
|
||||||
_externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||||
@@ -67,13 +57,6 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
var totalUpdateTime = Stopwatch.StartNew();
|
var totalUpdateTime = Stopwatch.StartNew();
|
||||||
|
|
||||||
// Copy dotnet runtime and externals of current runner to a temp folder
|
|
||||||
// So we can re-use them with trimmed runner package, if possible.
|
|
||||||
// This process is best effort, if we can't use trimmed runner package,
|
|
||||||
// we will just go with the full package.
|
|
||||||
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
|
||||||
_cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token);
|
|
||||||
|
|
||||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||||
{
|
{
|
||||||
Trace.Info($"Can't find available update package.");
|
Trace.Info($"Can't find available update package.");
|
||||||
@@ -87,24 +70,6 @@ namespace GitHub.Runner.Listener
|
|||||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||||
|
|
||||||
if (_targetPackage.TrimmedPackages?.Count > 0)
|
|
||||||
{
|
|
||||||
// wait for cloning assets task to finish only if we have trimmed packages
|
|
||||||
await _cloneAndCalculateContentHashTask;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
linkedTokenSource.Cancel();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
await _cloneAndCalculateContentHashTask;
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await DownloadLatestRunner(token, updateMessage.TargetVersion);
|
await DownloadLatestRunner(token, updateMessage.TargetVersion);
|
||||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||||
|
|
||||||
@@ -218,54 +183,8 @@ namespace GitHub.Runner.Listener
|
|||||||
string archiveFile = null;
|
string archiveFile = null;
|
||||||
var packageDownloadUrl = _targetPackage.DownloadUrl;
|
var packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||||
var packageHashValue = _targetPackage.HashValue;
|
var packageHashValue = _targetPackage.HashValue;
|
||||||
var runtimeTrimmed = false;
|
|
||||||
var externalsTrimmed = false;
|
|
||||||
var fallbackToFullPackage = false;
|
|
||||||
|
|
||||||
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
|
|
||||||
if (_contentHashes.Count == 2 &&
|
|
||||||
_contentHashes.ContainsKey(_dotnetRuntime) &&
|
|
||||||
_contentHashes.ContainsKey(_externals) &&
|
|
||||||
_targetPackage.TrimmedPackages?.Count > 0)
|
|
||||||
{
|
|
||||||
Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}");
|
|
||||||
Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}");
|
|
||||||
// Try to see whether we can use any size trimmed down package to speed up runner updates.
|
|
||||||
foreach (var trimmedPackage in _targetPackage.TrimmedPackages)
|
|
||||||
{
|
|
||||||
if (trimmedPackage.TrimmedContents.Count == 2 &&
|
|
||||||
trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) &&
|
|
||||||
trimmedRuntimeHash == _contentHashes[_dotnetRuntime] &&
|
|
||||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) &&
|
|
||||||
trimmedExternalsHash == _contentHashes[_externals])
|
|
||||||
{
|
|
||||||
Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
|
||||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
|
||||||
packageHashValue = trimmedPackage.HashValue;
|
|
||||||
runtimeTrimmed = true;
|
|
||||||
externalsTrimmed = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
else if (trimmedPackage.TrimmedContents.Count == 1 &&
|
|
||||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) &&
|
|
||||||
trimmedExternalsHash == _contentHashes[_externals])
|
|
||||||
{
|
|
||||||
Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
|
||||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
|
||||||
packageHashValue = trimmedPackage.HashValue;
|
|
||||||
externalsTrimmed = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch).");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||||
_updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}");
|
|
||||||
_updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}");
|
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -323,12 +242,6 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
|
|
||||||
{
|
|
||||||
// if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package.
|
|
||||||
Trace.Error($"Fail to download latest runner using trimmed package: {ex}");
|
|
||||||
fallbackToFullPackage = true;
|
|
||||||
}
|
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
@@ -347,74 +260,6 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var trimmedPackageRestoreTasks = new List<Task<bool>>();
|
|
||||||
if (!fallbackToFullPackage)
|
|
||||||
{
|
|
||||||
// Skip restoring externals and runtime if we are going to fullback to the full package.
|
|
||||||
if (externalsTrimmed)
|
|
||||||
{
|
|
||||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token));
|
|
||||||
}
|
|
||||||
if (runtimeTrimmed)
|
|
||||||
{
|
|
||||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (trimmedPackageRestoreTasks.Count > 0)
|
|
||||||
{
|
|
||||||
var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks);
|
|
||||||
if (restoreResults.Any(x => x == false))
|
|
||||||
{
|
|
||||||
// if any of the restore failed, fallback to full package.
|
|
||||||
fallbackToFullPackage = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fallbackToFullPackage)
|
|
||||||
{
|
|
||||||
Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates.");
|
|
||||||
_updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}");
|
|
||||||
|
|
||||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
|
||||||
Directory.CreateDirectory(latestRunnerDirectory);
|
|
||||||
|
|
||||||
packageDownloadUrl = _targetPackage.DownloadUrl;
|
|
||||||
packageHashValue = _targetPackage.HashValue;
|
|
||||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(archiveFile))
|
|
||||||
{
|
|
||||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
|
||||||
}
|
|
||||||
|
|
||||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
|
||||||
|
|
||||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
// delete .zip file
|
|
||||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
|
||||||
{
|
|
||||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
|
||||||
IOUtil.DeleteFile(archiveFile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
//it is not critical if we fail to delete the .zip file
|
|
||||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
|
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -665,9 +510,9 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
||||||
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
||||||
if (allBinDirs.Length > 2)
|
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
|
||||||
{
|
{
|
||||||
// there are more than 2 bin.version folder.
|
// there are more than one bin.version folder.
|
||||||
// delete older bin.version folders.
|
// delete older bin.version folders.
|
||||||
foreach (var oldBinDir in allBinDirs)
|
foreach (var oldBinDir in allBinDirs)
|
||||||
{
|
{
|
||||||
@@ -694,9 +539,9 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
||||||
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
||||||
if (allExternalsDirs.Length > 2)
|
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
|
||||||
{
|
{
|
||||||
// there are more than 2 externals.version folder.
|
// there are more than one externals.version folder.
|
||||||
// delete older externals.version folders.
|
// delete older externals.version folders.
|
||||||
foreach (var oldExternalDir in allExternalsDirs)
|
foreach (var oldExternalDir in allExternalsDirs)
|
||||||
{
|
{
|
||||||
@@ -795,330 +640,5 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
|
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<bool> RestoreTrimmedExternals(string downloadDirectory, CancellationToken token)
|
|
||||||
{
|
|
||||||
// Copy the current runner's externals if we are using a externals trimmed package
|
|
||||||
// Execute the node.js to make sure the copied externals is working.
|
|
||||||
var stopWatch = Stopwatch.StartNew();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}.");
|
|
||||||
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
|
|
||||||
|
|
||||||
// try run node.js to see if current node.js works fine after copy over to new location.
|
|
||||||
var nodeVersions = NodeUtil.BuiltInNodeVersions;
|
|
||||||
foreach (var nodeVersion in nodeVersions)
|
|
||||||
{
|
|
||||||
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
|
|
||||||
if (File.Exists(newNodeBinary))
|
|
||||||
{
|
|
||||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
|
||||||
{
|
|
||||||
var outputs = "";
|
|
||||||
p.ErrorDataReceived += (_, data) =>
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(data.Data))
|
|
||||||
{
|
|
||||||
Trace.Error(data.Data);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
p.OutputDataReceived += (_, data) =>
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(data.Data))
|
|
||||||
{
|
|
||||||
Trace.Info(data.Data);
|
|
||||||
outputs = data.Data;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token);
|
|
||||||
if (exitCode != 0)
|
|
||||||
{
|
|
||||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase))
|
|
||||||
{
|
|
||||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content.");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error($"Fail to restore externals for trimmed package: {ex}");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
stopWatch.Stop();
|
|
||||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task<bool> RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token)
|
|
||||||
{
|
|
||||||
// Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package
|
|
||||||
// Execute the runner.listener to make sure the copied runtime is working.
|
|
||||||
var stopWatch = Stopwatch.StartNew();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}.");
|
|
||||||
IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token);
|
|
||||||
|
|
||||||
// try run the runner executable to see if current dotnet runtime + future runner binary works fine.
|
|
||||||
var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener");
|
|
||||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
|
||||||
{
|
|
||||||
p.ErrorDataReceived += (_, data) =>
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(data.Data))
|
|
||||||
{
|
|
||||||
Trace.Error(data.Data);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
p.OutputDataReceived += (_, data) =>
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(data.Data))
|
|
||||||
{
|
|
||||||
Trace.Info(data.Data);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token);
|
|
||||||
if (exitCode != 0)
|
|
||||||
{
|
|
||||||
Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
stopWatch.Stop();
|
|
||||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token)
|
|
||||||
{
|
|
||||||
var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token);
|
|
||||||
var externalsCloneTask = CloneExternals(externalsCloneDirectory, token);
|
|
||||||
|
|
||||||
var waitingTasks = new Dictionary<string, Task>()
|
|
||||||
{
|
|
||||||
{nameof(CloneDotnetRuntime), runtimeCloneTask},
|
|
||||||
{nameof(CloneExternals),externalsCloneTask}
|
|
||||||
};
|
|
||||||
|
|
||||||
while (waitingTasks.Count > 0)
|
|
||||||
{
|
|
||||||
Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete.");
|
|
||||||
var complatedTask = await Task.WhenAny(waitingTasks.Values);
|
|
||||||
if (waitingTasks.ContainsKey(nameof(CloneExternals)) &&
|
|
||||||
complatedTask == waitingTasks[nameof(CloneExternals)])
|
|
||||||
{
|
|
||||||
Trace.Info($"Externals clone finished.");
|
|
||||||
waitingTasks.Remove(nameof(CloneExternals));
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (await externalsCloneTask && !token.IsCancellationRequested)
|
|
||||||
{
|
|
||||||
var externalsHash = await HashFiles(externalsCloneDirectory, token);
|
|
||||||
Trace.Info($"Externals content hash: {externalsHash}");
|
|
||||||
_contentHashes[_externals] = externalsHash;
|
|
||||||
_updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
Trace.Error($"Skip compute hash since clone externals failed/cancelled.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error($"Fail to hash externals content: {ex}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) &&
|
|
||||||
complatedTask == waitingTasks[nameof(CloneDotnetRuntime)])
|
|
||||||
{
|
|
||||||
Trace.Info($"Dotnet runtime clone finished.");
|
|
||||||
waitingTasks.Remove(nameof(CloneDotnetRuntime));
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (await runtimeCloneTask && !token.IsCancellationRequested)
|
|
||||||
{
|
|
||||||
var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token);
|
|
||||||
Trace.Info($"Runtime content hash: {runtimeHash}");
|
|
||||||
_contentHashes[_dotnetRuntime] = runtimeHash;
|
|
||||||
_updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error($"Fail to hash runtime content: {ex}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task<bool> CloneDotnetRuntime(string runtimeDir, CancellationToken token)
|
|
||||||
{
|
|
||||||
var stopWatch = Stopwatch.StartNew();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Trace.Info($"Cloning dotnet runtime to {runtimeDir}");
|
|
||||||
IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None);
|
|
||||||
Directory.CreateDirectory(runtimeDir);
|
|
||||||
|
|
||||||
var assembly = Assembly.GetExecutingAssembly();
|
|
||||||
var assetsContent = default(string);
|
|
||||||
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets"))
|
|
||||||
using (var streamReader = new StreamReader(stream))
|
|
||||||
{
|
|
||||||
assetsContent = await streamReader.ReadToEndAsync();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(assetsContent))
|
|
||||||
{
|
|
||||||
var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries);
|
|
||||||
if (runnerCoreAssets.Length > 0)
|
|
||||||
{
|
|
||||||
var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
|
||||||
IOUtil.CopyDirectory(binDir, runtimeDir, token);
|
|
||||||
|
|
||||||
var clonedFile = 0;
|
|
||||||
foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories))
|
|
||||||
{
|
|
||||||
token.ThrowIfCancellationRequested();
|
|
||||||
if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim())))
|
|
||||||
{
|
|
||||||
Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory.");
|
|
||||||
IOUtil.DeleteFile(file);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
clonedFile++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}");
|
|
||||||
Trace.Error(ex);
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
stopWatch.Stop();
|
|
||||||
_updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Task<bool> CloneExternals(string externalsDir, CancellationToken token)
|
|
||||||
{
|
|
||||||
var stopWatch = Stopwatch.StartNew();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Trace.Info($"Cloning externals to {externalsDir}");
|
|
||||||
IOUtil.DeleteDirectory(externalsDir, CancellationToken.None);
|
|
||||||
Directory.CreateDirectory(externalsDir);
|
|
||||||
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token);
|
|
||||||
Trace.Info($"Successfully cloned externals to {externalsDir}.");
|
|
||||||
return Task.FromResult(true);
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error($"Fail to clone externals to {externalsDir}");
|
|
||||||
Trace.Error(ex);
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
stopWatch.Stop();
|
|
||||||
_updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
|
||||||
}
|
|
||||||
|
|
||||||
return Task.FromResult(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task<string> HashFiles(string fileFolder, CancellationToken token)
|
|
||||||
{
|
|
||||||
Trace.Info($"Calculating hash for {fileFolder}");
|
|
||||||
|
|
||||||
var stopWatch = Stopwatch.StartNew();
|
|
||||||
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
|
||||||
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
|
||||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
|
||||||
var hashResult = string.Empty;
|
|
||||||
|
|
||||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
|
||||||
{
|
|
||||||
processInvoker.ErrorDataReceived += (_, data) =>
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
|
||||||
{
|
|
||||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
|
||||||
Trace.Info($"Hash result: '{hashResult}'");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
Trace.Info(data.Data);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
processInvoker.OutputDataReceived += (_, data) =>
|
|
||||||
{
|
|
||||||
Trace.Verbose(data.Data);
|
|
||||||
};
|
|
||||||
|
|
||||||
var env = new Dictionary<string, string>
|
|
||||||
{
|
|
||||||
["patterns"] = "**"
|
|
||||||
};
|
|
||||||
|
|
||||||
int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder,
|
|
||||||
fileName: node,
|
|
||||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
|
||||||
environment: env,
|
|
||||||
requireExitCodeZero: false,
|
|
||||||
outputEncoding: null,
|
|
||||||
killProcessOnCancel: true,
|
|
||||||
cancellationToken: token);
|
|
||||||
|
|
||||||
if (exitCode != 0)
|
|
||||||
{
|
|
||||||
Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'");
|
|
||||||
}
|
|
||||||
|
|
||||||
stopWatch.Stop();
|
|
||||||
_updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
|
||||||
return hashResult;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
568
src/Runner.Listener/SelfUpdaterV2.cs
Normal file
568
src/Runner.Listener/SelfUpdaterV2.cs
Normal file
@@ -0,0 +1,568 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Concurrent;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
|
using System.IO;
|
||||||
|
using System.IO.Compression;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Reflection;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
using GitHub.Services.WebApi;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Listener
|
||||||
|
{
|
||||||
|
// This class is a fork of SelfUpdater.cs and is intended to only be used for the
|
||||||
|
// new self-update flow where the PackageMetadata is sent in the message directly.
|
||||||
|
// Forking the class prevents us from accidentally breaking the old flow while it's still in production
|
||||||
|
|
||||||
|
[ServiceLocator(Default = typeof(SelfUpdaterV2))]
|
||||||
|
public interface ISelfUpdaterV2 : IRunnerService
|
||||||
|
{
|
||||||
|
bool Busy { get; }
|
||||||
|
Task<bool> SelfUpdate(RunnerRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
||||||
|
}
|
||||||
|
public class SelfUpdaterV2 : RunnerService, ISelfUpdaterV2
|
||||||
|
{
|
||||||
|
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||||
|
private ITerminal _terminal;
|
||||||
|
private IRunnerServer _runnerServer;
|
||||||
|
private int _poolId;
|
||||||
|
private ulong _agentId;
|
||||||
|
|
||||||
|
private const int _numberOfOldVersionsToKeep = 1;
|
||||||
|
|
||||||
|
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||||
|
public bool Busy { get; private set; }
|
||||||
|
|
||||||
|
public override void Initialize(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
base.Initialize(hostContext);
|
||||||
|
|
||||||
|
_terminal = hostContext.GetService<ITerminal>();
|
||||||
|
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
|
var configStore = HostContext.GetService<IConfigurationStore>();
|
||||||
|
var settings = configStore.GetSettings();
|
||||||
|
_poolId = settings.PoolId;
|
||||||
|
_agentId = settings.AgentId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<bool> SelfUpdate(RunnerRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||||
|
{
|
||||||
|
Busy = true;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var totalUpdateTime = Stopwatch.StartNew();
|
||||||
|
|
||||||
|
Trace.Info($"An update is available.");
|
||||||
|
_updateTrace.Enqueue($"RunnerPlatform: {updateMessage.OS}");
|
||||||
|
|
||||||
|
// Print console line that warn user not shutdown runner.
|
||||||
|
_terminal.WriteLine("Runner update in progress, do not shutdown runner.");
|
||||||
|
_terminal.WriteLine($"Downloading {updateMessage.TargetVersion} runner");
|
||||||
|
|
||||||
|
await DownloadLatestRunner(token, updateMessage.TargetVersion, updateMessage.DownloadUrl, updateMessage.SHA256Checksum, updateMessage.OS);
|
||||||
|
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||||
|
|
||||||
|
// wait till all running job finish
|
||||||
|
_terminal.WriteLine("Waiting for current job finish running.");
|
||||||
|
|
||||||
|
await jobDispatcher.WaitAsync(token);
|
||||||
|
Trace.Info($"All running job has exited.");
|
||||||
|
|
||||||
|
// We need to keep runner backup around for macOS until we fixed https://github.com/actions/runner/issues/743
|
||||||
|
// delete runner backup
|
||||||
|
var stopWatch = Stopwatch.StartNew();
|
||||||
|
DeletePreviousVersionRunnerBackup(token, updateMessage.TargetVersion);
|
||||||
|
Trace.Info($"Delete old version runner backup.");
|
||||||
|
stopWatch.Stop();
|
||||||
|
// generate update script from template
|
||||||
|
_updateTrace.Enqueue($"DeleteRunnerBackupTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||||
|
_terminal.WriteLine("Generate and execute update script.");
|
||||||
|
|
||||||
|
string updateScript = GenerateUpdateScript(restartInteractiveRunner, updateMessage.TargetVersion);
|
||||||
|
Trace.Info($"Generate update script into: {updateScript}");
|
||||||
|
|
||||||
|
|
||||||
|
#if DEBUG
|
||||||
|
// For L0, we will skip execute update script.
|
||||||
|
if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT")))
|
||||||
|
#endif
|
||||||
|
{
|
||||||
|
string flagFile = "update.finished";
|
||||||
|
IOUtil.DeleteFile(flagFile);
|
||||||
|
// kick off update script
|
||||||
|
Process invokeScript = new();
|
||||||
|
#if OS_WINDOWS
|
||||||
|
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||||
|
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||||
|
#elif (OS_OSX || OS_LINUX)
|
||||||
|
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||||
|
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||||
|
#endif
|
||||||
|
invokeScript.Start();
|
||||||
|
Trace.Info($"Update script start running");
|
||||||
|
}
|
||||||
|
|
||||||
|
totalUpdateTime.Stop();
|
||||||
|
|
||||||
|
_updateTrace.Enqueue($"TotalUpdateTime: {totalUpdateTime.ElapsedMilliseconds}ms");
|
||||||
|
_terminal.WriteLine("Runner will exit shortly for update, should be back online within 10 seconds.");
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_updateTrace.Enqueue(ex.ToString());
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_terminal.WriteLine("Runner update process finished.");
|
||||||
|
Busy = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// _work
|
||||||
|
/// \_update
|
||||||
|
/// \bin
|
||||||
|
/// \externals
|
||||||
|
/// \run.sh
|
||||||
|
/// \run.cmd
|
||||||
|
/// \package.zip //temp download .zip/.tar.gz
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="token"></param>
|
||||||
|
/// <returns></returns>
|
||||||
|
private async Task DownloadLatestRunner(CancellationToken token, string targetVersion, string packageDownloadUrl, string packageHashValue, string targetPlatform)
|
||||||
|
{
|
||||||
|
string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory);
|
||||||
|
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||||
|
Directory.CreateDirectory(latestRunnerDirectory);
|
||||||
|
|
||||||
|
string archiveFile = null;
|
||||||
|
|
||||||
|
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
#if DEBUG
|
||||||
|
// Much of the update process (targetVersion, archive) is server-side, this is a way to control it from here for testing specific update scenarios
|
||||||
|
// Add files like 'runner2.281.2.tar.gz' or 'runner2.283.0.zip' (depending on your platform) to your runner root folder
|
||||||
|
// Note that runners still need to be older than the server's runner version in order to receive an 'AgentRefreshMessage' and trigger this update
|
||||||
|
// Wrapped in #if DEBUG as this should not be in the RELEASE build
|
||||||
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||||
|
{
|
||||||
|
var waitForDebugger = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE_WAIT_FOR_DEBUGGER"));
|
||||||
|
if (waitForDebugger)
|
||||||
|
{
|
||||||
|
int waitInSeconds = 20;
|
||||||
|
while (!Debugger.IsAttached && waitInSeconds-- > 0)
|
||||||
|
{
|
||||||
|
await Task.Delay(1000);
|
||||||
|
}
|
||||||
|
Debugger.Break();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (targetPlatform.StartsWith("win"))
|
||||||
|
{
|
||||||
|
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.zip");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.tar.gz");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (File.Exists(archiveFile))
|
||||||
|
{
|
||||||
|
_updateTrace.Enqueue($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||||
|
_terminal.WriteLine($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
archiveFile = null;
|
||||||
|
_terminal.WriteLine($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||||
|
_updateTrace.Enqueue($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
// archiveFile is not null only if we mocked it above
|
||||||
|
if (string.IsNullOrEmpty(archiveFile))
|
||||||
|
{
|
||||||
|
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, targetPlatform, token);
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(archiveFile))
|
||||||
|
{
|
||||||
|
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||||
|
}
|
||||||
|
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// delete .zip file
|
||||||
|
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||||
|
{
|
||||||
|
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||||
|
IOUtil.DeleteFile(archiveFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
//it is not critical if we fail to delete the .zip file
|
||||||
|
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await CopyLatestRunnerToRoot(latestRunnerDirectory, targetVersion, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<string> DownLoadRunner(string downloadDirectory, string packageDownloadUrl, string packageHashValue, string packagePlatform, CancellationToken token)
|
||||||
|
{
|
||||||
|
var stopWatch = Stopwatch.StartNew();
|
||||||
|
int runnerSuffix = 1;
|
||||||
|
string archiveFile = null;
|
||||||
|
bool downloadSucceeded = false;
|
||||||
|
|
||||||
|
// Download the runner, using multiple attempts in order to be resilient against any networking/CDN issues
|
||||||
|
for (int attempt = 1; attempt <= Constants.RunnerDownloadRetryMaxAttempts; attempt++)
|
||||||
|
{
|
||||||
|
// Generate an available package name, and do our best effort to clean up stale local zip files
|
||||||
|
while (true)
|
||||||
|
{
|
||||||
|
if (packagePlatform.StartsWith("win"))
|
||||||
|
{
|
||||||
|
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.zip");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.tar.gz");
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// delete .zip file
|
||||||
|
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||||
|
{
|
||||||
|
Trace.Verbose("Deleting latest runner package zip '{0}'", archiveFile);
|
||||||
|
IOUtil.DeleteFile(archiveFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
// couldn't delete the file for whatever reason, so generate another name
|
||||||
|
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||||
|
runnerSuffix++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow a 15-minute package download timeout, which is good enough to update the runner from a 1 Mbit/s ADSL connection.
|
||||||
|
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_DOWNLOAD_TIMEOUT") ?? string.Empty, out int timeoutSeconds))
|
||||||
|
{
|
||||||
|
timeoutSeconds = 15 * 60;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Attempt {attempt}: save latest runner into {archiveFile}.");
|
||||||
|
|
||||||
|
using (var downloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||||
|
using (var downloadCts = CancellationTokenSource.CreateLinkedTokenSource(downloadTimeout.Token, token))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Trace.Info($"Download runner: begin download");
|
||||||
|
long downloadSize = 0;
|
||||||
|
|
||||||
|
//open zip stream in async mode
|
||||||
|
using (HttpClient httpClient = new(HostContext.CreateHttpClientHandler()))
|
||||||
|
{
|
||||||
|
Trace.Info($"Downloading {packageDownloadUrl}");
|
||||||
|
|
||||||
|
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||||
|
using (Stream result = await httpClient.GetStreamAsync(packageDownloadUrl))
|
||||||
|
{
|
||||||
|
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||||
|
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||||
|
await fs.FlushAsync(downloadCts.Token);
|
||||||
|
downloadSize = fs.Length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Download runner: finished download");
|
||||||
|
downloadSucceeded = true;
|
||||||
|
stopWatch.Stop();
|
||||||
|
_updateTrace.Enqueue($"PackageDownloadTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||||
|
_updateTrace.Enqueue($"Attempts: {attempt}");
|
||||||
|
_updateTrace.Enqueue($"PackageSize: {downloadSize / 1024 / 1024}MB");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Info($"Runner download has been cancelled.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
if (downloadCts.Token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Warning($"Runner download has timed out after {timeoutSeconds} seconds");
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Warning($"Failed to get package '{archiveFile}' from '{packageDownloadUrl}'. Exception {ex}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (downloadSucceeded)
|
||||||
|
{
|
||||||
|
return archiveFile;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ValidateRunnerHash(string archiveFile, string packageHashValue)
|
||||||
|
{
|
||||||
|
var stopWatch = Stopwatch.StartNew();
|
||||||
|
// Validate Hash Matches if it is provided
|
||||||
|
using (FileStream stream = File.OpenRead(archiveFile))
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(packageHashValue))
|
||||||
|
{
|
||||||
|
using (SHA256 sha256 = SHA256.Create())
|
||||||
|
{
|
||||||
|
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||||
|
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||||
|
if (hash != packageHashValue)
|
||||||
|
{
|
||||||
|
// Hash did not match, we can't recover from this, just throw
|
||||||
|
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {packageHashValue} for {archiveFile}");
|
||||||
|
}
|
||||||
|
|
||||||
|
stopWatch.Stop();
|
||||||
|
Trace.Info($"Validated Runner Hash matches {archiveFile} : {packageHashValue}");
|
||||||
|
_updateTrace.Enqueue($"ValidateHashTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ExtractRunnerPackage(string archiveFile, string extractDirectory, CancellationToken token)
|
||||||
|
{
|
||||||
|
var stopWatch = Stopwatch.StartNew();
|
||||||
|
|
||||||
|
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
ZipFile.ExtractToDirectory(archiveFile, extractDirectory);
|
||||||
|
}
|
||||||
|
else if (archiveFile.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
string tar = WhichUtil.Which("tar", trace: Trace);
|
||||||
|
|
||||||
|
if (string.IsNullOrEmpty(tar))
|
||||||
|
{
|
||||||
|
throw new NotSupportedException($"tar -xzf");
|
||||||
|
}
|
||||||
|
|
||||||
|
// tar -xzf
|
||||||
|
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||||
|
{
|
||||||
|
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(args.Data))
|
||||||
|
{
|
||||||
|
Trace.Info(args.Data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(args.Data))
|
||||||
|
{
|
||||||
|
Trace.Error(args.Data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
int exitCode = await processInvoker.ExecuteAsync(extractDirectory, tar, $"-xzf \"{archiveFile}\"", null, token);
|
||||||
|
if (exitCode != 0)
|
||||||
|
{
|
||||||
|
throw new NotSupportedException($"Can't use 'tar -xzf' to extract archive file: {archiveFile}. return code: {exitCode}.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new NotSupportedException($"{archiveFile}");
|
||||||
|
}
|
||||||
|
|
||||||
|
stopWatch.Stop();
|
||||||
|
Trace.Info($"Finished getting latest runner package at: {extractDirectory}.");
|
||||||
|
_updateTrace.Enqueue($"PackageExtractTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||||
|
}
|
||||||
|
|
||||||
|
private Task CopyLatestRunnerToRoot(string latestRunnerDirectory, string targetVersion, CancellationToken token)
|
||||||
|
{
|
||||||
|
var stopWatch = Stopwatch.StartNew();
|
||||||
|
// copy latest runner into runner root folder
|
||||||
|
// copy bin from _work/_update -> bin.version under root
|
||||||
|
string binVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.BinDirectory}.{targetVersion}");
|
||||||
|
Directory.CreateDirectory(binVersionDir);
|
||||||
|
Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory)} to {binVersionDir}.");
|
||||||
|
IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory), binVersionDir, token);
|
||||||
|
|
||||||
|
// copy externals from _work/_update -> externals.version under root
|
||||||
|
string externalsVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.ExternalsDirectory}.{targetVersion}");
|
||||||
|
Directory.CreateDirectory(externalsVersionDir);
|
||||||
|
Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory)} to {externalsVersionDir}.");
|
||||||
|
IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory), externalsVersionDir, token);
|
||||||
|
|
||||||
|
// copy and replace all .sh/.cmd files
|
||||||
|
Trace.Info($"Copy any remaining .sh/.cmd files into runner root.");
|
||||||
|
foreach (FileInfo file in new DirectoryInfo(latestRunnerDirectory).GetFiles() ?? new FileInfo[0])
|
||||||
|
{
|
||||||
|
string destination = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name);
|
||||||
|
|
||||||
|
// Removing the file instead of just trying to overwrite it works around permissions issues on linux.
|
||||||
|
// https://github.com/actions/runner/issues/981
|
||||||
|
Trace.Info($"Copy {file.FullName} to {destination}");
|
||||||
|
IOUtil.DeleteFile(destination);
|
||||||
|
file.CopyTo(destination, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
stopWatch.Stop();
|
||||||
|
_updateTrace.Enqueue($"CopyRunnerToRootTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||||
|
return Task.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void DeletePreviousVersionRunnerBackup(CancellationToken token, string targetVersion)
|
||||||
|
{
|
||||||
|
// delete previous backup runner (back compat, can be remove after serval sprints)
|
||||||
|
// bin.bak.2.99.0
|
||||||
|
// externals.bak.2.99.0
|
||||||
|
foreach (string existBackUp in Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "*.bak.*"))
|
||||||
|
{
|
||||||
|
Trace.Info($"Delete existing runner backup at {existBackUp}.");
|
||||||
|
try
|
||||||
|
{
|
||||||
|
IOUtil.DeleteDirectory(existBackUp, token);
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
Trace.Info($"Catch exception during delete backup folder {existBackUp}, ignore this error try delete the backup folder on next auto-update.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
||||||
|
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
||||||
|
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
|
||||||
|
{
|
||||||
|
// there are more than {_numberOfOldVersionsToKeep} bin.version folder.
|
||||||
|
// delete older bin.version folders.
|
||||||
|
foreach (var oldBinDir in allBinDirs)
|
||||||
|
{
|
||||||
|
if (string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin"), StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{targetVersion}"), StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
// skip for current runner version
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Delete runner bin folder's backup at {oldBinDir}.");
|
||||||
|
try
|
||||||
|
{
|
||||||
|
IOUtil.DeleteDirectory(oldBinDir, token);
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
Trace.Info($"Catch exception during delete backup folder {oldBinDir}, ignore this error try delete the backup folder on next auto-update.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
||||||
|
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
||||||
|
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
|
||||||
|
{
|
||||||
|
// there are more than {_numberOfOldVersionsToKeep} externals.version folder.
|
||||||
|
// delete older externals.version folders.
|
||||||
|
foreach (var oldExternalDir in allExternalsDirs)
|
||||||
|
{
|
||||||
|
if (string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals"), StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{targetVersion}"), StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
// skip for current runner version
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Delete runner externals folder's backup at {oldExternalDir}.");
|
||||||
|
try
|
||||||
|
{
|
||||||
|
IOUtil.DeleteDirectory(oldExternalDir, token);
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
Trace.Info($"Catch exception during delete backup folder {oldExternalDir}, ignore this error try delete the backup folder on next auto-update.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private string GenerateUpdateScript(bool restartInteractiveRunner, string targetVersion)
|
||||||
|
{
|
||||||
|
int processId = Process.GetCurrentProcess().Id;
|
||||||
|
string updateLog = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), $"SelfUpdate-{DateTime.UtcNow.ToString("yyyyMMdd-HHmmss")}.log");
|
||||||
|
string runnerRoot = HostContext.GetDirectory(WellKnownDirectory.Root);
|
||||||
|
|
||||||
|
#if OS_WINDOWS
|
||||||
|
string templateName = "update.cmd.template";
|
||||||
|
#else
|
||||||
|
string templateName = "update.sh.template";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
string templatePath = Path.Combine(runnerRoot, $"bin.{targetVersion}", templateName);
|
||||||
|
string template = File.ReadAllText(templatePath);
|
||||||
|
|
||||||
|
template = template.Replace("_PROCESS_ID_", processId.ToString());
|
||||||
|
template = template.Replace("_RUNNER_PROCESS_NAME_", $"Runner.Listener{IOUtil.ExeExtension}");
|
||||||
|
template = template.Replace("_ROOT_FOLDER_", runnerRoot);
|
||||||
|
template = template.Replace("_EXIST_RUNNER_VERSION_", BuildConstants.RunnerPackage.Version);
|
||||||
|
template = template.Replace("_DOWNLOAD_RUNNER_VERSION_", targetVersion);
|
||||||
|
template = template.Replace("_UPDATE_LOG_", updateLog);
|
||||||
|
template = template.Replace("_RESTART_INTERACTIVE_RUNNER_", restartInteractiveRunner ? "1" : "0");
|
||||||
|
|
||||||
|
#if OS_WINDOWS
|
||||||
|
string scriptName = "_update.cmd";
|
||||||
|
#else
|
||||||
|
string scriptName = "_update.sh";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
string updateScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), scriptName);
|
||||||
|
if (File.Exists(updateScript))
|
||||||
|
{
|
||||||
|
IOUtil.DeleteFile(updateScript);
|
||||||
|
}
|
||||||
|
|
||||||
|
File.WriteAllText(updateScript, template);
|
||||||
|
return updateScript;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -69,6 +69,10 @@ namespace GitHub.Runner.Sdk
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(httpProxyAddress) && !Uri.TryCreate(httpProxyAddress, UriKind.Absolute, out var _))
|
||||||
|
{
|
||||||
|
httpProxyAddress = PrependHttpIfMissing(httpProxyAddress);
|
||||||
|
}
|
||||||
if (!string.IsNullOrEmpty(httpProxyAddress) && Uri.TryCreate(httpProxyAddress, UriKind.Absolute, out var proxyHttpUri))
|
if (!string.IsNullOrEmpty(httpProxyAddress) && Uri.TryCreate(httpProxyAddress, UriKind.Absolute, out var proxyHttpUri))
|
||||||
{
|
{
|
||||||
_httpProxyAddress = proxyHttpUri.OriginalString;
|
_httpProxyAddress = proxyHttpUri.OriginalString;
|
||||||
@@ -99,6 +103,10 @@ namespace GitHub.Runner.Sdk
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(httpsProxyAddress) && !Uri.TryCreate(httpsProxyAddress, UriKind.Absolute, out var _))
|
||||||
|
{
|
||||||
|
httpsProxyAddress = PrependHttpIfMissing(httpsProxyAddress);
|
||||||
|
}
|
||||||
if (!string.IsNullOrEmpty(httpsProxyAddress) && Uri.TryCreate(httpsProxyAddress, UriKind.Absolute, out var proxyHttpsUri))
|
if (!string.IsNullOrEmpty(httpsProxyAddress) && Uri.TryCreate(httpsProxyAddress, UriKind.Absolute, out var proxyHttpsUri))
|
||||||
{
|
{
|
||||||
_httpsProxyAddress = proxyHttpsUri.OriginalString;
|
_httpsProxyAddress = proxyHttpsUri.OriginalString;
|
||||||
@@ -240,5 +248,20 @@ namespace GitHub.Runner.Sdk
|
|||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private string PrependHttpIfMissing(string proxyAddress)
|
||||||
|
{
|
||||||
|
// much like in golang, see https://github.com/golang/net/blob/f5464ddb689c015d1abf4df78a806a54af977e6c/http/httpproxy/proxy.go#LL156C31-L156C31
|
||||||
|
if (!proxyAddress.StartsWith("http://", StringComparison.Ordinal) && !proxyAddress.StartsWith("https://", StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
var prependedProxyAddress = "http://" + proxyAddress;
|
||||||
|
if (Uri.TryCreate(prependedProxyAddress, UriKind.Absolute, out var _))
|
||||||
|
{
|
||||||
|
// if prepending http:// turns the proxyAddress into a valid Uri, then use that
|
||||||
|
return prependedProxyAddress;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return proxyAddress;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ using System.Linq;
|
|||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
|
||||||
namespace GitHub.Runner.Sdk
|
namespace GitHub.Runner.Sdk
|
||||||
{
|
{
|
||||||
@@ -72,6 +74,25 @@ namespace GitHub.Runner.Sdk
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async Task<string> GetFileContentSha256HashAsync(string path)
|
||||||
|
{
|
||||||
|
if (!File.Exists(path))
|
||||||
|
{
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
using (FileStream stream = File.OpenRead(path))
|
||||||
|
{
|
||||||
|
using (SHA256 sha256 = SHA256.Create())
|
||||||
|
{
|
||||||
|
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||||
|
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static void Delete(string path, CancellationToken cancellationToken)
|
public static void Delete(string path, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
DeleteDirectory(path, cancellationToken);
|
DeleteDirectory(path, cancellationToken);
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net.Http.Headers;
|
||||||
|
|
||||||
namespace GitHub.Runner.Sdk
|
namespace GitHub.Runner.Sdk
|
||||||
{
|
{
|
||||||
@@ -48,5 +50,15 @@ namespace GitHub.Runner.Sdk
|
|||||||
|
|
||||||
return credUri.Uri;
|
return credUri.Uri;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static string GetGitHubRequestId(HttpResponseHeaders headers)
|
||||||
|
{
|
||||||
|
if (headers != null &&
|
||||||
|
headers.TryGetValues("x-github-request-id", out var headerValues))
|
||||||
|
{
|
||||||
|
return headerValues.FirstOrDefault();
|
||||||
|
}
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,13 @@ namespace GitHub.Runner.Sdk
|
|||||||
|
|
||||||
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
||||||
{
|
{
|
||||||
headerValues.AddRange(VssClientHttpRequestSettings.Default.UserAgent);
|
foreach (var headerVal in VssClientHttpRequestSettings.Default.UserAgent)
|
||||||
|
{
|
||||||
|
if (!headerValues.Contains(headerVal))
|
||||||
|
{
|
||||||
|
headerValues.Add(headerVal);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
|
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
|
||||||
@@ -33,6 +39,23 @@ namespace GitHub.Runner.Sdk
|
|||||||
{
|
{
|
||||||
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var rawHeaderValues = new List<ProductInfoHeaderValue>();
|
||||||
|
rawHeaderValues.AddRange(additionalUserAgents);
|
||||||
|
rawHeaderValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
|
||||||
|
|
||||||
|
if (RawClientHttpRequestSettings.Default.UserAgent != null && RawClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (var headerVal in RawClientHttpRequestSettings.Default.UserAgent)
|
||||||
|
{
|
||||||
|
if (!rawHeaderValues.Contains(headerVal))
|
||||||
|
{
|
||||||
|
rawHeaderValues.Add(headerVal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RawClientHttpRequestSettings.Default.UserAgent = rawHeaderValues;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static VssConnection CreateConnection(
|
public static VssConnection CreateConnection(
|
||||||
@@ -64,7 +87,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
|
|
||||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
|
||||||
{
|
{
|
||||||
settings.AllowAutoRedirect = true;
|
settings.AllowAutoRedirectForBroker = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove Invariant from the list of accepted languages.
|
// Remove Invariant from the list of accepted languages.
|
||||||
@@ -85,6 +108,35 @@ namespace GitHub.Runner.Sdk
|
|||||||
VssCredentials credentials,
|
VssCredentials credentials,
|
||||||
IEnumerable<DelegatingHandler> additionalDelegatingHandler = null,
|
IEnumerable<DelegatingHandler> additionalDelegatingHandler = null,
|
||||||
TimeSpan? timeout = null)
|
TimeSpan? timeout = null)
|
||||||
|
{
|
||||||
|
RawClientHttpRequestSettings settings = GetHttpRequestSettings(timeout);
|
||||||
|
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
|
||||||
|
return connection;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint)
|
||||||
|
{
|
||||||
|
ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint));
|
||||||
|
ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization));
|
||||||
|
ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme));
|
||||||
|
|
||||||
|
if (serviceEndpoint.Authorization.Parameters.Count == 0)
|
||||||
|
{
|
||||||
|
throw new ArgumentOutOfRangeException(nameof(serviceEndpoint));
|
||||||
|
}
|
||||||
|
|
||||||
|
VssCredentials credentials = null;
|
||||||
|
string accessToken;
|
||||||
|
if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth &&
|
||||||
|
serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken))
|
||||||
|
{
|
||||||
|
credentials = new VssCredentials(new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt);
|
||||||
|
}
|
||||||
|
|
||||||
|
return credentials;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RawClientHttpRequestSettings GetHttpRequestSettings(TimeSpan? timeout = null)
|
||||||
{
|
{
|
||||||
RawClientHttpRequestSettings settings = RawClientHttpRequestSettings.Default.Clone();
|
RawClientHttpRequestSettings settings = RawClientHttpRequestSettings.Default.Clone();
|
||||||
|
|
||||||
@@ -116,30 +168,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
// settings are applied to an HttpRequestMessage.
|
// settings are applied to an HttpRequestMessage.
|
||||||
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
|
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
|
||||||
|
|
||||||
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
|
return settings;
|
||||||
return connection;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint)
|
|
||||||
{
|
|
||||||
ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint));
|
|
||||||
ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization));
|
|
||||||
ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme));
|
|
||||||
|
|
||||||
if (serviceEndpoint.Authorization.Parameters.Count == 0)
|
|
||||||
{
|
|
||||||
throw new ArgumentOutOfRangeException(nameof(serviceEndpoint));
|
|
||||||
}
|
|
||||||
|
|
||||||
VssCredentials credentials = null;
|
|
||||||
string accessToken;
|
|
||||||
if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth &&
|
|
||||||
serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken))
|
|
||||||
{
|
|
||||||
credentials = new VssCredentials(new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt);
|
|
||||||
}
|
|
||||||
|
|
||||||
return credentials;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -114,6 +114,128 @@ namespace GitHub.Runner.Sdk
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if OS_WINDOWS
|
||||||
|
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
|
||||||
|
#else
|
||||||
|
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'PATH' environment variable.");
|
||||||
|
#endif
|
||||||
|
if (require)
|
||||||
|
{
|
||||||
|
throw new FileNotFoundException(
|
||||||
|
message: $"{command}: command not found",
|
||||||
|
fileName: command);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static string Which2(string command, bool require = false, ITraceWriter trace = null, string prependPath = null)
|
||||||
|
{
|
||||||
|
ArgUtil.NotNullOrEmpty(command, nameof(command));
|
||||||
|
trace?.Info($"Which2: '{command}'");
|
||||||
|
if (Path.IsPathFullyQualified(command) && File.Exists(command))
|
||||||
|
{
|
||||||
|
trace?.Info($"Fully qualified path: '{command}'");
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||||
|
if (string.IsNullOrEmpty(path))
|
||||||
|
{
|
||||||
|
trace?.Info("PATH environment variable not defined.");
|
||||||
|
path = path ?? string.Empty;
|
||||||
|
}
|
||||||
|
if (!string.IsNullOrEmpty(prependPath))
|
||||||
|
{
|
||||||
|
path = PathUtil.PrependPath(prependPath, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
string[] pathSegments = path.Split(new Char[] { Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
for (int i = 0; i < pathSegments.Length; i++)
|
||||||
|
{
|
||||||
|
pathSegments[i] = Environment.ExpandEnvironmentVariables(pathSegments[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (string pathSegment in pathSegments)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(pathSegment) && Directory.Exists(pathSegment))
|
||||||
|
{
|
||||||
|
#if OS_WINDOWS
|
||||||
|
string pathExt = Environment.GetEnvironmentVariable("PATHEXT");
|
||||||
|
if (string.IsNullOrEmpty(pathExt))
|
||||||
|
{
|
||||||
|
// XP's system default value for PATHEXT system variable
|
||||||
|
pathExt = ".com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh";
|
||||||
|
}
|
||||||
|
|
||||||
|
string[] pathExtSegments = pathExt.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
|
||||||
|
// if command already has an extension.
|
||||||
|
if (pathExtSegments.Any(ext => command.EndsWith(ext, StringComparison.OrdinalIgnoreCase)))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
foreach (var file in Directory.EnumerateFiles(pathSegment, command))
|
||||||
|
{
|
||||||
|
if (IsPathValid(file, trace))
|
||||||
|
{
|
||||||
|
trace?.Info($"Location: '{file}'");
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (UnauthorizedAccessException ex)
|
||||||
|
{
|
||||||
|
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
|
||||||
|
trace?.Verbose(ex.ToString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
string searchPattern;
|
||||||
|
searchPattern = StringUtil.Format($"{command}.*");
|
||||||
|
try
|
||||||
|
{
|
||||||
|
foreach (var file in Directory.EnumerateFiles(pathSegment, searchPattern))
|
||||||
|
{
|
||||||
|
// add extension.
|
||||||
|
for (int i = 0; i < pathExtSegments.Length; i++)
|
||||||
|
{
|
||||||
|
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
|
||||||
|
if (string.Equals(file, fullPath, StringComparison.OrdinalIgnoreCase) && IsPathValid(fullPath, trace))
|
||||||
|
{
|
||||||
|
trace?.Info($"Location: '{fullPath}'");
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (UnauthorizedAccessException ex)
|
||||||
|
{
|
||||||
|
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
|
||||||
|
trace?.Verbose(ex.ToString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
try
|
||||||
|
{
|
||||||
|
foreach (var file in Directory.EnumerateFiles(pathSegment, command))
|
||||||
|
{
|
||||||
|
if (IsPathValid(file, trace))
|
||||||
|
{
|
||||||
|
trace?.Info($"Location: '{file}'");
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (UnauthorizedAccessException ex)
|
||||||
|
{
|
||||||
|
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
|
||||||
|
trace?.Verbose(ex.ToString());
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
|
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
|
||||||
#else
|
#else
|
||||||
@@ -134,7 +256,12 @@ namespace GitHub.Runner.Sdk
|
|||||||
{
|
{
|
||||||
var fileInfo = new FileInfo(path);
|
var fileInfo = new FileInfo(path);
|
||||||
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
|
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
|
||||||
if (fileInfo.LinkTarget == null || File.Exists(linkTargetFullPath) || File.Exists(fileInfo.LinkTarget)) return true;
|
if (fileInfo.LinkTarget == null ||
|
||||||
|
File.Exists(linkTargetFullPath) ||
|
||||||
|
File.Exists(fileInfo.LinkTarget))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
|
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,19 +6,20 @@ using System.Linq;
|
|||||||
using System.Net;
|
using System.Net;
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
using System.Net.Http.Headers;
|
using System.Net.Http.Headers;
|
||||||
|
using System.Security.Cryptography;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Worker.Container;
|
using GitHub.Runner.Worker.Container;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using WebApi = GitHub.DistributedTask.WebApi;
|
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using WebApi = GitHub.DistributedTask.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
namespace GitHub.Runner.Worker
|
||||||
{
|
{
|
||||||
@@ -52,7 +53,6 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||||
private const int _defaultCopyBufferSize = 81920;
|
private const int _defaultCopyBufferSize = 81920;
|
||||||
private const string _dotcomApiUrl = "https://api.github.com";
|
|
||||||
|
|
||||||
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new();
|
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new();
|
||||||
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
||||||
@@ -115,6 +115,14 @@ namespace GitHub.Runner.Worker
|
|||||||
executionContext.Result = TaskResult.Failed;
|
executionContext.Result = TaskResult.Failed;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
catch (InvalidActionArchiveException ex)
|
||||||
|
{
|
||||||
|
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||||
|
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||||
|
executionContext.InfrastructureError(ex.Message);
|
||||||
|
executionContext.Result = TaskResult.Failed;
|
||||||
|
throw;
|
||||||
|
}
|
||||||
if (!FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
|
if (!FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
|
||||||
{
|
{
|
||||||
if (state.ImagesToPull.Count > 0)
|
if (state.ImagesToPull.Count > 0)
|
||||||
@@ -731,10 +739,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
|
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
|
||||||
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
|
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
|
||||||
var apiUrl = GetApiUrl(executionContext);
|
|
||||||
var defaultAccessToken = executionContext.GetGitHubContext("token");
|
var defaultAccessToken = executionContext.GetGitHubContext("token");
|
||||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
|
||||||
var runnerSettings = configurationStore.GetSettings();
|
|
||||||
|
|
||||||
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
|
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
|
||||||
{
|
{
|
||||||
@@ -758,6 +763,8 @@ namespace GitHub.Runner.Worker
|
|||||||
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
|
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
|
||||||
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
|
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
|
||||||
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
|
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
|
||||||
|
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.ResolvedNameWithOwner));
|
||||||
|
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.ResolvedSha));
|
||||||
|
|
||||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
|
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
|
||||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||||
@@ -774,31 +781,6 @@ namespace GitHub.Runner.Worker
|
|||||||
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})");
|
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})");
|
||||||
}
|
}
|
||||||
|
|
||||||
await DownloadRepositoryActionAsync(executionContext, downloadInfo, destDirectory);
|
|
||||||
}
|
|
||||||
|
|
||||||
private string GetApiUrl(IExecutionContext executionContext)
|
|
||||||
{
|
|
||||||
string apiUrl = executionContext.GetGitHubContext("api_url");
|
|
||||||
if (!string.IsNullOrEmpty(apiUrl))
|
|
||||||
{
|
|
||||||
return apiUrl;
|
|
||||||
}
|
|
||||||
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
|
|
||||||
return _dotcomApiUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
|
|
||||||
{
|
|
||||||
#if OS_WINDOWS
|
|
||||||
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
|
|
||||||
#else
|
|
||||||
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
|
|
||||||
{
|
|
||||||
//download and extract action in a temp folder and rename it on success
|
//download and extract action in a temp folder and rename it on success
|
||||||
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
||||||
Directory.CreateDirectory(tempDirectory);
|
Directory.CreateDirectory(tempDirectory);
|
||||||
@@ -811,108 +793,76 @@ namespace GitHub.Runner.Worker
|
|||||||
string link = downloadInfo?.TarballUrl;
|
string link = downloadInfo?.TarballUrl;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
Trace.Info($"Save archive '{link}' into {archiveFile}.");
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
int retryCount = 0;
|
var useActionArchiveCache = false;
|
||||||
|
if (executionContext.Global.Variables.GetBoolean("DistributedTask.UseActionArchiveCache") == true)
|
||||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
|
||||||
int timeoutSeconds = 20 * 60;
|
|
||||||
while (retryCount < 3)
|
|
||||||
{
|
{
|
||||||
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
var hasActionArchiveCache = false;
|
||||||
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
|
var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory);
|
||||||
|
if (!string.IsNullOrEmpty(actionArchiveCacheDir) &&
|
||||||
|
Directory.Exists(actionArchiveCacheDir))
|
||||||
{
|
{
|
||||||
try
|
hasActionArchiveCache = true;
|
||||||
|
Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'");
|
||||||
|
#if OS_WINDOWS
|
||||||
|
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip");
|
||||||
|
#else
|
||||||
|
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz");
|
||||||
|
#endif
|
||||||
|
if (File.Exists(cacheArchiveFile))
|
||||||
{
|
{
|
||||||
//open zip stream in async mode
|
try
|
||||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
|
|
||||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
|
||||||
using (var httpClient = new HttpClient(httpClientHandler))
|
|
||||||
{
|
{
|
||||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
|
Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'");
|
||||||
|
File.Copy(cacheArchiveFile, archiveFile);
|
||||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
useActionArchiveCache = true;
|
||||||
using (var response = await httpClient.GetAsync(link))
|
executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'");
|
||||||
{
|
|
||||||
if (response.IsSuccessStatusCode)
|
|
||||||
{
|
|
||||||
using (var result = await response.Content.ReadAsStreamAsync())
|
|
||||||
{
|
|
||||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
|
||||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
|
||||||
|
|
||||||
// download succeed, break out the retry loop.
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
|
||||||
{
|
|
||||||
// It doesn't make sense to retry in this case, so just stop
|
|
||||||
throw new ActionNotFoundException(new Uri(link));
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Something else bad happened, let's go to our retry logic
|
|
||||||
response.EnsureSuccessStatusCode();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
catch (Exception ex)
|
||||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
|
||||||
{
|
|
||||||
Trace.Info("Action download has been cancelled.");
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
catch (ActionNotFoundException)
|
|
||||||
{
|
|
||||||
Trace.Info($"The action at '{link}' does not exist");
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
catch (Exception ex) when (retryCount < 2)
|
|
||||||
{
|
|
||||||
retryCount++;
|
|
||||||
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
|
|
||||||
Trace.Error(ex);
|
|
||||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
|
||||||
{
|
{
|
||||||
// action download didn't finish within timeout
|
Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}");
|
||||||
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
executionContext.Global.JobTelemetry.Add(new JobTelemetry()
|
||||||
{
|
{
|
||||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
Type = JobTelemetryType.General,
|
||||||
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
|
Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}"
|
||||||
await Task.Delay(backOff);
|
});
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
if (!useActionArchiveCache)
|
||||||
executionContext.Debug($"Download '{link}' to '{archiveFile}'");
|
{
|
||||||
|
await DownloadRepositoryArchive(executionContext, link, downloadInfo.Authentication?.Token, archiveFile);
|
||||||
|
}
|
||||||
|
|
||||||
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
|
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
|
||||||
Directory.CreateDirectory(stagingDirectory);
|
Directory.CreateDirectory(stagingDirectory);
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
ZipFile.ExtractToDirectory(archiveFile, stagingDirectory);
|
try
|
||||||
|
{
|
||||||
|
ZipFile.ExtractToDirectory(archiveFile, stagingDirectory);
|
||||||
|
}
|
||||||
|
catch (InvalidDataException e)
|
||||||
|
{
|
||||||
|
throw new InvalidActionArchiveException($"Can't un-zip archive file: {archiveFile}. action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. error: {e}.");
|
||||||
|
}
|
||||||
#else
|
#else
|
||||||
string tar = WhichUtil.Which("tar", require: true, trace: Trace);
|
string tar = WhichUtil.Which("tar", require: true, trace: Trace);
|
||||||
|
|
||||||
// tar -xzf
|
// tar -xzf
|
||||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||||
{
|
{
|
||||||
|
var tarOutputs = new List<string>();
|
||||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||||
{
|
{
|
||||||
if (!string.IsNullOrEmpty(args.Data))
|
if (!string.IsNullOrEmpty(args.Data))
|
||||||
{
|
{
|
||||||
Trace.Info(args.Data);
|
Trace.Info(args.Data);
|
||||||
|
tarOutputs.Add($"STDOUT: {args.Data}");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -921,13 +871,23 @@ namespace GitHub.Runner.Worker
|
|||||||
if (!string.IsNullOrEmpty(args.Data))
|
if (!string.IsNullOrEmpty(args.Data))
|
||||||
{
|
{
|
||||||
Trace.Error(args.Data);
|
Trace.Error(args.Data);
|
||||||
|
tarOutputs.Add($"STDERR: {args.Data}");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
|
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
|
||||||
if (exitCode != 0)
|
if (exitCode != 0)
|
||||||
{
|
{
|
||||||
throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}.");
|
if (executionContext.Global.Variables.GetBoolean("DistributedTask.DetailUntarFailure") == true)
|
||||||
|
{
|
||||||
|
var fileInfo = new FileInfo(archiveFile);
|
||||||
|
var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile);
|
||||||
|
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@@ -945,7 +905,6 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
Trace.Verbose("Create watermark file indicate action download succeed.");
|
Trace.Verbose("Create watermark file indicate action download succeed.");
|
||||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
|
||||||
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
|
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
|
||||||
|
|
||||||
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
|
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
|
||||||
@@ -970,29 +929,6 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
|
|
||||||
{
|
|
||||||
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
|
||||||
if (string.IsNullOrEmpty(authToken))
|
|
||||||
{
|
|
||||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
|
||||||
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(authToken))
|
|
||||||
{
|
|
||||||
HostContext.SecretMasker.AddValue(authToken);
|
|
||||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
|
|
||||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
var accessToken = executionContext.GetGitHubContext("token");
|
|
||||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
|
|
||||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private string GetWatermarkFilePath(string directory) => directory + ".completed";
|
private string GetWatermarkFilePath(string directory) => directory + ".completed";
|
||||||
|
|
||||||
private ActionSetupInfo PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
private ActionSetupInfo PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||||
@@ -1042,7 +978,7 @@ namespace GitHub.Runner.Worker
|
|||||||
if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Container)
|
if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Container)
|
||||||
{
|
{
|
||||||
var containerAction = actionDefinitionData.Execution as ContainerActionExecutionData;
|
var containerAction = actionDefinitionData.Execution as ContainerActionExecutionData;
|
||||||
if (containerAction.Image.EndsWith("Dockerfile") || containerAction.Image.EndsWith("dockerfile"))
|
if (DockerUtil.IsDockerfile(containerAction.Image))
|
||||||
{
|
{
|
||||||
var dockerFileFullPath = Path.Combine(actionEntryDirectory, containerAction.Image);
|
var dockerFileFullPath = Path.Combine(actionEntryDirectory, containerAction.Image);
|
||||||
executionContext.Debug($"Dockerfile for action: '{dockerFileFullPath}'.");
|
executionContext.Debug($"Dockerfile for action: '{dockerFileFullPath}'.");
|
||||||
@@ -1127,8 +1063,16 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
var fullPath = IOUtil.ResolvePath(actionEntryDirectory, "."); // resolve full path without access filesystem.
|
var reference = repositoryReference.Name;
|
||||||
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
if (!string.IsNullOrEmpty(repositoryReference.Path))
|
||||||
|
{
|
||||||
|
reference = $"{reference}/{repositoryReference.Path}";
|
||||||
|
}
|
||||||
|
if (!string.IsNullOrEmpty(repositoryReference.Ref))
|
||||||
|
{
|
||||||
|
reference = $"{reference}@{repositoryReference.Ref}";
|
||||||
|
}
|
||||||
|
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' for action '{reference}'.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1157,13 +1101,6 @@ namespace GitHub.Runner.Worker
|
|||||||
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
|
|
||||||
{
|
|
||||||
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
|
|
||||||
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
|
|
||||||
return $"{info.NameWithOwner}@{info.Ref}";
|
|
||||||
}
|
|
||||||
|
|
||||||
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(token))
|
if (string.IsNullOrEmpty(token))
|
||||||
@@ -1175,6 +1112,104 @@ namespace GitHub.Runner.Worker
|
|||||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task DownloadRepositoryArchive(IExecutionContext executionContext, string downloadUrl, string downloadAuthToken, string archiveFile)
|
||||||
|
{
|
||||||
|
Trace.Info($"Save archive '{downloadUrl}' into {archiveFile}.");
|
||||||
|
int retryCount = 0;
|
||||||
|
|
||||||
|
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||||
|
int timeoutSeconds = 20 * 60;
|
||||||
|
while (retryCount < 3)
|
||||||
|
{
|
||||||
|
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||||
|
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
//open zip stream in async mode
|
||||||
|
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
|
||||||
|
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||||
|
using (var httpClient = new HttpClient(httpClientHandler))
|
||||||
|
{
|
||||||
|
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadAuthToken);
|
||||||
|
|
||||||
|
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||||
|
using (var response = await httpClient.GetAsync(downloadUrl))
|
||||||
|
{
|
||||||
|
var requestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
if (!string.IsNullOrEmpty(requestId))
|
||||||
|
{
|
||||||
|
Trace.Info($"Request URL: {downloadUrl} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.IsSuccessStatusCode)
|
||||||
|
{
|
||||||
|
using (var result = await response.Content.ReadAsStreamAsync())
|
||||||
|
{
|
||||||
|
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||||
|
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||||
|
|
||||||
|
// download succeed, break out the retry loop.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||||
|
{
|
||||||
|
// It doesn't make sense to retry in this case, so just stop
|
||||||
|
throw new ActionNotFoundException(new Uri(downloadUrl), requestId);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Something else bad happened, let's go to our retry logic
|
||||||
|
response.EnsureSuccessStatusCode();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Info("Action download has been cancelled.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException ex) when (!executionContext.CancellationToken.IsCancellationRequested && retryCount >= 2)
|
||||||
|
{
|
||||||
|
Trace.Info($"Action download final retry timeout after {timeoutSeconds} seconds.");
|
||||||
|
throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message}");
|
||||||
|
}
|
||||||
|
catch (ActionNotFoundException)
|
||||||
|
{
|
||||||
|
Trace.Info($"The action at '{downloadUrl}' does not exist");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (retryCount < 2)
|
||||||
|
{
|
||||||
|
retryCount++;
|
||||||
|
Trace.Error($"Fail to download archive '{downloadUrl}' -- Attempt: {retryCount}");
|
||||||
|
Trace.Error(ex);
|
||||||
|
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
// action download didn't finish within timeout
|
||||||
|
executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||||
|
{
|
||||||
|
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||||
|
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
|
||||||
|
await Task.Delay(backOff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
||||||
|
executionContext.Debug($"Download '{downloadUrl}' to '{archiveFile}'");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class Definition
|
public sealed class Definition
|
||||||
|
|||||||
@@ -144,7 +144,7 @@ namespace GitHub.Runner.Worker
|
|||||||
executionContext.Error(error.Message);
|
executionContext.Error(error.Message);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new ArgumentException($"Fail to load {fileRelativePath}");
|
throw new ArgumentException($"Failed to load {fileRelativePath}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (actionDefinition.Execution == null)
|
if (actionDefinition.Execution == null)
|
||||||
@@ -449,7 +449,8 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
|
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
|
||||||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase))
|
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(mainToken?.Value))
|
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||||
{
|
{
|
||||||
@@ -489,7 +490,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12' or 'node16' instead.");
|
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16' or 'node20' instead.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (pluginToken != null)
|
else if (pluginToken != null)
|
||||||
@@ -500,7 +501,7 @@ namespace GitHub.Runner.Worker
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12' or 'node16'.");
|
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void ConvertInputs(
|
private void ConvertInputs(
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
public class ActionNotFoundException : Exception
|
public class ActionNotFoundException : Exception
|
||||||
{
|
{
|
||||||
public ActionNotFoundException(Uri actionUri)
|
public ActionNotFoundException(Uri actionUri, string requestId)
|
||||||
: base(FormatMessage(actionUri))
|
: base(FormatMessage(actionUri, requestId))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -25,8 +25,13 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string FormatMessage(Uri actionUri)
|
private static string FormatMessage(Uri actionUri, string requestId)
|
||||||
{
|
{
|
||||||
|
if (!string.IsNullOrEmpty(requestId))
|
||||||
|
{
|
||||||
|
return $"An action could not be found at the URI '{actionUri}' ({requestId})";
|
||||||
|
}
|
||||||
|
|
||||||
return $"An action could not be found at the URI '{actionUri}'";
|
return $"An action could not be found at the URI '{actionUri}'";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Channels;
|
using System.Threading.Channels;
|
||||||
@@ -46,7 +47,9 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
{
|
{
|
||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
DockerPath = WhichUtil.Which("docker", true, Trace);
|
DockerPath = WhichUtil.Which("docker", true, Trace);
|
||||||
DockerInstanceLabel = IOUtil.GetSha256Hash(hostContext.GetDirectory(WellKnownDirectory.Root)).Substring(0, 6);
|
string path = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), ".runner");
|
||||||
|
string json = File.ReadAllText(path, Encoding.UTF8);
|
||||||
|
DockerInstanceLabel = IOUtil.GetSha256Hash(json).Substring(0, 6);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<DockerVersion> DockerVersion(IExecutionContext context)
|
public async Task<DockerVersion> DockerVersion(IExecutionContext context)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker.Container
|
namespace GitHub.Runner.Worker.Container
|
||||||
@@ -65,6 +66,16 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static bool IsDockerfile(string image)
|
||||||
|
{
|
||||||
|
if (image.StartsWith("docker://", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
var imageWithoutPath = image.Split('/').Last();
|
||||||
|
return imageWithoutPath.StartsWith("Dockerfile.", StringComparison.OrdinalIgnoreCase) || imageWithoutPath.EndsWith("Dockerfile", StringComparison.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
public static string CreateEscapedOption(string flag, string key)
|
public static string CreateEscapedOption(string flag, string key)
|
||||||
{
|
{
|
||||||
if (String.IsNullOrEmpty(key))
|
if (String.IsNullOrEmpty(key))
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ namespace GitHub.Runner.Worker
|
|||||||
// \_layout\_work\_temp\[jobname-support]\files\environment.txt
|
// \_layout\_work\_temp\[jobname-support]\files\environment.txt
|
||||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||||
RunnerSettings settings = configurationStore.GetSettings();
|
RunnerSettings settings = configurationStore.GetSettings();
|
||||||
int runnerId = settings.AgentId;
|
ulong runnerId = settings.AgentId;
|
||||||
string runnerName = settings.AgentName;
|
string runnerName = settings.AgentName;
|
||||||
int poolId = settings.PoolId;
|
int poolId = settings.PoolId;
|
||||||
|
|
||||||
@@ -108,6 +108,8 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
|
parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
|
||||||
|
|
||||||
|
parentContext.QueueDiagnosticLogFile(name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
|
||||||
|
|
||||||
executionContext.Debug("Diagnostic file upload complete.");
|
executionContext.Debug("Diagnostic file upload complete.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -78,17 +78,19 @@ namespace GitHub.Runner.Worker
|
|||||||
List<string> StepEnvironmentOverrides { get; }
|
List<string> StepEnvironmentOverrides { get; }
|
||||||
|
|
||||||
ExecutionContext Root { get; }
|
ExecutionContext Root { get; }
|
||||||
|
ExecutionContext Parent { get; }
|
||||||
|
|
||||||
// Initialize
|
// Initialize
|
||||||
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
|
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
|
||||||
void CancelToken();
|
void CancelToken();
|
||||||
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null);
|
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null, TimeSpan? timeout = null);
|
||||||
IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null);
|
IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null);
|
||||||
|
|
||||||
// logging
|
// logging
|
||||||
long Write(string tag, string message);
|
long Write(string tag, string message);
|
||||||
void QueueAttachFile(string type, string name, string filePath);
|
void QueueAttachFile(string type, string name, string filePath);
|
||||||
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
|
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
|
||||||
|
void QueueDiagnosticLogFile(string name, string filePath);
|
||||||
|
|
||||||
// timeline record update methods
|
// timeline record update methods
|
||||||
void Start(string currentOperation = null);
|
void Start(string currentOperation = null);
|
||||||
@@ -264,6 +266,14 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ExecutionContext Parent
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
return _parentExecutionContext;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public JobContext JobContext
|
public JobContext JobContext
|
||||||
{
|
{
|
||||||
get
|
get
|
||||||
@@ -348,7 +358,8 @@ namespace GitHub.Runner.Worker
|
|||||||
bool isEmbedded = false,
|
bool isEmbedded = false,
|
||||||
CancellationTokenSource cancellationTokenSource = null,
|
CancellationTokenSource cancellationTokenSource = null,
|
||||||
Guid embeddedId = default(Guid),
|
Guid embeddedId = default(Guid),
|
||||||
string siblingScopeName = null)
|
string siblingScopeName = null,
|
||||||
|
TimeSpan? timeout = null)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
|
|
||||||
@@ -377,15 +388,21 @@ namespace GitHub.Runner.Worker
|
|||||||
child.ExpressionFunctions.Add(item);
|
child.ExpressionFunctions.Add(item);
|
||||||
}
|
}
|
||||||
child._cancellationTokenSource = cancellationTokenSource ?? new CancellationTokenSource();
|
child._cancellationTokenSource = cancellationTokenSource ?? new CancellationTokenSource();
|
||||||
|
if (timeout != null)
|
||||||
|
{
|
||||||
|
// composite steps inherit the timeout from the parent, set by https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepstimeout-minutes
|
||||||
|
child.SetTimeout(timeout);
|
||||||
|
}
|
||||||
|
|
||||||
child.EchoOnActionCommand = EchoOnActionCommand;
|
child.EchoOnActionCommand = EchoOnActionCommand;
|
||||||
|
|
||||||
if (recordOrder != null)
|
if (recordOrder != null)
|
||||||
{
|
{
|
||||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder);
|
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder, embedded: isEmbedded);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
|
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder, embedded: isEmbedded);
|
||||||
}
|
}
|
||||||
if (logger != null)
|
if (logger != null)
|
||||||
{
|
{
|
||||||
@@ -406,7 +423,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// An embedded execution context shares the same record ID, record name, logger,
|
/// An embedded execution context shares the same record ID, record name, logger,
|
||||||
/// and a linked cancellation token.
|
/// but NOT the cancellation token (just like workflow steps contexts - they don't share a token)
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public IExecutionContext CreateEmbeddedChild(
|
public IExecutionContext CreateEmbeddedChild(
|
||||||
string scopeName,
|
string scopeName,
|
||||||
@@ -416,7 +433,7 @@ namespace GitHub.Runner.Worker
|
|||||||
Dictionary<string, string> intraActionState = null,
|
Dictionary<string, string> intraActionState = null,
|
||||||
string siblingScopeName = null)
|
string siblingScopeName = null)
|
||||||
{
|
{
|
||||||
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
|
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout(), recordOrder: _record.Order);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void Start(string currentOperation = null)
|
public void Start(string currentOperation = null)
|
||||||
@@ -477,28 +494,32 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
PublishStepTelemetry();
|
PublishStepTelemetry();
|
||||||
|
|
||||||
var stepResult = new StepResult
|
if (_record.RecordType == "Task")
|
||||||
{
|
{
|
||||||
ExternalID = _record.Id,
|
var stepResult = new StepResult
|
||||||
Conclusion = _record.Result ?? TaskResult.Succeeded,
|
|
||||||
Status = _record.State,
|
|
||||||
Number = _record.Order,
|
|
||||||
Name = _record.Name,
|
|
||||||
StartedAt = _record.StartTime,
|
|
||||||
CompletedAt = _record.FinishTime,
|
|
||||||
Annotations = new List<Annotation>()
|
|
||||||
};
|
|
||||||
|
|
||||||
_record.Issues?.ForEach(issue =>
|
|
||||||
{
|
|
||||||
var annotation = issue.ToAnnotation();
|
|
||||||
if (annotation != null)
|
|
||||||
{
|
{
|
||||||
stepResult.Annotations.Add(annotation.Value);
|
ExternalID = _record.Id,
|
||||||
}
|
Conclusion = _record.Result ?? TaskResult.Succeeded,
|
||||||
});
|
Status = _record.State,
|
||||||
|
Number = _record.Order,
|
||||||
|
Name = _record.Name,
|
||||||
|
StartedAt = _record.StartTime,
|
||||||
|
CompletedAt = _record.FinishTime,
|
||||||
|
Annotations = new List<Annotation>()
|
||||||
|
};
|
||||||
|
|
||||||
|
_record.Issues?.ForEach(issue =>
|
||||||
|
{
|
||||||
|
var annotation = issue.ToAnnotation();
|
||||||
|
if (annotation != null)
|
||||||
|
{
|
||||||
|
stepResult.Annotations.Add(annotation.Value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Global.StepsResult.Add(stepResult);
|
||||||
|
}
|
||||||
|
|
||||||
Global.StepsResult.Add(stepResult);
|
|
||||||
|
|
||||||
if (Root != this)
|
if (Root != this)
|
||||||
{
|
{
|
||||||
@@ -593,9 +614,33 @@ namespace GitHub.Runner.Worker
|
|||||||
if (timeout != null)
|
if (timeout != null)
|
||||||
{
|
{
|
||||||
_cancellationTokenSource.CancelAfter(timeout.Value);
|
_cancellationTokenSource.CancelAfter(timeout.Value);
|
||||||
|
m_timeoutStartedAt = DateTime.UtcNow;
|
||||||
|
m_timeout = timeout.Value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
DateTime? m_timeoutStartedAt;
|
||||||
|
TimeSpan? m_timeout;
|
||||||
|
public TimeSpan? GetRemainingTimeout()
|
||||||
|
{
|
||||||
|
if (m_timeoutStartedAt != null && m_timeout != null)
|
||||||
|
{
|
||||||
|
var elapsedSinceTimeoutSet = DateTime.UtcNow - m_timeoutStartedAt.Value;
|
||||||
|
var remainingTimeout = m_timeout.Value - elapsedSinceTimeoutSet;
|
||||||
|
if (remainingTimeout.Ticks > 0)
|
||||||
|
{
|
||||||
|
return remainingTimeout;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// there was a timeout and it has expired
|
||||||
|
return TimeSpan.Zero;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// no timeout was ever set
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public void Progress(int percentage, string currentOperation = null)
|
public void Progress(int percentage, string currentOperation = null)
|
||||||
{
|
{
|
||||||
if (percentage > 100 || percentage < 0)
|
if (percentage > 100 || percentage < 0)
|
||||||
@@ -763,9 +808,9 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
Global.Variables = new Variables(HostContext, variables);
|
Global.Variables = new Variables(HostContext, variables);
|
||||||
|
|
||||||
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false)
|
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo16") ?? false)
|
||||||
{
|
{
|
||||||
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node12");
|
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node16");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Environment variables shared across all actions
|
// Environment variables shared across all actions
|
||||||
@@ -938,6 +983,18 @@ namespace GitHub.Runner.Worker
|
|||||||
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
|
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void QueueDiagnosticLogFile(string name, string filePath)
|
||||||
|
{
|
||||||
|
ArgUtil.NotNullOrEmpty(name, nameof(name));
|
||||||
|
ArgUtil.NotNullOrEmpty(filePath, nameof(filePath));
|
||||||
|
|
||||||
|
if (!File.Exists(filePath))
|
||||||
|
{
|
||||||
|
throw new FileNotFoundException($"Can't upload diagnostic log file: {filePath}. File does not exist.");
|
||||||
|
}
|
||||||
|
_jobServerQueue.QueueResultsUpload(_record.Id, name, filePath, CoreAttachmentType.ResultsDiagnosticLog, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
|
||||||
|
}
|
||||||
|
|
||||||
// Add OnMatcherChanged
|
// Add OnMatcherChanged
|
||||||
public void Add(OnMatcherChanged handler)
|
public void Add(OnMatcherChanged handler)
|
||||||
{
|
{
|
||||||
@@ -1116,7 +1173,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
|
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order, bool embedded = false)
|
||||||
{
|
{
|
||||||
_mainTimelineId = timelineId;
|
_mainTimelineId = timelineId;
|
||||||
_record.Id = timelineRecordId;
|
_record.Id = timelineRecordId;
|
||||||
@@ -1142,7 +1199,11 @@ namespace GitHub.Runner.Worker
|
|||||||
var configuration = HostContext.GetService<IConfigurationStore>();
|
var configuration = HostContext.GetService<IConfigurationStore>();
|
||||||
_record.WorkerName = configuration.GetSettings().AgentName;
|
_record.WorkerName = configuration.GetSettings().AgentName;
|
||||||
|
|
||||||
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
|
// We don't want to update the timeline record for embedded steps since they are not really represented in the UI.
|
||||||
|
if (!embedded)
|
||||||
|
{
|
||||||
|
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void JobServerQueueThrottling_EventReceived(object sender, ThrottlingEventArgs data)
|
private void JobServerQueueThrottling_EventReceived(object sender, ThrottlingEventArgs data)
|
||||||
@@ -1355,6 +1416,12 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
foreach (var key in dict.Keys.ToList())
|
foreach (var key in dict.Keys.ToList())
|
||||||
{
|
{
|
||||||
|
if (key == PipelineTemplateConstants.HostWorkspace)
|
||||||
|
{
|
||||||
|
// The HostWorkspace context var is excluded so that there is a var that always points to the host path.
|
||||||
|
// This var can be used to translate back from container paths, e.g. in HashFilesFunction, which always runs on the host machine
|
||||||
|
continue;
|
||||||
|
}
|
||||||
if (dict[key] is StringContextData)
|
if (dict[key] is StringContextData)
|
||||||
{
|
{
|
||||||
var value = dict[key].ToString();
|
var value = dict[key].ToString();
|
||||||
@@ -1401,7 +1468,15 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public void Error(string format, params Object[] args)
|
public void Error(string format, params Object[] args)
|
||||||
{
|
{
|
||||||
_executionContext.Error(string.Format(CultureInfo.CurrentCulture, format, args));
|
/* TraceWriter should be used for logging and not creating erros. */
|
||||||
|
if (logTemplateErrorsAsDebugMessages())
|
||||||
|
{
|
||||||
|
_executionContext.Debug(string.Format(CultureInfo.CurrentCulture, format, args));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_executionContext.Error(string.Format(CultureInfo.CurrentCulture, format, args));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void Info(string format, params Object[] args)
|
public void Info(string format, params Object[] args)
|
||||||
@@ -1414,8 +1489,18 @@ namespace GitHub.Runner.Worker
|
|||||||
// todo: switch to verbose?
|
// todo: switch to verbose?
|
||||||
_executionContext.Debug(string.Format(CultureInfo.CurrentCulture, $"{format}", args));
|
_executionContext.Debug(string.Format(CultureInfo.CurrentCulture, $"{format}", args));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private bool logTemplateErrorsAsDebugMessages()
|
||||||
|
{
|
||||||
|
if (_executionContext.Global.Variables.TryGetValue(Constants.Runner.Features.LogTemplateErrorsAsDebugMessages, out var logErrorsAsDebug))
|
||||||
|
{
|
||||||
|
return StringUtil.ConvertToBoolean(logErrorsAsDebug, defaultValue: false);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static class WellKnownTags
|
public static class WellKnownTags
|
||||||
{
|
{
|
||||||
public static readonly string Section = "##[section]";
|
public static readonly string Section = "##[section]";
|
||||||
|
|||||||
@@ -26,11 +26,18 @@ namespace GitHub.Runner.Worker.Expressions
|
|||||||
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
|
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
|
||||||
var githubContext = githubContextData as DictionaryContextData;
|
var githubContext = githubContextData as DictionaryContextData;
|
||||||
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||||
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace);
|
|
||||||
|
if (!githubContext.TryGetValue(PipelineTemplateConstants.HostWorkspace, out var workspace))
|
||||||
|
{
|
||||||
|
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out workspace);
|
||||||
|
}
|
||||||
|
ArgUtil.NotNull(workspace, nameof(workspace));
|
||||||
|
|
||||||
var workspaceData = workspace as StringContextData;
|
var workspaceData = workspace as StringContextData;
|
||||||
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
|
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
|
||||||
|
|
||||||
string githubWorkspace = workspaceData.Value;
|
string githubWorkspace = workspaceData.Value;
|
||||||
|
|
||||||
bool followSymlink = false;
|
bool followSymlink = false;
|
||||||
List<string> patterns = new();
|
List<string> patterns = new();
|
||||||
var firstParameter = true;
|
var firstParameter = true;
|
||||||
|
|||||||
@@ -141,6 +141,28 @@ namespace GitHub.Runner.Worker
|
|||||||
var pairs = new EnvFileKeyValuePairs(context, filePath);
|
var pairs = new EnvFileKeyValuePairs(context, filePath);
|
||||||
foreach (var pair in pairs)
|
foreach (var pair in pairs)
|
||||||
{
|
{
|
||||||
|
var isBlocked = false;
|
||||||
|
foreach (var blocked in _setEnvBlockList)
|
||||||
|
{
|
||||||
|
if (string.Equals(blocked, pair.Key, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
// Log Telemetry and let user know they shouldn't do this
|
||||||
|
var issue = new Issue()
|
||||||
|
{
|
||||||
|
Type = IssueType.Error,
|
||||||
|
Message = $"Can't store {blocked} output parameter using '$GITHUB_ENV' command."
|
||||||
|
};
|
||||||
|
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = $"{Constants.Runner.UnsupportedCommand}_{pair.Key}";
|
||||||
|
context.AddIssue(issue, ExecutionContextLogOptions.Default);
|
||||||
|
|
||||||
|
isBlocked = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isBlocked)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
SetEnvironmentVariable(context, pair.Key, pair.Value);
|
SetEnvironmentVariable(context, pair.Key, pair.Value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -154,6 +176,11 @@ namespace GitHub.Runner.Worker
|
|||||||
context.SetEnvContext(name, value);
|
context.SetEnvContext(name, value);
|
||||||
context.Debug($"{name}='{value}'");
|
context.Debug($"{name}='{value}'");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private string[] _setEnvBlockList =
|
||||||
|
{
|
||||||
|
"NODE_OPTIONS"
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
|
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
|
||||||
|
|||||||
@@ -310,6 +310,7 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
// Mark job as cancelled
|
// Mark job as cancelled
|
||||||
ExecutionContext.Root.Result = TaskResult.Canceled;
|
ExecutionContext.Root.Result = TaskResult.Canceled;
|
||||||
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
|
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
|
||||||
|
step.ExecutionContext.SetGitHubContext("action_status", (ExecutionContext.Root.Result?.ToActionResult() ?? ActionResult.Cancelled).ToString().ToLowerInvariant());
|
||||||
|
|
||||||
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
|
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
|
||||||
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
|
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
{
|
{
|
||||||
Data.Image = Data.Image.Substring("docker://".Length);
|
Data.Image = Data.Image.Substring("docker://".Length);
|
||||||
}
|
}
|
||||||
else if (Data.Image.EndsWith("Dockerfile") || Data.Image.EndsWith("dockerfile"))
|
else if (DockerUtil.IsDockerfile(Data.Image))
|
||||||
{
|
{
|
||||||
// ensure docker file exist
|
// ensure docker file exist
|
||||||
dockerFile = Path.Combine(ActionDirectory, Data.Image);
|
dockerFile = Path.Combine(ActionDirectory, Data.Image);
|
||||||
@@ -228,6 +228,10 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
|
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
|
||||||
Environment["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
|
Environment["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
|
||||||
}
|
}
|
||||||
|
if (systemConnection.Data.TryGetValue("ResultsServiceUrl", out var resultsUrl) && !string.IsNullOrEmpty(resultsUrl))
|
||||||
|
{
|
||||||
|
Environment["ACTIONS_RESULTS_URL"] = resultsUrl;
|
||||||
|
}
|
||||||
|
|
||||||
foreach (var variable in this.Environment)
|
foreach (var variable in this.Environment)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -58,18 +58,31 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
var nodeData = data as NodeJSActionExecutionData;
|
var nodeData = data as NodeJSActionExecutionData;
|
||||||
|
|
||||||
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
|
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
|
||||||
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) &&
|
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase))
|
||||||
(executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode16") ?? false))
|
|
||||||
{
|
{
|
||||||
// The user can opt out of this behaviour by setting this variable to true, either setting 'env' in their workflow or as an environment variable on their machine
|
var repoAction = action as Pipelines.RepositoryPathReference;
|
||||||
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
|
if (repoAction != null)
|
||||||
var isWorkflowOptOutSet = !string.IsNullOrEmpty(workflowOptOut);
|
|
||||||
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
|
|
||||||
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
|
|
||||||
if (!isOptOut)
|
|
||||||
{
|
{
|
||||||
nodeData.NodeVersion = "node16";
|
var warningActions = new HashSet<string>();
|
||||||
|
if (executionContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
|
||||||
|
{
|
||||||
|
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings);
|
||||||
|
}
|
||||||
|
|
||||||
|
string repoActionFullName;
|
||||||
|
if (string.IsNullOrEmpty(repoAction.Name))
|
||||||
|
{
|
||||||
|
repoActionFullName = repoAction.Path; // local actions don't have a 'Name'
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
repoActionFullName = $"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}";
|
||||||
|
}
|
||||||
|
|
||||||
|
warningActions.Add(repoActionFullName);
|
||||||
|
executionContext.Global.Variables.Set("Node16ForceActionsWarnings", StringUtil.ConvertToJson(warningActions));
|
||||||
}
|
}
|
||||||
|
nodeData.NodeVersion = "node16";
|
||||||
}
|
}
|
||||||
(handler as INodeScriptActionHandler).Data = nodeData;
|
(handler as INodeScriptActionHandler).Data = nodeData;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -63,6 +63,10 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
|
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
|
||||||
Environment["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
|
Environment["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
|
||||||
}
|
}
|
||||||
|
if (systemConnection.Data.TryGetValue("ResultsServiceUrl", out var resultsUrl) && !string.IsNullOrEmpty(resultsUrl))
|
||||||
|
{
|
||||||
|
Environment["ACTIONS_RESULTS_URL"] = resultsUrl;
|
||||||
|
}
|
||||||
|
|
||||||
// Resolve the target script.
|
// Resolve the target script.
|
||||||
string target = null;
|
string target = null;
|
||||||
@@ -98,20 +102,14 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
|
workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
|
||||||
}
|
}
|
||||||
|
|
||||||
#if OS_OSX || OS_WINDOWS
|
|
||||||
if (string.Equals(Data.NodeVersion, "node12", StringComparison.OrdinalIgnoreCase) &&
|
if (string.Equals(Data.NodeVersion, "node12", StringComparison.OrdinalIgnoreCase) &&
|
||||||
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm64))
|
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm64))
|
||||||
{
|
{
|
||||||
#if OS_OSX
|
ExecutionContext.Output($"The node12 is not supported. Use node16 instead.");
|
||||||
ExecutionContext.Output($"The node12 is not supported on macOS ARM64 platform. Use node16 instead.");
|
|
||||||
#elif OS_WINDOWS
|
|
||||||
ExecutionContext.Output($"The node12 is not supported on windows ARM64 platform. Use node16 instead.");
|
|
||||||
#endif
|
|
||||||
Data.NodeVersion = "node16";
|
Data.NodeVersion = "node16";
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
string forcedNodeVersion = System.Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedActionsNodeVersion);
|
|
||||||
|
|
||||||
|
string forcedNodeVersion = System.Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedActionsNodeVersion);
|
||||||
if (forcedNodeVersion == "node16" && Data.NodeVersion != "node16")
|
if (forcedNodeVersion == "node16" && Data.NodeVersion != "node16")
|
||||||
{
|
{
|
||||||
Data.NodeVersion = "node16";
|
Data.NodeVersion = "node16";
|
||||||
@@ -136,27 +134,26 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
// Remove environment variable that may cause conflicts with the node within the runner.
|
// Remove environment variable that may cause conflicts with the node within the runner.
|
||||||
Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795
|
Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795
|
||||||
|
|
||||||
if (Data.NodeVersion == "node12" && (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.Node12Warning) ?? false))
|
if (string.Equals(Data.NodeVersion, Constants.Runner.DeprecatedNodeVersion, StringComparison.OrdinalIgnoreCase) && (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.Node16Warning) ?? false))
|
||||||
{
|
{
|
||||||
var repoAction = Action as RepositoryPathReference;
|
var repoAction = Action as RepositoryPathReference;
|
||||||
var warningActions = new HashSet<string>();
|
var warningActions = new HashSet<string>();
|
||||||
if (ExecutionContext.Global.Variables.TryGetValue("Node12ActionsWarnings", out var node12Warnings))
|
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
|
||||||
{
|
{
|
||||||
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(node12Warnings);
|
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings);
|
||||||
}
|
}
|
||||||
|
|
||||||
var repoActionFullName = "";
|
|
||||||
if (string.IsNullOrEmpty(repoAction.Name))
|
if (string.IsNullOrEmpty(repoAction.Name))
|
||||||
{
|
{
|
||||||
repoActionFullName = repoAction.Path; // local actions don't have a 'Name'
|
// local actions don't have a 'Name'
|
||||||
|
warningActions.Add(repoAction.Path);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
repoActionFullName = $"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}";
|
warningActions.Add($"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}");
|
||||||
}
|
}
|
||||||
|
|
||||||
warningActions.Add(repoActionFullName);
|
ExecutionContext.Global.Variables.Set(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, StringUtil.ConvertToJson(warningActions));
|
||||||
ExecutionContext.Global.Variables.Set("Node12ActionsWarnings", StringUtil.ConvertToJson(warningActions));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||||
|
|||||||
@@ -83,19 +83,40 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
shellCommand = "pwsh";
|
shellCommand = "pwsh";
|
||||||
if (validateShellOnHost)
|
if (validateShellOnHost)
|
||||||
{
|
{
|
||||||
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||||
|
}
|
||||||
if (string.IsNullOrEmpty(shellCommandPath))
|
if (string.IsNullOrEmpty(shellCommandPath))
|
||||||
{
|
{
|
||||||
shellCommand = "powershell";
|
shellCommand = "powershell";
|
||||||
Trace.Info($"Defaulting to {shellCommand}");
|
Trace.Info($"Defaulting to {shellCommand}");
|
||||||
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
shellCommand = "sh";
|
shellCommand = "sh";
|
||||||
if (validateShellOnHost)
|
if (validateShellOnHost)
|
||||||
{
|
{
|
||||||
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
||||||
@@ -106,7 +127,14 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
shellCommand = parsed.shellCommand;
|
shellCommand = parsed.shellCommand;
|
||||||
if (validateShellOnHost)
|
if (validateShellOnHost)
|
||||||
{
|
{
|
||||||
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which2(parsed.shellCommand, true, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
argFormat = $"{parsed.shellArgs}".TrimStart();
|
argFormat = $"{parsed.shellArgs}".TrimStart();
|
||||||
@@ -188,17 +216,38 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
{
|
{
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
shellCommand = "pwsh";
|
shellCommand = "pwsh";
|
||||||
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||||
|
}
|
||||||
if (string.IsNullOrEmpty(commandPath))
|
if (string.IsNullOrEmpty(commandPath))
|
||||||
{
|
{
|
||||||
shellCommand = "powershell";
|
shellCommand = "powershell";
|
||||||
Trace.Info($"Defaulting to {shellCommand}");
|
Trace.Info($"Defaulting to {shellCommand}");
|
||||||
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ArgUtil.NotNullOrEmpty(commandPath, "Default Shell");
|
ArgUtil.NotNullOrEmpty(commandPath, "Default Shell");
|
||||||
#else
|
#else
|
||||||
shellCommand = "sh";
|
shellCommand = "sh";
|
||||||
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
||||||
}
|
}
|
||||||
@@ -209,7 +258,14 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
if (!IsActionStep && systemShells.Contains(shell))
|
if (!IsActionStep && systemShells.Contains(shell))
|
||||||
{
|
{
|
||||||
shellCommand = shell;
|
shellCommand = shell;
|
||||||
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which2(shell, !isContainerStepHost, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
|
||||||
|
}
|
||||||
if (shell == "bash")
|
if (shell == "bash")
|
||||||
{
|
{
|
||||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh");
|
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh");
|
||||||
@@ -224,7 +280,14 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell);
|
var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell);
|
||||||
shellCommand = parsed.shellCommand;
|
shellCommand = parsed.shellCommand;
|
||||||
// For non-ContainerStepHost, the command must be located on the host by Which
|
// For non-ContainerStepHost, the command must be located on the host by Which
|
||||||
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
|
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which2(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
|
||||||
|
}
|
||||||
argFormat = $"{parsed.shellArgs}".TrimStart();
|
argFormat = $"{parsed.shellArgs}".TrimStart();
|
||||||
if (string.IsNullOrEmpty(argFormat))
|
if (string.IsNullOrEmpty(argFormat))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ using System.Diagnostics;
|
|||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Net.Http;
|
||||||
using System.Runtime.Serialization;
|
using System.Runtime.Serialization;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -15,6 +16,7 @@ using GitHub.DistributedTask.WebApi;
|
|||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
namespace GitHub.Runner.Worker
|
||||||
@@ -34,12 +36,13 @@ namespace GitHub.Runner.Worker
|
|||||||
public interface IJobExtension : IRunnerService
|
public interface IJobExtension : IRunnerService
|
||||||
{
|
{
|
||||||
Task<List<IStep>> InitializeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message);
|
Task<List<IStep>> InitializeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message);
|
||||||
void FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc);
|
Task FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class JobExtension : RunnerService, IJobExtension
|
public sealed class JobExtension : RunnerService, IJobExtension
|
||||||
{
|
{
|
||||||
private readonly HashSet<string> _existingProcesses = new(StringComparer.OrdinalIgnoreCase);
|
private readonly HashSet<string> _existingProcesses = new(StringComparer.OrdinalIgnoreCase);
|
||||||
|
private readonly List<Task<string>> _connectivityCheckTasks = new();
|
||||||
private bool _processCleanup;
|
private bool _processCleanup;
|
||||||
private string _processLookupId = $"github_{Guid.NewGuid()}";
|
private string _processLookupId = $"github_{Guid.NewGuid()}";
|
||||||
private CancellationTokenSource _diskSpaceCheckToken = new();
|
private CancellationTokenSource _diskSpaceCheckToken = new();
|
||||||
@@ -175,7 +178,14 @@ namespace GitHub.Runner.Worker
|
|||||||
context.Debug("Update context data");
|
context.Debug("Update context data");
|
||||||
string _workDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
|
string _workDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
|
||||||
context.SetRunnerContext("workspace", Path.Combine(_workDirectory, trackingConfig.PipelineDirectory));
|
context.SetRunnerContext("workspace", Path.Combine(_workDirectory, trackingConfig.PipelineDirectory));
|
||||||
context.SetGitHubContext("workspace", Path.Combine(_workDirectory, trackingConfig.WorkspaceDirectory));
|
|
||||||
|
var githubWorkspace = Path.Combine(_workDirectory, trackingConfig.WorkspaceDirectory);
|
||||||
|
if (jobContext.Global.Variables.GetBoolean(Constants.Runner.Features.UseContainerPathForTemplate) ?? false)
|
||||||
|
{
|
||||||
|
// This value is used to translate paths from the container path back to the host path.
|
||||||
|
context.SetGitHubContext("host-workspace", githubWorkspace);
|
||||||
|
}
|
||||||
|
context.SetGitHubContext("workspace", githubWorkspace);
|
||||||
|
|
||||||
// Temporary hack for GHES alpha
|
// Temporary hack for GHES alpha
|
||||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||||
@@ -421,6 +431,22 @@ namespace GitHub.Runner.Worker
|
|||||||
_diskSpaceCheckTask = CheckDiskSpaceAsync(context, _diskSpaceCheckToken.Token);
|
_diskSpaceCheckTask = CheckDiskSpaceAsync(context, _diskSpaceCheckToken.Token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check server connectivity in background
|
||||||
|
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
|
if (systemConnection.Data.TryGetValue("ConnectivityChecks", out var connectivityChecksPayload) &&
|
||||||
|
!string.IsNullOrEmpty(connectivityChecksPayload))
|
||||||
|
{
|
||||||
|
Trace.Info($"Start checking server connectivity.");
|
||||||
|
var checkUrls = StringUtil.ConvertFromJson<List<string>>(connectivityChecksPayload);
|
||||||
|
if (checkUrls?.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (var checkUrl in checkUrls)
|
||||||
|
{
|
||||||
|
_connectivityCheckTasks.Add(CheckConnectivity(checkUrl));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return steps;
|
return steps;
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
||||||
@@ -465,7 +491,7 @@ namespace GitHub.Runner.Worker
|
|||||||
return reference;
|
return reference;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc)
|
public async Task FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
ArgUtil.NotNull(jobContext, nameof(jobContext));
|
ArgUtil.NotNull(jobContext, nameof(jobContext));
|
||||||
@@ -642,6 +668,28 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
_diskSpaceCheckToken.Cancel();
|
_diskSpaceCheckToken.Cancel();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Collect server connectivity check result
|
||||||
|
if (_connectivityCheckTasks.Count > 0)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Trace.Info($"Wait for all connectivity checks to finish.");
|
||||||
|
await Task.WhenAll(_connectivityCheckTasks);
|
||||||
|
foreach (var check in _connectivityCheckTasks)
|
||||||
|
{
|
||||||
|
var result = await check;
|
||||||
|
Trace.Info($"Connectivity check result: {result}");
|
||||||
|
context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = result });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Fail to check server connectivity.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = $"Fail to check server connectivity. {ex.Message}" });
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@@ -657,6 +705,37 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task<string> CheckConnectivity(string endpointUrl)
|
||||||
|
{
|
||||||
|
Trace.Info($"Check server connectivity for {endpointUrl}.");
|
||||||
|
string result = string.Empty;
|
||||||
|
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(5)))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||||
|
using (var httpClient = new HttpClient(httpClientHandler))
|
||||||
|
{
|
||||||
|
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||||
|
var response = await httpClient.GetAsync(endpointUrl, timeoutTokenSource.Token);
|
||||||
|
result = $"{endpointUrl}: {response.StatusCode}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (ex is OperationCanceledException && timeoutTokenSource.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Error($"Request timeout during connectivity check: {ex}");
|
||||||
|
result = $"{endpointUrl}: timeout";
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Catch exception during connectivity check: {ex}");
|
||||||
|
result = $"{endpointUrl}: {ex.Message}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
private async Task CheckDiskSpaceAsync(IExecutionContext context, CancellationToken token)
|
private async Task CheckDiskSpaceAsync(IExecutionContext context, CancellationToken token)
|
||||||
{
|
{
|
||||||
while (!token.IsCancellationRequested)
|
while (!token.IsCancellationRequested)
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ using System.Net.Http.Headers;
|
|||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
using GitHub.DistributedTask.Pipelines;
|
using GitHub.DistributedTask.Pipelines;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
@@ -48,6 +49,16 @@ namespace GitHub.Runner.Worker
|
|||||||
!string.IsNullOrEmpty(orchestrationId.Value))
|
!string.IsNullOrEmpty(orchestrationId.Value))
|
||||||
{
|
{
|
||||||
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
|
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
|
||||||
|
|
||||||
|
// make sure orchestration id is in the user-agent header.
|
||||||
|
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
|
||||||
|
}
|
||||||
|
|
||||||
|
var jobServerQueueTelemetry = false;
|
||||||
|
if (message.Variables.TryGetValue("DistributedTask.EnableJobServerQueueTelemetry", out VariableValue enableJobServerQueueTelemetry) &&
|
||||||
|
!string.IsNullOrEmpty(enableJobServerQueueTelemetry?.Value))
|
||||||
|
{
|
||||||
|
jobServerQueueTelemetry = StringUtil.ConvertToBoolean(enableJobServerQueueTelemetry.Value);
|
||||||
}
|
}
|
||||||
|
|
||||||
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
@@ -71,7 +82,7 @@ namespace GitHub.Runner.Worker
|
|||||||
launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken);
|
launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken);
|
||||||
}
|
}
|
||||||
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
||||||
_jobServerQueue.Start(message, resultServiceOnly: true);
|
_jobServerQueue.Start(message, resultsServiceOnly: true, enableTelemetry: jobServerQueueTelemetry);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -83,10 +94,17 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Creating job server with URL: {jobServerUrl}");
|
Trace.Info($"Creating job server with URL: {jobServerUrl}");
|
||||||
// jobServerQueue is the throttling reporter.
|
// jobServerQueue is the throttling reporter.
|
||||||
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
||||||
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) });
|
var delegatingHandlers = new List<DelegatingHandler>() { new ThrottlingReportHandler(_jobServerQueue) };
|
||||||
|
message.Variables.TryGetValue("Actions.EnableHttpRedirects", out VariableValue enableHttpRedirects);
|
||||||
|
if (StringUtil.ConvertToBoolean(enableHttpRedirects?.Value) &&
|
||||||
|
!StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_NO_HTTP_REDIRECTS")))
|
||||||
|
{
|
||||||
|
delegatingHandlers.Add(new RedirectMessageHandler(Trace));
|
||||||
|
}
|
||||||
|
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers);
|
||||||
await jobServer.ConnectAsync(jobConnection);
|
await jobServer.ConnectAsync(jobConnection);
|
||||||
|
|
||||||
_jobServerQueue.Start(message);
|
_jobServerQueue.Start(message, enableTelemetry: jobServerQueueTelemetry);
|
||||||
server = jobServer;
|
server = jobServer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -228,7 +246,7 @@ namespace GitHub.Runner.Worker
|
|||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
Trace.Info("Finalize job.");
|
Trace.Info("Finalize job.");
|
||||||
jobExtension.FinalizeJob(jobContext, message, jobStartTimeUtc);
|
await jobExtension.FinalizeJob(jobContext, message, jobStartTimeUtc);
|
||||||
}
|
}
|
||||||
|
|
||||||
Trace.Info($"Job result after all job steps finish: {jobContext.Result ?? TaskResult.Succeeded}");
|
Trace.Info($"Job result after all job steps finish: {jobContext.Result ?? TaskResult.Succeeded}");
|
||||||
@@ -268,12 +286,20 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
jobContext.Debug($"Finishing: {message.JobDisplayName}");
|
jobContext.Debug($"Finishing: {message.JobDisplayName}");
|
||||||
TaskResult result = jobContext.Complete(taskResult);
|
TaskResult result = jobContext.Complete(taskResult);
|
||||||
if (jobContext.Global.Variables.TryGetValue("Node12ActionsWarnings", out var node12Warnings))
|
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
|
||||||
{
|
{
|
||||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node12Warnings));
|
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings));
|
||||||
jobContext.Warning(string.Format(Constants.Runner.Node12DetectedAfterEndOfLife, actions));
|
jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
|
||||||
|
{
|
||||||
|
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings));
|
||||||
|
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions));
|
||||||
|
}
|
||||||
|
|
||||||
|
await ShutdownQueue(throwOnFailure: false);
|
||||||
|
|
||||||
// Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir.
|
// Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir.
|
||||||
_tempDirectoryManager?.CleanupTempDirectory();
|
_tempDirectoryManager?.CleanupTempDirectory();
|
||||||
|
|
||||||
@@ -283,6 +309,13 @@ namespace GitHub.Runner.Worker
|
|||||||
// Make sure we don't submit secrets as telemetry
|
// Make sure we don't submit secrets as telemetry
|
||||||
MaskTelemetrySecrets(jobContext.Global.JobTelemetry);
|
MaskTelemetrySecrets(jobContext.Global.JobTelemetry);
|
||||||
|
|
||||||
|
// Get environment url
|
||||||
|
string environmentUrl = null;
|
||||||
|
if (jobContext.ActionsEnvironment?.Url is StringToken urlStringToken)
|
||||||
|
{
|
||||||
|
environmentUrl = urlStringToken.Value;
|
||||||
|
}
|
||||||
|
|
||||||
Trace.Info($"Raising job completed against run service");
|
Trace.Info($"Raising job completed against run service");
|
||||||
var completeJobRetryLimit = 5;
|
var completeJobRetryLimit = 5;
|
||||||
var exceptions = new List<Exception>();
|
var exceptions = new List<Exception>();
|
||||||
@@ -290,7 +323,7 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, default);
|
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, default);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -360,15 +393,26 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (jobContext.Global.Variables.TryGetValue("Node12ActionsWarnings", out var node12Warnings))
|
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
|
||||||
{
|
{
|
||||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node12Warnings));
|
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings));
|
||||||
jobContext.Warning(string.Format(Constants.Runner.Node12DetectedAfterEndOfLife, actions));
|
jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
|
||||||
|
{
|
||||||
|
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings));
|
||||||
|
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions));
|
||||||
}
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await ShutdownQueue(throwOnFailure: true);
|
var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: true);
|
||||||
|
// include any job telemetry from the background upload process.
|
||||||
|
if (jobQueueTelemetry.Count > 0)
|
||||||
|
{
|
||||||
|
jobContext.Global.JobTelemetry.AddRange(jobQueueTelemetry);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@@ -470,7 +514,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task ShutdownQueue(bool throwOnFailure)
|
private async Task<IList<JobTelemetry>> ShutdownQueue(bool throwOnFailure)
|
||||||
{
|
{
|
||||||
if (_jobServerQueue != null)
|
if (_jobServerQueue != null)
|
||||||
{
|
{
|
||||||
@@ -478,6 +522,7 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
Trace.Info("Shutting down the job server queue.");
|
Trace.Info("Shutting down the job server queue.");
|
||||||
await _jobServerQueue.ShutdownAsync();
|
await _jobServerQueue.ShutdownAsync();
|
||||||
|
return _jobServerQueue.JobTelemetries;
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (!throwOnFailure)
|
catch (Exception ex) when (!throwOnFailure)
|
||||||
{
|
{
|
||||||
@@ -489,6 +534,8 @@ namespace GitHub.Runner.Worker
|
|||||||
_jobServerQueue = null; // Prevent multiple attempts.
|
_jobServerQueue = null; // Prevent multiple attempts.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return Array.Empty<JobTelemetry>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
22
src/Sdk/Common/Common/Authentication/NoOpCredentials.cs
Normal file
22
src/Sdk/Common/Common/Authentication/NoOpCredentials.cs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
using System;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
namespace GitHub.Services.Common
|
||||||
|
{
|
||||||
|
// Set of classes used to bypass token operations
|
||||||
|
// Results Service and External services follow a different auth model but
|
||||||
|
// we are required to pass in a credentials object to create a RawHttpMessageHandler
|
||||||
|
public class NoOpCredentials : FederatedCredential
|
||||||
|
{
|
||||||
|
public NoOpCredentials(IssuedToken initialToken) : base(initialToken)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public override VssCredentialsType CredentialType { get; }
|
||||||
|
protected override IssuedTokenProvider OnCreateTokenProvider(Uri serverUrl, IHttpResponse response)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,12 +1,11 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.ComponentModel;
|
using System.ComponentModel;
|
||||||
using System.Diagnostics.CodeAnalysis;
|
|
||||||
|
|
||||||
namespace GitHub.Services.Common.Internal
|
namespace GitHub.Services.Common.Internal
|
||||||
{
|
{
|
||||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||||
public static class RawHttpHeaders
|
public static class RawHttpHeaders
|
||||||
{
|
{
|
||||||
public const String SessionHeader = "X-Runner-Session";
|
public const String SessionHeader = "X-Actions-Session";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ namespace GitHub.Services.Common
|
|||||||
lock (m_thisLock)
|
lock (m_thisLock)
|
||||||
{
|
{
|
||||||
// Ensure that we attempt to use the most appropriate authentication mechanism by default.
|
// Ensure that we attempt to use the most appropriate authentication mechanism by default.
|
||||||
if (m_tokenProvider == null)
|
if (m_tokenProvider == null && !(this.Credentials is NoOpCredentials))
|
||||||
{
|
{
|
||||||
m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null);
|
m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null);
|
||||||
}
|
}
|
||||||
@@ -121,7 +121,8 @@ namespace GitHub.Services.Common
|
|||||||
HttpResponseMessageWrapper responseWrapper;
|
HttpResponseMessageWrapper responseWrapper;
|
||||||
|
|
||||||
Boolean lastResponseDemandedProxyAuth = false;
|
Boolean lastResponseDemandedProxyAuth = false;
|
||||||
Int32 retries = m_maxAuthRetries;
|
// do not retry if we cannot recreate tokens
|
||||||
|
Int32 retries = this.Credentials is NoOpCredentials ? 0 : m_maxAuthRetries;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||||
@@ -137,9 +138,15 @@ namespace GitHub.Services.Common
|
|||||||
response.Dispose();
|
response.Dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.Settings.ApplyTo(request);
|
||||||
|
|
||||||
// Let's start with sending a token
|
// Let's start with sending a token
|
||||||
IssuedToken token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
|
IssuedToken token = null;
|
||||||
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
|
if (m_tokenProvider != null)
|
||||||
|
{
|
||||||
|
token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
|
||||||
|
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
|
||||||
|
}
|
||||||
|
|
||||||
// The WinHttpHandler will chunk any content that does not have a computed length which is
|
// The WinHttpHandler will chunk any content that does not have a computed length which is
|
||||||
// not what we want. By loading into a buffer up-front we bypass this behavior and there is
|
// not what we want. By loading into a buffer up-front we bypass this behavior and there is
|
||||||
|
|||||||
@@ -213,7 +213,26 @@ namespace GitHub.Services.Common
|
|||||||
|
|
||||||
// ConfigureAwait(false) enables the continuation to be run outside any captured
|
// ConfigureAwait(false) enables the continuation to be run outside any captured
|
||||||
// SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking...
|
// SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking...
|
||||||
response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
|
|
||||||
|
var tmpResponse = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
|
||||||
|
if (Settings.AllowAutoRedirectForBroker && tmpResponse.StatusCode == HttpStatusCode.Redirect)
|
||||||
|
{
|
||||||
|
//Dispose of the previous response
|
||||||
|
tmpResponse?.Dispose();
|
||||||
|
|
||||||
|
var location = tmpResponse.Headers.Location;
|
||||||
|
request = new HttpRequestMessage(HttpMethod.Get, location);
|
||||||
|
|
||||||
|
// Reapply the token to new redirected request
|
||||||
|
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
|
||||||
|
|
||||||
|
// Resend the request
|
||||||
|
response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
response = tmpResponse;
|
||||||
|
}
|
||||||
|
|
||||||
traceInfo?.TraceRequestSendTime();
|
traceInfo?.TraceRequestSendTime();
|
||||||
|
|
||||||
|
|||||||
@@ -110,6 +110,16 @@ namespace GitHub.Services.Common
|
|||||||
set;
|
set;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets a value indicating whether or not HttpClientHandler should follow redirect on outgoing broker requests
|
||||||
|
/// This is special since this also sends token in the request, where as default AllowAutoRedirect does not
|
||||||
|
/// </summary>
|
||||||
|
public Boolean AllowAutoRedirectForBroker
|
||||||
|
{
|
||||||
|
get;
|
||||||
|
set;
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Gets or sets a value indicating whether or not compression should be used on outgoing requests.
|
/// Gets or sets a value indicating whether or not compression should be used on outgoing requests.
|
||||||
/// The default value is true.
|
/// The default value is true.
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||||
public virtual async Task DeleteAgentAsync(
|
public virtual async Task DeleteAgentAsync(
|
||||||
int poolId,
|
int poolId,
|
||||||
int agentId,
|
ulong agentId,
|
||||||
object userState = null,
|
object userState = null,
|
||||||
CancellationToken cancellationToken = default)
|
CancellationToken cancellationToken = default)
|
||||||
{
|
{
|
||||||
@@ -243,7 +243,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||||
public virtual Task<TaskAgent> ReplaceAgentAsync(
|
public virtual Task<TaskAgent> ReplaceAgentAsync(
|
||||||
int poolId,
|
int poolId,
|
||||||
int agentId,
|
ulong agentId,
|
||||||
TaskAgent agent,
|
TaskAgent agent,
|
||||||
object userState = null,
|
object userState = null,
|
||||||
CancellationToken cancellationToken = default)
|
CancellationToken cancellationToken = default)
|
||||||
@@ -461,6 +461,9 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
long? lastMessageId = null,
|
long? lastMessageId = null,
|
||||||
TaskAgentStatus? status = null,
|
TaskAgentStatus? status = null,
|
||||||
string runnerVersion = null,
|
string runnerVersion = null,
|
||||||
|
string os = null,
|
||||||
|
string architecture = null,
|
||||||
|
bool? disableUpdate = null,
|
||||||
object userState = null,
|
object userState = null,
|
||||||
CancellationToken cancellationToken = default)
|
CancellationToken cancellationToken = default)
|
||||||
{
|
{
|
||||||
@@ -483,6 +486,21 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
queryParams.Add("runnerVersion", runnerVersion);
|
queryParams.Add("runnerVersion", runnerVersion);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (os != null)
|
||||||
|
{
|
||||||
|
queryParams.Add("os", os);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (architecture != null)
|
||||||
|
{
|
||||||
|
queryParams.Add("architecture", architecture);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (disableUpdate != null)
|
||||||
|
{
|
||||||
|
queryParams.Add("disableUpdate", disableUpdate.Value.ToString().ToLower());
|
||||||
|
}
|
||||||
|
|
||||||
return SendAsync<TaskAgentMessage>(
|
return SendAsync<TaskAgentMessage>(
|
||||||
httpMethod,
|
httpMethod,
|
||||||
locationId,
|
locationId,
|
||||||
@@ -786,7 +804,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||||
public virtual Task<TaskAgent> UpdateAgentUpdateStateAsync(
|
public virtual Task<TaskAgent> UpdateAgentUpdateStateAsync(
|
||||||
int poolId,
|
int poolId,
|
||||||
int agentId,
|
ulong agentId,
|
||||||
string currentState,
|
string currentState,
|
||||||
string updateTrace,
|
string updateTrace,
|
||||||
object userState = null,
|
object userState = null,
|
||||||
|
|||||||
@@ -123,8 +123,11 @@ namespace GitHub.DistributedTask.Logging
|
|||||||
var secretSection = string.Empty;
|
var secretSection = string.Empty;
|
||||||
if (value.Contains("&+"))
|
if (value.Contains("&+"))
|
||||||
{
|
{
|
||||||
// +1 to skip the letter that got colored
|
if (value.Length > value.IndexOf("&+") + "&+".Length + 1)
|
||||||
secretSection = value.Substring(value.IndexOf("&+") + "&+".Length + 1);
|
{
|
||||||
|
// +1 to skip the letter that got colored
|
||||||
|
secretSection = value.Substring(value.IndexOf("&+") + "&+".Length + 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -82,5 +82,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
|||||||
public const String WorkflowRoot = "workflow-root";
|
public const String WorkflowRoot = "workflow-root";
|
||||||
public const String WorkingDirectory = "working-directory";
|
public const String WorkingDirectory = "working-directory";
|
||||||
public const String Workspace = "workspace";
|
public const String Workspace = "workspace";
|
||||||
|
public const String HostWorkspace = "host-workspace";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
}
|
}
|
||||||
|
|
||||||
public AgentRefreshMessage(
|
public AgentRefreshMessage(
|
||||||
Int32 agentId,
|
ulong agentId,
|
||||||
String targetVersion,
|
String targetVersion,
|
||||||
TimeSpan? timeout = null)
|
TimeSpan? timeout = null)
|
||||||
{
|
{
|
||||||
@@ -26,7 +26,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
}
|
}
|
||||||
|
|
||||||
[DataMember]
|
[DataMember]
|
||||||
public Int32 AgentId
|
public ulong AgentId
|
||||||
{
|
{
|
||||||
get;
|
get;
|
||||||
private set;
|
private set;
|
||||||
|
|||||||
38
src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs
Normal file
38
src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
using System;
|
||||||
|
using System.Runtime.Serialization;
|
||||||
|
|
||||||
|
namespace GitHub.DistributedTask.WebApi
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Message that tells the runner to redirect itself to BrokerListener for messages.
|
||||||
|
/// (Note that we use a special Message instead of a simple 302. This is because
|
||||||
|
/// the runner will need to apply the runner's token to the request, and it is
|
||||||
|
/// a security best practice to *not* blindly add sensitive data to redirects
|
||||||
|
/// 302s.)
|
||||||
|
/// </summary>
|
||||||
|
[DataContract]
|
||||||
|
public class BrokerMigrationMessage
|
||||||
|
{
|
||||||
|
public static readonly string MessageType = "BrokerMigration";
|
||||||
|
|
||||||
|
public BrokerMigrationMessage()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public BrokerMigrationMessage(
|
||||||
|
Uri brokerUrl)
|
||||||
|
{
|
||||||
|
this.BrokerBaseUrl = brokerUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The base url for the broker listener
|
||||||
|
/// </summary>
|
||||||
|
[DataMember]
|
||||||
|
public Uri BrokerBaseUrl
|
||||||
|
{
|
||||||
|
get;
|
||||||
|
internal set;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
[DataContract]
|
[DataContract]
|
||||||
public sealed class DiagnosticLogMetadata
|
public sealed class DiagnosticLogMetadata
|
||||||
{
|
{
|
||||||
public DiagnosticLogMetadata(string agentName, int agentId, int poolId, string phaseName, string fileName, string phaseResult)
|
public DiagnosticLogMetadata(string agentName, ulong agentId, int poolId, string phaseName, string fileName, string phaseResult)
|
||||||
{
|
{
|
||||||
AgentName = agentName;
|
AgentName = agentName;
|
||||||
AgentId = agentId;
|
AgentId = agentId;
|
||||||
@@ -19,7 +19,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
public string AgentName { get; set; }
|
public string AgentName { get; set; }
|
||||||
|
|
||||||
[DataMember]
|
[DataMember]
|
||||||
public int AgentId { get; set; }
|
public ulong AgentId { get; set; }
|
||||||
|
|
||||||
[DataMember]
|
[DataMember]
|
||||||
public int PoolId { get; set; }
|
public int PoolId { get; set; }
|
||||||
|
|||||||
@@ -2516,4 +2516,23 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Serializable]
|
||||||
|
public sealed class InvalidActionArchiveException : DistributedTaskException
|
||||||
|
{
|
||||||
|
public InvalidActionArchiveException(String message)
|
||||||
|
: base(message)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public InvalidActionArchiveException(String message, Exception innerException)
|
||||||
|
: base(message, innerException)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
private InvalidActionArchiveException(SerializationInfo info, StreamingContext context)
|
||||||
|
: base(info, context)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user