mirror of
https://github.com/actions/runner.git
synced 2025-12-10 20:36:49 +00:00
Compare commits
120 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea931c7deb | ||
|
|
8c3e8d0627 | ||
|
|
3424e3aa32 | ||
|
|
6b9ba79e26 | ||
|
|
65361e0fb5 | ||
|
|
36e37a0885 | ||
|
|
a5cd1ba4b6 | ||
|
|
acdc6edf7c | ||
|
|
b4a7bb0969 | ||
|
|
f47384b46e | ||
|
|
f672567acc | ||
|
|
e25c754744 | ||
|
|
f57ecd8e3c | ||
|
|
463ec00cb4 | ||
|
|
c3a7188eca | ||
|
|
2a6f271afa | ||
|
|
462337a4a4 | ||
|
|
8f1c070506 | ||
|
|
bf445e2750 | ||
|
|
67d70803a9 | ||
|
|
8c917b4ad3 | ||
|
|
440238adc4 | ||
|
|
8250726be1 | ||
|
|
5b2bc388ca | ||
|
|
6a2381f525 | ||
|
|
1f0c91e23e | ||
|
|
020a1ed790 | ||
|
|
c1a5dc71a5 | ||
|
|
c68e28788d | ||
|
|
a823a7f669 | ||
|
|
21ca5e6f04 | ||
|
|
f4197fb5a5 | ||
|
|
3a8cb43022 | ||
|
|
80a17a2f0c | ||
|
|
16834edc67 | ||
|
|
2908d82845 | ||
|
|
3f5b813499 | ||
|
|
7b703d667d | ||
|
|
d2f0a46865 | ||
|
|
143639ddac | ||
|
|
474d0fb354 | ||
|
|
15c0fe6c1d | ||
|
|
2b66cbe699 | ||
|
|
0e9e9f1e8d | ||
|
|
be65955a9d | ||
|
|
e419ae3c7e | ||
|
|
bb40cd2788 | ||
|
|
e0acb14bfc | ||
|
|
1ff8ad7860 | ||
|
|
8dd2cec3af | ||
|
|
7b53c38294 | ||
|
|
e22452c2d6 | ||
|
|
9bbfed0740 | ||
|
|
cf5afc63da | ||
|
|
a00db53b0d | ||
|
|
73ef82ff85 | ||
|
|
7892066256 | ||
|
|
8b9a81c952 | ||
|
|
460d9ae5a8 | ||
|
|
e94e744bed | ||
|
|
94080812f7 | ||
|
|
1183100ab8 | ||
|
|
4f40f29cff | ||
|
|
d88823c634 | ||
|
|
a8783c023f | ||
|
|
2606425cc5 | ||
|
|
8fb038b0e0 | ||
|
|
8b30f9381b | ||
|
|
8206cf4e73 | ||
|
|
6680a3b142 | ||
|
|
b882f6696a | ||
|
|
e76de55cda | ||
|
|
9eb4b96713 | ||
|
|
719348e0bf | ||
|
|
9fe5aa2a9a | ||
|
|
765a5c3efc | ||
|
|
e752edf7b5 | ||
|
|
e350f35217 | ||
|
|
8fa970a1e6 | ||
|
|
8eefd849c1 | ||
|
|
f6e9809844 | ||
|
|
5b2e4049bc | ||
|
|
7cb61925b0 | ||
|
|
a61d3f37dc | ||
|
|
e30b9d6d12 | ||
|
|
496904c0b7 | ||
|
|
b91ad56f92 | ||
|
|
f25c9dfba3 | ||
|
|
7d432fb24c | ||
|
|
e8ee6f7b1b | ||
|
|
d4bbbb8419 | ||
|
|
4ffd081aea | ||
|
|
c05e6748c3 | ||
|
|
a2b7856c9c | ||
|
|
5f1c6f4708 | ||
|
|
8415f13bab | ||
|
|
471e3ae2d9 | ||
|
|
1096b975e4 | ||
|
|
282ba4cfc8 | ||
|
|
b737a5ac5c | ||
|
|
20721bc950 | ||
|
|
fde86b0666 | ||
|
|
efffbaeabc | ||
|
|
3a1376f90e | ||
|
|
50b3edff3c | ||
|
|
58f7a379a1 | ||
|
|
e13627df81 | ||
|
|
48cbee08f9 | ||
|
|
21b49c542c | ||
|
|
8db8bbe13a | ||
|
|
49b04976f4 | ||
|
|
eeb0cf6f1e | ||
|
|
f8a28c3c4e | ||
|
|
1bc14f0607 | ||
|
|
22d1938ac4 | ||
|
|
229b9b8ecc | ||
|
|
896152d78e | ||
|
|
8d74a9ead6 | ||
|
|
77b8586a03 | ||
|
|
c8c47d4f27 |
@@ -1,27 +1,24 @@
|
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
|
||||||
{
|
{
|
||||||
"name": "Actions Runner Devcontainer",
|
"name": "Actions Runner Devcontainer",
|
||||||
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||||
"ghcr.io/devcontainers/features/dotnet": {
|
"ghcr.io/devcontainers/features/dotnet": {
|
||||||
"version": "6.0.405"
|
"version": "6.0.414"
|
||||||
},
|
},
|
||||||
"ghcr.io/devcontainers/features/node:1": {
|
"ghcr.io/devcontainers/features/node:1": {
|
||||||
"version": "16"
|
"version": "16"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"ms-azuretools.vscode-docker",
|
"ms-azuretools.vscode-docker",
|
||||||
"ms-dotnettools.csharp",
|
"ms-dotnettools.csharp",
|
||||||
"eamodio.gitlens"
|
"eamodio.gitlens"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// dotnet restore to install dependencies so OmniSharp works out of the box
|
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
||||||
// src/Test restores all other projects it references, src/Runner.PluginHost is not one of them
|
"remoteUser": "vscode"
|
||||||
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
}
|
||||||
"remoteUser": "vscode"
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
# https://editorconfig.org/
|
|
||||||
|
|
||||||
[*]
|
|
||||||
charset = utf-8 # Set default charset to utf-8
|
|
||||||
insert_final_newline = true # ensure all files end with a single newline
|
|
||||||
trim_trailing_whitespace = true # attempt to remove trailing whitespace on save
|
|
||||||
|
|
||||||
[*.md]
|
|
||||||
trim_trailing_whitespace = false # in markdown, "two trailing spaces" is unfortunately meaningful; it means `<br>`
|
|
||||||
20
.github/dependabot.yml
vendored
Normal file
20
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/images"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
target-branch: "main"
|
||||||
|
- package-ecosystem: "nuget"
|
||||||
|
directory: "/src"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
target-branch: "main"
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/src/Misc/expressionFunc/hashFiles"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
target-branch: "main"
|
||||||
|
allow:
|
||||||
|
- dependency-type: direct
|
||||||
|
- dependency-type: production # check only dependencies, which are going to the compiled app, not supporting tools like @vue-cli
|
||||||
306
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
306
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
name: "DotNet SDK Upgrade"
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 1'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dotnet-update:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
|
||||||
|
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Get current major minor version
|
||||||
|
id: fetch_current_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
current_major_minor_patch_version=$(jq .sdk.version ./src/global.json | xargs)
|
||||||
|
current_major_minor_version=$(cut -d '.' -f 1,2 <<< "$current_major_minor_patch_version")
|
||||||
|
|
||||||
|
echo "DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION=${current_major_minor_patch_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "DOTNET_CURRENT_MAJOR_MINOR_VERSION=${current_major_minor_version}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Check patch version
|
||||||
|
id: fetch_latest_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
latest_patch_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version")
|
||||||
|
current_patch_version=${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
should_update=0
|
||||||
|
[ "$current_patch_version" != "$latest_patch_version" ] && should_update=1
|
||||||
|
|
||||||
|
# check if git branch already exists for the upgrade
|
||||||
|
branch_already_exists=0
|
||||||
|
|
||||||
|
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
|
||||||
|
then
|
||||||
|
branch_already_exists=1
|
||||||
|
should_update=0
|
||||||
|
fi
|
||||||
|
echo "DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION=${latest_patch_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||||
|
echo "BRANCH_EXISTS=${branch_already_exists}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Create an error annotation if branch exists
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS == 1 }}
|
||||||
|
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
|
||||||
|
- name: Create a warning annotation if no need to update
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_latest_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||||
|
- name: Update patch version
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
patch_version="${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
current_version="${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
|
||||||
|
# Update globals
|
||||||
|
echo Updating globals
|
||||||
|
globals_temp=$(mktemp)
|
||||||
|
jq --unbuffered --arg patch_version "$patch_version" '.sdk.version = $patch_version' ./src/global.json > "$globals_temp" && mv "$globals_temp" ./src/global.json
|
||||||
|
|
||||||
|
# Update devcontainer
|
||||||
|
echo Updating devcontainer
|
||||||
|
devcontainer_temp=$(mktemp)
|
||||||
|
jq --unbuffered --arg patch_version "$patch_version" '.features."ghcr.io/devcontainers/features/dotnet".version = $patch_version' ./.devcontainer/devcontainer.json > "$devcontainer_temp" && mv "$devcontainer_temp" ./.devcontainer/devcontainer.json
|
||||||
|
|
||||||
|
# Update dev.sh
|
||||||
|
echo Updating start script
|
||||||
|
sed -i "s/DOTNETSDK_VERSION=\"$current_version\"/DOTNETSDK_VERSION=\"$patch_version\"/g" ./src/dev.sh
|
||||||
|
- name: GIT commit and push all changed files
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
id: create_branch
|
||||||
|
run: |
|
||||||
|
branch_name="feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
git checkout -b $branch_name
|
||||||
|
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
git push --set-upstream origin $branch_name
|
||||||
|
|
||||||
|
build-hashes:
|
||||||
|
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
needs: [dotnet-update]
|
||||||
|
outputs:
|
||||||
|
# pass outputs from this job to create-pr for use
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||||
|
include:
|
||||||
|
- runtime: linux-x64
|
||||||
|
os: ubuntu-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: linux-arm64
|
||||||
|
os: ubuntu-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: linux-arm
|
||||||
|
os: ubuntu-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: osx-x64
|
||||||
|
os: macOS-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: osx-arm64
|
||||||
|
os: macOS-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: win-x64
|
||||||
|
os: windows-2019
|
||||||
|
devScript: ./dev
|
||||||
|
|
||||||
|
- runtime: win-arm64
|
||||||
|
os: windows-latest
|
||||||
|
devScript: ./dev
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
# Build runner layout
|
||||||
|
- name: Build & Layout Release
|
||||||
|
run: |
|
||||||
|
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||||
|
working-directory: src
|
||||||
|
|
||||||
|
# Check runtime/externals hash
|
||||||
|
- name: Compute/Compare runtime and externals Hash
|
||||||
|
id: compute-hash
|
||||||
|
continue-on-error: true
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||||
|
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||||
|
|
||||||
|
NeedUpdate=0
|
||||||
|
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||||
|
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||||
|
|
||||||
|
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||||
|
NeedUpdate=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||||
|
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||||
|
|
||||||
|
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||||
|
NeedUpdate=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||||
|
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||||
|
- name: update hash
|
||||||
|
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||||
|
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||||
|
|
||||||
|
if [ -n "$ExternalHash" ]; then
|
||||||
|
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$DotNetRuntimeHash" ]; then
|
||||||
|
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||||
|
fi
|
||||||
|
- name: cache updated hashes
|
||||||
|
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
uses: actions/cache/save@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||||
|
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
|
||||||
|
|
||||||
|
hash-update:
|
||||||
|
needs: [build-hashes]
|
||||||
|
if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }}
|
||||||
|
outputs:
|
||||||
|
# pass outputs from this job to create-pr for use
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
- name: Restore cached hashes - linux-x64
|
||||||
|
id: cache-restore-linux-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||||
|
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - linux-arm64
|
||||||
|
id: cache-restore-linux-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||||
|
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - linux-arm
|
||||||
|
id: cache-restore-linux-arm
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-arm
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||||
|
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - osx-x64
|
||||||
|
id: cache-restore-osx-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/osx-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||||
|
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - osx-arm64
|
||||||
|
id: cache-restore-osx-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/osx-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||||
|
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - win-x64
|
||||||
|
id: cache-restore-win-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/win-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||||
|
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - win-arm64
|
||||||
|
id: cache-restore-win-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/win-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||||
|
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Fetch cached computed hashes
|
||||||
|
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" )
|
||||||
|
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
git commit -a -m "Update computed hashes"
|
||||||
|
git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
create-pr:
|
||||||
|
needs: [hash-update]
|
||||||
|
outputs:
|
||||||
|
# pass outputs from this job to run-tests for use
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
- name: Create Pull Request
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||||
|
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||||
3
.github/workflows/publish-image.yml
vendored
3
.github/workflows/publish-image.yml
vendored
@@ -53,6 +53,9 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: ./images
|
context: ./images
|
||||||
|
platforms: |
|
||||||
|
linux/amd64
|
||||||
|
linux/arm64
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
|||||||
3
.github/workflows/release.yml
vendored
3
.github/workflows/release.yml
vendored
@@ -699,6 +699,9 @@ jobs:
|
|||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: ./images
|
context: ./images
|
||||||
|
platforms: |
|
||||||
|
linux/amd64
|
||||||
|
linux/arm64
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
|||||||
16
.github/workflows/stale-bot.yml
vendored
Normal file
16
.github/workflows/stale-bot.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
name: Stale Bot
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 1' # every monday at midnight
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v8
|
||||||
|
with:
|
||||||
|
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||||
|
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||||
|
exempt-issue-labels: "keep"
|
||||||
|
days-before-stale: 365
|
||||||
|
days-before-close: 15
|
||||||
6
.husky/pre-commit
Executable file
6
.husky/pre-commit
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
. "$(dirname -- "$0")/_/husky.sh"
|
||||||
|
|
||||||
|
cd src/Misc/expressionFunc/hashFiles
|
||||||
|
|
||||||
|
npx lint-staged
|
||||||
@@ -9,11 +9,13 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
|||||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||||
|
---
|
||||||
|
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||||
|
|
||||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||||
|
|
||||||
```
|
```
|
||||||
curl -v https://api.github.com/api/v3/zen
|
curl -v https://api.github.com/zen
|
||||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||||
|
|
||||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
- Firewall rules that block action runner from accessing [certain hosts](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github), ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||||
|
|
||||||
|
|
||||||
### Identify and solve these problems
|
### Identify and solve these problems
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
|
|||||||
## Styling
|
## Styling
|
||||||
|
|
||||||
We use the .NET Foundation and CoreCLR style guidelines [located here](
|
We use the .NET Foundation and CoreCLR style guidelines [located here](
|
||||||
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
|
https://github.com/dotnet/runtime/blob/main/docs/coding-guidelines/coding-style.md)
|
||||||
|
|
||||||
### Format C# Code
|
### Format C# Code
|
||||||
|
|
||||||
@@ -165,4 +165,4 @@ To format both staged and unstaged .cs files
|
|||||||
```
|
```
|
||||||
cd ./src
|
cd ./src
|
||||||
./dev.(cmd|sh) format
|
./dev.(cmd|sh) format
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,14 +1,18 @@
|
|||||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
|
# Source: https://github.com/dotnet/dotnet-docker
|
||||||
|
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
|
||||||
|
|
||||||
|
ARG TARGETOS
|
||||||
|
ARG TARGETARCH
|
||||||
ARG RUNNER_VERSION
|
ARG RUNNER_VERSION
|
||||||
ARG RUNNER_ARCH="x64"
|
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.4.0
|
||||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.1
|
ARG DOCKER_VERSION=24.0.6
|
||||||
ARG DOCKER_VERSION=20.10.23
|
|
||||||
|
|
||||||
RUN apt update -y && apt install curl unzip -y
|
RUN apt update -y && apt install curl unzip -y
|
||||||
|
|
||||||
WORKDIR /actions-runner
|
WORKDIR /actions-runner
|
||||||
RUN curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-linux-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
|
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||||
|
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export RUNNER_ARCH=x64 ; fi \
|
||||||
|
&& curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-${TARGETOS}-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
|
||||||
&& tar xzf ./runner.tar.gz \
|
&& tar xzf ./runner.tar.gz \
|
||||||
&& rm runner.tar.gz
|
&& rm runner.tar.gz
|
||||||
|
|
||||||
@@ -16,13 +20,14 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
|
|||||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||||
&& rm runner-container-hooks.zip
|
&& rm runner-container-hooks.zip
|
||||||
|
|
||||||
RUN export DOCKER_ARCH=x86_64 \
|
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||||
|
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||||
&& curl -fLo docker.tgz https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
||||||
&& tar zxvf docker.tgz \
|
&& tar zxvf docker.tgz \
|
||||||
&& rm -rf docker.tgz
|
&& rm -rf docker.tgz
|
||||||
|
|
||||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
|
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||||
@@ -31,6 +36,7 @@ ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
|||||||
RUN apt-get update -y \
|
RUN apt-get update -y \
|
||||||
&& apt-get install -y --no-install-recommends \
|
&& apt-get install -y --no-install-recommends \
|
||||||
sudo \
|
sudo \
|
||||||
|
lsb-release \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||||
|
|||||||
@@ -1,18 +1,45 @@
|
|||||||
## Features
|
## What's Changed
|
||||||
- Runner changes for communication with Results service (#2510, #2531, #2535, #2516)
|
* Prepare runner release 2.309.0 by @johnsudol in https://github.com/actions/runner/pull/2833
|
||||||
- Add `*.ghe.localhost` domains to hosted server check (#2536)
|
* remove debug-only flag from stale bot action by @ruvceskistefan in https://github.com/actions/runner/pull/2834
|
||||||
- Add `OrchestrationId` to user-agent for better telemetry correlation. (#2568)
|
* Calculate docker instance label based on the hash of the config by @nikola-jokic in https://github.com/actions/runner/pull/2683
|
||||||
|
* Correcting `zen` address by @Pantelis-Santorinios in https://github.com/actions/runner/pull/2855
|
||||||
|
* Update dotnet sdk to latest version @6.0.414 by @github-actions in https://github.com/actions/runner/pull/2852
|
||||||
|
* Bump @typescript-eslint/parser from 6.4.1 to 6.7.0 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2845
|
||||||
|
* Bump @types/node from 20.5.6 to 20.6.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2854
|
||||||
|
* Bump eslint-plugin-github from 4.9.2 to 4.10.0 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2808
|
||||||
|
* Bump @typescript-eslint/parser from 6.7.0 to 6.7.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2858
|
||||||
|
* Bump prettier from 3.0.2 to 3.0.3 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2860
|
||||||
|
* Bump @vercel/ncc from 0.36.1 to 0.38.0 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2859
|
||||||
|
* Bump @typescript-eslint/eslint-plugin from 6.4.1 to 6.7.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2861
|
||||||
|
* Remove unused code in AgentManager. by @TingluoHuang in https://github.com/actions/runner/pull/2866
|
||||||
|
* GetAgents from all runner groups durning config. by @TingluoHuang in https://github.com/actions/runner/pull/2865
|
||||||
|
* Change alpine from vst blobs to OSS gha alpine build by @vanZeben in https://github.com/actions/runner/pull/2871
|
||||||
|
* Bump node 16 to v16.20.2 by @vanZeben in https://github.com/actions/runner/pull/2872
|
||||||
|
* Bump directly dotnet vulnerable packages by @nikola-jokic in https://github.com/actions/runner/pull/2870
|
||||||
|
* Fix ArgumentOutOfRangeException in PowerShellPostAmpersandEscape. by @TingluoHuang in https://github.com/actions/runner/pull/2875
|
||||||
|
* bump container hook version in runner image by @nikola-jokic in https://github.com/actions/runner/pull/2881
|
||||||
|
* Use `Directory.EnumerateFiles` instead of `Directory.GetFiles` in WhichUtil. by @TingluoHuang in https://github.com/actions/runner/pull/2882
|
||||||
|
* Add warning about node16 deprecation by @takost in https://github.com/actions/runner/pull/2887
|
||||||
|
* Throw TimeoutException instead of OperationCanceledException on the final retry in DownloadRepositoryAction by @TingluoHuang in https://github.com/actions/runner/pull/2895
|
||||||
|
* Update message when runners are deleted by @thboop in https://github.com/actions/runner/pull/2896
|
||||||
|
* Do not give up if Results is powering logs by @yacaovsnc in https://github.com/actions/runner/pull/2893
|
||||||
|
* Allow use action archive cache to speed up workflow jobs. by @TingluoHuang in https://github.com/actions/runner/pull/2857
|
||||||
|
* Upgrade docker engine to 24.0.6 in the runner container image by @Link- in https://github.com/actions/runner/pull/2886
|
||||||
|
* Collect telemetry to measure upload speed for different backend. by @TingluoHuang in https://github.com/actions/runner/pull/2912
|
||||||
|
* Use RawHttpMessageHandler and VssHttpRetryMessageHandler in ResultsHttpClient by @yacaovsnc in https://github.com/actions/runner/pull/2908
|
||||||
|
* Retries to lock Services database on Windows by @sugymt in https://github.com/actions/runner/pull/2880
|
||||||
|
* Update default version to node20 by @takost in https://github.com/actions/runner/pull/2844
|
||||||
|
* Revert "Update default version to node20" by @takost in https://github.com/actions/runner/pull/2918
|
||||||
|
* Fixed Attempt typo by @corycalahan in https://github.com/actions/runner/pull/2849
|
||||||
|
* Fix typo by @rajbos in https://github.com/actions/runner/pull/2670
|
||||||
|
|
||||||
## Bugs
|
## New Contributors
|
||||||
- Fix JIT configurations on Windows (#2497)
|
* @Pantelis-Santorinios made their first contribution in https://github.com/actions/runner/pull/2855
|
||||||
- Guard against NullReference while creating HostContext (#2343)
|
* @github-actions made their first contribution in https://github.com/actions/runner/pull/2852
|
||||||
- Handles broken symlink in `Which` (#2150, #2196)
|
* @sugymt made their first contribution in https://github.com/actions/runner/pull/2880
|
||||||
- Adding curl retry for external tool downloads (#2552, #2557)
|
* @corycalahan made their first contribution in https://github.com/actions/runner/pull/2849
|
||||||
- Limit the time we wait for waiting websocket to connect. (#2554)
|
|
||||||
|
|
||||||
## Misc
|
**Full Changelog**: https://github.com/actions/runner/compare/v2.309.0...v2.310.0
|
||||||
- Bump container hooks version to 0.3.1 in runner image (#2496)
|
|
||||||
- Runner changes to communicate with vNext services (#2487, #2500, #2505, #2541, #2547)
|
|
||||||
|
|
||||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
<Update to ./src/runnerversion when creating release>
|
2.310.1
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[*.cs]
|
[*.cs]
|
||||||
charset = utf-8
|
charset = utf-8-bom
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
|
|
||||||
csharp_new_line_before_else = true
|
csharp_new_line_before_else = true
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
39f2a931565d6a10e695ac8ed14bb9dcbb568151410349b32dbf9c27bae29602
|
7539d33c35b0bc94ee67e3c0de1a6bac5ef89ce8e8efaa110131fa0520a54fb4
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
29ffb303537d8ba674fbebc7729292c21c4ebd17b3198f91ed593ef4cbbb67b5
|
d71a31f9a17e1a41d6e1edea596edfa68a0db5948ed160e86f2154a547f4dd10
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
de6868a836fa3cb9e5ddddbc079da1c25e819aa2d2fc193cc9931c353687c57c
|
3c2f700d8a995efe7895614ee07d9c7880f872d214b45983ad6163e1931870ab
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
339d3e1a5fd28450c0fe6cb820cc7aae291f0f9e2d153ac34e1f7b080e35d30e
|
b2d85c95ecad13d352f4c7d31c64dbb0d9c6381b48fa5874c4c72a43a025a8a1
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
dcb7f606c1d7d290381e5020ee73e7f16dcbd2f20ac9b431362ccbb5120d449c
|
417d835c1a108619886b4bb5d25988cb6c138eb7b4c00320b1d9455c5630bff9
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
1bbcb0e9a2cf4be4b1fce77458de139b70ac58efcbb415a6db028b9373ae1673
|
8f35aaecfb53426ea10816442e23065142bab9dd0fb712a29e0fc471d13c44ac
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
44cd25f3c104d0abb44d262397a80e0b2c4f206465c5d899a22eec043dac0fb3
|
811c7debdfc54d074385b063b83c997e5360c8a9160cd20fe777713968370063
|
||||||
|
|||||||
2
src/Misc/contentHash/externals/linux-arm
vendored
2
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +1 @@
|
|||||||
3807dcbf947e840c33535fb466b096d76bf09e5c0254af8fc8cbbb24c6388222
|
97cbac637d592d3a5d20f6cd91a3afaf5257995c7f6fdc73ab1b5a3a464e4382
|
||||||
2
src/Misc/contentHash/externals/linux-arm64
vendored
2
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +1 @@
|
|||||||
ee01eee80cd8a460a4b9780ee13fdd20f25c59e754b4ccd99df55fbba2a85634
|
25eaf1d30e72a521414384c24b7474037698325c233503671eceaacf6a56c6bd
|
||||||
2
src/Misc/contentHash/externals/linux-x64
vendored
2
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +1 @@
|
|||||||
a9fb9c14e24e79aec97d4da197dd7bfc6364297d6fce573afb2df48cc9a931f8
|
93865f08e52e0fb0fe0119dca2363f221fbe10af5bd932a0fc3df999143a7f81
|
||||||
2
src/Misc/contentHash/externals/osx-arm64
vendored
2
src/Misc/contentHash/externals/osx-arm64
vendored
@@ -1 +1 @@
|
|||||||
a4e0e8fc62eba0967a39c7d693dcd0aeb8b2bed0765f9c38df80d42884f65341
|
2574465a73ef1de75cd01da9232a96d4b6e9a0090e368978ff48d0629137610b
|
||||||
2
src/Misc/contentHash/externals/osx-x64
vendored
2
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +1 @@
|
|||||||
17ac17fbe785b3d6fa2868d8d17185ebfe0c90b4b0ddf6b67eac70e42bcd989b
|
ac60e452c01d99e23e696cc984f8e08b2602b649a370fc3ef1451f3958f2df0f
|
||||||
2
src/Misc/contentHash/externals/win-arm64
vendored
2
src/Misc/contentHash/externals/win-arm64
vendored
@@ -1 +1 @@
|
|||||||
89f24657a550f1e818b0e9975e5b80edcf4dd22b7d4bccbb9e48e37f45d30fb1
|
763d18de11c11fd299c0e75e98fefc8a0e6605ae0ad6aba3bbc110db2262ab41
|
||||||
2
src/Misc/contentHash/externals/win-x64
vendored
2
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +1 @@
|
|||||||
24fd131b5dce33ef16038b771407bc0507da8682a72fb3b7780607235f76db0b
|
c7e94c3c73ccebf214497c5ae2b6aac6eb6677c0d2080929b0a87c576c6f3858
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
"printWidth": 80,
|
"printWidth": 80,
|
||||||
"tabWidth": 2,
|
"tabWidth": 2,
|
||||||
"useTabs": false,
|
"useTabs": false,
|
||||||
"semi": false,
|
"semi": false,
|
||||||
"singleQuote": true,
|
"singleQuote": true,
|
||||||
"trailingComma": "none",
|
"trailingComma": "none",
|
||||||
"bracketSpacing": false,
|
"bracketSpacing": false,
|
||||||
"arrowParens": "avoid",
|
"arrowParens": "avoid",
|
||||||
"parser": "typescript"
|
"overrides": [
|
||||||
}
|
{
|
||||||
|
"files": "*.{js,ts,json}",
|
||||||
|
"options": {
|
||||||
|
"tabWidth": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run all`.
|
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run prepare && npm run all`.
|
||||||
|
|
||||||
> Note: this package also needs to be recompiled for dependabot PRs updating one of
|
When you commit changes to the JSON or Typescript file, the javascript binary will be automatically re-compiled and added to the latest commit.
|
||||||
> its dependencies.
|
|
||||||
|
|||||||
4626
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
4626
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,9 @@
|
|||||||
"format-check": "prettier --check **/*.ts",
|
"format-check": "prettier --check **/*.ts",
|
||||||
"lint": "eslint src/**/*.ts",
|
"lint": "eslint src/**/*.ts",
|
||||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||||
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||||
|
"prepare": "cd ../../../../ && husky install"
|
||||||
|
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -18,18 +20,32 @@
|
|||||||
"keywords": [
|
"keywords": [
|
||||||
"actions"
|
"actions"
|
||||||
],
|
],
|
||||||
|
"lint-staged": {
|
||||||
|
"*.md": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add ."
|
||||||
|
],
|
||||||
|
"*.{ts,json}": [
|
||||||
|
"sh -c 'npm run all'",
|
||||||
|
"git add ."
|
||||||
|
]
|
||||||
|
},
|
||||||
"author": "GitHub Actions",
|
"author": "GitHub Actions",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/glob": "^0.1.0"
|
"@actions/glob": "^0.4.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^12.7.12",
|
"@types/node": "^20.6.2",
|
||||||
"@typescript-eslint/parser": "^5.15.0",
|
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
||||||
"@vercel/ncc": "^0.36.0",
|
"@typescript-eslint/parser": "^6.7.2",
|
||||||
"eslint": "^8.11.0",
|
"@vercel/ncc": "^0.38.0",
|
||||||
"eslint-plugin-github": "^4.3.5",
|
"eslint": "^8.47.0",
|
||||||
"prettier": "^1.19.1",
|
"eslint-plugin-github": "^4.10.0",
|
||||||
"typescript": "^3.6.4"
|
"eslint-plugin-prettier": "^5.0.0",
|
||||||
|
"prettier": "^3.0.3",
|
||||||
|
"typescript": "^5.2.2",
|
||||||
|
"husky": "^8.0.3",
|
||||||
|
"lint-staged": "^14.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -52,12 +52,13 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
run()
|
;(async () => {
|
||||||
.then(out => {
|
try {
|
||||||
|
const out = await run()
|
||||||
console.log(out)
|
console.log(out)
|
||||||
process.exit(0)
|
process.exit(0)
|
||||||
})
|
} catch (err) {
|
||||||
.catch(err => {
|
|
||||||
console.error(err)
|
console.error(err)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
})
|
}
|
||||||
|
})()
|
||||||
|
|||||||
@@ -4,8 +4,11 @@ PRECACHE=$2
|
|||||||
|
|
||||||
NODE_URL=https://nodejs.org/dist
|
NODE_URL=https://nodejs.org/dist
|
||||||
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
||||||
NODE12_VERSION="12.22.7"
|
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||||
NODE16_VERSION="16.16.0"
|
NODE16_VERSION="16.20.2"
|
||||||
|
NODE20_VERSION="20.5.0"
|
||||||
|
# used only for win-arm64, remove node16 unofficial version when official version is available
|
||||||
|
NODE16_UNOFFICIAL_VERSION="16.20.0"
|
||||||
|
|
||||||
get_abs_path() {
|
get_abs_path() {
|
||||||
# exploits the fact that pwd will print abs path when no args
|
# exploits the fact that pwd will print abs path when no args
|
||||||
@@ -137,10 +140,10 @@ function acquireExternalTool() {
|
|||||||
|
|
||||||
# Download the external tools only for Windows.
|
# Download the external tools only for Windows.
|
||||||
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.exe" node12/bin
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.lib" node12/bin
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||||
if [[ "$PRECACHE" != "" ]]; then
|
if [[ "$PRECACHE" != "" ]]; then
|
||||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||||
fi
|
fi
|
||||||
@@ -149,8 +152,10 @@ fi
|
|||||||
# Download the external tools only for Windows.
|
# Download the external tools only for Windows.
|
||||||
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
||||||
# todo: replace these with official release when available
|
# todo: replace these with official release when available
|
||||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||||
if [[ "$PRECACHE" != "" ]]; then
|
if [[ "$PRECACHE" != "" ]]; then
|
||||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||||
fi
|
fi
|
||||||
@@ -158,29 +163,30 @@ fi
|
|||||||
|
|
||||||
# Download the external tools only for OSX.
|
# Download the external tools only for OSX.
|
||||||
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
||||||
# node.js v12 doesn't support macOS on arm64.
|
# node.js v12 doesn't support macOS on arm64.
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-arm64.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
|
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -6,6 +6,29 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -22,13 +45,6 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
};
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const crypto = __importStar(__nccwpck_require__(6113));
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
@@ -37,7 +53,7 @@ const path = __importStar(__nccwpck_require__(1017));
|
|||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
function run() {
|
function run() {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// arg0 -> node
|
// arg0 -> node
|
||||||
// arg1 -> hashFiles.js
|
// arg1 -> hashFiles.js
|
||||||
@@ -56,8 +72,10 @@ function run() {
|
|||||||
let count = 0;
|
let count = 0;
|
||||||
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
||||||
try {
|
try {
|
||||||
for (var _b = __asyncValues(globber.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||||
const file = _c.value;
|
_c = _f.value;
|
||||||
|
_d = false;
|
||||||
|
const file = _c;
|
||||||
console.log(file);
|
console.log(file);
|
||||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
@@ -80,7 +98,7 @@ function run() {
|
|||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@@ -94,15 +112,18 @@ function run() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
run()
|
;
|
||||||
.then(out => {
|
(() => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
console.log(out);
|
try {
|
||||||
process.exit(0);
|
const out = yield run();
|
||||||
})
|
console.log(out);
|
||||||
.catch(err => {
|
process.exit(0);
|
||||||
console.error(err);
|
}
|
||||||
process.exit(1);
|
catch (err) {
|
||||||
});
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}))();
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -246,7 +267,6 @@ const file_command_1 = __nccwpck_require__(717);
|
|||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(5278);
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const uuid_1 = __nccwpck_require__(5840);
|
|
||||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
const oidc_utils_1 = __nccwpck_require__(8041);
|
||||||
/**
|
/**
|
||||||
* The code to exit an action
|
* The code to exit an action
|
||||||
@@ -276,20 +296,9 @@ function exportVariable(name, val) {
|
|||||||
process.env[name] = convertedVal;
|
process.env[name] = convertedVal;
|
||||||
const filePath = process.env['GITHUB_ENV'] || '';
|
const filePath = process.env['GITHUB_ENV'] || '';
|
||||||
if (filePath) {
|
if (filePath) {
|
||||||
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
||||||
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
|
|
||||||
if (name.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
if (convertedVal.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
|
||||||
file_command_1.issueCommand('ENV', commandValue);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
|
||||||
}
|
}
|
||||||
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||||
}
|
}
|
||||||
exports.exportVariable = exportVariable;
|
exports.exportVariable = exportVariable;
|
||||||
/**
|
/**
|
||||||
@@ -307,7 +316,7 @@ exports.setSecret = setSecret;
|
|||||||
function addPath(inputPath) {
|
function addPath(inputPath) {
|
||||||
const filePath = process.env['GITHUB_PATH'] || '';
|
const filePath = process.env['GITHUB_PATH'] || '';
|
||||||
if (filePath) {
|
if (filePath) {
|
||||||
file_command_1.issueCommand('PATH', inputPath);
|
file_command_1.issueFileCommand('PATH', inputPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
command_1.issueCommand('add-path', {}, inputPath);
|
command_1.issueCommand('add-path', {}, inputPath);
|
||||||
@@ -347,7 +356,10 @@ function getMultilineInput(name, options) {
|
|||||||
const inputs = getInput(name, options)
|
const inputs = getInput(name, options)
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter(x => x !== '');
|
.filter(x => x !== '');
|
||||||
return inputs;
|
if (options && options.trimWhitespace === false) {
|
||||||
|
return inputs;
|
||||||
|
}
|
||||||
|
return inputs.map(input => input.trim());
|
||||||
}
|
}
|
||||||
exports.getMultilineInput = getMultilineInput;
|
exports.getMultilineInput = getMultilineInput;
|
||||||
/**
|
/**
|
||||||
@@ -380,8 +392,12 @@ exports.getBooleanInput = getBooleanInput;
|
|||||||
*/
|
*/
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function setOutput(name, value) {
|
function setOutput(name, value) {
|
||||||
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
process.stdout.write(os.EOL);
|
process.stdout.write(os.EOL);
|
||||||
command_1.issueCommand('set-output', { name }, value);
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
||||||
}
|
}
|
||||||
exports.setOutput = setOutput;
|
exports.setOutput = setOutput;
|
||||||
/**
|
/**
|
||||||
@@ -510,7 +526,11 @@ exports.group = group;
|
|||||||
*/
|
*/
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function saveState(name, value) {
|
function saveState(name, value) {
|
||||||
command_1.issueCommand('save-state', { name }, value);
|
const filePath = process.env['GITHUB_STATE'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
||||||
}
|
}
|
||||||
exports.saveState = saveState;
|
exports.saveState = saveState;
|
||||||
/**
|
/**
|
||||||
@@ -576,13 +596,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.issueCommand = void 0;
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||||
// We use any as a valid input type
|
// We use any as a valid input type
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
|
const uuid_1 = __nccwpck_require__(5840);
|
||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(5278);
|
||||||
function issueCommand(command, message) {
|
function issueFileCommand(command, message) {
|
||||||
const filePath = process.env[`GITHUB_${command}`];
|
const filePath = process.env[`GITHUB_${command}`];
|
||||||
if (!filePath) {
|
if (!filePath) {
|
||||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||||
@@ -594,7 +615,22 @@ function issueCommand(command, message) {
|
|||||||
encoding: 'utf8'
|
encoding: 'utf8'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.issueCommand = issueCommand;
|
exports.issueFileCommand = issueFileCommand;
|
||||||
|
function prepareKeyValueMessage(key, value) {
|
||||||
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||||||
|
const convertedValue = utils_1.toCommandValue(value);
|
||||||
|
// These should realistically never happen, but just in case someone finds a
|
||||||
|
// way to exploit uuid generation let's not allow keys or values that contain
|
||||||
|
// the delimiter.
|
||||||
|
if (key.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
if (convertedValue.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
||||||
|
}
|
||||||
|
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||||
//# sourceMappingURL=file-command.js.map
|
//# sourceMappingURL=file-command.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -1100,7 +1136,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.hashFiles = exports.create = void 0;
|
||||||
const internal_globber_1 = __nccwpck_require__(8298);
|
const internal_globber_1 = __nccwpck_require__(8298);
|
||||||
|
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||||
/**
|
/**
|
||||||
* Constructs a globber
|
* Constructs a globber
|
||||||
*
|
*
|
||||||
@@ -1113,17 +1151,56 @@ function create(patterns, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.create = create;
|
exports.create = create;
|
||||||
|
/**
|
||||||
|
* Computes the sha256 hash of a glob
|
||||||
|
*
|
||||||
|
* @param patterns Patterns separated by newlines
|
||||||
|
* @param currentWorkspace Workspace used when matching files
|
||||||
|
* @param options Glob options
|
||||||
|
* @param verbose Enables verbose logging
|
||||||
|
*/
|
||||||
|
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let followSymbolicLinks = true;
|
||||||
|
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||||
|
followSymbolicLinks = options.followSymbolicLinks;
|
||||||
|
}
|
||||||
|
const globber = yield create(patterns, { followSymbolicLinks });
|
||||||
|
return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.hashFiles = hashFiles;
|
||||||
//# sourceMappingURL=glob.js.map
|
//# sourceMappingURL=glob.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1026:
|
/***/ 1026:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const core = __nccwpck_require__(2186);
|
exports.getOptions = void 0;
|
||||||
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
/**
|
/**
|
||||||
* Returns a copy with defaults filled in.
|
* Returns a copy with defaults filled in.
|
||||||
*/
|
*/
|
||||||
@@ -1131,6 +1208,7 @@ function getOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
|
matchDirectories: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
@@ -1142,6 +1220,10 @@ function getOptions(copy) {
|
|||||||
result.implicitDescendants = copy.implicitDescendants;
|
result.implicitDescendants = copy.implicitDescendants;
|
||||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||||||
}
|
}
|
||||||
|
if (typeof copy.matchDirectories === 'boolean') {
|
||||||
|
result.matchDirectories = copy.matchDirectories;
|
||||||
|
core.debug(`matchDirectories '${result.matchDirectories}'`);
|
||||||
|
}
|
||||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||||
@@ -1159,6 +1241,25 @@ exports.getOptions = getOptions;
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -1188,11 +1289,12 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
|||||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const core = __nccwpck_require__(2186);
|
exports.DefaultGlobber = void 0;
|
||||||
const fs = __nccwpck_require__(7147);
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const globOptionsHelper = __nccwpck_require__(1026);
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const path = __nccwpck_require__(1017);
|
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||||
const patternHelper = __nccwpck_require__(9005);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||||
@@ -1238,7 +1340,7 @@ class DefaultGlobber {
|
|||||||
if (options.implicitDescendants &&
|
if (options.implicitDescendants &&
|
||||||
(pattern.trailingSeparator ||
|
(pattern.trailingSeparator ||
|
||||||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
|
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Push the search paths
|
// Push the search paths
|
||||||
@@ -1281,7 +1383,7 @@ class DefaultGlobber {
|
|||||||
// Directory
|
// Directory
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory()) {
|
||||||
// Matched
|
// Matched
|
||||||
if (match & internal_match_kind_1.MatchKind.Directory) {
|
if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {
|
||||||
yield yield __await(item.path);
|
yield yield __await(item.path);
|
||||||
}
|
}
|
||||||
// Descend?
|
// Descend?
|
||||||
@@ -1376,12 +1478,117 @@ exports.DefaultGlobber = DefaultGlobber;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 2448:
|
||||||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||||
|
var m = o[Symbol.asyncIterator], i;
|
||||||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.hashFiles = void 0;
|
||||||
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||||
|
var e_1, _a;
|
||||||
|
var _b;
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const writeDelegate = verbose ? core.info : core.debug;
|
||||||
|
let hasMatch = false;
|
||||||
|
const githubWorkspace = currentWorkspace
|
||||||
|
? currentWorkspace
|
||||||
|
: (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
|
const result = crypto.createHash('sha256');
|
||||||
|
let count = 0;
|
||||||
|
try {
|
||||||
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
|
const file = _d.value;
|
||||||
|
writeDelegate(file);
|
||||||
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
|
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (fs.statSync(file).isDirectory()) {
|
||||||
|
writeDelegate(`Skip directory '${file}'.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const hash = crypto.createHash('sha256');
|
||||||
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
|
yield pipeline(fs.createReadStream(file), hash);
|
||||||
|
result.write(hash.digest());
|
||||||
|
count++;
|
||||||
|
if (!hasMatch) {
|
||||||
|
hasMatch = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
|
finally {
|
||||||
|
try {
|
||||||
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
|
}
|
||||||
|
finally { if (e_1) throw e_1.error; }
|
||||||
|
}
|
||||||
|
result.end();
|
||||||
|
if (hasMatch) {
|
||||||
|
writeDelegate(`Found ${count} files to hash.`);
|
||||||
|
return result.digest('hex');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
writeDelegate(`No matches found for glob`);
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.hashFiles = hashFiles;
|
||||||
|
//# sourceMappingURL=internal-hash-files.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1063:
|
/***/ 1063:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.MatchKind = void 0;
|
||||||
/**
|
/**
|
||||||
* Indicates whether a pattern matches a path
|
* Indicates whether a pattern matches a path
|
||||||
*/
|
*/
|
||||||
@@ -1401,13 +1608,36 @@ var MatchKind;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1849:
|
/***/ 1849:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||||
@@ -1447,8 +1677,8 @@ exports.dirname = dirname;
|
|||||||
* or `C:` are expanded based on the current working directory.
|
* or `C:` are expanded based on the current working directory.
|
||||||
*/
|
*/
|
||||||
function ensureAbsoluteRoot(root, itemPath) {
|
function ensureAbsoluteRoot(root, itemPath) {
|
||||||
assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||||
assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Already rooted
|
// Already rooted
|
||||||
if (hasAbsoluteRoot(itemPath)) {
|
if (hasAbsoluteRoot(itemPath)) {
|
||||||
return itemPath;
|
return itemPath;
|
||||||
@@ -1458,7 +1688,7 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like C: or C:foo
|
// Check for itemPath like C: or C:foo
|
||||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||||
let cwd = process.cwd();
|
let cwd = process.cwd();
|
||||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
// Drive letter matches cwd? Expand to cwd
|
// Drive letter matches cwd? Expand to cwd
|
||||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||||
// Drive only, e.g. C:
|
// Drive only, e.g. C:
|
||||||
@@ -1483,11 +1713,11 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like \ or \foo
|
// Check for itemPath like \ or \foo
|
||||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||||
// Otherwise ensure root ends with a separator
|
// Otherwise ensure root ends with a separator
|
||||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||||
// Intentionally empty
|
// Intentionally empty
|
||||||
@@ -1504,7 +1734,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
|||||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasAbsoluteRoot(itemPath) {
|
function hasAbsoluteRoot(itemPath) {
|
||||||
assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@@ -1521,7 +1751,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
|||||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasRoot(itemPath) {
|
function hasRoot(itemPath) {
|
||||||
assert(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@@ -1583,14 +1813,37 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6836:
|
/***/ 6836:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.Path = void 0;
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Helper class for parsing paths into segments
|
* Helper class for parsing paths into segments
|
||||||
@@ -1604,7 +1857,7 @@ class Path {
|
|||||||
this.segments = [];
|
this.segments = [];
|
||||||
// String
|
// String
|
||||||
if (typeof itemPath === 'string') {
|
if (typeof itemPath === 'string') {
|
||||||
assert(itemPath, `Parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||||
// Normalize slashes and trim unnecessary trailing slash
|
// Normalize slashes and trim unnecessary trailing slash
|
||||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||||
// Not rooted
|
// Not rooted
|
||||||
@@ -1631,24 +1884,24 @@ class Path {
|
|||||||
// Array
|
// Array
|
||||||
else {
|
else {
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||||
// Each segment
|
// Each segment
|
||||||
for (let i = 0; i < itemPath.length; i++) {
|
for (let i = 0; i < itemPath.length; i++) {
|
||||||
let segment = itemPath[i];
|
let segment = itemPath[i];
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||||
// Root segment
|
// Root segment
|
||||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||||
assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
// All other segments
|
// All other segments
|
||||||
else {
|
else {
|
||||||
// Must not contain slash
|
// Must not contain slash
|
||||||
assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1680,12 +1933,32 @@ exports.Path = Path;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9005:
|
/***/ 9005:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||||
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
@@ -1761,21 +2034,44 @@ exports.partialMatch = partialMatch;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4536:
|
/***/ 4536:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.Pattern = void 0;
|
||||||
const os = __nccwpck_require__(2037);
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const minimatch_1 = __nccwpck_require__(3973);
|
const minimatch_1 = __nccwpck_require__(3973);
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const internal_path_1 = __nccwpck_require__(6836);
|
const internal_path_1 = __nccwpck_require__(6836);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
class Pattern {
|
class Pattern {
|
||||||
constructor(patternOrNegate, segments) {
|
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||||
/**
|
/**
|
||||||
* Indicates whether matches should be excluded from the result set
|
* Indicates whether matches should be excluded from the result set
|
||||||
*/
|
*/
|
||||||
@@ -1789,9 +2085,9 @@ class Pattern {
|
|||||||
else {
|
else {
|
||||||
// Convert to pattern
|
// Convert to pattern
|
||||||
segments = segments || [];
|
segments = segments || [];
|
||||||
assert(segments.length, `Parameter 'segments' must not empty`);
|
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||||||
const root = Pattern.getLiteral(segments[0]);
|
const root = Pattern.getLiteral(segments[0]);
|
||||||
assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||||
if (patternOrNegate) {
|
if (patternOrNegate) {
|
||||||
pattern = `!${pattern}`;
|
pattern = `!${pattern}`;
|
||||||
@@ -1803,7 +2099,7 @@ class Pattern {
|
|||||||
pattern = pattern.substr(1).trim();
|
pattern = pattern.substr(1).trim();
|
||||||
}
|
}
|
||||||
// Normalize slashes and ensures absolute root
|
// Normalize slashes and ensures absolute root
|
||||||
pattern = Pattern.fixupPattern(pattern);
|
pattern = Pattern.fixupPattern(pattern, homedir);
|
||||||
// Segments
|
// Segments
|
||||||
this.segments = new internal_path_1.Path(pattern).segments;
|
this.segments = new internal_path_1.Path(pattern).segments;
|
||||||
// Trailing slash indicates the pattern should only match directories, not regular files
|
// Trailing slash indicates the pattern should only match directories, not regular files
|
||||||
@@ -1819,6 +2115,7 @@ class Pattern {
|
|||||||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||||||
// Root RegExp (required when determining partial match)
|
// Root RegExp (required when determining partial match)
|
||||||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||||||
|
this.isImplicitPattern = isImplicitPattern;
|
||||||
// Create minimatch
|
// Create minimatch
|
||||||
const minimatchOptions = {
|
const minimatchOptions = {
|
||||||
dot: true,
|
dot: true,
|
||||||
@@ -1840,11 +2137,11 @@ class Pattern {
|
|||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||||||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||||||
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||||||
if (!itemPath.endsWith(path.sep)) {
|
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||||||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||||||
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
|
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||||||
itemPath = `${itemPath}${path.sep}`;
|
itemPath = `${itemPath}${path.sep}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1882,15 +2179,15 @@ class Pattern {
|
|||||||
/**
|
/**
|
||||||
* Normalizes slashes and ensures absolute root
|
* Normalizes slashes and ensures absolute root
|
||||||
*/
|
*/
|
||||||
static fixupPattern(pattern) {
|
static fixupPattern(pattern, homedir) {
|
||||||
// Empty
|
// Empty
|
||||||
assert(pattern, 'pattern cannot be empty');
|
assert_1.default(pattern, 'pattern cannot be empty');
|
||||||
// Must not contain `.` segment, unless first segment
|
// Must not contain `.` segment, unless first segment
|
||||||
// Must not contain `..` segment
|
// Must not contain `..` segment
|
||||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||||
assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||||
assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
pattern = pathHelper.normalizeSeparators(pattern);
|
pattern = pathHelper.normalizeSeparators(pattern);
|
||||||
// Replace leading `.` segment
|
// Replace leading `.` segment
|
||||||
@@ -1899,9 +2196,9 @@ class Pattern {
|
|||||||
}
|
}
|
||||||
// Replace leading `~` segment
|
// Replace leading `~` segment
|
||||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||||
const homedir = os.homedir();
|
homedir = homedir || os.homedir();
|
||||||
assert(homedir, 'Unable to determine HOME directory');
|
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||||||
assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||||
}
|
}
|
||||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||||
@@ -2004,6 +2301,7 @@ exports.Pattern = Pattern;
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.SearchState = void 0;
|
||||||
class SearchState {
|
class SearchState {
|
||||||
constructor(path, level) {
|
constructor(path, level) {
|
||||||
this.path = path;
|
this.path = path;
|
||||||
@@ -2232,6 +2530,19 @@ class HttpClientResponse {
|
|||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
readBodyBuffer() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const chunks = [];
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(Buffer.concat(chunks));
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
@@ -2736,7 +3047,13 @@ function getProxyUrl(reqUrl) {
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
return new URL(proxyVar);
|
try {
|
||||||
|
return new URL(proxyVar);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||||
|
return new URL(`http://${proxyVar}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -2747,6 +3064,10 @@ function checkBypass(reqUrl) {
|
|||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
const reqHost = reqUrl.hostname;
|
||||||
|
if (isLoopbackAddress(reqHost)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
@@ -2772,13 +3093,24 @@ function checkBypass(reqUrl) {
|
|||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
if (upperNoProxyItem === '*' ||
|
||||||
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||||
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||||
|
(upperNoProxyItem.startsWith('.') &&
|
||||||
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
|
function isLoopbackAddress(host) {
|
||||||
|
const hostLower = host.toLowerCase();
|
||||||
|
return (hostLower === 'localhost' ||
|
||||||
|
hostLower.startsWith('127.') ||
|
||||||
|
hostLower.startsWith('[::1]') ||
|
||||||
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||||
|
}
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -2817,6 +3149,9 @@ function range(a, b, str) {
|
|||||||
var i = ai;
|
var i = ai;
|
||||||
|
|
||||||
if (ai >= 0 && bi > 0) {
|
if (ai >= 0 && bi > 0) {
|
||||||
|
if(a===b) {
|
||||||
|
return [ai, bi];
|
||||||
|
}
|
||||||
begs = [];
|
begs = [];
|
||||||
left = str.length;
|
left = str.length;
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
SET UPDATEFILE=update.finished
|
SET UPDATEFILE=update.finished
|
||||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||||
|
|
||||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N`
|
||||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||||
|
|
||||||
if %ERRORLEVEL% EQU 0 (
|
if %ERRORLEVEL% EQU 0 (
|
||||||
|
|||||||
@@ -75,6 +75,8 @@ mscordaccore.dll
|
|||||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
||||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
||||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
||||||
|
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
||||||
|
mscordaccore_amd64_amd64_6.0.2223.42425.dll
|
||||||
mscordbi.dll
|
mscordbi.dll
|
||||||
mscorlib.dll
|
mscorlib.dll
|
||||||
mscorrc.debug.dll
|
mscorrc.debug.dll
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
public enum ActionResult
|
public enum ActionResult
|
||||||
{
|
{
|
||||||
@@ -10,4 +10,4 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
Skipped = 3
|
Skipped = 3
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using GitHub.DistributedTask.Logging;
|
using GitHub.DistributedTask.Logging;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
|
|||||||
private bool? _isHostedServer;
|
private bool? _isHostedServer;
|
||||||
|
|
||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public int AgentId { get; set; }
|
public ulong AgentId { get; set; }
|
||||||
|
|
||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public string AgentName { get; set; }
|
public string AgentName { get; set; }
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
@@ -69,6 +69,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly OSPlatform Platform = OSPlatform.OSX;
|
public static readonly OSPlatform Platform = OSPlatform.OSX;
|
||||||
#elif OS_WINDOWS
|
#elif OS_WINDOWS
|
||||||
public static readonly OSPlatform Platform = OSPlatform.Windows;
|
public static readonly OSPlatform Platform = OSPlatform.Windows;
|
||||||
|
#else
|
||||||
|
public static readonly OSPlatform Platform = OSPlatform.Linux;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if X86
|
#if X86
|
||||||
@@ -79,6 +81,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm;
|
public static readonly Architecture PlatformArchitecture = Architecture.Arm;
|
||||||
#elif ARM64
|
#elif ARM64
|
||||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
|
public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
|
||||||
|
#else
|
||||||
|
public static readonly Architecture PlatformArchitecture = Architecture.X64;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
|
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
|
||||||
@@ -132,6 +136,7 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string GenerateServiceConfig = "generateServiceConfig";
|
public static readonly string GenerateServiceConfig = "generateServiceConfig";
|
||||||
public static readonly string Help = "help";
|
public static readonly string Help = "help";
|
||||||
public static readonly string Local = "local";
|
public static readonly string Local = "local";
|
||||||
|
public static readonly string NoDefaultLabels = "no-default-labels";
|
||||||
public static readonly string Replace = "replace";
|
public static readonly string Replace = "replace";
|
||||||
public static readonly string DisableUpdate = "disableupdate";
|
public static readonly string DisableUpdate = "disableupdate";
|
||||||
public static readonly string Once = "once"; // Keep this around since customers still relies on it
|
public static readonly string Once = "once"; // Keep this around since customers still relies on it
|
||||||
@@ -153,7 +158,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static class Features
|
public static class Features
|
||||||
{
|
{
|
||||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||||
public static readonly string Node12Warning = "DistributedTask.AddWarningToNode12Action";
|
public static readonly string Node16Warning = "DistributedTask.AddWarningToNode16Action";
|
||||||
|
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
|
||||||
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
||||||
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
||||||
}
|
}
|
||||||
@@ -169,7 +175,11 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||||
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||||
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/.";
|
public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/.";
|
||||||
|
public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings";
|
||||||
|
public static readonly string DeprecatedNodeVersion = "node16";
|
||||||
|
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
|
||||||
|
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class RunnerEvent
|
public static class RunnerEvent
|
||||||
@@ -251,6 +261,7 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||||
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
||||||
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
||||||
|
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class System
|
public static class System
|
||||||
@@ -261,6 +272,7 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string AccessToken = "system.accessToken";
|
public static readonly string AccessToken = "system.accessToken";
|
||||||
public static readonly string Culture = "system.culture";
|
public static readonly string Culture = "system.culture";
|
||||||
public static readonly string PhaseDisplayName = "system.phaseDisplayName";
|
public static readonly string PhaseDisplayName = "system.phaseDisplayName";
|
||||||
|
public static readonly string JobRequestType = "system.jobRequestType";
|
||||||
public static readonly string OrchestrationId = "system.orchestrationId";
|
public static readonly string OrchestrationId = "system.orchestrationId";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
@@ -203,7 +203,7 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||||
{
|
{
|
||||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||||
}
|
}
|
||||||
|
|
||||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
|
||||||
@@ -24,4 +24,4 @@ namespace GitHub.Runner.Common
|
|||||||
return client;
|
return client;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Net;
|
using System.Net;
|
||||||
using System.Net.Sockets;
|
using System.Net.Sockets;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
@@ -37,10 +37,10 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
ConnectMonitor(monitorSocketAddress);
|
ConnectMonitor(monitorSocketAddress);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void StartMonitor(Guid jobId, string accessToken, Uri serverUri)
|
private void StartMonitor(Guid jobId, string accessToken, Uri serverUri)
|
||||||
{
|
{
|
||||||
if(String.IsNullOrEmpty(accessToken))
|
if (String.IsNullOrEmpty(accessToken))
|
||||||
{
|
{
|
||||||
Trace.Info("No access token could be retrieved to start the monitor.");
|
Trace.Info("No access token could be retrieved to start the monitor.");
|
||||||
return;
|
return;
|
||||||
@@ -82,7 +82,7 @@ namespace GitHub.Runner.Common
|
|||||||
_monitorSocket.Send(Encoding.UTF8.GetBytes(message));
|
_monitorSocket.Send(Encoding.UTF8.GetBytes(message));
|
||||||
Trace.Info("Finished EndMonitor writing to socket");
|
Trace.Info("Finished EndMonitor writing to socket");
|
||||||
|
|
||||||
await Task.Delay(TimeSpan.FromSeconds(2));
|
await Task.Delay(TimeSpan.FromSeconds(2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (SocketException e)
|
catch (SocketException e)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
@@ -11,10 +11,10 @@ using System.Threading.Tasks;
|
|||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
|
using GitHub.Services.OAuth;
|
||||||
|
using GitHub.Services.Results.Client;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.Services.WebApi;
|
||||||
using GitHub.Services.WebApi.Utilities.Internal;
|
using GitHub.Services.WebApi.Utilities.Internal;
|
||||||
using GitHub.Services.Results.Client;
|
|
||||||
using GitHub.Services.OAuth;
|
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
@@ -254,7 +254,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
failedAttemptsToPostBatchedLinesByWebsocket++;
|
failedAttemptsToPostBatchedLinesByWebsocket++;
|
||||||
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
|
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
|
||||||
Trace.Error(ex);
|
Trace.Verbose(ex.ToString());
|
||||||
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
|
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
|
||||||
{
|
{
|
||||||
// let's consider failure percentage
|
// let's consider failure percentage
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
@@ -14,10 +15,11 @@ namespace GitHub.Runner.Common
|
|||||||
[ServiceLocator(Default = typeof(JobServerQueue))]
|
[ServiceLocator(Default = typeof(JobServerQueue))]
|
||||||
public interface IJobServerQueue : IRunnerService, IThrottlingReporter
|
public interface IJobServerQueue : IRunnerService, IThrottlingReporter
|
||||||
{
|
{
|
||||||
|
IList<JobTelemetry> JobTelemetries { get; }
|
||||||
TaskCompletionSource<int> JobRecordUpdated { get; }
|
TaskCompletionSource<int> JobRecordUpdated { get; }
|
||||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||||
Task ShutdownAsync();
|
Task ShutdownAsync();
|
||||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
|
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false);
|
||||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||||
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
||||||
@@ -69,13 +71,18 @@ namespace GitHub.Runner.Common
|
|||||||
private Task[] _allDequeueTasks;
|
private Task[] _allDequeueTasks;
|
||||||
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
|
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
|
||||||
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
|
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
|
||||||
|
private readonly List<JobTelemetry> _jobTelemetries = new();
|
||||||
private bool _queueInProcess = false;
|
private bool _queueInProcess = false;
|
||||||
private bool _resultsServiceOnly = false;
|
private bool _resultsServiceOnly = false;
|
||||||
|
private Stopwatch _resultsUploadTimer = new();
|
||||||
|
private Stopwatch _actionsUploadTimer = new();
|
||||||
|
|
||||||
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
|
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
|
||||||
|
|
||||||
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||||
|
|
||||||
|
public IList<JobTelemetry> JobTelemetries => _jobTelemetries;
|
||||||
|
|
||||||
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
||||||
// Then the dequeue will happen every 500ms.
|
// Then the dequeue will happen every 500ms.
|
||||||
// In this way, customer still can get instance live console output on job start,
|
// In this way, customer still can get instance live console output on job start,
|
||||||
@@ -87,6 +94,7 @@ namespace GitHub.Runner.Common
|
|||||||
private bool _firstConsoleOutputs = true;
|
private bool _firstConsoleOutputs = true;
|
||||||
|
|
||||||
private bool _resultsClientInitiated = false;
|
private bool _resultsClientInitiated = false;
|
||||||
|
private bool _enableTelemetry = false;
|
||||||
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
|
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
@@ -96,14 +104,15 @@ namespace GitHub.Runner.Common
|
|||||||
_resultsServer = hostContext.GetService<IResultsServer>();
|
_resultsServer = hostContext.GetService<IResultsServer>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
|
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
_resultsServiceOnly = resultServiceOnly;
|
_resultsServiceOnly = resultsServiceOnly;
|
||||||
|
_enableTelemetry = enableTelemetry;
|
||||||
|
|
||||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
|
|
||||||
if (!resultServiceOnly)
|
if (!resultsServiceOnly)
|
||||||
{
|
{
|
||||||
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
||||||
}
|
}
|
||||||
@@ -119,7 +128,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
string liveConsoleFeedUrl = null;
|
string liveConsoleFeedUrl = null;
|
||||||
Trace.Info("Initializing results client");
|
Trace.Info("Initializing results client");
|
||||||
if (resultServiceOnly
|
if (resultsServiceOnly
|
||||||
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
|
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
|
||||||
&& !string.IsNullOrEmpty(feedStreamUrl))
|
&& !string.IsNullOrEmpty(feedStreamUrl))
|
||||||
{
|
{
|
||||||
@@ -211,6 +220,12 @@ namespace GitHub.Runner.Common
|
|||||||
await _resultsServer.DisposeAsync();
|
await _resultsServer.DisposeAsync();
|
||||||
|
|
||||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
var uploadTimeComparison = $"Actions upload time: {_actionsUploadTimer.ElapsedMilliseconds} ms, Result upload time: {_resultsUploadTimer.ElapsedMilliseconds} ms";
|
||||||
|
Trace.Info(uploadTimeComparison);
|
||||||
|
_jobTelemetries.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = uploadTimeComparison });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||||
@@ -456,6 +471,10 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_actionsUploadTimer.Start();
|
||||||
|
}
|
||||||
await UploadFile(file);
|
await UploadFile(file);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -471,6 +490,13 @@ namespace GitHub.Runner.Common
|
|||||||
// _fileUploadQueue.Enqueue(file);
|
// _fileUploadQueue.Enqueue(file);
|
||||||
//}
|
//}
|
||||||
}
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_actionsUploadTimer.Stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
|
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
|
||||||
@@ -517,6 +543,10 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_resultsUploadTimer.Start();
|
||||||
|
}
|
||||||
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
|
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
await UploadSummaryFile(file);
|
await UploadSummaryFile(file);
|
||||||
@@ -541,10 +571,19 @@ namespace GitHub.Runner.Common
|
|||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
errorCount++;
|
errorCount++;
|
||||||
|
|
||||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
|
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||||
_resultsClientInitiated = false;
|
if (!_resultsServiceOnly)
|
||||||
|
{
|
||||||
SendResultsTelemetry(ex);
|
_resultsClientInitiated = false;
|
||||||
|
SendResultsTelemetry(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
if (_enableTelemetry)
|
||||||
|
{
|
||||||
|
_resultsUploadTimer.Stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -660,9 +699,11 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
Trace.Info("Catch exception during update steps, skip update Results.");
|
Trace.Info("Catch exception during update steps, skip update Results.");
|
||||||
Trace.Error(e);
|
Trace.Error(e);
|
||||||
_resultsClientInitiated = false;
|
if (!_resultsServiceOnly)
|
||||||
|
{
|
||||||
SendResultsTelemetry(e);
|
_resultsClientInitiated = false;
|
||||||
|
SendResultsTelemetry(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_bufferedRetryRecords.Remove(update.TimelineId))
|
if (_bufferedRetryRecords.Remove(update.TimelineId))
|
||||||
@@ -756,17 +797,17 @@ namespace GitHub.Runner.Common
|
|||||||
timelineRecord.State = rec.State ?? timelineRecord.State;
|
timelineRecord.State = rec.State ?? timelineRecord.State;
|
||||||
timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName;
|
timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName;
|
||||||
|
|
||||||
if (rec.ErrorCount != null && rec.ErrorCount > 0)
|
if (rec.ErrorCount > 0)
|
||||||
{
|
{
|
||||||
timelineRecord.ErrorCount = rec.ErrorCount;
|
timelineRecord.ErrorCount = rec.ErrorCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rec.WarningCount != null && rec.WarningCount > 0)
|
if (rec.WarningCount > 0)
|
||||||
{
|
{
|
||||||
timelineRecord.WarningCount = rec.WarningCount;
|
timelineRecord.WarningCount = rec.WarningCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rec.NoticeCount != null && rec.NoticeCount > 0)
|
if (rec.NoticeCount > 0)
|
||||||
{
|
{
|
||||||
timelineRecord.NoticeCount = rec.NoticeCount;
|
timelineRecord.NoticeCount = rec.NoticeCount;
|
||||||
}
|
}
|
||||||
@@ -797,7 +838,7 @@ namespace GitHub.Runner.Common
|
|||||||
foreach (var record in mergedRecords)
|
foreach (var record in mergedRecords)
|
||||||
{
|
{
|
||||||
Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}");
|
Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}");
|
||||||
if (record.Issues != null && record.Issues.Count > 0)
|
if (record.Issues != null)
|
||||||
{
|
{
|
||||||
foreach (var issue in record.Issues)
|
foreach (var issue in record.Issues)
|
||||||
{
|
{
|
||||||
@@ -807,7 +848,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (record.Variables != null && record.Variables.Count > 0)
|
if (record.Variables != null)
|
||||||
{
|
{
|
||||||
foreach (var variable in record.Variables)
|
foreach (var variable in record.Variables)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
|
|||||||
42
src/Runner.Common/LaunchServer.cs
Normal file
42
src/Runner.Common/LaunchServer.cs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Services.Launch.Client;
|
||||||
|
using GitHub.Services.WebApi;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(LaunchServer))]
|
||||||
|
public interface ILaunchServer : IRunnerService
|
||||||
|
{
|
||||||
|
void InitializeLaunchClient(Uri uri, string token);
|
||||||
|
|
||||||
|
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class LaunchServer : RunnerService, ILaunchServer
|
||||||
|
{
|
||||||
|
private LaunchHttpClient _launchClient;
|
||||||
|
|
||||||
|
public void InitializeLaunchClient(Uri uri, string token)
|
||||||
|
{
|
||||||
|
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
||||||
|
this._launchClient = new LaunchHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (_launchClient != null)
|
||||||
|
{
|
||||||
|
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
||||||
|
cancellationToken: cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new InvalidOperationException("Launch client is not initialized.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
|||||||
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
using System;
|
||||||
|
using System.ComponentModel;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Handles redirects for Http requests
|
||||||
|
/// </summary>
|
||||||
|
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||||
|
public class RedirectMessageHandler : DelegatingHandler
|
||||||
|
{
|
||||||
|
public RedirectMessageHandler(ITraceWriter trace)
|
||||||
|
{
|
||||||
|
Trace = trace;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override async Task<HttpResponseMessage> SendAsync(
|
||||||
|
HttpRequestMessage request,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
HttpResponseMessage response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (response != null &&
|
||||||
|
IsRedirect(response.StatusCode) &&
|
||||||
|
response.Headers.Location != null)
|
||||||
|
{
|
||||||
|
Trace.Info($"Redirecting to '{response.Headers.Location}'.");
|
||||||
|
|
||||||
|
request = await CloneAsync(request, response.Headers.Location).ConfigureAwait(false);
|
||||||
|
|
||||||
|
response.Dispose();
|
||||||
|
|
||||||
|
response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsRedirect(HttpStatusCode statusCode)
|
||||||
|
{
|
||||||
|
return (int)statusCode >= 300 && (int)statusCode < 400;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<HttpRequestMessage> CloneAsync(HttpRequestMessage request, Uri requestUri)
|
||||||
|
{
|
||||||
|
var clone = new HttpRequestMessage(request.Method, requestUri)
|
||||||
|
{
|
||||||
|
Version = request.Version
|
||||||
|
};
|
||||||
|
|
||||||
|
request.Headers.ForEach(header => clone.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||||
|
|
||||||
|
request.Options.ForEach(option => clone.Options.Set(new HttpRequestOptionsKey<object>(option.Key), option.Value));
|
||||||
|
|
||||||
|
if (request.Content != null)
|
||||||
|
{
|
||||||
|
clone.Content = new ByteArrayContent(await request.Content.ReadAsByteArrayAsync().ConfigureAwait(false));
|
||||||
|
|
||||||
|
request.Content.Headers.ForEach(header => clone.Content.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||||
|
}
|
||||||
|
|
||||||
|
return clone;
|
||||||
|
}
|
||||||
|
|
||||||
|
private readonly ITraceWriter Trace;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Net.Http;
|
||||||
using System.Net.Http.Headers;
|
using System.Net.Http.Headers;
|
||||||
using System.Net.WebSockets;
|
using System.Net.WebSockets;
|
||||||
using System.Security;
|
using System.Security;
|
||||||
@@ -52,8 +53,8 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
||||||
{
|
{
|
||||||
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
this._resultsClient = CreateHttpClient(uri, token);
|
||||||
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
|
||||||
_token = token;
|
_token = token;
|
||||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||||
{
|
{
|
||||||
@@ -62,6 +63,26 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ResultsHttpClient CreateHttpClient(Uri uri, string token)
|
||||||
|
{
|
||||||
|
// Using default 100 timeout
|
||||||
|
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||||
|
|
||||||
|
// Create retry handler
|
||||||
|
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
|
||||||
|
if (settings.MaxRetryRequest > 0)
|
||||||
|
{
|
||||||
|
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup RawHttpMessageHandler without credentials
|
||||||
|
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
|
||||||
|
|
||||||
|
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||||
|
|
||||||
|
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true);
|
||||||
|
}
|
||||||
|
|
||||||
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
@@ -131,13 +152,13 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false)
|
private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false)
|
||||||
{
|
{
|
||||||
if (!string.IsNullOrEmpty(accessToken))
|
if (string.IsNullOrEmpty(accessToken))
|
||||||
{
|
{
|
||||||
Trace.Info($"No access token from server");
|
Trace.Info($"No access token from server");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
if (string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||||
{
|
{
|
||||||
Trace.Info($"No live console feed url from server");
|
Trace.Info($"No live console feed url from server");
|
||||||
return;
|
return;
|
||||||
@@ -222,7 +243,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect);
|
var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect);
|
||||||
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms.");
|
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms.");
|
||||||
Trace.Error(ex);
|
Trace.Verbose(ex.ToString());
|
||||||
retries++;
|
retries++;
|
||||||
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay);
|
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -19,7 +19,15 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
|
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
|
||||||
|
|
||||||
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
|
Task CompleteJobAsync(
|
||||||
|
Guid planId,
|
||||||
|
Guid jobId,
|
||||||
|
TaskResult result,
|
||||||
|
Dictionary<String, VariableValue> outputs,
|
||||||
|
IList<StepResult> stepResults,
|
||||||
|
IList<Annotation> jobAnnotations,
|
||||||
|
string environmentUrl,
|
||||||
|
CancellationToken token);
|
||||||
|
|
||||||
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
||||||
}
|
}
|
||||||
@@ -52,14 +60,23 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
return RetryRequest<AgentJobRequestMessage>(
|
return RetryRequest<AgentJobRequestMessage>(
|
||||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken);
|
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
|
||||||
|
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken cancellationToken)
|
public Task CompleteJobAsync(
|
||||||
|
Guid planId,
|
||||||
|
Guid jobId,
|
||||||
|
TaskResult result,
|
||||||
|
Dictionary<String, VariableValue> outputs,
|
||||||
|
IList<StepResult> stepResults,
|
||||||
|
IList<Annotation> jobAnnotations,
|
||||||
|
string environmentUrl,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
return RetryRequest(
|
return RetryRequest(
|
||||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
|
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, cancellationToken), cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
@@ -15,12 +15,11 @@ namespace GitHub.Runner.Common
|
|||||||
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
||||||
public interface IRunnerDotcomServer : IRunnerService
|
public interface IRunnerDotcomServer : IRunnerService
|
||||||
{
|
{
|
||||||
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
|
Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName);
|
||||||
|
|
||||||
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
|
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
||||||
|
|
||||||
string GetGitHubRequestId(HttpResponseHeaders headers);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum RequestType
|
public enum RequestType
|
||||||
@@ -42,7 +41,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
|
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
||||||
{
|
{
|
||||||
var githubApiUrl = "";
|
var githubApiUrl = "";
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
@@ -52,11 +51,11 @@ namespace GitHub.Runner.Common
|
|||||||
// org runner
|
// org runner
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (path.Length == 2)
|
else if (path.Length == 2)
|
||||||
@@ -69,11 +68,11 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -82,14 +81,8 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||||
var agents = runnersList.ToTaskAgents();
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(agentName))
|
return runnersList.ToTaskAgents();
|
||||||
{
|
|
||||||
return agents;
|
|
||||||
}
|
|
||||||
|
|
||||||
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
||||||
@@ -137,6 +130,16 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||||
|
{
|
||||||
|
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||||
|
{
|
||||||
|
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<DistributedTask.WebApi.Runner> AddOrReplaceRunner(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, bool replace)
|
||||||
{
|
{
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||||
@@ -159,9 +162,15 @@ namespace GitHub.Runner.Common
|
|||||||
{"updates_disabled", agent.DisableUpdate},
|
{"updates_disabled", agent.DisableUpdate},
|
||||||
{"ephemeral", agent.Ephemeral},
|
{"ephemeral", agent.Ephemeral},
|
||||||
{"labels", agent.Labels},
|
{"labels", agent.Labels},
|
||||||
{"public_key", publicKey}
|
{"public_key", publicKey},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (replace)
|
||||||
|
{
|
||||||
|
bodyObject.Add("runner_id", agent.Id);
|
||||||
|
bodyObject.Add("replace", replace);
|
||||||
|
}
|
||||||
|
|
||||||
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
||||||
|
|
||||||
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
||||||
@@ -195,7 +204,7 @@ namespace GitHub.Runner.Common
|
|||||||
if (response != null)
|
if (response != null)
|
||||||
{
|
{
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -215,7 +224,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
|
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
Trace.Error($"{errorMessage} -- Atempt: {retry}");
|
Trace.Error($"{errorMessage} -- Attempt: {retry}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -224,14 +233,5 @@ namespace GitHub.Runner.Common
|
|||||||
await Task.Delay(backOff);
|
await Task.Delay(backOff);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public string GetGitHubRequestId(HttpResponseHeaders headers)
|
|
||||||
{
|
|
||||||
if (headers.TryGetValues("x-github-request-id", out var headerValues))
|
|
||||||
{
|
|
||||||
return headerValues.FirstOrDefault();
|
|
||||||
}
|
|
||||||
return string.Empty;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
@@ -27,8 +27,8 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
// Configuration
|
// Configuration
|
||||||
Task<TaskAgent> AddAgentAsync(Int32 agentPoolId, TaskAgent agent);
|
Task<TaskAgent> AddAgentAsync(Int32 agentPoolId, TaskAgent agent);
|
||||||
Task DeleteAgentAsync(int agentPoolId, int agentId);
|
Task DeleteAgentAsync(int agentPoolId, ulong agentId);
|
||||||
Task DeleteAgentAsync(int agentId);
|
Task DeleteAgentAsync(ulong agentId);
|
||||||
Task<List<TaskAgentPool>> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation);
|
Task<List<TaskAgentPool>> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation);
|
||||||
Task<List<TaskAgent>> GetAgentsAsync(int agentPoolId, string agentName = null);
|
Task<List<TaskAgent>> GetAgentsAsync(int agentPoolId, string agentName = null);
|
||||||
Task<List<TaskAgent>> GetAgentsAsync(string agentName);
|
Task<List<TaskAgent>> GetAgentsAsync(string agentName);
|
||||||
@@ -50,7 +50,7 @@ namespace GitHub.Runner.Common
|
|||||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||||
|
|
||||||
// agent update
|
// agent update
|
||||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace);
|
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||||
@@ -239,13 +239,13 @@ namespace GitHub.Runner.Common
|
|||||||
return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent);
|
return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task DeleteAgentAsync(int agentPoolId, int agentId)
|
public Task DeleteAgentAsync(int agentPoolId, ulong agentId)
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.Generic);
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId);
|
return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task DeleteAgentAsync(int agentId)
|
public Task DeleteAgentAsync(ulong agentId)
|
||||||
{
|
{
|
||||||
return DeleteAgentAsync(0, agentId); // agentPool is ignored server side
|
return DeleteAgentAsync(0, agentId); // agentPool is ignored server side
|
||||||
}
|
}
|
||||||
@@ -315,7 +315,7 @@ namespace GitHub.Runner.Common
|
|||||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace)
|
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace)
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.Generic);
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
||||||
|
|||||||
@@ -80,10 +80,11 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
|
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
||||||
CancellationToken cancellationToken,
|
CancellationToken cancellationToken,
|
||||||
int maxRetryAttemptsCount = 5
|
int maxRetryAttemptsCount = 5,
|
||||||
|
Func<Exception, bool> shouldRetry = null
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
var retryCount = 0;
|
var retryCount = 0;
|
||||||
@@ -96,7 +97,7 @@ namespace GitHub.Runner.Common
|
|||||||
return await func();
|
return await func();
|
||||||
}
|
}
|
||||||
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
||||||
catch (Exception ex) when (retryCount < maxRetryAttemptsCount)
|
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
|
||||||
{
|
{
|
||||||
Trace.Error("Catch exception during request");
|
Trace.Error("Catch exception during request");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
@@ -93,4 +93,4 @@ namespace GitHub.Runner.Common
|
|||||||
IndentLevel--;
|
IndentLevel--;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using GitHub.DistributedTask.Logging;
|
using GitHub.DistributedTask.Logging;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using Newtonsoft.Json;
|
using Newtonsoft.Json;
|
||||||
using System;
|
using System;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.Runtime.CompilerServices;
|
using System.Runtime.CompilerServices;
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
// Represents absence of value.
|
// Represents absence of value.
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
public readonly struct Unit
|
public readonly struct Unit
|
||||||
{
|
{
|
||||||
public static readonly Unit Value = default;
|
public static readonly Unit Value = default;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
public static class EnumUtil
|
public static class EnumUtil
|
||||||
{
|
{
|
||||||
public static T? TryParse<T>(string value) where T: struct
|
public static T? TryParse<T>(string value) where T : struct
|
||||||
{
|
{
|
||||||
T val;
|
T val;
|
||||||
if (Enum.TryParse(value ?? string.Empty, ignoreCase: true, result: out val))
|
if (Enum.TryParse(value ?? string.Empty, ignoreCase: true, result: out val))
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
namespace GitHub.Runner.Common.Util
|
namespace GitHub.Runner.Common.Util
|
||||||
{
|
{
|
||||||
using System;
|
using System;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
|||||||
@@ -6,13 +6,7 @@ namespace GitHub.Runner.Common.Util
|
|||||||
public static class NodeUtil
|
public static class NodeUtil
|
||||||
{
|
{
|
||||||
private const string _defaultNodeVersion = "node16";
|
private const string _defaultNodeVersion = "node16";
|
||||||
|
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16", "node20" });
|
||||||
#if (OS_OSX || OS_WINDOWS) && ARM64
|
|
||||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16" });
|
|
||||||
#else
|
|
||||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node12", "node16" });
|
|
||||||
#endif
|
|
||||||
|
|
||||||
public static string GetInternalNodeVersion()
|
public static string GetInternalNodeVersion()
|
||||||
{
|
{
|
||||||
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Util
|
namespace GitHub.Runner.Common.Util
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
@@ -97,7 +97,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
|
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -108,7 +108,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
if (!IsGetNextMessageExceptionRetriable(ex))
|
if (!IsGetNextMessageExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
throw;
|
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -90,4 +90,4 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics.Tracing;
|
using System.Diagnostics.Tracing;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
@@ -351,21 +351,39 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new()
|
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new()
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
"Microsoft-System-Net-Http",
|
"System.Net.Http",
|
||||||
new HashSet<string>
|
new HashSet<string>
|
||||||
{
|
{
|
||||||
"Info",
|
"Info",
|
||||||
"Associate",
|
"Associate",
|
||||||
"Enter",
|
|
||||||
"Exit"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Microsoft-System-Net-Security",
|
"System.Net.Security",
|
||||||
|
new HashSet<string>
|
||||||
|
{
|
||||||
|
"Info",
|
||||||
|
"DumpBuffer",
|
||||||
|
"SslStreamCtor",
|
||||||
|
"SecureChannelCtor",
|
||||||
|
"NoDelegateNoClientCert",
|
||||||
|
"CertsAfterFiltering",
|
||||||
|
"UsingCachedCredential",
|
||||||
|
"SspiSelectedCipherSuite"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Private.InternalDiagnostics.System.Net.Http",
|
||||||
|
new HashSet<string>
|
||||||
|
{
|
||||||
|
"Info",
|
||||||
|
"Associate",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Private.InternalDiagnostics.System.Net.Security",
|
||||||
new HashSet<string>
|
new HashSet<string>
|
||||||
{
|
{
|
||||||
"Enter",
|
|
||||||
"Exit",
|
|
||||||
"Info",
|
"Info",
|
||||||
"DumpBuffer",
|
"DumpBuffer",
|
||||||
"SslStreamCtor",
|
"SslStreamCtor",
|
||||||
@@ -391,8 +409,8 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
{
|
{
|
||||||
base.OnEventSourceCreated(eventSource);
|
base.OnEventSourceCreated(eventSource);
|
||||||
|
|
||||||
if (eventSource.Name == "Microsoft-System-Net-Http" ||
|
if (eventSource.Name.Contains("System.Net.Http") ||
|
||||||
eventSource.Name == "Microsoft-System-Net-Security")
|
eventSource.Name.Contains("System.Net.Security"))
|
||||||
{
|
{
|
||||||
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
||||||
}
|
}
|
||||||
@@ -415,4 +433,4 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
|
|
||||||
@@ -27,4 +27,4 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
|
|
||||||
public List<string> Logs { get; set; }
|
public List<string> Logs { get; set; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -56,4 +56,4 @@ namespace GitHub.Runner.Listener.Check
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Listener.Configuration;
|
using GitHub.Runner.Listener.Configuration;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections;
|
using System.Collections;
|
||||||
@@ -29,8 +29,8 @@ namespace GitHub.Runner.Listener
|
|||||||
private readonly Dictionary<string, string[]> validOptions = new()
|
private readonly Dictionary<string, string[]> validOptions = new()
|
||||||
{
|
{
|
||||||
// Valid configure flags and args
|
// Valid configure flags and args
|
||||||
[Constants.Runner.CommandLine.Commands.Configure] =
|
[Constants.Runner.CommandLine.Commands.Configure] =
|
||||||
new string[]
|
new string[]
|
||||||
{
|
{
|
||||||
Constants.Runner.CommandLine.Flags.DisableUpdate,
|
Constants.Runner.CommandLine.Flags.DisableUpdate,
|
||||||
Constants.Runner.CommandLine.Flags.Ephemeral,
|
Constants.Runner.CommandLine.Flags.Ephemeral,
|
||||||
@@ -38,6 +38,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Constants.Runner.CommandLine.Flags.Replace,
|
Constants.Runner.CommandLine.Flags.Replace,
|
||||||
Constants.Runner.CommandLine.Flags.RunAsService,
|
Constants.Runner.CommandLine.Flags.RunAsService,
|
||||||
Constants.Runner.CommandLine.Flags.Unattended,
|
Constants.Runner.CommandLine.Flags.Unattended,
|
||||||
|
Constants.Runner.CommandLine.Flags.NoDefaultLabels,
|
||||||
Constants.Runner.CommandLine.Args.Auth,
|
Constants.Runner.CommandLine.Args.Auth,
|
||||||
Constants.Runner.CommandLine.Args.Labels,
|
Constants.Runner.CommandLine.Args.Labels,
|
||||||
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
||||||
@@ -85,6 +86,7 @@ namespace GitHub.Runner.Listener
|
|||||||
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
|
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
|
||||||
public bool GenerateServiceConfig => TestFlag(Constants.Runner.CommandLine.Flags.GenerateServiceConfig);
|
public bool GenerateServiceConfig => TestFlag(Constants.Runner.CommandLine.Flags.GenerateServiceConfig);
|
||||||
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
|
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
|
||||||
|
public bool NoDefaultLabels => TestFlag(Constants.Runner.CommandLine.Flags.NoDefaultLabels);
|
||||||
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
||||||
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
|
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
|
||||||
public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local);
|
public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local);
|
||||||
@@ -182,7 +184,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
command = Constants.Runner.CommandLine.Commands.Warmup;
|
command = Constants.Runner.CommandLine.Commands.Warmup;
|
||||||
}
|
}
|
||||||
|
|
||||||
return command;
|
return command;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
@@ -137,7 +137,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||||
runnerSettings.UseV2Flow = authResult.UseV2Flow;
|
runnerSettings.UseV2Flow = authResult.UseV2Flow;
|
||||||
_term.WriteLine($"Using V2 flow: {runnerSettings.UseV2Flow}");
|
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
|
||||||
creds = authResult.ToVssCredentials();
|
creds = authResult.ToVssCredentials();
|
||||||
Trace.Info("cred retrieved via GitHub auth");
|
Trace.Info("cred retrieved via GitHub auth");
|
||||||
}
|
}
|
||||||
@@ -244,11 +244,11 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
List<TaskAgent> agents;
|
List<TaskAgent> agents;
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseV2Flow)
|
||||||
{
|
{
|
||||||
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
agents = await _runnerServer.GetAgentsAsync(runnerSettings.AgentName);
|
||||||
}
|
}
|
||||||
|
|
||||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||||
@@ -259,11 +259,27 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
if (command.GetReplace())
|
if (command.GetReplace())
|
||||||
{
|
{
|
||||||
// Update existing agent with new PublicKey, agent version.
|
// Update existing agent with new PublicKey, agent version.
|
||||||
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
|
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
if (runnerSettings.UseV2Flow)
|
||||||
|
{
|
||||||
|
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||||
|
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||||
|
|
||||||
|
agent.Id = runner.Id;
|
||||||
|
agent.Authorization = new TaskAgentAuthorization()
|
||||||
|
{
|
||||||
|
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||||
|
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||||
|
}
|
||||||
|
|
||||||
if (command.DisableUpdate &&
|
if (command.DisableUpdate &&
|
||||||
command.DisableUpdate != agent.DisableUpdate)
|
command.DisableUpdate != agent.DisableUpdate)
|
||||||
{
|
{
|
||||||
@@ -293,7 +309,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Create a new agent.
|
// Create a new agent.
|
||||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
|
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -554,7 +570,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
|
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels)
|
||||||
{
|
{
|
||||||
ArgUtil.NotNull(agent, nameof(agent));
|
ArgUtil.NotNull(agent, nameof(agent));
|
||||||
agent.Authorization = new TaskAgentAuthorization
|
agent.Authorization = new TaskAgentAuthorization
|
||||||
@@ -571,9 +587,16 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
agent.Labels.Clear();
|
agent.Labels.Clear();
|
||||||
|
|
||||||
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
if (!noDefaultLabels)
|
||||||
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
{
|
||||||
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||||
|
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
||||||
|
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
||||||
|
}
|
||||||
|
else if (userLabels.Count == 0)
|
||||||
|
{
|
||||||
|
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
|
||||||
|
}
|
||||||
|
|
||||||
foreach (var userLabel in userLabels)
|
foreach (var userLabel in userLabels)
|
||||||
{
|
{
|
||||||
@@ -583,7 +606,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
return agent;
|
return agent;
|
||||||
}
|
}
|
||||||
|
|
||||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
|
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels)
|
||||||
{
|
{
|
||||||
TaskAgent agent = new(agentName)
|
TaskAgent agent = new(agentName)
|
||||||
{
|
{
|
||||||
@@ -598,9 +621,16 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
DisableUpdate = disableUpdate
|
DisableUpdate = disableUpdate
|
||||||
};
|
};
|
||||||
|
|
||||||
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
if (!noDefaultLabels)
|
||||||
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
{
|
||||||
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||||
|
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
||||||
|
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
||||||
|
}
|
||||||
|
else if (userLabels.Count == 0)
|
||||||
|
{
|
||||||
|
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
|
||||||
|
}
|
||||||
|
|
||||||
foreach (var userLabel in userLabels)
|
foreach (var userLabel in userLabels)
|
||||||
{
|
{
|
||||||
@@ -695,7 +725,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -714,7 +744,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
retryCount++;
|
retryCount++;
|
||||||
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
|
Trace.Error($"Failed to get JIT runner token -- Attempt: {retryCount}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -758,7 +788,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -777,7 +807,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
retryCount++;
|
retryCount++;
|
||||||
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
|
Trace.Error($"Failed to get tenant credentials -- Attempt: {retryCount}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Runtime.Serialization;
|
using System.Runtime.Serialization;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
if (!store.HasCredentials())
|
if (!store.HasCredentials())
|
||||||
{
|
{
|
||||||
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
||||||
}
|
}
|
||||||
|
|
||||||
CredentialData credData = store.GetCredentials();
|
CredentialData credData = store.GetCredentials();
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
#pragma warning disable CA1416
|
#pragma warning disable CA1416
|
||||||
using System;
|
using System;
|
||||||
using System.Collections;
|
using System.Collections;
|
||||||
@@ -514,9 +514,25 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
|
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
|
||||||
|
|
||||||
// Lock the Service Database
|
// Lock the Service Database
|
||||||
svcLock = LockServiceDatabase(scmHndl);
|
int svcLockRetries = 10;
|
||||||
if (svcLock.ToInt64() <= 0)
|
int svcLockRetryTimeout = 5000;
|
||||||
|
while (true)
|
||||||
{
|
{
|
||||||
|
svcLock = LockServiceDatabase(scmHndl);
|
||||||
|
if (svcLock.ToInt64() > 0)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
_term.WriteLine("Retrying Lock Service Database...");
|
||||||
|
|
||||||
|
svcLockRetries--;
|
||||||
|
if (svcLockRetries > 0)
|
||||||
|
{
|
||||||
|
Thread.Sleep(svcLockRetryTimeout);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
throw new Exception("Failed to Lock Service Database for Write");
|
throw new Exception("Failed to Lock Service Database for Write");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#if OS_LINUX || OS_OSX
|
#if OS_LINUX || OS_OSX
|
||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
@@ -68,7 +68,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
|
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
|
||||||
var random = new Random();
|
var random = new Random();
|
||||||
var num = random.Next(1000, 9999).ToString();
|
var num = random.Next(1000, 9999).ToString();
|
||||||
runnerNameSubstring +=$"-{num}";
|
runnerNameSubstring += $"-{num}";
|
||||||
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
|
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,12 +76,12 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
|
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
|
||||||
}
|
}
|
||||||
#if (OS_LINUX || OS_OSX)
|
#if (OS_LINUX || OS_OSX)
|
||||||
const int MaxServiceNameLength = 150;
|
const int MaxServiceNameLength = 150;
|
||||||
const int MaxRepoOrgCharacters = 70;
|
const int MaxRepoOrgCharacters = 70;
|
||||||
#elif OS_WINDOWS
|
#elif OS_WINDOWS
|
||||||
const int MaxServiceNameLength = 80;
|
const int MaxServiceNameLength = 80;
|
||||||
const int MaxRepoOrgCharacters = 45;
|
const int MaxRepoOrgCharacters = 45;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#if OS_LINUX
|
#if OS_LINUX
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
#pragma warning disable CA1416
|
#pragma warning disable CA1416
|
||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
using System.Runtime.CompilerServices;
|
using System.Runtime.CompilerServices;
|
||||||
|
|
||||||
[assembly: InternalsVisibleTo("Test")]
|
[assembly: InternalsVisibleTo("Test")]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
@@ -15,6 +15,7 @@ using GitHub.Runner.Sdk;
|
|||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.Services.WebApi;
|
||||||
using GitHub.Services.WebApi.Jwt;
|
using GitHub.Services.WebApi.Jwt;
|
||||||
|
using Sdk.RSWebApi.Contracts;
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener
|
namespace GitHub.Runner.Listener
|
||||||
@@ -372,6 +373,8 @@ namespace GitHub.Runner.Listener
|
|||||||
TaskCompletionSource<int> firstJobRequestRenewed = new();
|
TaskCompletionSource<int> firstJobRequestRenewed = new();
|
||||||
var notification = HostContext.GetService<IJobNotification>();
|
var notification = HostContext.GetService<IJobNotification>();
|
||||||
|
|
||||||
|
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
|
|
||||||
// lock renew cancellation token.
|
// lock renew cancellation token.
|
||||||
using (var lockRenewalTokenSource = new CancellationTokenSource())
|
using (var lockRenewalTokenSource = new CancellationTokenSource())
|
||||||
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
|
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
|
||||||
@@ -379,8 +382,6 @@ namespace GitHub.Runner.Listener
|
|||||||
long requestId = message.RequestId;
|
long requestId = message.RequestId;
|
||||||
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
|
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
|
||||||
|
|
||||||
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
|
||||||
|
|
||||||
// start renew job request
|
// start renew job request
|
||||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||||
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||||
@@ -405,7 +406,7 @@ namespace GitHub.Runner.Listener
|
|||||||
await renewJobRequest;
|
await renewJobRequest;
|
||||||
|
|
||||||
// complete job request with result Cancelled
|
// complete job request with result Cancelled
|
||||||
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
|
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, TaskResult.Canceled);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -544,7 +545,6 @@ namespace GitHub.Runner.Listener
|
|||||||
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
||||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||||
|
|
||||||
|
|
||||||
var jobServer = await InitializeJobServerAsync(systemConnection);
|
var jobServer = await InitializeJobServerAsync(systemConnection);
|
||||||
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
|
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
|
||||||
|
|
||||||
@@ -552,7 +552,7 @@ namespace GitHub.Runner.Listener
|
|||||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||||
await ForceFailJob(jobServer, message);
|
await ForceFailJob(jobServer, message, detailInfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -567,7 +567,7 @@ namespace GitHub.Runner.Listener
|
|||||||
await renewJobRequest;
|
await renewJobRequest;
|
||||||
|
|
||||||
// complete job request
|
// complete job request
|
||||||
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
|
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, result, detailInfo);
|
||||||
|
|
||||||
// print out unhandled exception happened in worker after we complete job request.
|
// print out unhandled exception happened in worker after we complete job request.
|
||||||
// when we run out of disk space, report back to server has higher priority.
|
// when we run out of disk space, report back to server has higher priority.
|
||||||
@@ -664,7 +664,7 @@ namespace GitHub.Runner.Listener
|
|||||||
await renewJobRequest;
|
await renewJobRequest;
|
||||||
|
|
||||||
// complete job request
|
// complete job request
|
||||||
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
|
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, resultOnAbandonOrCancel);
|
||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
@@ -1065,7 +1065,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
|
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, Guid lockToken, TaskResult result, string detailInfo = null)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
|
|
||||||
@@ -1077,7 +1077,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
if (this._isRunServiceJob)
|
if (this._isRunServiceJob)
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Skip FinishAgentRequest call from Listener because MessageType is {message.MessageType}");
|
Trace.Verbose($"Skip CompleteJobRequestAsync call from Listener because it's RunService job");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1117,7 +1117,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
|
|
||||||
// log an error issue to job level timeline record
|
// log an error issue to job level timeline record
|
||||||
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string errorMessage)
|
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||||
{
|
{
|
||||||
if (server is IJobServer jobServer)
|
if (server is IJobServer jobServer)
|
||||||
{
|
{
|
||||||
@@ -1129,34 +1129,11 @@ namespace GitHub.Runner.Listener
|
|||||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||||
|
|
||||||
try
|
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(errorMessage) &&
|
|
||||||
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
|
|
||||||
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
|
|
||||||
{
|
|
||||||
// the trace should be best effort and not affect any job result
|
|
||||||
var match = _invalidJsonRegex.Match(errorMessage);
|
|
||||||
if (match.Success &&
|
|
||||||
match.Groups.Count == 2)
|
|
||||||
{
|
|
||||||
var jsonPosition = int.Parse(match.Groups[1].Value);
|
|
||||||
var serializedJobMessage = JsonUtility.ToString(message);
|
|
||||||
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
|
|
||||||
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
Trace.Error(ex);
|
|
||||||
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
|
|
||||||
}
|
|
||||||
|
|
||||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
|
|
||||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||||
jobRecord.ErrorCount++;
|
jobRecord.ErrorCount++;
|
||||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||||
|
|
||||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -1167,13 +1144,13 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", errorMessage);
|
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// raise job completed event to fail the job.
|
// raise job completed event to fail the job.
|
||||||
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message)
|
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||||
{
|
{
|
||||||
if (server is IJobServer jobServer)
|
if (server is IJobServer jobServer)
|
||||||
{
|
{
|
||||||
@@ -1192,7 +1169,15 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, CancellationToken.None);
|
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||||
|
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
|
||||||
|
var jobAnnotations = new List<Annotation>();
|
||||||
|
if (unhandledAnnotation.HasValue)
|
||||||
|
{
|
||||||
|
jobAnnotations.Add(unhandledAnnotation.Value);
|
||||||
|
}
|
||||||
|
|
||||||
|
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
@@ -123,8 +123,15 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Error("Catch exception during create session.");
|
Trace.Error("Catch exception during create session.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|
||||||
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
|
if (ex is VssOAuthTokenRequestException vssOAuthEx && creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||||
{
|
{
|
||||||
|
// "invalid_client" means the runner registration has been deleted from the server.
|
||||||
|
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
// Check whether we get 401 because the runner registration already removed by the service.
|
// Check whether we get 401 because the runner registration already removed by the service.
|
||||||
// If the runner registration get deleted, we can't exchange oauth token.
|
// If the runner registration get deleted, we can't exchange oauth token.
|
||||||
Trace.Error("Test oauth app registration.");
|
Trace.Error("Test oauth app registration.");
|
||||||
@@ -132,7 +139,7 @@ namespace GitHub.Runner.Listener
|
|||||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -245,7 +252,7 @@ namespace GitHub.Runner.Listener
|
|||||||
_accessTokenRevoked = true;
|
_accessTokenRevoked = true;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
|
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
@@ -138,7 +138,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
|
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||||
{
|
{
|
||||||
terminal.WriteError($"An error occured: {e.Message}");
|
terminal.WriteError($"An error occured: {e.Message}");
|
||||||
trace.Error(e);
|
trace.Error(e);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
@@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
@@ -549,7 +549,17 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
var runServer = HostContext.CreateService<IRunServer>();
|
var runServer = HostContext.CreateService<IRunServer>();
|
||||||
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds);
|
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds);
|
||||||
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
try
|
||||||
|
{
|
||||||
|
jobRequestMessage =
|
||||||
|
await runServer.GetJobMessageAsync(messageRef.RunnerRequestId,
|
||||||
|
messageQueueLoopTokenSource.Token);
|
||||||
|
}
|
||||||
|
catch (TaskOrchestrationJobAlreadyAcquiredException)
|
||||||
|
{
|
||||||
|
Trace.Info("Job is already acquired, skip this message.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
jobDispatcher.Run(jobRequestMessage, runOnce);
|
jobDispatcher.Run(jobRequestMessage, runOnce);
|
||||||
@@ -673,7 +683,8 @@ Config Options:
|
|||||||
--token string Registration token. Required if unattended
|
--token string Registration token. Required if unattended
|
||||||
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
||||||
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
|
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
|
||||||
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
--labels string Custom labels that will be added to the runner. This option is mandatory if --no-default-labels is used.
|
||||||
|
--no-default-labels Disables adding the default labels: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
||||||
--local Removes the runner config files from your local machine. Used as an option to the remove command
|
--local Removes the runner config files from your local machine. Used as an option to the remove command
|
||||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||||
--replace Replace any existing runner with the same name (default false)
|
--replace Replace any existing runner with the same name (default false)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System.Runtime.Serialization;
|
using System.Runtime.Serialization;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener
|
namespace GitHub.Runner.Listener
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Diagnostics;
|
using System.Diagnostics;
|
||||||
@@ -38,7 +38,7 @@ namespace GitHub.Runner.Listener
|
|||||||
private ITerminal _terminal;
|
private ITerminal _terminal;
|
||||||
private IRunnerServer _runnerServer;
|
private IRunnerServer _runnerServer;
|
||||||
private int _poolId;
|
private int _poolId;
|
||||||
private int _agentId;
|
private ulong _agentId;
|
||||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||||
private Task _cloneAndCalculateContentHashTask;
|
private Task _cloneAndCalculateContentHashTask;
|
||||||
private string _dotnetRuntimeCloneDirectory;
|
private string _dotnetRuntimeCloneDirectory;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Reflection;
|
using System.Reflection;
|
||||||
|
|||||||
@@ -682,4 +682,4 @@ namespace GitHub.Runner.Plugins.Artifact
|
|||||||
: base(message, inner)
|
: base(message, inner)
|
||||||
{ }
|
{ }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user