Compare commits

..

6 Commits

Author SHA1 Message Date
Ryan van Zeben
ab7aa2e431 Merge branch 'main' into feature/vanzeben/automated-hash-update 2023-09-18 16:15:00 -04:00
Ryan van Zeben
d0300c34f2 Remove unused variable 2023-09-01 10:29:55 -04:00
Ryan van Zeben
ee0ba3616c Switch to numerical update check as bools aren't processing correct 2023-08-25 16:34:59 -04:00
Ryan van Zeben
1d1aaed09a Explicitly check true or false 2023-08-25 16:23:11 -04:00
Ryan van Zeben
7c4b0f6e88 Update to allow cross OS and fork compatibility 2023-08-25 16:10:35 -04:00
Ryan van Zeben
7d3cbb0494 Automatically update hashes if new version exists 2023-08-25 14:41:56 -04:00
61 changed files with 699 additions and 1211 deletions

View File

@@ -4,7 +4,7 @@
"features": { "features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {}, "ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": { "ghcr.io/devcontainers/features/dotnet": {
"version": "6.0.415" "version": "6.0.414"
}, },
"ghcr.io/devcontainers/features/node:1": { "ghcr.io/devcontainers/features/node:1": {
"version": "16" "version": "16"

View File

@@ -17,6 +17,7 @@ on:
jobs: jobs:
build: build:
strategy: strategy:
fail-fast: false
matrix: matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ] runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
include: include:
@@ -60,6 +61,8 @@ jobs:
# Check runtime/externals hash # Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash - name: Compute/Compare runtime and externals Hash
id: compute-hash
continue-on-error: true
shell: bash shell: bash
run: | run: |
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH" echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
@@ -68,25 +71,74 @@ jobs:
NeedUpdate=0 NeedUpdate=0
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
NeedUpdate=1 NeedUpdate=1
fi fi
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
NeedUpdate=1 NeedUpdate=1
fi fi
exit $NeedUpdate echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
env: env:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}} DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}} EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
- name: update hash
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
shell: bash
run: |
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
if [ -n "$ExternalHash" ]; then
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
fi
if [ -n "$DotNetRuntimeHash" ]; then
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
fi
- name: cache updated hashes
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
uses: actions/cache/save@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/${{ matrix.runtime }}
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Create an warning annotation if computed hashes will automatically be updated
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
shell: bash
run: echo "::warning ::Computed hashes do not match, we will automatically update these for you, you can safely ignore the errors on this job" && exit 1
- name: Create an error annotation if computed hashes need to be updated for a fork
if: ${{ github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
shell: bash
run: |
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
if [ -n "$ExternalHash" ]; then
echo "::error ::Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $ExternalHash"
fi
if [ -n "$DotNetRuntimeHash" ]; then
echo "::error ::Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DotNetRuntimeHash"
fi
if [[ -n "$ExternalHash" || -n "$DotNetRuntimeHash" ]]; then
exit 1
fi
# Run tests # Run tests
- name: L0 - name: L0
run: | run: |
${{ matrix.devScript }} test ${{ matrix.devScript }} test
working-directory: src working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64' if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 0 && matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64' }}
# Create runner package tar.gz/zip # Create runner package tar.gz/zip
- name: Package Release - name: Package Release
@@ -106,3 +158,90 @@ jobs:
_package_trims/trim_externals _package_trims/trim_externals
_package_trims/trim_runtime _package_trims/trim_runtime
_package_trims/trim_runtime_externals _package_trims/trim_runtime_externals
hash-update:
needs: [build]
# only run this if we get a failure from the build step - most likely meaning we need a hash update
if: ${{ always() && contains(needs.build.result, 'failure') && github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}
- name: Restore cached hashes - linux-x64
id: cache-restore-linux-x64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/linux-x64
./src/Misc/contentHash/dotnetRuntime/linux-x64
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - linux-arm64
id: cache-restore-linux-arm64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/linux-arm64
./src/Misc/contentHash/dotnetRuntime/linux-arm64
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - linux-arm
id: cache-restore-linux-arm
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/linux-arm
./src/Misc/contentHash/dotnetRuntime/linux-arm
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - osx-x64
id: cache-restore-osx-x64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/osx-x64
./src/Misc/contentHash/dotnetRuntime/osx-x64
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - osx-arm64
id: cache-restore-osx-arm64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/osx-arm64
./src/Misc/contentHash/dotnetRuntime/osx-arm64
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - win-x64
id: cache-restore-win-x64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/win-x64
./src/Misc/contentHash/dotnetRuntime/win-x64
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - win-arm64
id: cache-restore-win-arm64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/win-arm64
./src/Misc/contentHash/dotnetRuntime/win-arm64
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Fetch cached computed hashes
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
shell: bash
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -a -m "Update computed hashes"
git push

View File

@@ -4,9 +4,8 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
ARG TARGETOS ARG TARGETOS
ARG TARGETARCH ARG TARGETARCH
ARG RUNNER_VERSION ARG RUNNER_VERSION
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.4.0 ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
ARG DOCKER_VERSION=24.0.6 ARG DOCKER_VERSION=23.0.6
ARG BUILDX_VERSION=0.11.2
RUN apt update -y && apt install curl unzip -y RUN apt update -y && apt install curl unzip -y
@@ -26,11 +25,7 @@ RUN export RUNNER_ARCH=${TARGETARCH} \
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \ && if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \ && curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
&& tar zxvf docker.tgz \ && tar zxvf docker.tgz \
&& rm -rf docker.tgz \ && rm -rf docker.tgz
&& mkdir -p /usr/local/lib/docker/cli-plugins \
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy

View File

@@ -1,20 +1,37 @@
## What's Changed ## What's Changed
* Trim whitespace in `./Misc/contentHash/dotnetRuntime/*` by @TingluoHuang in https://github.com/actions/runner/pull/2915 * Bump @types/node from 12.12.14 to 20.4.10 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2759
* Send os and arch during long poll by @luketomlinson in https://github.com/actions/runner/pull/2913 * Trace x-github-request-id when download action tarball. by @TingluoHuang in https://github.com/actions/runner/pull/2755
* Revert "Update default version to node20 (#2844)" by @takost in https://github.com/actions/runner/pull/2918 * Fix typo by @kyanny in https://github.com/actions/runner/pull/2741
* Fix telemetry publish from JobServerQueue. by @TingluoHuang in https://github.com/actions/runner/pull/2919 * Bump prettier from 3.0.1 to 3.0.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2772
* Use block blob instead of append blob by @yacaovsnc in https://github.com/actions/runner/pull/2924 * Bump @types/node from 20.4.10 to 20.5.0 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2773
* Provide detail info on untar failures. by @TingluoHuang in https://github.com/actions/runner/pull/2939 * Revert "Fixed a bug where a misplaced `=` character could bypass here… by @cory-miller in https://github.com/actions/runner/pull/2774
* Bump node.js to 20.8.1 by @TingluoHuang in https://github.com/actions/runner/pull/2945 * Filter NODE_OPTIONS from env for file output by @cory-miller in https://github.com/actions/runner/pull/2775
* Update dotnet sdk to latest version @6.0.415 by @github-actions in https://github.com/actions/runner/pull/2929 * Bump @types/node from 20.5.0 to 20.5.1 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2781
* Fix typo in log strings by @rajbos in https://github.com/actions/runner/pull/2695 * Update Docker Version in Images by @ajschmidt8 in https://github.com/actions/runner/pull/2694
* feat: add support of arm64 arch runners in service creation script by @tuxity in https://github.com/actions/runner/pull/2606 * Bump @types/node from 20.5.1 to 20.5.4 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2789
* Add `buildx` to images by @ajschmidt8 in https://github.com/actions/runner/pull/2901 * Bump @typescript-eslint/parser from 6.4.0 to 6.4.1 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2785
* Bump Microsoft.AspNet.WebApi.Client from 5.2.4 to 5.2.9 in /src by @dependabot in https://github.com/actions/runner/pull/2751
* Bump System.Buffers from 4.3.0 to 4.5.1 in /src by @dependabot in https://github.com/actions/runner/pull/2749
* Bump dotnet/runtime-deps from 6.0-jammy to 7.0-jammy in /images by @dependabot in https://github.com/actions/runner/pull/2745
* Remove need to manually compile JS binary for hashFiles utility by @vanZeben in https://github.com/actions/runner/pull/2770
* Revert "Bump dotnet/runtime-deps from 6.0-jammy to 7.0-jammy in /images" by @TingluoHuang in https://github.com/actions/runner/pull/2790
* Query runner by name on server side. by @TingluoHuang in https://github.com/actions/runner/pull/2771
* Bump typescript from 5.1.6 to 5.2.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2795
* Bump @types/node from 20.5.4 to 20.5.6 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2796
* Bump Newtonsoft.Json from 13.0.1 to 13.0.3 in /src by @dependabot in https://github.com/actions/runner/pull/2797
* Support replacing runners in v2 flow by @luketomlinson in https://github.com/actions/runner/pull/2791
* Delegating handler for Http redirects by @paveliak in https://github.com/actions/runner/pull/2814
* Add references to the firewall requirements docs by @paveliak in https://github.com/actions/runner/pull/2815
* Create automated workflow that will auto-generate dotnet sdk patches by @vanZeben in https://github.com/actions/runner/pull/2776
* Fixes minor issues with using proper output varaibles by @vanZeben in https://github.com/actions/runner/pull/2818
* Throw NonRetryableException on GetNextMessage from broker as needed. by @TingluoHuang in https://github.com/actions/runner/pull/2828
* Mark action download failures as infra failures by @cory-miller in https://github.com/actions/runner/pull/2827
## New Contributors ## New Contributors
* @tuxity made their first contribution in https://github.com/actions/runner/pull/2606 * @kyanny made their first contribution in https://github.com/actions/runner/pull/2741
* @ajschmidt8 made their first contribution in https://github.com/actions/runner/pull/2694
**Full Changelog**: https://github.com/actions/runner/compare/v2.310.2...v2.311.0 **Full Changelog**: https://github.com/actions/runner/compare/v2.308.0...v2.309.0
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. _Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1 +1 @@
2.311.0 <Update to ./src/runnerversion when creating release>

View File

@@ -8,7 +8,7 @@ set -e
# Configures it as a service more secure # Configures it as a service more secure
# Should be used on VMs and not containers # Should be used on VMs and not containers
# Works on OSX and Linux # Works on OSX and Linux
# Assumes x64 arch (support arm64) # Assumes x64 arch
# See EXAMPLES below # See EXAMPLES below
flags_found=false flags_found=false
@@ -87,9 +87,6 @@ sudo echo
runner_plat=linux runner_plat=linux
[ ! -z "$(which sw_vers)" ] && runner_plat=osx; [ ! -z "$(which sw_vers)" ] && runner_plat=osx;
runner_arch=x64
[ ! -z "$(arch | grep arm64)" ] && runner_arch=arm64
function fatal() function fatal()
{ {
echo "error: $1" >&2 echo "error: $1" >&2
@@ -142,7 +139,7 @@ echo "Downloading latest runner ..."
# For the GHES Alpha, download the runner from github.com # For the GHES Alpha, download the runner from github.com
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name') latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
latest_version=$(echo ${latest_version_label:1}) latest_version=$(echo ${latest_version_label:1})
runner_file="actions-runner-${runner_plat}-${runner_arch}-${latest_version}.tar.gz" runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
if [ -f "${runner_file}" ]; then if [ -f "${runner_file}" ]; then
echo "${runner_file} exists. skipping download." echo "${runner_file} exists. skipping download."

View File

@@ -1 +1 @@
531b31914e525ecb12cc5526415bc70a112ebc818f877347af1a231011f539c5 7539d33c35b0bc94ee67e3c0de1a6bac5ef89ce8e8efaa110131fa0520a54fb4

View File

@@ -1 +1 @@
722dd5fa5ecc207fcccf67f6e502d689f2119d8117beff2041618fba17dc66a4 d71a31f9a17e1a41d6e1edea596edfa68a0db5948ed160e86f2154a547f4dd10

View File

@@ -1 +1 @@
8ca75c76e15ab9dc7fe49a66c5c74e171e7fabd5d26546fda8931bd11bff30f9 3c2f700d8a995efe7895614ee07d9c7880f872d214b45983ad6163e1931870ab

View File

@@ -1 +1 @@
70496eb1c99b39b3373b5088c95a35ebbaac1098e6c47c8aab94771f3ffbf501 b2d85c95ecad13d352f4c7d31c64dbb0d9c6381b48fa5874c4c72a43a025a8a1

View File

@@ -1 +1 @@
4f8d48727d535daabcaec814e0dafb271c10625366c78e7e022ca7477a73023f 417d835c1a108619886b4bb5d25988cb6c138eb7b4c00320b1d9455c5630bff9

View File

@@ -1 +1 @@
d54d7428f2b9200a0030365a6a4e174e30a1b29b922f8254dffb2924bd09549d 8f35aaecfb53426ea10816442e23065142bab9dd0fb712a29e0fc471d13c44ac

View File

@@ -1 +1 @@
eaa939c45307f46b7003902255b3a2a09287215d710984107667e03ac493eb26 811c7debdfc54d074385b063b83c997e5360c8a9160cd20fe777713968370063

View File

@@ -1 +1 @@
4bf3e1af0d482af1b2eaf9f08250248a8c1aea8ec20a3c5be116d58cdd930009 5bdddd32bab1e57af252b470579083049496e9e39b6e4f50de01232581f9a2d8

View File

@@ -1 +1 @@
ec1719a8cb4d8687328aa64f4aa7c4e3498a715d8939117874782e3e6e63a14b 54b3b3a72da93db0fa38708c759fceadddb70cacdd3620a079084a242126dd78

View File

@@ -1 +1 @@
50538de29f173bb73f708c4ed2c8328a62b8795829b97b2a6cb57197e2305287 e7f2da271abb174285c3a757503538b3e9792e9d731b0382b6d1f21bb59a79ba

View File

@@ -1 +1 @@
a0a96cbb7593643b69e669bf14d7b29b7f27800b3a00bb3305aebe041456c701 2481c5b0d06b2b5621635f2568b86a43b0e5b259fed1298167ba4f33d4c464c7

View File

@@ -1 +1 @@
6255b22692779467047ecebd60ad46984866d75cdfe10421d593a7b51d620b09 85de7677165e65ec69b8a9e344c0811efa51b7fe5376a1aa083505c560ea6f57

View File

@@ -1 +1 @@
6ff1abd055dc35bfbf06f75c2f08908f660346f66ad1d8f81c910068e9ba029d 763d18de11c11fd299c0e75e98fefc8a0e6605ae0ad6aba3bbc110db2262ab41

View File

@@ -1 +1 @@
433a6d748742d12abd20dc2a79b62ac3d9718ae47ef26f8e84dc8c180eea3659 16f3cc545dfe10e84df43746073fc64d3c44d1891782532805aeb2118869a55d

View File

@@ -12,16 +12,16 @@
"@actions/glob": "^0.4.0" "@actions/glob": "^0.4.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.6.2", "@types/node": "^20.5.6",
"@typescript-eslint/eslint-plugin": "^6.7.2", "@typescript-eslint/eslint-plugin": "^6.4.0",
"@typescript-eslint/parser": "^6.7.2", "@typescript-eslint/parser": "^6.4.1",
"@vercel/ncc": "^0.38.0", "@vercel/ncc": "^0.36.1",
"eslint": "^8.47.0", "eslint": "^8.47.0",
"eslint-plugin-github": "^4.10.0", "eslint-plugin-github": "^4.9.2",
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"husky": "^8.0.3", "husky": "^8.0.3",
"lint-staged": "^14.0.0", "lint-staged": "^14.0.0",
"prettier": "^3.0.3", "prettier": "^3.0.1",
"typescript": "^5.2.2" "typescript": "^5.2.2"
} }
}, },
@@ -223,9 +223,9 @@
} }
}, },
"node_modules/@types/json-schema": { "node_modules/@types/json-schema": {
"version": "7.0.13", "version": "7.0.12",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz",
"integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==", "integrity": "sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==",
"dev": true "dev": true
}, },
"node_modules/@types/json5": { "node_modules/@types/json5": {
@@ -235,28 +235,28 @@
"dev": true "dev": true
}, },
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "20.6.2", "version": "20.5.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.6.2.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.6.tgz",
"integrity": "sha512-Y+/1vGBHV/cYk6OI1Na/LHzwnlNCAfU3ZNGrc1LdRe/LAIbdDPTTv/HU3M7yXN448aTVDq3eKRm2cg7iKLb8gw==", "integrity": "sha512-Gi5wRGPbbyOTX+4Y2iULQ27oUPrefaB0PxGQJnfyWN3kvEDGM3mIB5M/gQLmitZf7A9FmLeaqxD3L1CXpm3VKQ==",
"dev": true "dev": true
}, },
"node_modules/@types/semver": { "node_modules/@types/semver": {
"version": "7.5.2", "version": "7.5.0",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz",
"integrity": "sha512-7aqorHYgdNO4DM36stTiGO3DvKoex9TQRwsJU6vMaFGyqpBA1MNZkz+PG3gaNUPpTAOYhT1WR7M1JyA3fbS9Cw==", "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==",
"dev": true "dev": true
}, },
"node_modules/@typescript-eslint/eslint-plugin": { "node_modules/@typescript-eslint/eslint-plugin": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.4.1.tgz",
"integrity": "sha512-ooaHxlmSgZTM6CHYAFRlifqh1OAr3PAQEwi7lhYhaegbnXrnh7CDcHmc3+ihhbQC7H0i4JF0psI5ehzkF6Yl6Q==", "integrity": "sha512-3F5PtBzUW0dYlq77Lcqo13fv+58KDwUib3BddilE8ajPJT+faGgxmI9Sw+I8ZS22BYwoir9ZhNXcLi+S+I2bkw==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@eslint-community/regexpp": "^4.5.1", "@eslint-community/regexpp": "^4.5.1",
"@typescript-eslint/scope-manager": "6.7.2", "@typescript-eslint/scope-manager": "6.4.1",
"@typescript-eslint/type-utils": "6.7.2", "@typescript-eslint/type-utils": "6.4.1",
"@typescript-eslint/utils": "6.7.2", "@typescript-eslint/utils": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2", "@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4", "debug": "^4.3.4",
"graphemer": "^1.4.0", "graphemer": "^1.4.0",
"ignore": "^5.2.4", "ignore": "^5.2.4",
@@ -282,15 +282,15 @@
} }
}, },
"node_modules/@typescript-eslint/parser": { "node_modules/@typescript-eslint/parser": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.4.1.tgz",
"integrity": "sha512-KA3E4ox0ws+SPyxQf9iSI25R6b4Ne78ORhNHeVKrPQnoYsb9UhieoiRoJgrzgEeKGOXhcY1i8YtOeCHHTDa6Fw==", "integrity": "sha512-610G6KHymg9V7EqOaNBMtD1GgpAmGROsmfHJPXNLCU9bfIuLrkdOygltK784F6Crboyd5tBFayPB7Sf0McrQwg==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/scope-manager": "6.7.2", "@typescript-eslint/scope-manager": "6.4.1",
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/typescript-estree": "6.7.2", "@typescript-eslint/typescript-estree": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2", "@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4" "debug": "^4.3.4"
}, },
"engines": { "engines": {
@@ -309,14 +309,88 @@
} }
} }
}, },
"node_modules/@typescript-eslint/scope-manager": { "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
"integrity": "sha512-bgi6plgyZjEqapr7u2mhxGR6E8WCzKNUFWNh6fkpVe9+yzRZeYtDTbsIBzKbcxI+r1qVWt6VIoMSNZ4r2A+6Yw==", "integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2" "@typescript-eslint/visitor-keys": "6.4.1"
},
"engines": {
"node": "^16.0.0 || >=18.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
"integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
"dev": true,
"engines": {
"node": "^16.0.0 || >=18.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
"integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4",
"globby": "^11.1.0",
"is-glob": "^4.0.3",
"semver": "^7.5.4",
"ts-api-utils": "^1.0.1"
},
"engines": {
"node": "^16.0.0 || >=18.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
}
},
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
"integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "6.4.1",
"eslint-visitor-keys": "^3.4.1"
},
"engines": {
"node": "^16.0.0 || >=18.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
"integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.4.1"
}, },
"engines": { "engines": {
"node": "^16.0.0 || >=18.0.0" "node": "^16.0.0 || >=18.0.0"
@@ -327,13 +401,13 @@
} }
}, },
"node_modules/@typescript-eslint/type-utils": { "node_modules/@typescript-eslint/type-utils": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.4.1.tgz",
"integrity": "sha512-36F4fOYIROYRl0qj95dYKx6kybddLtsbmPIYNK0OBeXv2j9L5nZ17j9jmfy+bIDHKQgn2EZX+cofsqi8NPATBQ==", "integrity": "sha512-7ON8M8NXh73SGZ5XvIqWHjgX2f+vvaOarNliGhjrJnv1vdjG0LVIz+ToYfPirOoBi56jxAKLfsLm40+RvxVVXA==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/typescript-estree": "6.7.2", "@typescript-eslint/typescript-estree": "6.4.1",
"@typescript-eslint/utils": "6.7.2", "@typescript-eslint/utils": "6.4.1",
"debug": "^4.3.4", "debug": "^4.3.4",
"ts-api-utils": "^1.0.1" "ts-api-utils": "^1.0.1"
}, },
@@ -354,9 +428,9 @@
} }
}, },
"node_modules/@typescript-eslint/types": { "node_modules/@typescript-eslint/types": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
"integrity": "sha512-flJYwMYgnUNDAN9/GAI3l8+wTmvTYdv64fcH8aoJK76Y+1FCZ08RtI5zDerM/FYT5DMkAc+19E4aLmd5KqdFyg==", "integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
"dev": true, "dev": true,
"engines": { "engines": {
"node": "^16.0.0 || >=18.0.0" "node": "^16.0.0 || >=18.0.0"
@@ -367,13 +441,13 @@
} }
}, },
"node_modules/@typescript-eslint/typescript-estree": { "node_modules/@typescript-eslint/typescript-estree": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
"integrity": "sha512-kiJKVMLkoSciGyFU0TOY0fRxnp9qq1AzVOHNeN1+B9erKFCJ4Z8WdjAkKQPP+b1pWStGFqezMLltxO+308dJTQ==", "integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2", "@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4", "debug": "^4.3.4",
"globby": "^11.1.0", "globby": "^11.1.0",
"is-glob": "^4.0.3", "is-glob": "^4.0.3",
@@ -394,17 +468,17 @@
} }
}, },
"node_modules/@typescript-eslint/utils": { "node_modules/@typescript-eslint/utils": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.4.1.tgz",
"integrity": "sha512-ZCcBJug/TS6fXRTsoTkgnsvyWSiXwMNiPzBUani7hDidBdj1779qwM1FIAmpH4lvlOZNF3EScsxxuGifjpLSWQ==", "integrity": "sha512-F/6r2RieNeorU0zhqZNv89s9bDZSovv3bZQpUNOmmQK1L80/cV4KEu95YUJWi75u5PhboFoKUJBnZ4FQcoqhDw==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@eslint-community/eslint-utils": "^4.4.0", "@eslint-community/eslint-utils": "^4.4.0",
"@types/json-schema": "^7.0.12", "@types/json-schema": "^7.0.12",
"@types/semver": "^7.5.0", "@types/semver": "^7.5.0",
"@typescript-eslint/scope-manager": "6.7.2", "@typescript-eslint/scope-manager": "6.4.1",
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/typescript-estree": "6.7.2", "@typescript-eslint/typescript-estree": "6.4.1",
"semver": "^7.5.4" "semver": "^7.5.4"
}, },
"engines": { "engines": {
@@ -419,12 +493,12 @@
} }
}, },
"node_modules/@typescript-eslint/visitor-keys": { "node_modules/@typescript-eslint/visitor-keys": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
"integrity": "sha512-uVw9VIMFBUTz8rIeaUT3fFe8xIUx8r4ywAdlQv1ifH+6acn/XF8Y6rwJ7XNmkNMDrTW+7+vxFFPIF40nJCVsMQ==", "integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"eslint-visitor-keys": "^3.4.1" "eslint-visitor-keys": "^3.4.1"
}, },
"engines": { "engines": {
@@ -436,9 +510,9 @@
} }
}, },
"node_modules/@vercel/ncc": { "node_modules/@vercel/ncc": {
"version": "0.38.0", "version": "0.36.1",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.0.tgz", "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz",
"integrity": "sha512-B4YKZMm/EqMptKSFyAq4q2SlgJe+VCmEH6Y8gf/E1pTlWbsUJpuH1ymik2Ex3aYO5mCWwV1kaSYHSQOT8+4vHA==", "integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==",
"dev": true, "dev": true,
"bin": { "bin": {
"ncc": "dist/ncc/cli.js" "ncc": "dist/ncc/cli.js"
@@ -1358,9 +1432,9 @@
} }
}, },
"node_modules/eslint-plugin-github": { "node_modules/eslint-plugin-github": {
"version": "4.10.0", "version": "4.9.2",
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.10.0.tgz", "resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.9.2.tgz",
"integrity": "sha512-YKtqBtFbjih1wZNTwZjtLPEG6B/4ySMa38fgOo/rbMJpNKO3+OaKzwwOYkeKx/FapM/4MsTP9ExqUcDV+dkixA==", "integrity": "sha512-osez6Sio/fLr/3QkW5HE1wbCOcmYG5030/6QIa9IcKyyfchewlecdnYcsbeUMUtdIiU9lWqhroQp2H/O7auxBA==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"@github/browserslist-config": "^1.0.0", "@github/browserslist-config": "^1.0.0",
@@ -3107,9 +3181,9 @@
} }
}, },
"node_modules/prettier": { "node_modules/prettier": {
"version": "3.0.3", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.3.tgz", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.2.tgz",
"integrity": "sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==", "integrity": "sha512-o2YR9qtniXvwEZlOKbveKfDQVyqxbEIWn48Z8m3ZJjBjcCmUy3xZGIv+7AkaeuaTr6yPXJjwv07ZWlsWbEy1rQ==",
"dev": true, "dev": true,
"bin": { "bin": {
"prettier": "bin/prettier.cjs" "prettier": "bin/prettier.cjs"
@@ -4258,9 +4332,9 @@
} }
}, },
"@types/json-schema": { "@types/json-schema": {
"version": "7.0.13", "version": "7.0.12",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz",
"integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==", "integrity": "sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==",
"dev": true "dev": true
}, },
"@types/json5": { "@types/json5": {
@@ -4270,28 +4344,28 @@
"dev": true "dev": true
}, },
"@types/node": { "@types/node": {
"version": "20.6.2", "version": "20.5.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.6.2.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.6.tgz",
"integrity": "sha512-Y+/1vGBHV/cYk6OI1Na/LHzwnlNCAfU3ZNGrc1LdRe/LAIbdDPTTv/HU3M7yXN448aTVDq3eKRm2cg7iKLb8gw==", "integrity": "sha512-Gi5wRGPbbyOTX+4Y2iULQ27oUPrefaB0PxGQJnfyWN3kvEDGM3mIB5M/gQLmitZf7A9FmLeaqxD3L1CXpm3VKQ==",
"dev": true "dev": true
}, },
"@types/semver": { "@types/semver": {
"version": "7.5.2", "version": "7.5.0",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz",
"integrity": "sha512-7aqorHYgdNO4DM36stTiGO3DvKoex9TQRwsJU6vMaFGyqpBA1MNZkz+PG3gaNUPpTAOYhT1WR7M1JyA3fbS9Cw==", "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==",
"dev": true "dev": true
}, },
"@typescript-eslint/eslint-plugin": { "@typescript-eslint/eslint-plugin": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.4.1.tgz",
"integrity": "sha512-ooaHxlmSgZTM6CHYAFRlifqh1OAr3PAQEwi7lhYhaegbnXrnh7CDcHmc3+ihhbQC7H0i4JF0psI5ehzkF6Yl6Q==", "integrity": "sha512-3F5PtBzUW0dYlq77Lcqo13fv+58KDwUib3BddilE8ajPJT+faGgxmI9Sw+I8ZS22BYwoir9ZhNXcLi+S+I2bkw==",
"dev": true, "dev": true,
"requires": { "requires": {
"@eslint-community/regexpp": "^4.5.1", "@eslint-community/regexpp": "^4.5.1",
"@typescript-eslint/scope-manager": "6.7.2", "@typescript-eslint/scope-manager": "6.4.1",
"@typescript-eslint/type-utils": "6.7.2", "@typescript-eslint/type-utils": "6.4.1",
"@typescript-eslint/utils": "6.7.2", "@typescript-eslint/utils": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2", "@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4", "debug": "^4.3.4",
"graphemer": "^1.4.0", "graphemer": "^1.4.0",
"ignore": "^5.2.4", "ignore": "^5.2.4",
@@ -4301,54 +4375,97 @@
} }
}, },
"@typescript-eslint/parser": { "@typescript-eslint/parser": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.4.1.tgz",
"integrity": "sha512-KA3E4ox0ws+SPyxQf9iSI25R6b4Ne78ORhNHeVKrPQnoYsb9UhieoiRoJgrzgEeKGOXhcY1i8YtOeCHHTDa6Fw==", "integrity": "sha512-610G6KHymg9V7EqOaNBMtD1GgpAmGROsmfHJPXNLCU9bfIuLrkdOygltK784F6Crboyd5tBFayPB7Sf0McrQwg==",
"dev": true, "dev": true,
"requires": { "requires": {
"@typescript-eslint/scope-manager": "6.7.2", "@typescript-eslint/scope-manager": "6.4.1",
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/typescript-estree": "6.7.2", "@typescript-eslint/typescript-estree": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2", "@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4" "debug": "^4.3.4"
},
"dependencies": {
"@typescript-eslint/scope-manager": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
"integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
"dev": true,
"requires": {
"@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.4.1"
}
},
"@typescript-eslint/types": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
"integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
"dev": true
},
"@typescript-eslint/typescript-estree": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
"integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
"dev": true,
"requires": {
"@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4",
"globby": "^11.1.0",
"is-glob": "^4.0.3",
"semver": "^7.5.4",
"ts-api-utils": "^1.0.1"
}
},
"@typescript-eslint/visitor-keys": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
"integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
"dev": true,
"requires": {
"@typescript-eslint/types": "6.4.1",
"eslint-visitor-keys": "^3.4.1"
}
}
} }
}, },
"@typescript-eslint/scope-manager": { "@typescript-eslint/scope-manager": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
"integrity": "sha512-bgi6plgyZjEqapr7u2mhxGR6E8WCzKNUFWNh6fkpVe9+yzRZeYtDTbsIBzKbcxI+r1qVWt6VIoMSNZ4r2A+6Yw==", "integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
"dev": true, "dev": true,
"requires": { "requires": {
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2" "@typescript-eslint/visitor-keys": "6.4.1"
} }
}, },
"@typescript-eslint/type-utils": { "@typescript-eslint/type-utils": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.4.1.tgz",
"integrity": "sha512-36F4fOYIROYRl0qj95dYKx6kybddLtsbmPIYNK0OBeXv2j9L5nZ17j9jmfy+bIDHKQgn2EZX+cofsqi8NPATBQ==", "integrity": "sha512-7ON8M8NXh73SGZ5XvIqWHjgX2f+vvaOarNliGhjrJnv1vdjG0LVIz+ToYfPirOoBi56jxAKLfsLm40+RvxVVXA==",
"dev": true, "dev": true,
"requires": { "requires": {
"@typescript-eslint/typescript-estree": "6.7.2", "@typescript-eslint/typescript-estree": "6.4.1",
"@typescript-eslint/utils": "6.7.2", "@typescript-eslint/utils": "6.4.1",
"debug": "^4.3.4", "debug": "^4.3.4",
"ts-api-utils": "^1.0.1" "ts-api-utils": "^1.0.1"
} }
}, },
"@typescript-eslint/types": { "@typescript-eslint/types": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
"integrity": "sha512-flJYwMYgnUNDAN9/GAI3l8+wTmvTYdv64fcH8aoJK76Y+1FCZ08RtI5zDerM/FYT5DMkAc+19E4aLmd5KqdFyg==", "integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
"dev": true "dev": true
}, },
"@typescript-eslint/typescript-estree": { "@typescript-eslint/typescript-estree": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
"integrity": "sha512-kiJKVMLkoSciGyFU0TOY0fRxnp9qq1AzVOHNeN1+B9erKFCJ4Z8WdjAkKQPP+b1pWStGFqezMLltxO+308dJTQ==", "integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
"dev": true, "dev": true,
"requires": { "requires": {
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/visitor-keys": "6.7.2", "@typescript-eslint/visitor-keys": "6.4.1",
"debug": "^4.3.4", "debug": "^4.3.4",
"globby": "^11.1.0", "globby": "^11.1.0",
"is-glob": "^4.0.3", "is-glob": "^4.0.3",
@@ -4357,34 +4474,34 @@
} }
}, },
"@typescript-eslint/utils": { "@typescript-eslint/utils": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.4.1.tgz",
"integrity": "sha512-ZCcBJug/TS6fXRTsoTkgnsvyWSiXwMNiPzBUani7hDidBdj1779qwM1FIAmpH4lvlOZNF3EScsxxuGifjpLSWQ==", "integrity": "sha512-F/6r2RieNeorU0zhqZNv89s9bDZSovv3bZQpUNOmmQK1L80/cV4KEu95YUJWi75u5PhboFoKUJBnZ4FQcoqhDw==",
"dev": true, "dev": true,
"requires": { "requires": {
"@eslint-community/eslint-utils": "^4.4.0", "@eslint-community/eslint-utils": "^4.4.0",
"@types/json-schema": "^7.0.12", "@types/json-schema": "^7.0.12",
"@types/semver": "^7.5.0", "@types/semver": "^7.5.0",
"@typescript-eslint/scope-manager": "6.7.2", "@typescript-eslint/scope-manager": "6.4.1",
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"@typescript-eslint/typescript-estree": "6.7.2", "@typescript-eslint/typescript-estree": "6.4.1",
"semver": "^7.5.4" "semver": "^7.5.4"
} }
}, },
"@typescript-eslint/visitor-keys": { "@typescript-eslint/visitor-keys": {
"version": "6.7.2", "version": "6.4.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.2.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
"integrity": "sha512-uVw9VIMFBUTz8rIeaUT3fFe8xIUx8r4ywAdlQv1ifH+6acn/XF8Y6rwJ7XNmkNMDrTW+7+vxFFPIF40nJCVsMQ==", "integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
"dev": true, "dev": true,
"requires": { "requires": {
"@typescript-eslint/types": "6.7.2", "@typescript-eslint/types": "6.4.1",
"eslint-visitor-keys": "^3.4.1" "eslint-visitor-keys": "^3.4.1"
} }
}, },
"@vercel/ncc": { "@vercel/ncc": {
"version": "0.38.0", "version": "0.36.1",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.0.tgz", "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz",
"integrity": "sha512-B4YKZMm/EqMptKSFyAq4q2SlgJe+VCmEH6Y8gf/E1pTlWbsUJpuH1ymik2Ex3aYO5mCWwV1kaSYHSQOT8+4vHA==", "integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==",
"dev": true "dev": true
}, },
"acorn": { "acorn": {
@@ -5044,9 +5161,9 @@
} }
}, },
"eslint-plugin-github": { "eslint-plugin-github": {
"version": "4.10.0", "version": "4.9.2",
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.10.0.tgz", "resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.9.2.tgz",
"integrity": "sha512-YKtqBtFbjih1wZNTwZjtLPEG6B/4ySMa38fgOo/rbMJpNKO3+OaKzwwOYkeKx/FapM/4MsTP9ExqUcDV+dkixA==", "integrity": "sha512-osez6Sio/fLr/3QkW5HE1wbCOcmYG5030/6QIa9IcKyyfchewlecdnYcsbeUMUtdIiU9lWqhroQp2H/O7auxBA==",
"dev": true, "dev": true,
"requires": { "requires": {
"@github/browserslist-config": "^1.0.0", "@github/browserslist-config": "^1.0.0",
@@ -6280,9 +6397,9 @@
"dev": true "dev": true
}, },
"prettier": { "prettier": {
"version": "3.0.3", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.3.tgz", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.2.tgz",
"integrity": "sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==", "integrity": "sha512-o2YR9qtniXvwEZlOKbveKfDQVyqxbEIWn48Z8m3ZJjBjcCmUy3xZGIv+7AkaeuaTr6yPXJjwv07ZWlsWbEy1rQ==",
"dev": true "dev": true
}, },
"prettier-linter-helpers": { "prettier-linter-helpers": {

View File

@@ -36,14 +36,14 @@
"@actions/glob": "^0.4.0" "@actions/glob": "^0.4.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.6.2", "@types/node": "^20.5.6",
"@typescript-eslint/eslint-plugin": "^6.7.2", "@typescript-eslint/eslint-plugin": "^6.4.0",
"@typescript-eslint/parser": "^6.7.2", "@typescript-eslint/parser": "^6.4.1",
"@vercel/ncc": "^0.38.0", "@vercel/ncc": "^0.36.1",
"eslint": "^8.47.0", "eslint": "^8.47.0",
"eslint-plugin-github": "^4.10.0", "eslint-plugin-github": "^4.9.2",
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"prettier": "^3.0.3", "prettier": "^3.0.1",
"typescript": "^5.2.2", "typescript": "^5.2.2",
"husky": "^8.0.3", "husky": "^8.0.3",
"lint-staged": "^14.0.0" "lint-staged": "^14.0.0"

View File

@@ -4,9 +4,8 @@ PRECACHE=$2
NODE_URL=https://nodejs.org/dist NODE_URL=https://nodejs.org/dist
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download NODE16_VERSION="16.20.1"
NODE16_VERSION="16.20.2" NODE20_VERSION="20.5.0"
NODE20_VERSION="20.8.1"
# used only for win-arm64, remove node16 unofficial version when official version is available # used only for win-arm64, remove node16 unofficial version when official version is available
NODE16_UNOFFICIAL_VERSION="16.20.0" NODE16_UNOFFICIAL_VERSION="16.20.0"
@@ -176,9 +175,9 @@ fi
# Download the external tools for Linux PACKAGERUNTIMEs. # Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE20_VERSION}/alpine/x64/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
fi fi
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then

View File

@@ -2,7 +2,7 @@
SET UPDATEFILE=update.finished SET UPDATEFILE=update.finished
"%~dp0\bin\Runner.Listener.exe" run %* "%~dp0\bin\Runner.Listener.exe" run %*
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N` rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
rem `if ERRORLEVEL N` means: error level is N or MORE rem `if ERRORLEVEL N` means: error level is N or MORE
if %ERRORLEVEL% EQU 0 ( if %ERRORLEVEL% EQU 0 (

View File

@@ -77,7 +77,6 @@ mscordaccore_arm64_arm64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.1322.58009.dll mscordaccore_amd64_amd64_6.0.1322.58009.dll
mscordaccore_amd64_amd64_6.0.2023.32017.dll mscordaccore_amd64_amd64_6.0.2023.32017.dll
mscordaccore_amd64_amd64_6.0.2223.42425.dll mscordaccore_amd64_amd64_6.0.2223.42425.dll
mscordaccore_amd64_amd64_6.0.2323.48002.dll
mscordbi.dll mscordbi.dll
mscorlib.dll mscorlib.dll
mscorrc.debug.dll mscorrc.debug.dll

View File

@@ -17,7 +17,7 @@ namespace GitHub.Runner.Common
{ {
Task ConnectAsync(Uri serverUrl, VssCredentials credentials); Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version, string os, string architecture); Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
} }
public sealed class BrokerServer : RunnerService, IBrokerServer public sealed class BrokerServer : RunnerService, IBrokerServer
@@ -44,11 +44,11 @@ namespace GitHub.Runner.Common
} }
} }
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version, string os, string architecture) public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
{ {
CheckConnection(); CheckConnection();
var jobMessage = RetryRequest<TaskAgentMessage>( var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, os, architecture, cancellationToken), cancellationToken); async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
return jobMessage; return jobMessage;
} }

View File

@@ -69,8 +69,6 @@ namespace GitHub.Runner.Common
public static readonly OSPlatform Platform = OSPlatform.OSX; public static readonly OSPlatform Platform = OSPlatform.OSX;
#elif OS_WINDOWS #elif OS_WINDOWS
public static readonly OSPlatform Platform = OSPlatform.Windows; public static readonly OSPlatform Platform = OSPlatform.Windows;
#else
public static readonly OSPlatform Platform = OSPlatform.Linux;
#endif #endif
#if X86 #if X86
@@ -81,8 +79,6 @@ namespace GitHub.Runner.Common
public static readonly Architecture PlatformArchitecture = Architecture.Arm; public static readonly Architecture PlatformArchitecture = Architecture.Arm;
#elif ARM64 #elif ARM64
public static readonly Architecture PlatformArchitecture = Architecture.Arm64; public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
#else
public static readonly Architecture PlatformArchitecture = Architecture.X64;
#endif #endif
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30); public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
@@ -175,9 +171,6 @@ namespace GitHub.Runner.Common
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`."; public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/.";
public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings";
public static readonly string DeprecatedNodeVersion = "node16";
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/"; public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings"; public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
} }
@@ -261,7 +254,6 @@ namespace GitHub.Runner.Common
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION"; public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION"; public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT"; public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
} }
public static class System public static class System

View File

@@ -1,7 +1,6 @@
using System; using System;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading; using System.Threading;
@@ -15,11 +14,10 @@ namespace GitHub.Runner.Common
[ServiceLocator(Default = typeof(JobServerQueue))] [ServiceLocator(Default = typeof(JobServerQueue))]
public interface IJobServerQueue : IRunnerService, IThrottlingReporter public interface IJobServerQueue : IRunnerService, IThrottlingReporter
{ {
IList<JobTelemetry> JobTelemetries { get; }
TaskCompletionSource<int> JobRecordUpdated { get; } TaskCompletionSource<int> JobRecordUpdated { get; }
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling; event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync(); Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false); void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null); void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource); void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines); void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
@@ -71,18 +69,13 @@ namespace GitHub.Runner.Common
private Task[] _allDequeueTasks; private Task[] _allDequeueTasks;
private readonly TaskCompletionSource<int> _jobCompletionSource = new(); private readonly TaskCompletionSource<int> _jobCompletionSource = new();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new(); private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
private readonly List<JobTelemetry> _jobTelemetries = new();
private bool _queueInProcess = false; private bool _queueInProcess = false;
private bool _resultsServiceOnly = false; private bool _resultsServiceOnly = false;
private Stopwatch _resultsUploadTimer = new();
private Stopwatch _actionsUploadTimer = new();
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated; public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling; public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
public IList<JobTelemetry> JobTelemetries => _jobTelemetries;
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds). // Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
// Then the dequeue will happen every 500ms. // Then the dequeue will happen every 500ms.
// In this way, customer still can get instance live console output on job start, // In this way, customer still can get instance live console output on job start,
@@ -94,7 +87,6 @@ namespace GitHub.Runner.Common
private bool _firstConsoleOutputs = true; private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false; private bool _resultsClientInitiated = false;
private bool _enableTelemetry = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file); private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
@@ -104,15 +96,14 @@ namespace GitHub.Runner.Common
_resultsServer = hostContext.GetService<IResultsServer>(); _resultsServer = hostContext.GetService<IResultsServer>();
} }
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false) public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
{ {
Trace.Entering(); Trace.Entering();
_resultsServiceOnly = resultsServiceOnly; _resultsServiceOnly = resultServiceOnly;
_enableTelemetry = enableTelemetry;
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (!resultsServiceOnly) if (!resultServiceOnly)
{ {
_jobServer.InitializeWebsocketClient(serviceEndPoint); _jobServer.InitializeWebsocketClient(serviceEndPoint);
} }
@@ -128,7 +119,7 @@ namespace GitHub.Runner.Common
{ {
string liveConsoleFeedUrl = null; string liveConsoleFeedUrl = null;
Trace.Info("Initializing results client"); Trace.Info("Initializing results client");
if (resultsServiceOnly if (resultServiceOnly
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
&& !string.IsNullOrEmpty(feedStreamUrl)) && !string.IsNullOrEmpty(feedStreamUrl))
{ {
@@ -220,12 +211,6 @@ namespace GitHub.Runner.Common
await _resultsServer.DisposeAsync(); await _resultsServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained."); Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
if (_enableTelemetry)
{
var uploadTimeComparison = $"Actions upload time: {_actionsUploadTimer.ElapsedMilliseconds} ms, Result upload time: {_resultsUploadTimer.ElapsedMilliseconds} ms";
Trace.Info(uploadTimeComparison);
_jobTelemetries.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = uploadTimeComparison });
}
} }
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber) public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
@@ -471,10 +456,6 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
if (_enableTelemetry)
{
_actionsUploadTimer.Start();
}
await UploadFile(file); await UploadFile(file);
} }
catch (Exception ex) catch (Exception ex)
@@ -490,13 +471,6 @@ namespace GitHub.Runner.Common
// _fileUploadQueue.Enqueue(file); // _fileUploadQueue.Enqueue(file);
//} //}
} }
finally
{
if (_enableTelemetry)
{
_actionsUploadTimer.Stop();
}
}
} }
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount); Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
@@ -543,10 +517,6 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
if (_enableTelemetry)
{
_resultsUploadTimer.Start();
}
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase)) if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{ {
await UploadSummaryFile(file); await UploadSummaryFile(file);
@@ -571,19 +541,10 @@ namespace GitHub.Runner.Common
Trace.Error(ex); Trace.Error(ex);
errorCount++; errorCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs // If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
if (!_resultsServiceOnly) _resultsClientInitiated = false;
{
_resultsClientInitiated = false; SendResultsTelemetry(ex);
SendResultsTelemetry(ex);
}
}
finally
{
if (_enableTelemetry)
{
_resultsUploadTimer.Stop();
}
} }
} }
@@ -699,11 +660,9 @@ namespace GitHub.Runner.Common
{ {
Trace.Info("Catch exception during update steps, skip update Results."); Trace.Info("Catch exception during update steps, skip update Results.");
Trace.Error(e); Trace.Error(e);
if (!_resultsServiceOnly) _resultsClientInitiated = false;
{
_resultsClientInitiated = false; SendResultsTelemetry(e);
SendResultsTelemetry(e);
}
} }
if (_bufferedRetryRecords.Remove(update.TimelineId)) if (_bufferedRetryRecords.Remove(update.TimelineId))

View File

@@ -1,7 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers; using System.Net.Http.Headers;
using System.Net.WebSockets; using System.Net.WebSockets;
using System.Security; using System.Security;
@@ -53,8 +52,8 @@ namespace GitHub.Runner.Common
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token) public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
{ {
this._resultsClient = CreateHttpClient(uri, token); var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
_token = token; _token = token;
if (!string.IsNullOrEmpty(liveConsoleFeedUrl)) if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{ {
@@ -63,26 +62,6 @@ namespace GitHub.Runner.Common
} }
} }
public ResultsHttpClient CreateHttpClient(Uri uri, string token)
{
// Using default 100 timeout
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
// Create retry handler
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
if (settings.MaxRetryRequest > 0)
{
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
}
// Setup RawHttpMessageHandler without credentials
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true);
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file, public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken) CancellationToken cancellationToken)
{ {

View File

@@ -224,7 +224,7 @@ namespace GitHub.Runner.Common
} }
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound) catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{ {
Trace.Error($"{errorMessage} -- Attempt: {retry}"); Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex); Trace.Error(ex);
} }
} }

View File

@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken); Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken); Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken); Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, CancellationToken cancellationToken); Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
// job request // job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken); Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
@@ -272,10 +272,10 @@ namespace GitHub.Runner.Common
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken); return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
} }
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, CancellationToken cancellationToken) public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
{ {
CheckConnection(RunnerConnectionType.MessageQueue); CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, cancellationToken: cancellationToken); return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
} }
//----------------------------------------------------------------- //-----------------------------------------------------------------

View File

@@ -73,7 +73,7 @@ namespace GitHub.Runner.Listener
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); _getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try try
{ {
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version, VarUtil.OS, VarUtil.OSArchitecture); message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
if (message == null) if (message == null)
{ {

View File

@@ -248,7 +248,7 @@ namespace GitHub.Runner.Listener.Configuration
} }
else else
{ {
agents = await _runnerServer.GetAgentsAsync(runnerSettings.AgentName); agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
} }
Trace.Verbose("Returns {0} agents", agents.Count); Trace.Verbose("Returns {0} agents", agents.Count);
@@ -744,7 +744,7 @@ namespace GitHub.Runner.Listener.Configuration
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound) catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{ {
retryCount++; retryCount++;
Trace.Error($"Failed to get JIT runner token -- Attempt: {retryCount}"); Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
Trace.Error(ex); Trace.Error(ex);
} }
} }
@@ -807,7 +807,7 @@ namespace GitHub.Runner.Listener.Configuration
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound) catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{ {
retryCount++; retryCount++;
Trace.Error($"Failed to get tenant credentials -- Attempt: {retryCount}"); Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
Trace.Error(ex); Trace.Error(ex);
} }
} }

View File

@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
if (!store.HasCredentials()) if (!store.HasCredentials())
{ {
throw new InvalidOperationException("Credentials not stored. Must reconfigure."); throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
} }
CredentialData credData = store.GetCredentials(); CredentialData credData = store.GetCredentials();

View File

@@ -514,25 +514,9 @@ namespace GitHub.Runner.Listener.Configuration
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000)); failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
// Lock the Service Database // Lock the Service Database
int svcLockRetries = 10; svcLock = LockServiceDatabase(scmHndl);
int svcLockRetryTimeout = 5000; if (svcLock.ToInt64() <= 0)
while (true)
{ {
svcLock = LockServiceDatabase(scmHndl);
if (svcLock.ToInt64() > 0)
{
break;
}
_term.WriteLine("Retrying Lock Service Database...");
svcLockRetries--;
if (svcLockRetries > 0)
{
Thread.Sleep(svcLockRetryTimeout);
continue;
}
throw new Exception("Failed to Lock Service Database for Write"); throw new Exception("Failed to Lock Service Database for Write");
} }

View File

@@ -98,7 +98,7 @@ namespace GitHub.Runner.Listener
Guid dispatchedJobId = _jobDispatchedQueue.Dequeue(); Guid dispatchedJobId = _jobDispatchedQueue.Dequeue();
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch)) if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
{ {
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}."); Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
} }
} }
@@ -162,12 +162,12 @@ namespace GitHub.Runner.Listener
dispatchedJobId = _jobDispatchedQueue.Dequeue(); dispatchedJobId = _jobDispatchedQueue.Dequeue();
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch)) if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
{ {
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}."); Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
} }
} }
else else
{ {
Trace.Verbose($"There is no running WorkerDispatcher needs to await."); Trace.Verbose($"There is no running WorkerDispather needs to await.");
} }
if (currentDispatch != null) if (currentDispatch != null)
@@ -176,7 +176,7 @@ namespace GitHub.Runner.Listener
{ {
try try
{ {
Trace.Info($"Waiting WorkerDispatcher for job {currentDispatch.JobId} run to finish."); Trace.Info($"Waiting WorkerDispather for job {currentDispatch.JobId} run to finish.");
await currentDispatch.WorkerDispatch; await currentDispatch.WorkerDispatch;
Trace.Info($"Job request {currentDispatch.JobId} processed succeed."); Trace.Info($"Job request {currentDispatch.JobId} processed succeed.");
} }
@@ -190,7 +190,7 @@ namespace GitHub.Runner.Listener
WorkerDispatcher workerDispatcher; WorkerDispatcher workerDispatcher;
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher)) if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
{ {
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}."); Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
workerDispatcher.Dispose(); workerDispatcher.Dispose();
} }
} }
@@ -209,7 +209,7 @@ namespace GitHub.Runner.Listener
{ {
try try
{ {
Trace.Info($"Ensure WorkerDispatcher for job {currentDispatch.JobId} run to finish, cancel any running job."); Trace.Info($"Ensure WorkerDispather for job {currentDispatch.JobId} run to finish, cancel any running job.");
await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true); await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true);
} }
catch (Exception ex) catch (Exception ex)
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Listener
WorkerDispatcher workerDispatcher; WorkerDispatcher workerDispatcher;
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher)) if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
{ {
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}."); Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
workerDispatcher.Dispose(); workerDispatcher.Dispose();
} }
} }
@@ -327,7 +327,7 @@ namespace GitHub.Runner.Listener
WorkerDispatcher workerDispatcher; WorkerDispatcher workerDispatcher;
if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher)) if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher))
{ {
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}."); Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
workerDispatcher.Dispose(); workerDispatcher.Dispose();
} }
} }

View File

@@ -9,7 +9,6 @@ using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
@@ -129,7 +128,7 @@ namespace GitHub.Runner.Listener
// "invalid_client" means the runner registration has been deleted from the server. // "invalid_client" means the runner registration has been deleted from the server.
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase)) if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
{ {
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
return false; return false;
} }
@@ -140,7 +139,7 @@ namespace GitHub.Runner.Listener
var authError = await oauthTokenProvider.ValidateCredentialAsync(token); var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase)) if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{ {
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
return false; return false;
} }
} }
@@ -220,8 +219,6 @@ namespace GitHub.Runner.Listener
_lastMessageId, _lastMessageId,
runnerStatus, runnerStatus,
BuildConstants.RunnerPackage.Version, BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
_getMessagesTokenSource.Token); _getMessagesTokenSource.Token);
// Decrypt the message body if the session is using encryption // Decrypt the message body if the session is using encryption

View File

@@ -6,8 +6,6 @@ using System.Linq;
using System.Security.Cryptography; using System.Security.Cryptography;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.Common;
namespace GitHub.Runner.Sdk namespace GitHub.Runner.Sdk
{ {
@@ -74,25 +72,6 @@ namespace GitHub.Runner.Sdk
} }
} }
public static async Task<string> GetFileContentSha256HashAsync(string path)
{
if (!File.Exists(path))
{
return string.Empty;
}
using (FileStream stream = File.OpenRead(path))
{
using (SHA256 sha256 = SHA256.Create())
{
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
return hash;
}
}
}
public static void Delete(string path, CancellationToken cancellationToken) public static void Delete(string path, CancellationToken cancellationToken)
{ {
DeleteDirectory(path, cancellationToken); DeleteDirectory(path, cancellationToken);

View File

@@ -85,35 +85,6 @@ namespace GitHub.Runner.Sdk
VssCredentials credentials, VssCredentials credentials,
IEnumerable<DelegatingHandler> additionalDelegatingHandler = null, IEnumerable<DelegatingHandler> additionalDelegatingHandler = null,
TimeSpan? timeout = null) TimeSpan? timeout = null)
{
RawClientHttpRequestSettings settings = GetHttpRequestSettings(timeout);
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
return connection;
}
public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint)
{
ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint));
ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization));
ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme));
if (serviceEndpoint.Authorization.Parameters.Count == 0)
{
throw new ArgumentOutOfRangeException(nameof(serviceEndpoint));
}
VssCredentials credentials = null;
string accessToken;
if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth &&
serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken))
{
credentials = new VssCredentials(new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt);
}
return credentials;
}
public static RawClientHttpRequestSettings GetHttpRequestSettings(TimeSpan? timeout = null)
{ {
RawClientHttpRequestSettings settings = RawClientHttpRequestSettings.Default.Clone(); RawClientHttpRequestSettings settings = RawClientHttpRequestSettings.Default.Clone();
@@ -145,7 +116,30 @@ namespace GitHub.Runner.Sdk
// settings are applied to an HttpRequestMessage. // settings are applied to an HttpRequestMessage.
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture); settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
return settings; RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
return connection;
}
public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint)
{
ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint));
ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization));
ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme));
if (serviceEndpoint.Authorization.Parameters.Count == 0)
{
throw new ArgumentOutOfRangeException(nameof(serviceEndpoint));
}
VssCredentials credentials = null;
string accessToken;
if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth &&
serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken))
{
credentials = new VssCredentials(new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt);
}
return credentials;
} }
} }
} }

View File

@@ -114,128 +114,6 @@ namespace GitHub.Runner.Sdk
} }
} }
#if OS_WINDOWS
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
#else
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'PATH' environment variable.");
#endif
if (require)
{
throw new FileNotFoundException(
message: $"{command}: command not found",
fileName: command);
}
return null;
}
public static string Which2(string command, bool require = false, ITraceWriter trace = null, string prependPath = null)
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which2: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path))
{
trace?.Info("PATH environment variable not defined.");
path = path ?? string.Empty;
}
if (!string.IsNullOrEmpty(prependPath))
{
path = PathUtil.PrependPath(prependPath, path);
}
string[] pathSegments = path.Split(new Char[] { Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < pathSegments.Length; i++)
{
pathSegments[i] = Environment.ExpandEnvironmentVariables(pathSegments[i]);
}
foreach (string pathSegment in pathSegments)
{
if (!string.IsNullOrEmpty(pathSegment) && Directory.Exists(pathSegment))
{
#if OS_WINDOWS
string pathExt = Environment.GetEnvironmentVariable("PATHEXT");
if (string.IsNullOrEmpty(pathExt))
{
// XP's system default value for PATHEXT system variable
pathExt = ".com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh";
}
string[] pathExtSegments = pathExt.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
// if command already has an extension.
if (pathExtSegments.Any(ext => command.EndsWith(ext, StringComparison.OrdinalIgnoreCase)))
{
try
{
foreach (var file in Directory.EnumerateFiles(pathSegment, command))
{
if (IsPathValid(file, trace))
{
trace?.Info($"Location: '{file}'");
return file;
}
}
}
catch (UnauthorizedAccessException ex)
{
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
trace?.Verbose(ex.ToString());
}
}
else
{
string searchPattern;
searchPattern = StringUtil.Format($"{command}.*");
try
{
foreach (var file in Directory.EnumerateFiles(pathSegment, searchPattern))
{
// add extension.
for (int i = 0; i < pathExtSegments.Length; i++)
{
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
if (string.Equals(file, fullPath, StringComparison.OrdinalIgnoreCase) && IsPathValid(fullPath, trace))
{
trace?.Info($"Location: '{fullPath}'");
return fullPath;
}
}
}
}
catch (UnauthorizedAccessException ex)
{
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
trace?.Verbose(ex.ToString());
}
}
#else
try
{
foreach (var file in Directory.EnumerateFiles(pathSegment, command))
{
if (IsPathValid(file, trace))
{
trace?.Info($"Location: '{file}'");
return file;
}
}
}
catch (UnauthorizedAccessException ex)
{
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
trace?.Verbose(ex.ToString());
}
#endif
}
}
#if OS_WINDOWS #if OS_WINDOWS
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable."); trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
#else #else
@@ -256,12 +134,7 @@ namespace GitHub.Runner.Sdk
{ {
var fileInfo = new FileInfo(path); var fileInfo = new FileInfo(path);
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget; var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
if (fileInfo.LinkTarget == null || if (fileInfo.LinkTarget == null || File.Exists(linkTargetFullPath) || File.Exists(fileInfo.LinkTarget)) return true;
File.Exists(linkTargetFullPath) ||
File.Exists(fileInfo.LinkTarget))
{
return true;
}
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist"); trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
return false; return false;
} }

View File

@@ -6,7 +6,6 @@ using System.Linq;
using System.Net; using System.Net;
using System.Net.Http; using System.Net.Http;
using System.Net.Http.Headers; using System.Net.Http.Headers;
using System.Security.Cryptography;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -53,6 +52,7 @@ namespace GitHub.Runner.Worker
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k). //81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
private const int _defaultCopyBufferSize = 81920; private const int _defaultCopyBufferSize = 81920;
private const string _dotcomApiUrl = "https://api.github.com";
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new(); private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new();
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers; public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
@@ -739,7 +739,10 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos)); ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions)); ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
var apiUrl = GetApiUrl(executionContext);
var defaultAccessToken = executionContext.GetGitHubContext("token"); var defaultAccessToken = executionContext.GetGitHubContext("token");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values) foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
{ {
@@ -763,8 +766,6 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo)); ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner)); ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref)); ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.ResolvedNameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.ResolvedSha));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref); string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
string watermarkFile = GetWatermarkFilePath(destDirectory); string watermarkFile = GetWatermarkFilePath(destDirectory);
@@ -781,6 +782,31 @@ namespace GitHub.Runner.Worker
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})"); executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})");
} }
await DownloadRepositoryActionAsync(executionContext, downloadInfo, destDirectory);
}
private string GetApiUrl(IExecutionContext executionContext)
{
string apiUrl = executionContext.GetGitHubContext("api_url");
if (!string.IsNullOrEmpty(apiUrl))
{
return apiUrl;
}
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
return _dotcomApiUrl;
}
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
{
#if OS_WINDOWS
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
#else
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
#endif
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
{
//download and extract action in a temp folder and rename it on success //download and extract action in a temp folder and rename it on success
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid()); string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
Directory.CreateDirectory(tempDirectory); Directory.CreateDirectory(tempDirectory);
@@ -793,50 +819,97 @@ namespace GitHub.Runner.Worker
string link = downloadInfo?.TarballUrl; string link = downloadInfo?.TarballUrl;
#endif #endif
Trace.Info($"Save archive '{link}' into {archiveFile}.");
try try
{ {
var useActionArchiveCache = false; int retryCount = 0;
if (executionContext.Global.Variables.GetBoolean("DistributedTask.UseActionArchiveCache") == true)
// Allow up to 20 * 60s for any action to be downloaded from github graph.
int timeoutSeconds = 20 * 60;
while (retryCount < 3)
{ {
var hasActionArchiveCache = false; using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory); using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
if (!string.IsNullOrEmpty(actionArchiveCacheDir) &&
Directory.Exists(actionArchiveCacheDir))
{ {
hasActionArchiveCache = true; try
Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'");
#if OS_WINDOWS
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip");
#else
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz");
#endif
if (File.Exists(cacheArchiveFile))
{ {
try //open zip stream in async mode
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{ {
Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'"); httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
File.Copy(cacheArchiveFile, archiveFile);
useActionArchiveCache = true; httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'"); using (var response = await httpClient.GetAsync(link))
{
var requestId = UrlUtil.GetGitHubRequestId(response.Headers);
if (!string.IsNullOrEmpty(requestId))
{
Trace.Info($"Request URL: {link} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
}
if (response.IsSuccessStatusCode)
{
using (var result = await response.Content.ReadAsStreamAsync())
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
// download succeed, break out the retry loop.
break;
}
}
else if (response.StatusCode == HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
throw new ActionNotFoundException(new Uri(link), requestId);
}
else
{
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
}
} }
catch (Exception ex) }
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
{
Trace.Info("Action download has been cancelled.");
throw;
}
catch (ActionNotFoundException)
{
Trace.Info($"The action at '{link}' does not exist");
throw;
}
catch (Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
Trace.Error(ex);
if (actionDownloadTimeout.Token.IsCancellationRequested)
{ {
Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}"); // action download didn't finish within timeout
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
}
else
{
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
} }
} }
} }
executionContext.Global.JobTelemetry.Add(new JobTelemetry() if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{ {
Type = JobTelemetryType.General, var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}" executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
}); await Task.Delay(backOff);
}
} }
if (!useActionArchiveCache) ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
{ executionContext.Debug($"Download '{link}' to '{archiveFile}'");
await DownloadRepositoryArchive(executionContext, link, downloadInfo.Authentication?.Token, archiveFile);
}
var stagingDirectory = Path.Combine(tempDirectory, "_staging"); var stagingDirectory = Path.Combine(tempDirectory, "_staging");
Directory.CreateDirectory(stagingDirectory); Directory.CreateDirectory(stagingDirectory);
@@ -856,13 +929,11 @@ namespace GitHub.Runner.Worker
// tar -xzf // tar -xzf
using (var processInvoker = HostContext.CreateService<IProcessInvoker>()) using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{ {
var tarOutputs = new List<string>();
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) => processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{ {
if (!string.IsNullOrEmpty(args.Data)) if (!string.IsNullOrEmpty(args.Data))
{ {
Trace.Info(args.Data); Trace.Info(args.Data);
tarOutputs.Add($"STDOUT: {args.Data}");
} }
}); });
@@ -871,23 +942,13 @@ namespace GitHub.Runner.Worker
if (!string.IsNullOrEmpty(args.Data)) if (!string.IsNullOrEmpty(args.Data))
{ {
Trace.Error(args.Data); Trace.Error(args.Data);
tarOutputs.Add($"STDERR: {args.Data}");
} }
}); });
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken); int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
if (exitCode != 0) if (exitCode != 0)
{ {
if (executionContext.Global.Variables.GetBoolean("DistributedTask.DetailUntarFailure") == true) throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
{
var fileInfo = new FileInfo(archiveFile);
var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile);
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
}
else
{
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
}
} }
} }
#endif #endif
@@ -905,6 +966,7 @@ namespace GitHub.Runner.Worker
} }
Trace.Verbose("Create watermark file indicate action download succeed."); Trace.Verbose("Create watermark file indicate action download succeed.");
string watermarkFile = GetWatermarkFilePath(destDirectory);
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'."); executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
@@ -929,6 +991,29 @@ namespace GitHub.Runner.Worker
} }
} }
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
}
private string GetWatermarkFilePath(string directory) => directory + ".completed"; private string GetWatermarkFilePath(string directory) => directory + ".completed";
private ActionSetupInfo PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction) private ActionSetupInfo PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
@@ -1101,6 +1186,13 @@ namespace GitHub.Runner.Worker
return $"{repositoryReference.Name}@{repositoryReference.Ref}"; return $"{repositoryReference.Name}@{repositoryReference.Ref}";
} }
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
{
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
return $"{info.NameWithOwner}@{info.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token) private AuthenticationHeaderValue CreateAuthHeader(string token)
{ {
if (string.IsNullOrEmpty(token)) if (string.IsNullOrEmpty(token))
@@ -1112,104 +1204,6 @@ namespace GitHub.Runner.Worker
HostContext.SecretMasker.AddValue(base64EncodingToken); HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken); return new AuthenticationHeaderValue("Basic", base64EncodingToken);
} }
private async Task DownloadRepositoryArchive(IExecutionContext executionContext, string downloadUrl, string downloadAuthToken, string archiveFile)
{
Trace.Info($"Save archive '{downloadUrl}' into {archiveFile}.");
int retryCount = 0;
// Allow up to 20 * 60s for any action to be downloaded from github graph.
int timeoutSeconds = 20 * 60;
while (retryCount < 3)
{
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
{
try
{
//open zip stream in async mode
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadAuthToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var response = await httpClient.GetAsync(downloadUrl))
{
var requestId = UrlUtil.GetGitHubRequestId(response.Headers);
if (!string.IsNullOrEmpty(requestId))
{
Trace.Info($"Request URL: {downloadUrl} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
}
if (response.IsSuccessStatusCode)
{
using (var result = await response.Content.ReadAsStreamAsync())
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
// download succeed, break out the retry loop.
break;
}
}
else if (response.StatusCode == HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
throw new ActionNotFoundException(new Uri(downloadUrl), requestId);
}
else
{
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
}
}
}
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
{
Trace.Info("Action download has been cancelled.");
throw;
}
catch (OperationCanceledException ex) when (!executionContext.CancellationToken.IsCancellationRequested && retryCount >= 2)
{
Trace.Info($"Action download final retry timeout after {timeoutSeconds} seconds.");
throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message}");
}
catch (ActionNotFoundException)
{
Trace.Info($"The action at '{downloadUrl}' does not exist");
throw;
}
catch (Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Fail to download archive '{downloadUrl}' -- Attempt: {retryCount}");
Trace.Error(ex);
if (actionDownloadTimeout.Token.IsCancellationRequested)
{
// action download didn't finish within timeout
executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds.");
}
else
{
executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message}");
}
}
}
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
await Task.Delay(backOff);
}
}
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
executionContext.Debug($"Download '{downloadUrl}' to '{archiveFile}'");
}
} }
public sealed class Definition public sealed class Definition

View File

@@ -134,28 +134,6 @@ namespace GitHub.Runner.Worker.Handlers
// Remove environment variable that may cause conflicts with the node within the runner. // Remove environment variable that may cause conflicts with the node within the runner.
Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795 Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795
if (string.Equals(Data.NodeVersion, Constants.Runner.DeprecatedNodeVersion, StringComparison.OrdinalIgnoreCase) && (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.Node16Warning) ?? false))
{
var repoAction = Action as RepositoryPathReference;
var warningActions = new HashSet<string>();
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
{
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings);
}
if (string.IsNullOrEmpty(repoAction.Name))
{
// local actions don't have a 'Name'
warningActions.Add(repoAction.Path);
}
else
{
warningActions.Add($"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}");
}
ExecutionContext.Global.Variables.Set(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, StringUtil.ConvertToJson(warningActions));
}
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager)) using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager)) using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager))
{ {

View File

@@ -83,40 +83,19 @@ namespace GitHub.Runner.Worker.Handlers
shellCommand = "pwsh"; shellCommand = "pwsh";
if (validateShellOnHost) if (validateShellOnHost)
{ {
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
{
shellCommandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
}
if (string.IsNullOrEmpty(shellCommandPath)) if (string.IsNullOrEmpty(shellCommandPath))
{ {
shellCommand = "powershell"; shellCommand = "powershell";
Trace.Info($"Defaulting to {shellCommand}"); Trace.Info($"Defaulting to {shellCommand}");
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
{
shellCommandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
}
} }
} }
#else #else
shellCommand = "sh"; shellCommand = "sh";
if (validateShellOnHost) if (validateShellOnHost)
{ {
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
{
shellCommandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
}
} }
#endif #endif
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
@@ -127,14 +106,7 @@ namespace GitHub.Runner.Worker.Handlers
shellCommand = parsed.shellCommand; shellCommand = parsed.shellCommand;
if (validateShellOnHost) if (validateShellOnHost)
{ {
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
{
shellCommandPath = WhichUtil.Which2(parsed.shellCommand, true, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
}
} }
argFormat = $"{parsed.shellArgs}".TrimStart(); argFormat = $"{parsed.shellArgs}".TrimStart();
@@ -216,38 +188,17 @@ namespace GitHub.Runner.Worker.Handlers
{ {
#if OS_WINDOWS #if OS_WINDOWS
shellCommand = "pwsh"; shellCommand = "pwsh";
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
{
commandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
}
if (string.IsNullOrEmpty(commandPath)) if (string.IsNullOrEmpty(commandPath))
{ {
shellCommand = "powershell"; shellCommand = "powershell";
Trace.Info($"Defaulting to {shellCommand}"); Trace.Info($"Defaulting to {shellCommand}");
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
{
commandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
}
} }
ArgUtil.NotNullOrEmpty(commandPath, "Default Shell"); ArgUtil.NotNullOrEmpty(commandPath, "Default Shell");
#else #else
shellCommand = "sh"; shellCommand = "sh";
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
{
commandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
}
#endif #endif
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
} }
@@ -258,14 +209,7 @@ namespace GitHub.Runner.Worker.Handlers
if (!IsActionStep && systemShells.Contains(shell)) if (!IsActionStep && systemShells.Contains(shell))
{ {
shellCommand = shell; shellCommand = shell;
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
{
commandPath = WhichUtil.Which2(shell, !isContainerStepHost, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
}
if (shell == "bash") if (shell == "bash")
{ {
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh"); argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh");
@@ -280,14 +224,7 @@ namespace GitHub.Runner.Worker.Handlers
var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell); var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell);
shellCommand = parsed.shellCommand; shellCommand = parsed.shellCommand;
// For non-ContainerStepHost, the command must be located on the host by Which // For non-ContainerStepHost, the command must be located on the host by Which
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
{
commandPath = WhichUtil.Which2(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
}
argFormat = $"{parsed.shellArgs}".TrimStart(); argFormat = $"{parsed.shellArgs}".TrimStart();
if (string.IsNullOrEmpty(argFormat)) if (string.IsNullOrEmpty(argFormat))
{ {

View File

@@ -51,13 +51,6 @@ namespace GitHub.Runner.Worker
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value)); HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
} }
var jobServerQueueTelemetry = false;
if (message.Variables.TryGetValue("DistributedTask.EnableJobServerQueueTelemetry", out VariableValue enableJobServerQueueTelemetry) &&
!string.IsNullOrEmpty(enableJobServerQueueTelemetry?.Value))
{
jobServerQueueTelemetry = StringUtil.ConvertToBoolean(enableJobServerQueueTelemetry.Value);
}
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (MessageUtil.IsRunServiceJob(message.MessageType)) if (MessageUtil.IsRunServiceJob(message.MessageType))
{ {
@@ -79,7 +72,7 @@ namespace GitHub.Runner.Worker
launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken); launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken);
} }
_jobServerQueue = HostContext.GetService<IJobServerQueue>(); _jobServerQueue = HostContext.GetService<IJobServerQueue>();
_jobServerQueue.Start(message, resultsServiceOnly: true, enableTelemetry: jobServerQueueTelemetry); _jobServerQueue.Start(message, resultServiceOnly: true);
} }
else else
{ {
@@ -101,7 +94,7 @@ namespace GitHub.Runner.Worker
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers); VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers);
await jobServer.ConnectAsync(jobConnection); await jobServer.ConnectAsync(jobConnection);
_jobServerQueue.Start(message, enableTelemetry: jobServerQueueTelemetry); _jobServerQueue.Start(message);
server = jobServer; server = jobServer;
} }
@@ -283,12 +276,6 @@ namespace GitHub.Runner.Worker
{ {
jobContext.Debug($"Finishing: {message.JobDisplayName}"); jobContext.Debug($"Finishing: {message.JobDisplayName}");
TaskResult result = jobContext.Complete(taskResult); TaskResult result = jobContext.Complete(taskResult);
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
{
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings));
jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions));
}
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings)) if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
{ {
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings)); var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings));
@@ -388,12 +375,6 @@ namespace GitHub.Runner.Worker
} }
} }
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
{
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings));
jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions));
}
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings)) if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
{ {
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings)); var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings));
@@ -402,12 +383,7 @@ namespace GitHub.Runner.Worker
try try
{ {
var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: true); await ShutdownQueue(throwOnFailure: true);
// include any job telemetry from the background upload process.
if (jobQueueTelemetry.Count > 0)
{
jobContext.Global.JobTelemetry.AddRange(jobQueueTelemetry);
}
} }
catch (Exception ex) catch (Exception ex)
{ {
@@ -509,7 +485,7 @@ namespace GitHub.Runner.Worker
} }
} }
private async Task<IList<JobTelemetry>> ShutdownQueue(bool throwOnFailure) private async Task ShutdownQueue(bool throwOnFailure)
{ {
if (_jobServerQueue != null) if (_jobServerQueue != null)
{ {
@@ -517,7 +493,6 @@ namespace GitHub.Runner.Worker
{ {
Trace.Info("Shutting down the job server queue."); Trace.Info("Shutting down the job server queue.");
await _jobServerQueue.ShutdownAsync(); await _jobServerQueue.ShutdownAsync();
return _jobServerQueue.JobTelemetries;
} }
catch (Exception ex) when (!throwOnFailure) catch (Exception ex) when (!throwOnFailure)
{ {
@@ -529,8 +504,6 @@ namespace GitHub.Runner.Worker
_jobServerQueue = null; // Prevent multiple attempts. _jobServerQueue = null; // Prevent multiple attempts.
} }
} }
return Array.Empty<JobTelemetry>();
} }
} }
} }

View File

@@ -1,22 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace GitHub.Services.Common
{
// Set of classes used to bypass token operations
// Results Service and External services follow a different auth model but
// we are required to pass in a credentials object to create a RawHttpMessageHandler
public class NoOpCredentials : FederatedCredential
{
public NoOpCredentials(IssuedToken initialToken) : base(initialToken)
{
}
public override VssCredentialsType CredentialType { get; }
protected override IssuedTokenProvider OnCreateTokenProvider(Uri serverUrl, IHttpResponse response)
{
return null;
}
}
}

View File

@@ -109,7 +109,7 @@ namespace GitHub.Services.Common
lock (m_thisLock) lock (m_thisLock)
{ {
// Ensure that we attempt to use the most appropriate authentication mechanism by default. // Ensure that we attempt to use the most appropriate authentication mechanism by default.
if (m_tokenProvider == null && !(this.Credentials is NoOpCredentials)) if (m_tokenProvider == null)
{ {
m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null); m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null);
} }
@@ -121,8 +121,7 @@ namespace GitHub.Services.Common
HttpResponseMessageWrapper responseWrapper; HttpResponseMessageWrapper responseWrapper;
Boolean lastResponseDemandedProxyAuth = false; Boolean lastResponseDemandedProxyAuth = false;
// do not retry if we cannot recreate tokens Int32 retries = m_maxAuthRetries;
Int32 retries = this.Credentials is NoOpCredentials ? 0 : m_maxAuthRetries;
try try
{ {
tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
@@ -139,12 +138,8 @@ namespace GitHub.Services.Common
} }
// Let's start with sending a token // Let's start with sending a token
IssuedToken token = null; IssuedToken token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
if (m_tokenProvider != null) ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
{
token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
}
// The WinHttpHandler will chunk any content that does not have a computed length which is // The WinHttpHandler will chunk any content that does not have a computed length which is
// not what we want. By loading into a buffer up-front we bypass this behavior and there is // not what we want. By loading into a buffer up-front we bypass this behavior and there is

View File

@@ -461,8 +461,6 @@ namespace GitHub.DistributedTask.WebApi
long? lastMessageId = null, long? lastMessageId = null,
TaskAgentStatus? status = null, TaskAgentStatus? status = null,
string runnerVersion = null, string runnerVersion = null,
string os = null,
string architecture = null,
object userState = null, object userState = null,
CancellationToken cancellationToken = default) CancellationToken cancellationToken = default)
{ {
@@ -485,16 +483,6 @@ namespace GitHub.DistributedTask.WebApi
queryParams.Add("runnerVersion", runnerVersion); queryParams.Add("runnerVersion", runnerVersion);
} }
if (os != null)
{
queryParams.Add("os", os);
}
if (architecture != null)
{
queryParams.Add("architecture", architecture);
}
return SendAsync<TaskAgentMessage>( return SendAsync<TaskAgentMessage>(
httpMethod, httpMethod,
locationId, locationId,

View File

@@ -123,11 +123,8 @@ namespace GitHub.DistributedTask.Logging
var secretSection = string.Empty; var secretSection = string.Empty;
if (value.Contains("&+")) if (value.Contains("&+"))
{ {
if (value.Length > value.IndexOf("&+") + "&+".Length + 1) // +1 to skip the letter that got colored
{ secretSection = value.Substring(value.IndexOf("&+") + "&+".Length + 1);
// +1 to skip the letter that got colored
secretSection = value.Substring(value.IndexOf("&+") + "&+".Length + 1);
}
} }
else else
{ {

View File

@@ -2,7 +2,6 @@
namespace GitHub.DistributedTask.WebApi namespace GitHub.DistributedTask.WebApi
{ {
// do NOT add new enum since it will break backward compatibility with GHES
public enum JobTelemetryType public enum JobTelemetryType
{ {
[EnumMember] [EnumMember]

View File

@@ -22,8 +22,6 @@
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" /> <PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
<PackageReference Include="Minimatch" Version="2.0.0" /> <PackageReference Include="Minimatch" Version="2.0.0" />
<PackageReference Include="YamlDotNet.Signed" Version="5.3.0" /> <PackageReference Include="YamlDotNet.Signed" Version="5.3.0" />
<PackageReference Include="System.Net.Http" Version="4.3.4" />
<PackageReference Include="System.Text.RegularExpressions" Version="4.3.1" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@@ -59,8 +59,6 @@ namespace GitHub.Actions.RunService.WebApi
public async Task<TaskAgentMessage> GetRunnerMessageAsync( public async Task<TaskAgentMessage> GetRunnerMessageAsync(
string runnerVersion, string runnerVersion,
TaskAgentStatus? status, TaskAgentStatus? status,
string os = null,
string architecture = null,
CancellationToken cancellationToken = default CancellationToken cancellationToken = default
) )
{ {
@@ -77,16 +75,6 @@ namespace GitHub.Actions.RunService.WebApi
queryParams.Add("runnerVersion", runnerVersion); queryParams.Add("runnerVersion", runnerVersion);
} }
if (os != null)
{
queryParams.Add("os", os);
}
if (architecture != null)
{
queryParams.Add("architecture", architecture);
}
var result = await SendAsync<TaskAgentMessage>( var result = await SendAsync<TaskAgentMessage>(
new HttpMethod("GET"), new HttpMethod("GET"),
requestUri: requestUri, requestUri: requestUri,

View File

@@ -258,38 +258,6 @@ namespace GitHub.Services.Results.Client
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken); await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
} }
private async Task<HttpResponseMessage> UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType,
CancellationToken cancellationToken)
{
HttpResponseMessage response;
if (firstBlock && finalize)
{
// This is the one and only block, just use a block blob
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
response = await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken);
}
}
else
{
// This is either not the first block, which means it's using appendBlob; or first block and need to wait for additional blocks. Using append blob in either case.
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
response = await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
}
return response;
}
// Handle file upload for step log // Handle file upload for step log
public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken) public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{ {
@@ -300,7 +268,18 @@ namespace GitHub.Services.Results.Client
throw new Exception("Failed to get step log upload url"); throw new Exception("Failed to get step log upload url");
} }
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken); // Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata // Update metadata
if (finalize) if (finalize)
@@ -320,7 +299,18 @@ namespace GitHub.Services.Results.Client
throw new Exception("Failed to get job log upload url"); throw new Exception("Failed to get job log upload url");
} }
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken); // Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata // Update metadata
if (finalize) if (finalize)

View File

@@ -120,7 +120,6 @@ namespace GitHub.Runner.Common.Tests
[InlineData("secret&+secret&secret", "secret&+\x0033[96ms\x0033[0mecret&secret", "***\x0033[96ms\x0033[0m***")] [InlineData("secret&+secret&secret", "secret&+\x0033[96ms\x0033[0mecret&secret", "***\x0033[96ms\x0033[0m***")]
[InlineData("secret&+secret&+secret", "secret&+\x0033[96ms\x0033[0mecret&+secret", "***\x0033[96ms\x0033[0m***")] [InlineData("secret&+secret&+secret", "secret&+\x0033[96ms\x0033[0mecret&+secret", "***\x0033[96ms\x0033[0m***")]
[InlineData("secret&+secret&secret&+secret", "secret&+\x0033[96ms\x0033[0mecret&secret&+secret", "***\x0033[96ms\x0033[0m***")] [InlineData("secret&+secret&secret&+secret", "secret&+\x0033[96ms\x0033[0mecret&secret&+secret", "***\x0033[96ms\x0033[0m***")]
[InlineData("secret&secret&+", "secret&secret&+\x0033[96m\x0033[0m", "***\x0033[96m\x0033[0m")]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Common")] [Trait("Category", "Common")]
public void SecretSectionMasking(string secret, string rawOutput, string maskedOutput) public void SecretSectionMasking(string secret, string rawOutput, string maskedOutput)

View File

@@ -110,7 +110,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_runnerServer.Setup(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.IsAny<TaskAgentPoolType>())).Returns(Task.FromResult(expectedPools)); _runnerServer.Setup(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.IsAny<TaskAgentPoolType>())).Returns(Task.FromResult(expectedPools));
var expectedAgents = new List<TaskAgent>(); var expectedAgents = new List<TaskAgent>();
_runnerServer.Setup(x => x.GetAgentsAsync(It.IsAny<string>())).Returns(Task.FromResult(expectedAgents)); _runnerServer.Setup(x => x.GetAgentsAsync(It.IsAny<int>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent)); _runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent)); _runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));

View File

@@ -192,8 +192,8 @@ namespace GitHub.Runner.Common.Tests.Listener
_runnerServer _runnerServer
.Setup(x => x.GetAgentMessageAsync( .Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>())) _settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, CancellationToken cancellationToken) => .Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken) =>
{ {
await Task.Yield(); await Task.Yield();
return messages.Dequeue(); return messages.Dequeue();
@@ -208,7 +208,7 @@ namespace GitHub.Runner.Common.Tests.Listener
//Assert //Assert
_runnerServer _runnerServer
.Verify(x => x.GetAgentMessageAsync( .Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length)); _settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
} }
} }
@@ -293,7 +293,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_runnerServer _runnerServer
.Setup(x => x.GetAgentMessageAsync( .Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>())) _settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Throws(new TaskAgentAccessTokenExpiredException("test")); .Throws(new TaskAgentAccessTokenExpiredException("test"));
try try
{ {
@@ -311,7 +311,7 @@ namespace GitHub.Runner.Common.Tests.Listener
//Assert //Assert
_runnerServer _runnerServer
.Verify(x => x.GetAgentMessageAsync( .Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once); _settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once);
_runnerServer _runnerServer
.Verify(x => x.DeleteAgentSessionAsync( .Verify(x => x.DeleteAgentSessionAsync(

View File

@@ -273,29 +273,5 @@ namespace GitHub.Runner.Common.Tests
Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently."); Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public Task RunnerLayoutParts_ContentHashFilesNoNewline()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
var dotnetRuntimeHash = File.ReadAllText(dotnetRuntimeHashFile);
trace.Info($"Current hash: {dotnetRuntimeHash}");
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
var externalsHash = File.ReadAllText(externalsHashFile);
trace.Info($"Current hash: {externalsHash}");
Assert.False(externalsHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in externals hash file.");
Assert.False(dotnetRuntimeHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in dotnet runtime hash file.");
return Task.CompletedTask;
}
}
} }
} }

View File

@@ -212,210 +212,5 @@ namespace GitHub.Runner.Common.Tests.Util
File.Delete(brokenSymlink); File.Delete(brokenSymlink);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue); Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void UseWhich2FindGit()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
string gitPath = WhichUtil.Which2("git", trace: trace);
trace.Info($"Which(\"git\") returns: {gitPath ?? string.Empty}");
// Assert.
Assert.True(!string.IsNullOrEmpty(gitPath) && File.Exists(gitPath), $"Unable to find Git through: {nameof(WhichUtil.Which)}");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2ReturnsNullWhenNotFound()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
string nosuch = WhichUtil.Which2("no-such-file-cf7e351f", trace: trace);
trace.Info($"result: {nosuch ?? string.Empty}");
// Assert.
Assert.True(string.IsNullOrEmpty(nosuch), "Path should not be resolved");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2ThrowsWhenRequireAndNotFound()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
try
{
WhichUtil.Which2("no-such-file-cf7e351f", require: true, trace: trace);
throw new Exception("which should have thrown");
}
catch (FileNotFoundException ex)
{
Assert.Equal("no-such-file-cf7e351f", ex.FileName);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2HandleFullyQualifiedPath()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
var gitPath = WhichUtil.Which2("git", require: true, trace: trace);
var gitPath2 = WhichUtil.Which2(gitPath, require: true, trace: trace);
// Assert.
Assert.Equal(gitPath, gitPath2);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2HandlesSymlinkToTargetFullPath()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}.exe";
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}";
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}";
#endif
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
using (File.Create(target))
{
File.CreateSymbolicLink(symlink, target);
// Act.
var result = WhichUtil.Which2(symlinkName, require: true, trace: trace);
// Assert
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find symlink through: {nameof(WhichUtil.Which)}");
}
// Cleanup
File.Delete(symlink);
File.Delete(target);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2HandlesSymlinkToTargetRelativePath()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
string targetName = $"target-{Guid.NewGuid()}.exe";
string target = Path.GetTempPath() + targetName;
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}";
string targetName = $"target-{Guid.NewGuid()}";
string target = Path.GetTempPath() + targetName;
#endif
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
using (File.Create(target))
{
File.CreateSymbolicLink(symlink, targetName);
// Act.
var result = WhichUtil.Which2(symlinkName, require: true, trace: trace);
// Assert
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find {symlinkName} through: {nameof(WhichUtil.Which)}");
}
// Cleanup
File.Delete(symlink);
File.Delete(target);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2ThrowsWhenSymlinkBroken()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}.exe";
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}";
#endif
string target = "no-such-file-cf7e351f";
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
File.CreateSymbolicLink(brokenSymlink, target);
// Act.
var exception = Assert.Throws<FileNotFoundException>(() => WhichUtil.Which2(brokenSymlinkName, require: true, trace: trace));
// Assert
Assert.Equal(brokenSymlinkName, exception.FileName);
// Cleanup
File.Delete(brokenSymlink);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
} }
} }

View File

@@ -293,118 +293,6 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_DownloadActionFromGraph_UseCache()
{
try
{
//Arrange
Setup();
Directory.CreateDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache"));
Directory.CreateDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache", "actions_download-artifact"));
Directory.CreateDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"));
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory, Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache"));
const string Content = @"
# Container action
name: '1ae80bcb-c1df-4362-bdaa-54f729c60281'
description: 'Greet the world and record the time'
author: 'GitHub'
inputs:
greeting: # id of input
description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout'
required: true
default: 'Hello'
entryPoint: # id of input
description: 'optional docker entrypoint overwrite.'
required: false
outputs:
time: # id of output
description: 'The time we did the greeting'
icon: 'hello.svg' # vector art to display in the GitHub Marketplace
color: 'green' # optional, decorates the entry in the GitHub Marketplace
runs:
using: 'node12'
main: 'task.js'
";
await File.WriteAllTextAsync(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact", "action.yml"), Content);
#if OS_WINDOWS
ZipFile.CreateFromDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"), Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache", "actions_download-artifact", "master-sha.zip"), CompressionLevel.Fastest, true);
#else
string tar = WhichUtil.Which("tar", require: true, trace: _hc.GetTrace());
// tar -xzf
using (var processInvoker = new ProcessInvokerWrapper())
{
processInvoker.Initialize(_hc);
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
_hc.GetTrace().Info(args.Data);
}
});
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
_hc.GetTrace().Error(args.Data);
}
});
string cwd = Path.GetDirectoryName(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"));
string inputDirectory = Path.GetFileName(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"));
string archiveFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache", "actions_download-artifact", "master-sha.tar.gz");
int exitCode = await processInvoker.ExecuteAsync(_hc.GetDirectory(WellKnownDirectory.Bin), tar, $"-czf \"{archiveFile}\" -C \"{cwd}\" \"{inputDirectory}\"", null, CancellationToken.None);
if (exitCode != 0)
{
throw new NotSupportedException($"Can't use 'tar -czf' to create archive file: {archiveFile}. return code: {exitCode}.");
}
}
#endif
var actionId = Guid.NewGuid();
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
{
Name = "actions/download-artifact",
Ref = "master",
RepositoryType = "GitHub"
}
}
};
_ec.Object.Global.Variables.Set("DistributedTask.UseActionArchiveCache", bool.TrueString);
//Act
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/download-artifact", "master.completed");
Assert.True(File.Exists(watermarkFile));
var actionYamlFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/download-artifact", "master", "action.yml");
Assert.True(File.Exists(actionYamlFile));
_hc.GetTrace().Info(File.ReadAllText(actionYamlFile));
Assert.Contains("1ae80bcb-c1df-4362-bdaa-54f729c60281", File.ReadAllText(actionYamlFile));
}
finally
{
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory, null);
Teardown();
}
}
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Worker")] [Trait("Category", "Worker")]
@@ -2384,7 +2272,6 @@ runs:
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>()); _ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Object.Global.FileTable = new List<String>(); _ec.Object.Global.FileTable = new List<String>();
_ec.Object.Global.Plan = new TaskOrchestrationPlanReference(); _ec.Object.Global.Plan = new TaskOrchestrationPlanReference();
_ec.Object.Global.JobTelemetry = new List<JobTelemetry>();
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"[{tag}]{message}"); }); _ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"[{tag}]{message}"); });
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); }); _ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
_ec.Setup(x => x.GetGitHubContext("workspace")).Returns(Path.Combine(_workFolder, "actions", "actions")); _ec.Setup(x => x.GetGitHubContext("workspace")).Returns(Path.Combine(_workFolder, "actions", "actions"));
@@ -2407,8 +2294,6 @@ runs:
{ {
NameWithOwner = action.NameWithOwner, NameWithOwner = action.NameWithOwner,
Ref = action.Ref, Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}", TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}", ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
}; };
@@ -2428,8 +2313,6 @@ runs:
{ {
NameWithOwner = action.NameWithOwner, NameWithOwner = action.NameWithOwner,
Ref = action.Ref, Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}", TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}", ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
}; };

View File

@@ -22,7 +22,7 @@ DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
PACKAGE_DIR="$SCRIPT_DIR/../_package" PACKAGE_DIR="$SCRIPT_DIR/../_package"
PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims" PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims"
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk" DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
DOTNETSDK_VERSION="6.0.415" DOTNETSDK_VERSION="6.0.414"
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION" DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
RUNNER_VERSION=$(cat runnerversion) RUNNER_VERSION=$(cat runnerversion)

View File

@@ -1,5 +1,5 @@
{ {
"sdk": { "sdk": {
"version": "6.0.415" "version": "6.0.414"
} }
} }

View File

@@ -1 +1 @@
2.311.0 2.309.0