mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
6 Commits
v2.312.0
...
feature/va
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab7aa2e431 | ||
|
|
d0300c34f2 | ||
|
|
ee0ba3616c | ||
|
|
1d1aaed09a | ||
|
|
7c4b0f6e88 | ||
|
|
7d3cbb0494 |
@@ -4,7 +4,7 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "6.0.418"
|
||||
"version": "6.0.414"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "16"
|
||||
|
||||
167
.github/workflows/build.yml
vendored
167
.github/workflows/build.yml
vendored
@@ -17,6 +17,7 @@ on:
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||
include:
|
||||
@@ -58,12 +59,86 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
id: compute-hash
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
|
||||
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
|
||||
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
- name: update hash
|
||||
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||
|
||||
if [ -n "$ExternalHash" ]; then
|
||||
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
fi
|
||||
|
||||
if [ -n "$DotNetRuntimeHash" ]; then
|
||||
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
fi
|
||||
- name: cache updated hashes
|
||||
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Create an warning annotation if computed hashes will automatically be updated
|
||||
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
shell: bash
|
||||
run: echo "::warning ::Computed hashes do not match, we will automatically update these for you, you can safely ignore the errors on this job" && exit 1
|
||||
- name: Create an error annotation if computed hashes need to be updated for a fork
|
||||
if: ${{ github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||
|
||||
if [ -n "$ExternalHash" ]; then
|
||||
echo "::error ::Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $ExternalHash"
|
||||
fi
|
||||
|
||||
if [ -n "$DotNetRuntimeHash" ]; then
|
||||
echo "::error ::Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DotNetRuntimeHash"
|
||||
fi
|
||||
|
||||
if [[ -n "$ExternalHash" || -n "$DotNetRuntimeHash" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
${{ matrix.devScript }} test
|
||||
working-directory: src
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64'
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 0 && matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64' }}
|
||||
|
||||
# Create runner package tar.gz/zip
|
||||
- name: Package Release
|
||||
@@ -80,3 +155,93 @@ jobs:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
|
||||
hash-update:
|
||||
needs: [build]
|
||||
# only run this if we get a failure from the build step - most likely meaning we need a hash update
|
||||
if: ${{ always() && contains(needs.build.result, 'failure') && github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
- name: Restore cached hashes - linux-x64
|
||||
id: cache-restore-linux-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm64
|
||||
id: cache-restore-linux-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm
|
||||
id: cache-restore-linux-arm
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-x64
|
||||
id: cache-restore-osx-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-arm64
|
||||
id: cache-restore-osx-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-x64
|
||||
id: cache-restore-win-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-arm64
|
||||
id: cache-restore-win-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Fetch cached computed hashes
|
||||
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -a -m "Update computed hashes"
|
||||
git push
|
||||
|
||||
17
.github/workflows/close-bugs-bot.yml
vendored
17
.github/workflows/close-bugs-bot.yml
vendored
@@ -1,17 +0,0 @@
|
||||
name: Close Bugs Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-bug"
|
||||
only-labels: "actions-bug"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
17
.github/workflows/close-features-bot.yml
vendored
17
.github/workflows/close-features-bot.yml
vendored
@@ -1,17 +0,0 @@
|
||||
name: Close Features Bot
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # every day at midnight
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-feature"
|
||||
only-labels: "actions-feature"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
211
.github/workflows/dotnet-upgrade.yml
vendored
211
.github/workflows/dotnet-upgrade.yml
vendored
@@ -84,20 +84,221 @@ jobs:
|
||||
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git push --set-upstream origin $branch_name
|
||||
|
||||
create-pr:
|
||||
needs: [dotnet-update]
|
||||
build-hashes:
|
||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||
needs: [dotnet-update]
|
||||
outputs:
|
||||
# pass outputs from this job to create-pr for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-arm64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
run: |
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
id: compute-hash
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
|
||||
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
|
||||
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
- name: update hash
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||
|
||||
if [ -n "$ExternalHash" ]; then
|
||||
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
fi
|
||||
|
||||
if [ -n "$DotNetRuntimeHash" ]; then
|
||||
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
fi
|
||||
- name: cache updated hashes
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
|
||||
|
||||
hash-update:
|
||||
needs: [build-hashes]
|
||||
if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }}
|
||||
outputs:
|
||||
# pass outputs from this job to create-pr for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Restore cached hashes - linux-x64
|
||||
id: cache-restore-linux-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm64
|
||||
id: cache-restore-linux-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm
|
||||
id: cache-restore-linux-arm
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-x64
|
||||
id: cache-restore-osx-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-arm64
|
||||
id: cache-restore-osx-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-x64
|
||||
id: cache-restore-win-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-arm64
|
||||
id: cache-restore-win-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Fetch cached computed hashes
|
||||
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" )
|
||||
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -a -m "Update computed hashes"
|
||||
git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
create-pr:
|
||||
needs: [hash-update]
|
||||
outputs:
|
||||
# pass outputs from this job to run-tests for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
|
||||
|
||||
---
|
||||
|
||||
419
.github/workflows/release.yml
vendored
419
.github/workflows/release.yml
vendored
@@ -53,6 +53,27 @@ jobs:
|
||||
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
||||
@@ -115,6 +136,76 @@ jobs:
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_externals
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime_externals
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
@@ -125,6 +216,10 @@ jobs:
|
||||
name: runner-packages
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
${{ matrix.runtime }}-trimmedpackages.json
|
||||
|
||||
release:
|
||||
needs: build
|
||||
@@ -158,11 +253,33 @@ jobs:
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
@@ -192,7 +309,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -202,7 +319,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -212,7 +329,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -222,7 +339,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -232,7 +349,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -242,7 +359,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -252,10 +369,298 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
publish-image:
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -7,10 +7,8 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
|
||||
---
|
||||
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||
|
||||
@@ -18,15 +16,12 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
```
|
||||
curl -v https://api.github.com/zen
|
||||
curl -v https://codeload.github.com/_ping
|
||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://results-receiver.actions.githubusercontent.com/health
|
||||
```
|
||||
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
@@ -34,7 +29,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
```
|
||||
curl -v https://[hostname]/api/v3/zen
|
||||
curl -v https://codeload.[hostname]/_ping
|
||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||
```
|
||||
@@ -50,10 +44,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Ping codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
@@ -62,10 +52,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Ping results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
|
||||
@@ -42,7 +42,6 @@ If you are having trouble connecting, try these steps:
|
||||
- https://api.github.com/
|
||||
- https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
- https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
- https://results-receiver.actions.githubusercontent.com/health
|
||||
- For GHES/GHAE
|
||||
- https://myGHES.com/_services/vstoken/_apis/health
|
||||
- https://myGHES.com/_services/pipelines/_apis/health
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
## Supported Distributions and Versions
|
||||
|
||||
x64
|
||||
- Red Hat Enterprise Linux 7+
|
||||
- CentOS 7+
|
||||
- Oracle Linux 7+
|
||||
- Red Hat Enterprise Linux 7
|
||||
- CentOS 7
|
||||
- Oracle Linux 7
|
||||
- Fedora 29+
|
||||
- Debian 9+
|
||||
- Ubuntu 16.04+
|
||||
|
||||
@@ -4,9 +4,8 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.5.0
|
||||
ARG DOCKER_VERSION=24.0.6
|
||||
ARG BUILDX_VERSION=0.11.2
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
|
||||
ARG DOCKER_VERSION=23.0.6
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
@@ -26,18 +25,13 @@ RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
|
||||
&& tar zxvf docker.tgz \
|
||||
&& rm -rf docker.tgz \
|
||||
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
||||
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
||||
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
&& rm -rf docker.tgz
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||
ENV ImageOS=ubuntu22
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
@@ -55,7 +49,6 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
WORKDIR /home/runner
|
||||
|
||||
COPY --chown=runner:docker --from=build /actions-runner .
|
||||
COPY --from=build /usr/local/lib/docker/cli-plugins/docker-buildx /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
|
||||
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
|
||||
|
||||
|
||||
@@ -1,26 +1,37 @@
|
||||
## What's Changed
|
||||
* Fix `buildx` installation by @ajschmidt8 in https://github.com/actions/runner/pull/2952
|
||||
* Create close-features and close-bugs bot for runner issues by @ruvceskistefan in https://github.com/actions/runner/pull/2909
|
||||
* Send disableUpdate as query parameter by @luketomlinson in https://github.com/actions/runner/pull/2970
|
||||
* Handle SelfUpdate Flow when Package is provided in Message by @luketomlinson in https://github.com/actions/runner/pull/2926
|
||||
* Bump container hook version to 0.5.0 in runner image by @nikola-jokic in https://github.com/actions/runner/pull/3003
|
||||
* Set `ImageOS` environment variable in runner images by @int128 in https://github.com/actions/runner/pull/2878
|
||||
* Mark job as failed on worker crash. by @TingluoHuang in https://github.com/actions/runner/pull/3006
|
||||
* Include whether http proxy configured as part of UserAgent. by @TingluoHuang in https://github.com/actions/runner/pull/3009
|
||||
* Add codeload to the list of service we check during '--check'. by @TingluoHuang in https://github.com/actions/runner/pull/3011
|
||||
* close reason update by @ruvceskistefan in https://github.com/actions/runner/pull/3027
|
||||
* Update envlinux.md by @adjn in https://github.com/actions/runner/pull/3040
|
||||
* Extend `--check` to check Results-Receiver service. by @TingluoHuang in https://github.com/actions/runner/pull/3078
|
||||
* Use Azure SDK to upload files to Azure Blob by @yacaovsnc in https://github.com/actions/runner/pull/3033
|
||||
* Remove code in runner for handling trimmed packages. by @TingluoHuang in https://github.com/actions/runner/pull/3074
|
||||
* Update dotnet sdk to latest version @6.0.418 by @github-actions in https://github.com/actions/runner/pull/3085
|
||||
* Patch Curl to no longer use -k by @thboop in https://github.com/actions/runner/pull/3091
|
||||
* Bump @types/node from 12.12.14 to 20.4.10 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2759
|
||||
* Trace x-github-request-id when download action tarball. by @TingluoHuang in https://github.com/actions/runner/pull/2755
|
||||
* Fix typo by @kyanny in https://github.com/actions/runner/pull/2741
|
||||
* Bump prettier from 3.0.1 to 3.0.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2772
|
||||
* Bump @types/node from 20.4.10 to 20.5.0 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2773
|
||||
* Revert "Fixed a bug where a misplaced `=` character could bypass here… by @cory-miller in https://github.com/actions/runner/pull/2774
|
||||
* Filter NODE_OPTIONS from env for file output by @cory-miller in https://github.com/actions/runner/pull/2775
|
||||
* Bump @types/node from 20.5.0 to 20.5.1 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2781
|
||||
* Update Docker Version in Images by @ajschmidt8 in https://github.com/actions/runner/pull/2694
|
||||
* Bump @types/node from 20.5.1 to 20.5.4 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2789
|
||||
* Bump @typescript-eslint/parser from 6.4.0 to 6.4.1 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2785
|
||||
* Bump Microsoft.AspNet.WebApi.Client from 5.2.4 to 5.2.9 in /src by @dependabot in https://github.com/actions/runner/pull/2751
|
||||
* Bump System.Buffers from 4.3.0 to 4.5.1 in /src by @dependabot in https://github.com/actions/runner/pull/2749
|
||||
* Bump dotnet/runtime-deps from 6.0-jammy to 7.0-jammy in /images by @dependabot in https://github.com/actions/runner/pull/2745
|
||||
* Remove need to manually compile JS binary for hashFiles utility by @vanZeben in https://github.com/actions/runner/pull/2770
|
||||
* Revert "Bump dotnet/runtime-deps from 6.0-jammy to 7.0-jammy in /images" by @TingluoHuang in https://github.com/actions/runner/pull/2790
|
||||
* Query runner by name on server side. by @TingluoHuang in https://github.com/actions/runner/pull/2771
|
||||
* Bump typescript from 5.1.6 to 5.2.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2795
|
||||
* Bump @types/node from 20.5.4 to 20.5.6 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2796
|
||||
* Bump Newtonsoft.Json from 13.0.1 to 13.0.3 in /src by @dependabot in https://github.com/actions/runner/pull/2797
|
||||
* Support replacing runners in v2 flow by @luketomlinson in https://github.com/actions/runner/pull/2791
|
||||
* Delegating handler for Http redirects by @paveliak in https://github.com/actions/runner/pull/2814
|
||||
* Add references to the firewall requirements docs by @paveliak in https://github.com/actions/runner/pull/2815
|
||||
* Create automated workflow that will auto-generate dotnet sdk patches by @vanZeben in https://github.com/actions/runner/pull/2776
|
||||
* Fixes minor issues with using proper output varaibles by @vanZeben in https://github.com/actions/runner/pull/2818
|
||||
* Throw NonRetryableException on GetNextMessage from broker as needed. by @TingluoHuang in https://github.com/actions/runner/pull/2828
|
||||
* Mark action download failures as infra failures by @cory-miller in https://github.com/actions/runner/pull/2827
|
||||
|
||||
## New Contributors
|
||||
* @int128 made their first contribution in https://github.com/actions/runner/pull/2878
|
||||
* @adjn made their first contribution in https://github.com/actions/runner/pull/3040
|
||||
* @kyanny made their first contribution in https://github.com/actions/runner/pull/2741
|
||||
* @ajschmidt8 made their first contribution in https://github.com/actions/runner/pull/2694
|
||||
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.311.0...v2.312.0
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.308.0...v2.309.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
@@ -125,3 +136,27 @@ The SHA-256 checksums for the packages included in this build are shown below:
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.312.0
|
||||
<Update to ./src/runnerversion when creating release>
|
||||
|
||||
@@ -8,7 +8,7 @@ set -e
|
||||
# Configures it as a service more secure
|
||||
# Should be used on VMs and not containers
|
||||
# Works on OSX and Linux
|
||||
# Assumes x64 arch (support arm64)
|
||||
# Assumes x64 arch
|
||||
# See EXAMPLES below
|
||||
|
||||
flags_found=false
|
||||
@@ -87,9 +87,6 @@ sudo echo
|
||||
runner_plat=linux
|
||||
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||
|
||||
runner_arch=x64
|
||||
[ ! -z "$(arch | grep arm64)" ] && runner_arch=arm64
|
||||
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
@@ -142,7 +139,7 @@ echo "Downloading latest runner ..."
|
||||
# For the GHES Alpha, download the runner from github.com
|
||||
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
||||
latest_version=$(echo ${latest_version_label:1})
|
||||
runner_file="actions-runner-${runner_plat}-${runner_arch}-${latest_version}.tar.gz"
|
||||
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
|
||||
|
||||
if [ -f "${runner_file}" ]; then
|
||||
echo "${runner_file} exists. skipping download."
|
||||
|
||||
@@ -1 +1 @@
|
||||
54d95a44d118dba852395991224a6b9c1abe916858c87138656f80c619e85331
|
||||
7539d33c35b0bc94ee67e3c0de1a6bac5ef89ce8e8efaa110131fa0520a54fb4
|
||||
|
||||
@@ -1 +1 @@
|
||||
68015af17f06a824fa478e62ae7393766ce627fd5599ab916432a14656a19a52
|
||||
d71a31f9a17e1a41d6e1edea596edfa68a0db5948ed160e86f2154a547f4dd10
|
||||
|
||||
@@ -1 +1 @@
|
||||
a2628119ca419cb54e279103ffae7986cdbd0814d57c73ff0dc74c38be08b9ae
|
||||
3c2f700d8a995efe7895614ee07d9c7880f872d214b45983ad6163e1931870ab
|
||||
|
||||
@@ -1 +1 @@
|
||||
de71ca09ead807e1a2ce9df0a5b23eb7690cb71fff51169a77e4c3992be53dda
|
||||
b2d85c95ecad13d352f4c7d31c64dbb0d9c6381b48fa5874c4c72a43a025a8a1
|
||||
|
||||
@@ -1 +1 @@
|
||||
d009e05e6b26d614d65be736a15d1bd151932121c16a9ff1b986deadecc982b9
|
||||
417d835c1a108619886b4bb5d25988cb6c138eb7b4c00320b1d9455c5630bff9
|
||||
|
||||
@@ -1 +1 @@
|
||||
f730db39c2305800b4653795360ba9c10c68f384a46b85d808f1f9f0ed3c42e4
|
||||
8f35aaecfb53426ea10816442e23065142bab9dd0fb712a29e0fc471d13c44ac
|
||||
|
||||
@@ -1 +1 @@
|
||||
a35b5722375490e9473cdcccb5e18b41eba3dbf4344fe31abc9821e21f18ea5a
|
||||
811c7debdfc54d074385b063b83c997e5360c8a9160cd20fe777713968370063
|
||||
|
||||
2
src/Misc/contentHash/externals/linux-arm
vendored
2
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +1 @@
|
||||
4bf3e1af0d482af1b2eaf9f08250248a8c1aea8ec20a3c5be116d58cdd930009
|
||||
5bdddd32bab1e57af252b470579083049496e9e39b6e4f50de01232581f9a2d8
|
||||
2
src/Misc/contentHash/externals/linux-arm64
vendored
2
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +1 @@
|
||||
ec1719a8cb4d8687328aa64f4aa7c4e3498a715d8939117874782e3e6e63a14b
|
||||
54b3b3a72da93db0fa38708c759fceadddb70cacdd3620a079084a242126dd78
|
||||
2
src/Misc/contentHash/externals/linux-x64
vendored
2
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +1 @@
|
||||
50538de29f173bb73f708c4ed2c8328a62b8795829b97b2a6cb57197e2305287
|
||||
e7f2da271abb174285c3a757503538b3e9792e9d731b0382b6d1f21bb59a79ba
|
||||
2
src/Misc/contentHash/externals/osx-arm64
vendored
2
src/Misc/contentHash/externals/osx-arm64
vendored
@@ -1 +1 @@
|
||||
a0a96cbb7593643b69e669bf14d7b29b7f27800b3a00bb3305aebe041456c701
|
||||
2481c5b0d06b2b5621635f2568b86a43b0e5b259fed1298167ba4f33d4c464c7
|
||||
2
src/Misc/contentHash/externals/osx-x64
vendored
2
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +1 @@
|
||||
6255b22692779467047ecebd60ad46984866d75cdfe10421d593a7b51d620b09
|
||||
85de7677165e65ec69b8a9e344c0811efa51b7fe5376a1aa083505c560ea6f57
|
||||
2
src/Misc/contentHash/externals/win-arm64
vendored
2
src/Misc/contentHash/externals/win-arm64
vendored
@@ -1 +1 @@
|
||||
6ff1abd055dc35bfbf06f75c2f08908f660346f66ad1d8f81c910068e9ba029d
|
||||
763d18de11c11fd299c0e75e98fefc8a0e6605ae0ad6aba3bbc110db2262ab41
|
||||
2
src/Misc/contentHash/externals/win-x64
vendored
2
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +1 @@
|
||||
433a6d748742d12abd20dc2a79b62ac3d9718ae47ef26f8e84dc8c180eea3659
|
||||
16f3cc545dfe10e84df43746073fc64d3c44d1891782532805aeb2118869a55d
|
||||
371
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
371
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
@@ -12,16 +12,16 @@
|
||||
"@actions/glob": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.6.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
||||
"@typescript-eslint/parser": "^6.7.2",
|
||||
"@vercel/ncc": "^0.38.0",
|
||||
"@types/node": "^20.5.6",
|
||||
"@typescript-eslint/eslint-plugin": "^6.4.0",
|
||||
"@typescript-eslint/parser": "^6.4.1",
|
||||
"@vercel/ncc": "^0.36.1",
|
||||
"eslint": "^8.47.0",
|
||||
"eslint-plugin-github": "^4.10.0",
|
||||
"eslint-plugin-github": "^4.9.2",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"husky": "^8.0.3",
|
||||
"lint-staged": "^14.0.0",
|
||||
"prettier": "^3.0.3",
|
||||
"prettier": "^3.0.1",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
},
|
||||
@@ -223,9 +223,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/json-schema": {
|
||||
"version": "7.0.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz",
|
||||
"integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==",
|
||||
"version": "7.0.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz",
|
||||
"integrity": "sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/json5": {
|
||||
@@ -235,28 +235,28 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.6.2.tgz",
|
||||
"integrity": "sha512-Y+/1vGBHV/cYk6OI1Na/LHzwnlNCAfU3ZNGrc1LdRe/LAIbdDPTTv/HU3M7yXN448aTVDq3eKRm2cg7iKLb8gw==",
|
||||
"version": "20.5.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.6.tgz",
|
||||
"integrity": "sha512-Gi5wRGPbbyOTX+4Y2iULQ27oUPrefaB0PxGQJnfyWN3kvEDGM3mIB5M/gQLmitZf7A9FmLeaqxD3L1CXpm3VKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/semver": {
|
||||
"version": "7.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz",
|
||||
"integrity": "sha512-7aqorHYgdNO4DM36stTiGO3DvKoex9TQRwsJU6vMaFGyqpBA1MNZkz+PG3gaNUPpTAOYhT1WR7M1JyA3fbS9Cw==",
|
||||
"version": "7.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz",
|
||||
"integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.2.tgz",
|
||||
"integrity": "sha512-ooaHxlmSgZTM6CHYAFRlifqh1OAr3PAQEwi7lhYhaegbnXrnh7CDcHmc3+ihhbQC7H0i4JF0psI5ehzkF6Yl6Q==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.4.1.tgz",
|
||||
"integrity": "sha512-3F5PtBzUW0dYlq77Lcqo13fv+58KDwUib3BddilE8ajPJT+faGgxmI9Sw+I8ZS22BYwoir9ZhNXcLi+S+I2bkw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.5.1",
|
||||
"@typescript-eslint/scope-manager": "6.7.2",
|
||||
"@typescript-eslint/type-utils": "6.7.2",
|
||||
"@typescript-eslint/utils": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2",
|
||||
"@typescript-eslint/scope-manager": "6.4.1",
|
||||
"@typescript-eslint/type-utils": "6.4.1",
|
||||
"@typescript-eslint/utils": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.2.4",
|
||||
@@ -282,15 +282,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.2.tgz",
|
||||
"integrity": "sha512-KA3E4ox0ws+SPyxQf9iSI25R6b4Ne78ORhNHeVKrPQnoYsb9UhieoiRoJgrzgEeKGOXhcY1i8YtOeCHHTDa6Fw==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.4.1.tgz",
|
||||
"integrity": "sha512-610G6KHymg9V7EqOaNBMtD1GgpAmGROsmfHJPXNLCU9bfIuLrkdOygltK784F6Crboyd5tBFayPB7Sf0McrQwg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "6.7.2",
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/typescript-estree": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2",
|
||||
"@typescript-eslint/scope-manager": "6.4.1",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/typescript-estree": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -309,14 +309,88 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.2.tgz",
|
||||
"integrity": "sha512-bgi6plgyZjEqapr7u2mhxGR6E8WCzKNUFWNh6fkpVe9+yzRZeYtDTbsIBzKbcxI+r1qVWt6VIoMSNZ4r2A+6Yw==",
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
|
||||
"integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2"
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
|
||||
"integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
|
||||
"integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
"semver": "^7.5.4",
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
|
||||
"integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"eslint-visitor-keys": "^3.4.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
|
||||
"integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
@@ -327,13 +401,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.2.tgz",
|
||||
"integrity": "sha512-36F4fOYIROYRl0qj95dYKx6kybddLtsbmPIYNK0OBeXv2j9L5nZ17j9jmfy+bIDHKQgn2EZX+cofsqi8NPATBQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.4.1.tgz",
|
||||
"integrity": "sha512-7ON8M8NXh73SGZ5XvIqWHjgX2f+vvaOarNliGhjrJnv1vdjG0LVIz+ToYfPirOoBi56jxAKLfsLm40+RvxVVXA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "6.7.2",
|
||||
"@typescript-eslint/utils": "6.7.2",
|
||||
"@typescript-eslint/typescript-estree": "6.4.1",
|
||||
"@typescript-eslint/utils": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.0.1"
|
||||
},
|
||||
@@ -354,9 +428,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.2.tgz",
|
||||
"integrity": "sha512-flJYwMYgnUNDAN9/GAI3l8+wTmvTYdv64fcH8aoJK76Y+1FCZ08RtI5zDerM/FYT5DMkAc+19E4aLmd5KqdFyg==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
|
||||
"integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^16.0.0 || >=18.0.0"
|
||||
@@ -367,13 +441,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.2.tgz",
|
||||
"integrity": "sha512-kiJKVMLkoSciGyFU0TOY0fRxnp9qq1AzVOHNeN1+B9erKFCJ4Z8WdjAkKQPP+b1pWStGFqezMLltxO+308dJTQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
|
||||
"integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -394,17 +468,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.2.tgz",
|
||||
"integrity": "sha512-ZCcBJug/TS6fXRTsoTkgnsvyWSiXwMNiPzBUani7hDidBdj1779qwM1FIAmpH4lvlOZNF3EScsxxuGifjpLSWQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.4.1.tgz",
|
||||
"integrity": "sha512-F/6r2RieNeorU0zhqZNv89s9bDZSovv3bZQpUNOmmQK1L80/cV4KEu95YUJWi75u5PhboFoKUJBnZ4FQcoqhDw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@types/json-schema": "^7.0.12",
|
||||
"@types/semver": "^7.5.0",
|
||||
"@typescript-eslint/scope-manager": "6.7.2",
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/typescript-estree": "6.7.2",
|
||||
"@typescript-eslint/scope-manager": "6.4.1",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/typescript-estree": "6.4.1",
|
||||
"semver": "^7.5.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -419,12 +493,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.2.tgz",
|
||||
"integrity": "sha512-uVw9VIMFBUTz8rIeaUT3fFe8xIUx8r4ywAdlQv1ifH+6acn/XF8Y6rwJ7XNmkNMDrTW+7+vxFFPIF40nJCVsMQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
|
||||
"integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"eslint-visitor-keys": "^3.4.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -436,9 +510,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vercel/ncc": {
|
||||
"version": "0.38.0",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.0.tgz",
|
||||
"integrity": "sha512-B4YKZMm/EqMptKSFyAq4q2SlgJe+VCmEH6Y8gf/E1pTlWbsUJpuH1ymik2Ex3aYO5mCWwV1kaSYHSQOT8+4vHA==",
|
||||
"version": "0.36.1",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz",
|
||||
"integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"ncc": "dist/ncc/cli.js"
|
||||
@@ -1358,9 +1432,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-github": {
|
||||
"version": "4.10.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.10.0.tgz",
|
||||
"integrity": "sha512-YKtqBtFbjih1wZNTwZjtLPEG6B/4ySMa38fgOo/rbMJpNKO3+OaKzwwOYkeKx/FapM/4MsTP9ExqUcDV+dkixA==",
|
||||
"version": "4.9.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.9.2.tgz",
|
||||
"integrity": "sha512-osez6Sio/fLr/3QkW5HE1wbCOcmYG5030/6QIa9IcKyyfchewlecdnYcsbeUMUtdIiU9lWqhroQp2H/O7auxBA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@github/browserslist-config": "^1.0.0",
|
||||
@@ -3107,9 +3181,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.3.tgz",
|
||||
"integrity": "sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==",
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.2.tgz",
|
||||
"integrity": "sha512-o2YR9qtniXvwEZlOKbveKfDQVyqxbEIWn48Z8m3ZJjBjcCmUy3xZGIv+7AkaeuaTr6yPXJjwv07ZWlsWbEy1rQ==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"prettier": "bin/prettier.cjs"
|
||||
@@ -4258,9 +4332,9 @@
|
||||
}
|
||||
},
|
||||
"@types/json-schema": {
|
||||
"version": "7.0.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz",
|
||||
"integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==",
|
||||
"version": "7.0.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz",
|
||||
"integrity": "sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/json5": {
|
||||
@@ -4270,28 +4344,28 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "20.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.6.2.tgz",
|
||||
"integrity": "sha512-Y+/1vGBHV/cYk6OI1Na/LHzwnlNCAfU3ZNGrc1LdRe/LAIbdDPTTv/HU3M7yXN448aTVDq3eKRm2cg7iKLb8gw==",
|
||||
"version": "20.5.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.5.6.tgz",
|
||||
"integrity": "sha512-Gi5wRGPbbyOTX+4Y2iULQ27oUPrefaB0PxGQJnfyWN3kvEDGM3mIB5M/gQLmitZf7A9FmLeaqxD3L1CXpm3VKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/semver": {
|
||||
"version": "7.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.2.tgz",
|
||||
"integrity": "sha512-7aqorHYgdNO4DM36stTiGO3DvKoex9TQRwsJU6vMaFGyqpBA1MNZkz+PG3gaNUPpTAOYhT1WR7M1JyA3fbS9Cw==",
|
||||
"version": "7.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz",
|
||||
"integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==",
|
||||
"dev": true
|
||||
},
|
||||
"@typescript-eslint/eslint-plugin": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.2.tgz",
|
||||
"integrity": "sha512-ooaHxlmSgZTM6CHYAFRlifqh1OAr3PAQEwi7lhYhaegbnXrnh7CDcHmc3+ihhbQC7H0i4JF0psI5ehzkF6Yl6Q==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.4.1.tgz",
|
||||
"integrity": "sha512-3F5PtBzUW0dYlq77Lcqo13fv+58KDwUib3BddilE8ajPJT+faGgxmI9Sw+I8ZS22BYwoir9ZhNXcLi+S+I2bkw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/regexpp": "^4.5.1",
|
||||
"@typescript-eslint/scope-manager": "6.7.2",
|
||||
"@typescript-eslint/type-utils": "6.7.2",
|
||||
"@typescript-eslint/utils": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2",
|
||||
"@typescript-eslint/scope-manager": "6.4.1",
|
||||
"@typescript-eslint/type-utils": "6.4.1",
|
||||
"@typescript-eslint/utils": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.2.4",
|
||||
@@ -4301,54 +4375,97 @@
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/parser": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.2.tgz",
|
||||
"integrity": "sha512-KA3E4ox0ws+SPyxQf9iSI25R6b4Ne78ORhNHeVKrPQnoYsb9UhieoiRoJgrzgEeKGOXhcY1i8YtOeCHHTDa6Fw==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.4.1.tgz",
|
||||
"integrity": "sha512-610G6KHymg9V7EqOaNBMtD1GgpAmGROsmfHJPXNLCU9bfIuLrkdOygltK784F6Crboyd5tBFayPB7Sf0McrQwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/scope-manager": "6.7.2",
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/typescript-estree": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2",
|
||||
"@typescript-eslint/scope-manager": "6.4.1",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/typescript-estree": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
|
||||
"integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/types": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
|
||||
"integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
|
||||
"dev": true
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
|
||||
"integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
"semver": "^7.5.4",
|
||||
"ts-api-utils": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/visitor-keys": {
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
|
||||
"integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"eslint-visitor-keys": "^3.4.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/scope-manager": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.2.tgz",
|
||||
"integrity": "sha512-bgi6plgyZjEqapr7u2mhxGR6E8WCzKNUFWNh6fkpVe9+yzRZeYtDTbsIBzKbcxI+r1qVWt6VIoMSNZ4r2A+6Yw==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.4.1.tgz",
|
||||
"integrity": "sha512-p/OavqOQfm4/Hdrr7kvacOSFjwQ2rrDVJRPxt/o0TOWdFnjJptnjnZ+sYDR7fi4OimvIuKp+2LCkc+rt9fIW+A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2"
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/type-utils": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.2.tgz",
|
||||
"integrity": "sha512-36F4fOYIROYRl0qj95dYKx6kybddLtsbmPIYNK0OBeXv2j9L5nZ17j9jmfy+bIDHKQgn2EZX+cofsqi8NPATBQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.4.1.tgz",
|
||||
"integrity": "sha512-7ON8M8NXh73SGZ5XvIqWHjgX2f+vvaOarNliGhjrJnv1vdjG0LVIz+ToYfPirOoBi56jxAKLfsLm40+RvxVVXA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/typescript-estree": "6.7.2",
|
||||
"@typescript-eslint/utils": "6.7.2",
|
||||
"@typescript-eslint/typescript-estree": "6.4.1",
|
||||
"@typescript-eslint/utils": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/types": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.2.tgz",
|
||||
"integrity": "sha512-flJYwMYgnUNDAN9/GAI3l8+wTmvTYdv64fcH8aoJK76Y+1FCZ08RtI5zDerM/FYT5DMkAc+19E4aLmd5KqdFyg==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.4.1.tgz",
|
||||
"integrity": "sha512-zAAopbNuYu++ijY1GV2ylCsQsi3B8QvfPHVqhGdDcbx/NK5lkqMnCGU53amAjccSpk+LfeONxwzUhDzArSfZJg==",
|
||||
"dev": true
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.2.tgz",
|
||||
"integrity": "sha512-kiJKVMLkoSciGyFU0TOY0fRxnp9qq1AzVOHNeN1+B9erKFCJ4Z8WdjAkKQPP+b1pWStGFqezMLltxO+308dJTQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.4.1.tgz",
|
||||
"integrity": "sha512-xF6Y7SatVE/OyV93h1xGgfOkHr2iXuo8ip0gbfzaKeGGuKiAnzS+HtVhSPx8Www243bwlW8IF7X0/B62SzFftg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/visitor-keys": "6.7.2",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/visitor-keys": "6.4.1",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -4357,34 +4474,34 @@
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/utils": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.2.tgz",
|
||||
"integrity": "sha512-ZCcBJug/TS6fXRTsoTkgnsvyWSiXwMNiPzBUani7hDidBdj1779qwM1FIAmpH4lvlOZNF3EScsxxuGifjpLSWQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.4.1.tgz",
|
||||
"integrity": "sha512-F/6r2RieNeorU0zhqZNv89s9bDZSovv3bZQpUNOmmQK1L80/cV4KEu95YUJWi75u5PhboFoKUJBnZ4FQcoqhDw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@types/json-schema": "^7.0.12",
|
||||
"@types/semver": "^7.5.0",
|
||||
"@typescript-eslint/scope-manager": "6.7.2",
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/typescript-estree": "6.7.2",
|
||||
"@typescript-eslint/scope-manager": "6.4.1",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"@typescript-eslint/typescript-estree": "6.4.1",
|
||||
"semver": "^7.5.4"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/visitor-keys": {
|
||||
"version": "6.7.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.2.tgz",
|
||||
"integrity": "sha512-uVw9VIMFBUTz8rIeaUT3fFe8xIUx8r4ywAdlQv1ifH+6acn/XF8Y6rwJ7XNmkNMDrTW+7+vxFFPIF40nJCVsMQ==",
|
||||
"version": "6.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.4.1.tgz",
|
||||
"integrity": "sha512-y/TyRJsbZPkJIZQXrHfdnxVnxyKegnpEvnRGNam7s3TRR2ykGefEWOhaef00/UUN3IZxizS7BTO3svd3lCOJRQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "6.7.2",
|
||||
"@typescript-eslint/types": "6.4.1",
|
||||
"eslint-visitor-keys": "^3.4.1"
|
||||
}
|
||||
},
|
||||
"@vercel/ncc": {
|
||||
"version": "0.38.0",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.0.tgz",
|
||||
"integrity": "sha512-B4YKZMm/EqMptKSFyAq4q2SlgJe+VCmEH6Y8gf/E1pTlWbsUJpuH1ymik2Ex3aYO5mCWwV1kaSYHSQOT8+4vHA==",
|
||||
"version": "0.36.1",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz",
|
||||
"integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==",
|
||||
"dev": true
|
||||
},
|
||||
"acorn": {
|
||||
@@ -5044,9 +5161,9 @@
|
||||
}
|
||||
},
|
||||
"eslint-plugin-github": {
|
||||
"version": "4.10.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.10.0.tgz",
|
||||
"integrity": "sha512-YKtqBtFbjih1wZNTwZjtLPEG6B/4ySMa38fgOo/rbMJpNKO3+OaKzwwOYkeKx/FapM/4MsTP9ExqUcDV+dkixA==",
|
||||
"version": "4.9.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-4.9.2.tgz",
|
||||
"integrity": "sha512-osez6Sio/fLr/3QkW5HE1wbCOcmYG5030/6QIa9IcKyyfchewlecdnYcsbeUMUtdIiU9lWqhroQp2H/O7auxBA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@github/browserslist-config": "^1.0.0",
|
||||
@@ -6280,9 +6397,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"prettier": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.3.tgz",
|
||||
"integrity": "sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg==",
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.2.tgz",
|
||||
"integrity": "sha512-o2YR9qtniXvwEZlOKbveKfDQVyqxbEIWn48Z8m3ZJjBjcCmUy3xZGIv+7AkaeuaTr6yPXJjwv07ZWlsWbEy1rQ==",
|
||||
"dev": true
|
||||
},
|
||||
"prettier-linter-helpers": {
|
||||
|
||||
@@ -36,14 +36,14 @@
|
||||
"@actions/glob": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.6.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
||||
"@typescript-eslint/parser": "^6.7.2",
|
||||
"@vercel/ncc": "^0.38.0",
|
||||
"@types/node": "^20.5.6",
|
||||
"@typescript-eslint/eslint-plugin": "^6.4.0",
|
||||
"@typescript-eslint/parser": "^6.4.1",
|
||||
"@vercel/ncc": "^0.36.1",
|
||||
"eslint": "^8.47.0",
|
||||
"eslint-plugin-github": "^4.10.0",
|
||||
"eslint-plugin-github": "^4.9.2",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"prettier": "^3.0.3",
|
||||
"prettier": "^3.0.1",
|
||||
"typescript": "^5.2.2",
|
||||
"husky": "^8.0.3",
|
||||
"lint-staged": "^14.0.0"
|
||||
|
||||
@@ -4,9 +4,8 @@ PRECACHE=$2
|
||||
|
||||
NODE_URL=https://nodejs.org/dist
|
||||
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
NODE16_VERSION="16.20.2"
|
||||
NODE20_VERSION="20.8.1"
|
||||
NODE16_VERSION="16.20.1"
|
||||
NODE20_VERSION="20.5.0"
|
||||
# used only for win-arm64, remove node16 unofficial version when official version is available
|
||||
NODE16_UNOFFICIAL_VERSION="16.20.0"
|
||||
|
||||
@@ -63,16 +62,17 @@ function acquireExternalTool() {
|
||||
echo "Curl version: $CURL_VERSION"
|
||||
|
||||
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
||||
# -k Allow connections to SSL sites without certs (H)
|
||||
# -S Show error. With -s, make curl show errors when they occur
|
||||
# -L Follow redirects (H)
|
||||
# -o FILE Write to FILE instead of stdout
|
||||
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
||||
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
||||
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
||||
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
else
|
||||
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
||||
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
fi
|
||||
|
||||
# Move the partial file to the download target.
|
||||
@@ -175,9 +175,9 @@ fi
|
||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE20_VERSION}/alpine/x64/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
SET UPDATEFILE=update.finished
|
||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N`
|
||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||
|
||||
if %ERRORLEVEL% EQU 0 (
|
||||
|
||||
57
src/Misc/runnercoreassets
Normal file
57
src/Misc/runnercoreassets
Normal file
@@ -0,0 +1,57 @@
|
||||
actions.runner.plist.template
|
||||
actions.runner.service.template
|
||||
checkScripts/downloadCert.js
|
||||
checkScripts/makeWebRequest.js
|
||||
darwin.svc.sh.template
|
||||
hashFiles/index.js
|
||||
installdependencies.sh
|
||||
macos-run-invoker.js
|
||||
Microsoft.IdentityModel.Logging.dll
|
||||
Microsoft.IdentityModel.Tokens.dll
|
||||
Minimatch.dll
|
||||
Newtonsoft.Json.Bson.dll
|
||||
Newtonsoft.Json.dll
|
||||
Runner.Common.deps.json
|
||||
Runner.Common.dll
|
||||
Runner.Common.pdb
|
||||
Runner.Listener
|
||||
Runner.Listener.deps.json
|
||||
Runner.Listener.dll
|
||||
Runner.Listener.exe
|
||||
Runner.Listener.pdb
|
||||
Runner.Listener.runtimeconfig.json
|
||||
Runner.PluginHost
|
||||
Runner.PluginHost.deps.json
|
||||
Runner.PluginHost.dll
|
||||
Runner.PluginHost.exe
|
||||
Runner.PluginHost.pdb
|
||||
Runner.PluginHost.runtimeconfig.json
|
||||
Runner.Plugins.deps.json
|
||||
Runner.Plugins.dll
|
||||
Runner.Plugins.pdb
|
||||
Runner.Sdk.deps.json
|
||||
Runner.Sdk.dll
|
||||
Runner.Sdk.pdb
|
||||
Runner.Worker
|
||||
Runner.Worker.deps.json
|
||||
Runner.Worker.dll
|
||||
Runner.Worker.exe
|
||||
Runner.Worker.pdb
|
||||
Runner.Worker.runtimeconfig.json
|
||||
RunnerService.exe
|
||||
RunnerService.exe.config
|
||||
RunnerService.js
|
||||
RunnerService.pdb
|
||||
runsvc.sh
|
||||
Sdk.deps.json
|
||||
Sdk.dll
|
||||
Sdk.pdb
|
||||
System.IdentityModel.Tokens.Jwt.dll
|
||||
System.Net.Http.Formatting.dll
|
||||
System.Security.Cryptography.Pkcs.dll
|
||||
System.Security.Cryptography.ProtectedData.dll
|
||||
System.ServiceProcess.ServiceController.dll
|
||||
systemd.svc.sh.template
|
||||
update.cmd.template
|
||||
update.sh.template
|
||||
YamlDotNet.dll
|
||||
269
src/Misc/runnerdotnetruntimeassets
Normal file
269
src/Misc/runnerdotnetruntimeassets
Normal file
@@ -0,0 +1,269 @@
|
||||
api-ms-win-core-console-l1-1-0.dll
|
||||
api-ms-win-core-console-l1-2-0.dll
|
||||
api-ms-win-core-datetime-l1-1-0.dll
|
||||
api-ms-win-core-debug-l1-1-0.dll
|
||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
||||
api-ms-win-core-fibers-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-2-0.dll
|
||||
api-ms-win-core-file-l2-1-0.dll
|
||||
api-ms-win-core-handle-l1-1-0.dll
|
||||
api-ms-win-core-heap-l1-1-0.dll
|
||||
api-ms-win-core-interlocked-l1-1-0.dll
|
||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
||||
api-ms-win-core-localization-l1-2-0.dll
|
||||
api-ms-win-core-memory-l1-1-0.dll
|
||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-1.dll
|
||||
api-ms-win-core-profile-l1-1-0.dll
|
||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
||||
api-ms-win-core-string-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-2-0.dll
|
||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
||||
api-ms-win-core-timezone-l1-1-0.dll
|
||||
api-ms-win-core-util-l1-1-0.dll
|
||||
api-ms-win-crt-conio-l1-1-0.dll
|
||||
api-ms-win-crt-convert-l1-1-0.dll
|
||||
api-ms-win-crt-environment-l1-1-0.dll
|
||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
||||
api-ms-win-crt-heap-l1-1-0.dll
|
||||
api-ms-win-crt-locale-l1-1-0.dll
|
||||
api-ms-win-crt-math-l1-1-0.dll
|
||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
||||
api-ms-win-crt-private-l1-1-0.dll
|
||||
api-ms-win-crt-process-l1-1-0.dll
|
||||
api-ms-win-crt-runtime-l1-1-0.dll
|
||||
api-ms-win-crt-stdio-l1-1-0.dll
|
||||
api-ms-win-crt-string-l1-1-0.dll
|
||||
api-ms-win-crt-time-l1-1-0.dll
|
||||
api-ms-win-crt-utility-l1-1-0.dll
|
||||
clrcompression.dll
|
||||
clretwrc.dll
|
||||
clrjit.dll
|
||||
coreclr.dll
|
||||
createdump
|
||||
createdump.exe
|
||||
dbgshim.dll
|
||||
hostfxr.dll
|
||||
hostpolicy.dll
|
||||
libclrjit.dylib
|
||||
libclrjit.so
|
||||
libcoreclr.dylib
|
||||
libcoreclr.so
|
||||
libcoreclrtraceptprovider.so
|
||||
libdbgshim.dylib
|
||||
libdbgshim.so
|
||||
libhostfxr.dylib
|
||||
libhostfxr.so
|
||||
libhostpolicy.dylib
|
||||
libhostpolicy.so
|
||||
libmscordaccore.dylib
|
||||
libmscordaccore.so
|
||||
libmscordbi.dylib
|
||||
libmscordbi.so
|
||||
Microsoft.CSharp.dll
|
||||
Microsoft.DiaSymReader.Native.amd64.dll
|
||||
Microsoft.DiaSymReader.Native.arm64.dll
|
||||
Microsoft.VisualBasic.Core.dll
|
||||
Microsoft.VisualBasic.dll
|
||||
Microsoft.Win32.Primitives.dll
|
||||
Microsoft.Win32.Registry.dll
|
||||
mscordaccore.dll
|
||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
||||
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
||||
mscordaccore_amd64_amd64_6.0.2223.42425.dll
|
||||
mscordbi.dll
|
||||
mscorlib.dll
|
||||
mscorrc.debug.dll
|
||||
mscorrc.dll
|
||||
msquic.dll
|
||||
netstandard.dll
|
||||
SOS_README.md
|
||||
System.AppContext.dll
|
||||
System.Buffers.dll
|
||||
System.Collections.Concurrent.dll
|
||||
System.Collections.dll
|
||||
System.Collections.Immutable.dll
|
||||
System.Collections.NonGeneric.dll
|
||||
System.Collections.Specialized.dll
|
||||
System.ComponentModel.Annotations.dll
|
||||
System.ComponentModel.DataAnnotations.dll
|
||||
System.ComponentModel.dll
|
||||
System.ComponentModel.EventBasedAsync.dll
|
||||
System.ComponentModel.Primitives.dll
|
||||
System.ComponentModel.TypeConverter.dll
|
||||
System.Configuration.dll
|
||||
System.Console.dll
|
||||
System.Core.dll
|
||||
System.Data.Common.dll
|
||||
System.Data.DataSetExtensions.dll
|
||||
System.Data.dll
|
||||
System.Diagnostics.Contracts.dll
|
||||
System.Diagnostics.Debug.dll
|
||||
System.Diagnostics.DiagnosticSource.dll
|
||||
System.Diagnostics.FileVersionInfo.dll
|
||||
System.Diagnostics.Process.dll
|
||||
System.Diagnostics.StackTrace.dll
|
||||
System.Diagnostics.TextWriterTraceListener.dll
|
||||
System.Diagnostics.Tools.dll
|
||||
System.Diagnostics.TraceSource.dll
|
||||
System.Diagnostics.Tracing.dll
|
||||
System.dll
|
||||
System.Drawing.dll
|
||||
System.Drawing.Primitives.dll
|
||||
System.Dynamic.Runtime.dll
|
||||
System.Formats.Asn1.dll
|
||||
System.Globalization.Calendars.dll
|
||||
System.Globalization.dll
|
||||
System.Globalization.Extensions.dll
|
||||
System.Globalization.Native.dylib
|
||||
System.Globalization.Native.so
|
||||
System.IO.Compression.Brotli.dll
|
||||
System.IO.Compression.dll
|
||||
System.IO.Compression.FileSystem.dll
|
||||
System.IO.Compression.Native.a
|
||||
System.IO.Compression.Native.dll
|
||||
System.IO.Compression.Native.dylib
|
||||
System.IO.Compression.Native.so
|
||||
System.IO.Compression.ZipFile.dll
|
||||
System.IO.dll
|
||||
System.IO.FileSystem.AccessControl.dll
|
||||
System.IO.FileSystem.dll
|
||||
System.IO.FileSystem.DriveInfo.dll
|
||||
System.IO.FileSystem.Primitives.dll
|
||||
System.IO.FileSystem.Watcher.dll
|
||||
System.IO.IsolatedStorage.dll
|
||||
System.IO.MemoryMappedFiles.dll
|
||||
System.IO.Pipes.AccessControl.dll
|
||||
System.IO.Pipes.dll
|
||||
System.IO.UnmanagedMemoryStream.dll
|
||||
System.Linq.dll
|
||||
System.Linq.Expressions.dll
|
||||
System.Linq.Parallel.dll
|
||||
System.Linq.Queryable.dll
|
||||
System.Memory.dll
|
||||
System.Native.a
|
||||
System.Native.dylib
|
||||
System.Native.so
|
||||
System.Net.dll
|
||||
System.Net.Http.dll
|
||||
System.Net.Http.Json.dll
|
||||
System.Net.Http.Native.a
|
||||
System.Net.Http.Native.dylib
|
||||
System.Net.Http.Native.so
|
||||
System.Net.HttpListener.dll
|
||||
System.Net.Mail.dll
|
||||
System.Net.NameResolution.dll
|
||||
System.Net.NetworkInformation.dll
|
||||
System.Net.Ping.dll
|
||||
System.Net.Primitives.dll
|
||||
System.Net.Quic.dll
|
||||
System.Net.Requests.dll
|
||||
System.Net.Security.dll
|
||||
System.Net.Security.Native.a
|
||||
System.Net.Security.Native.dylib
|
||||
System.Net.Security.Native.so
|
||||
System.Net.ServicePoint.dll
|
||||
System.Net.Sockets.dll
|
||||
System.Net.WebClient.dll
|
||||
System.Net.WebHeaderCollection.dll
|
||||
System.Net.WebProxy.dll
|
||||
System.Net.WebSockets.Client.dll
|
||||
System.Net.WebSockets.dll
|
||||
System.Numerics.dll
|
||||
System.Numerics.Vectors.dll
|
||||
System.ObjectModel.dll
|
||||
System.Private.CoreLib.dll
|
||||
System.Private.DataContractSerialization.dll
|
||||
System.Private.Uri.dll
|
||||
System.Private.Xml.dll
|
||||
System.Private.Xml.Linq.dll
|
||||
System.Reflection.DispatchProxy.dll
|
||||
System.Reflection.dll
|
||||
System.Reflection.Emit.dll
|
||||
System.Reflection.Emit.ILGeneration.dll
|
||||
System.Reflection.Emit.Lightweight.dll
|
||||
System.Reflection.Extensions.dll
|
||||
System.Reflection.Metadata.dll
|
||||
System.Reflection.Primitives.dll
|
||||
System.Reflection.TypeExtensions.dll
|
||||
System.Resources.Reader.dll
|
||||
System.Resources.ResourceManager.dll
|
||||
System.Resources.Writer.dll
|
||||
System.Runtime.CompilerServices.Unsafe.dll
|
||||
System.Runtime.CompilerServices.VisualC.dll
|
||||
System.Runtime.dll
|
||||
System.Runtime.Extensions.dll
|
||||
System.Runtime.Handles.dll
|
||||
System.Runtime.InteropServices.dll
|
||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
||||
System.Runtime.Intrinsics.dll
|
||||
System.Runtime.Loader.dll
|
||||
System.Runtime.Numerics.dll
|
||||
System.Runtime.Serialization.dll
|
||||
System.Runtime.Serialization.Formatters.dll
|
||||
System.Runtime.Serialization.Json.dll
|
||||
System.Runtime.Serialization.Primitives.dll
|
||||
System.Runtime.Serialization.Xml.dll
|
||||
System.Runtime.WindowsRuntime.dll
|
||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
||||
System.Security.AccessControl.dll
|
||||
System.Security.Claims.dll
|
||||
System.Security.Cryptography.Algorithms.dll
|
||||
System.Security.Cryptography.Cng.dll
|
||||
System.Security.Cryptography.Csp.dll
|
||||
System.Security.Cryptography.Encoding.dll
|
||||
System.Security.Cryptography.Native.Apple.a
|
||||
System.Security.Cryptography.Native.Apple.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.a
|
||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.so
|
||||
System.Security.Cryptography.OpenSsl.dll
|
||||
System.Security.Cryptography.Primitives.dll
|
||||
System.Security.Cryptography.X509Certificates.dll
|
||||
System.Security.Cryptography.XCertificates.dll
|
||||
System.Security.dll
|
||||
System.Security.Principal.dll
|
||||
System.Security.Principal.Windows.dll
|
||||
System.Security.SecureString.dll
|
||||
System.ServiceModel.Web.dll
|
||||
System.ServiceProcess.dll
|
||||
System.Text.Encoding.CodePages.dll
|
||||
System.Text.Encoding.dll
|
||||
System.Text.Encoding.Extensions.dll
|
||||
System.Text.Encodings.Web.dll
|
||||
System.Text.Json.dll
|
||||
System.Text.RegularExpressions.dll
|
||||
System.Threading.Channels.dll
|
||||
System.Threading.dll
|
||||
System.Threading.Overlapped.dll
|
||||
System.Threading.Tasks.Dataflow.dll
|
||||
System.Threading.Tasks.dll
|
||||
System.Threading.Tasks.Extensions.dll
|
||||
System.Threading.Tasks.Parallel.dll
|
||||
System.Threading.Thread.dll
|
||||
System.Threading.ThreadPool.dll
|
||||
System.Threading.Timer.dll
|
||||
System.Transactions.dll
|
||||
System.Transactions.Local.dll
|
||||
System.ValueTuple.dll
|
||||
System.Web.dll
|
||||
System.Web.HttpUtility.dll
|
||||
System.Windows.dll
|
||||
System.Xml.dll
|
||||
System.Xml.Linq.dll
|
||||
System.Xml.ReaderWriter.dll
|
||||
System.Xml.Serialization.dll
|
||||
System.Xml.XDocument.dll
|
||||
System.Xml.XmlDocument.dll
|
||||
System.Xml.XmlSerializer.dll
|
||||
System.Xml.XPath.dll
|
||||
System.Xml.XPath.XDocument.dll
|
||||
ucrtbase.dll
|
||||
WindowsBase.dll
|
||||
24
src/Misc/trimmedpackages_targz.json
Normal file
24
src/Misc/trimmedpackages_targz.json
Normal file
@@ -0,0 +1,24 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
24
src/Misc/trimmedpackages_zip.json
Normal file
24
src/Misc/trimmedpackages_zip.json
Normal file
@@ -0,0 +1,24 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -17,7 +17,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate);
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
|
||||
}
|
||||
|
||||
public sealed class BrokerServer : RunnerService, IBrokerServer
|
||||
@@ -44,11 +44,11 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate)
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
|
||||
{
|
||||
CheckConnection();
|
||||
var jobMessage = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken);
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
|
||||
|
||||
return jobMessage;
|
||||
}
|
||||
|
||||
@@ -69,8 +69,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly OSPlatform Platform = OSPlatform.OSX;
|
||||
#elif OS_WINDOWS
|
||||
public static readonly OSPlatform Platform = OSPlatform.Windows;
|
||||
#else
|
||||
public static readonly OSPlatform Platform = OSPlatform.Linux;
|
||||
#endif
|
||||
|
||||
#if X86
|
||||
@@ -81,8 +79,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm;
|
||||
#elif ARM64
|
||||
public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
|
||||
#else
|
||||
public static readonly Architecture PlatformArchitecture = Architecture.X64;
|
||||
#endif
|
||||
|
||||
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
|
||||
@@ -175,9 +171,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/.";
|
||||
public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings";
|
||||
public static readonly string DeprecatedNodeVersion = "node16";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
|
||||
}
|
||||
@@ -261,7 +254,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
||||
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
||||
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
|
||||
}
|
||||
|
||||
public static class System
|
||||
|
||||
@@ -200,10 +200,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
}
|
||||
else
|
||||
{
|
||||
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString));
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
@@ -15,11 +14,10 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(JobServerQueue))]
|
||||
public interface IJobServerQueue : IRunnerService, IThrottlingReporter
|
||||
{
|
||||
IList<JobTelemetry> JobTelemetries { get; }
|
||||
TaskCompletionSource<int> JobRecordUpdated { get; }
|
||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
Task ShutdownAsync();
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false);
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
||||
@@ -71,18 +69,13 @@ namespace GitHub.Runner.Common
|
||||
private Task[] _allDequeueTasks;
|
||||
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
|
||||
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
|
||||
private readonly List<JobTelemetry> _jobTelemetries = new();
|
||||
private bool _queueInProcess = false;
|
||||
private bool _resultsServiceOnly = false;
|
||||
private Stopwatch _resultsUploadTimer = new();
|
||||
private Stopwatch _actionsUploadTimer = new();
|
||||
|
||||
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
|
||||
|
||||
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
|
||||
public IList<JobTelemetry> JobTelemetries => _jobTelemetries;
|
||||
|
||||
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
||||
// Then the dequeue will happen every 500ms.
|
||||
// In this way, customer still can get instance live console output on job start,
|
||||
@@ -94,7 +87,6 @@ namespace GitHub.Runner.Common
|
||||
private bool _firstConsoleOutputs = true;
|
||||
|
||||
private bool _resultsClientInitiated = false;
|
||||
private bool _enableTelemetry = false;
|
||||
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -104,15 +96,14 @@ namespace GitHub.Runner.Common
|
||||
_resultsServer = hostContext.GetService<IResultsServer>();
|
||||
}
|
||||
|
||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false)
|
||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
|
||||
{
|
||||
Trace.Entering();
|
||||
_resultsServiceOnly = resultsServiceOnly;
|
||||
_enableTelemetry = enableTelemetry;
|
||||
_resultsServiceOnly = resultServiceOnly;
|
||||
|
||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!resultsServiceOnly)
|
||||
if (!resultServiceOnly)
|
||||
{
|
||||
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
||||
}
|
||||
@@ -128,14 +119,14 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
string liveConsoleFeedUrl = null;
|
||||
Trace.Info("Initializing results client");
|
||||
if (resultsServiceOnly
|
||||
if (resultServiceOnly
|
||||
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
|
||||
&& !string.IsNullOrEmpty(feedStreamUrl))
|
||||
{
|
||||
liveConsoleFeedUrl = feedStreamUrl;
|
||||
}
|
||||
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
|
||||
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
|
||||
_resultsClientInitiated = true;
|
||||
}
|
||||
|
||||
@@ -220,12 +211,6 @@ namespace GitHub.Runner.Common
|
||||
await _resultsServer.DisposeAsync();
|
||||
|
||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
var uploadTimeComparison = $"Actions upload time: {_actionsUploadTimer.ElapsedMilliseconds} ms, Result upload time: {_resultsUploadTimer.ElapsedMilliseconds} ms";
|
||||
Trace.Info(uploadTimeComparison);
|
||||
_jobTelemetries.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = uploadTimeComparison });
|
||||
}
|
||||
}
|
||||
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||
@@ -471,10 +456,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_actionsUploadTimer.Start();
|
||||
}
|
||||
await UploadFile(file);
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -490,13 +471,6 @@ namespace GitHub.Runner.Common
|
||||
// _fileUploadQueue.Enqueue(file);
|
||||
//}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_actionsUploadTimer.Stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
|
||||
@@ -543,10 +517,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_resultsUploadTimer.Start();
|
||||
}
|
||||
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await UploadSummaryFile(file);
|
||||
@@ -571,19 +541,10 @@ namespace GitHub.Runner.Common
|
||||
Trace.Error(ex);
|
||||
errorCount++;
|
||||
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||
if (!_resultsServiceOnly)
|
||||
{
|
||||
_resultsClientInitiated = false;
|
||||
SendResultsTelemetry(ex);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (_enableTelemetry)
|
||||
{
|
||||
_resultsUploadTimer.Stop();
|
||||
}
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
|
||||
_resultsClientInitiated = false;
|
||||
|
||||
SendResultsTelemetry(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -699,11 +660,9 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Info("Catch exception during update steps, skip update Results.");
|
||||
Trace.Error(e);
|
||||
if (!_resultsServiceOnly)
|
||||
{
|
||||
_resultsClientInitiated = false;
|
||||
SendResultsTelemetry(e);
|
||||
}
|
||||
_resultsClientInitiated = false;
|
||||
|
||||
SendResultsTelemetry(e);
|
||||
}
|
||||
|
||||
if (_bufferedRetryRecords.Remove(update.TimelineId))
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.WebSockets;
|
||||
using System.Security;
|
||||
@@ -19,7 +18,7 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(ResultServer))]
|
||||
public interface IResultsServer : IRunnerService, IAsyncDisposable
|
||||
{
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk);
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
|
||||
|
||||
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||
|
||||
@@ -51,10 +50,10 @@ namespace GitHub.Runner.Common
|
||||
private String _liveConsoleFeedUrl;
|
||||
private string _token;
|
||||
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk)
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
||||
{
|
||||
this._resultsClient = CreateHttpClient(uri, token, useSdk);
|
||||
|
||||
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
||||
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
||||
_token = token;
|
||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||
{
|
||||
@@ -63,26 +62,6 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
|
||||
{
|
||||
// Using default 100 timeout
|
||||
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||
|
||||
// Create retry handler
|
||||
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
|
||||
if (settings.MaxRetryRequest > 0)
|
||||
{
|
||||
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
|
||||
}
|
||||
|
||||
// Setup RawHttpMessageHandler without credentials
|
||||
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
|
||||
|
||||
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||
|
||||
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
|
||||
}
|
||||
|
||||
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
|
||||
@@ -224,7 +224,7 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
Trace.Error($"{errorMessage} -- Attempt: {retry}");
|
||||
Trace.Error($"{errorMessage} -- Atempt: {retry}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
|
||||
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
|
||||
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
|
||||
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
|
||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken);
|
||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
|
||||
|
||||
// job request
|
||||
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
||||
@@ -272,10 +272,10 @@ namespace GitHub.Runner.Common
|
||||
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, disableUpdate, cancellationToken: cancellationToken);
|
||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
|
||||
@@ -73,12 +73,7 @@ namespace GitHub.Runner.Listener
|
||||
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
try
|
||||
{
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token,
|
||||
runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate);
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
|
||||
|
||||
if (message == null)
|
||||
{
|
||||
|
||||
@@ -39,7 +39,6 @@ namespace GitHub.Runner.Listener.Check
|
||||
string githubApiUrl = null;
|
||||
string actionsTokenServiceUrl = null;
|
||||
string actionsPipelinesServiceUrl = null;
|
||||
string resultsReceiverServiceUrl = null;
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
@@ -48,7 +47,6 @@ namespace GitHub.Runner.Listener.Check
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||
resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health";
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -58,31 +56,13 @@ namespace GitHub.Runner.Listener.Check
|
||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet.
|
||||
}
|
||||
|
||||
var codeLoadUrlBuilder = new UriBuilder(url);
|
||||
codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}";
|
||||
codeLoadUrlBuilder.Path = "_ping";
|
||||
|
||||
// check github api
|
||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check github codeload
|
||||
checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||
checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check results-receiver service
|
||||
if (!string.IsNullOrEmpty(resultsReceiverServiceUrl))
|
||||
{
|
||||
checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
}
|
||||
|
||||
// check actions token service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||
|
||||
@@ -248,7 +248,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
else
|
||||
{
|
||||
agents = await _runnerServer.GetAgentsAsync(runnerSettings.AgentName);
|
||||
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
||||
}
|
||||
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
@@ -744,7 +744,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get JIT runner token -- Attempt: {retryCount}");
|
||||
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
@@ -807,7 +807,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get tenant credentials -- Attempt: {retryCount}");
|
||||
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
if (!store.HasCredentials())
|
||||
{
|
||||
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
||||
throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
|
||||
}
|
||||
|
||||
CredentialData credData = store.GetCredentials();
|
||||
|
||||
@@ -514,25 +514,9 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
|
||||
|
||||
// Lock the Service Database
|
||||
int svcLockRetries = 10;
|
||||
int svcLockRetryTimeout = 5000;
|
||||
while (true)
|
||||
svcLock = LockServiceDatabase(scmHndl);
|
||||
if (svcLock.ToInt64() <= 0)
|
||||
{
|
||||
svcLock = LockServiceDatabase(scmHndl);
|
||||
if (svcLock.ToInt64() > 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
_term.WriteLine("Retrying Lock Service Database...");
|
||||
|
||||
svcLockRetries--;
|
||||
if (svcLockRetries > 0)
|
||||
{
|
||||
Thread.Sleep(svcLockRetryTimeout);
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new Exception("Failed to Lock Service Database for Write");
|
||||
}
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ namespace GitHub.Runner.Listener
|
||||
Guid dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
||||
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
||||
{
|
||||
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,12 +162,12 @@ namespace GitHub.Runner.Listener
|
||||
dispatchedJobId = _jobDispatchedQueue.Dequeue();
|
||||
if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch))
|
||||
{
|
||||
Trace.Verbose($"Retrive previous WorkerDispatcher for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose($"There is no running WorkerDispatcher needs to await.");
|
||||
Trace.Verbose($"There is no running WorkerDispather needs to await.");
|
||||
}
|
||||
|
||||
if (currentDispatch != null)
|
||||
@@ -176,7 +176,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Waiting WorkerDispatcher for job {currentDispatch.JobId} run to finish.");
|
||||
Trace.Info($"Waiting WorkerDispather for job {currentDispatch.JobId} run to finish.");
|
||||
await currentDispatch.WorkerDispatch;
|
||||
Trace.Info($"Job request {currentDispatch.JobId} processed succeed.");
|
||||
}
|
||||
@@ -190,7 +190,7 @@ namespace GitHub.Runner.Listener
|
||||
WorkerDispatcher workerDispatcher;
|
||||
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
||||
{
|
||||
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
workerDispatcher.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -209,7 +209,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Ensure WorkerDispatcher for job {currentDispatch.JobId} run to finish, cancel any running job.");
|
||||
Trace.Info($"Ensure WorkerDispather for job {currentDispatch.JobId} run to finish, cancel any running job.");
|
||||
await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Listener
|
||||
WorkerDispatcher workerDispatcher;
|
||||
if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher))
|
||||
{
|
||||
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}.");
|
||||
workerDispatcher.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -327,7 +327,7 @@ namespace GitHub.Runner.Listener
|
||||
WorkerDispatcher workerDispatcher;
|
||||
if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher))
|
||||
{
|
||||
Trace.Verbose($"Remove WorkerDispatcher from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
|
||||
Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}.");
|
||||
workerDispatcher.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -1134,15 +1134,6 @@ namespace GitHub.Runner.Listener
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||
|
||||
if (message.Variables.TryGetValue("DistributedTask.MarkJobAsFailedOnWorkerCrash", out var markJobAsFailedOnWorkerCrash) &&
|
||||
StringUtil.ConvertToBoolean(markJobAsFailedOnWorkerCrash?.Value))
|
||||
{
|
||||
Trace.Info("Mark the job as failed since the worker crashed");
|
||||
jobRecord.Result = TaskResult.Failed;
|
||||
// mark the job as completed so service will pickup the result
|
||||
jobRecord.State = TimelineRecordState.Completed;
|
||||
}
|
||||
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
@@ -9,7 +9,6 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
@@ -129,7 +128,7 @@ namespace GitHub.Runner.Listener
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -140,7 +139,7 @@ namespace GitHub.Runner.Listener
|
||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -220,9 +219,6 @@ namespace GitHub.Runner.Listener
|
||||
_lastMessageId,
|
||||
runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
_getMessagesTokenSource.Token);
|
||||
|
||||
// Decrypt the message body if the session is using encryption
|
||||
|
||||
@@ -25,6 +25,12 @@
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="..\Misc\runnercoreassets">
|
||||
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
|
||||
</EmbeddedResource>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
<DebugType>portable</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
@@ -457,13 +457,22 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
message = await getNextMessage; //get next message
|
||||
HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}");
|
||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (autoUpdateInProgress == false)
|
||||
{
|
||||
autoUpdateInProgress = true;
|
||||
AgentRefreshMessage runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||
|
||||
AgentRefreshMessage runnerUpdateMessage = null;
|
||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||
}
|
||||
else
|
||||
{
|
||||
var brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
|
||||
runnerUpdateMessage = new AgentRefreshMessage(brokerRunnerUpdateMessage.RunnerId, brokerRunnerUpdateMessage.TargetVersion, TimeSpan.FromSeconds(brokerRunnerUpdateMessage.TimeoutInSeconds));
|
||||
}
|
||||
#if DEBUG
|
||||
// Can mock the update for testing
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
@@ -494,22 +503,6 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
||||
}
|
||||
}
|
||||
else if (string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (autoUpdateInProgress == false)
|
||||
{
|
||||
autoUpdateInProgress = true;
|
||||
RunnerRefreshMessage brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
|
||||
|
||||
var selfUpdater = HostContext.GetService<ISelfUpdaterV2>();
|
||||
selfUpdateTask = selfUpdater.SelfUpdate(brokerRunnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
|
||||
Trace.Info("Refresh message received, kick-off selfupdate background process.");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
||||
}
|
||||
}
|
||||
else if (string.Equals(message.MessageType, JobRequestMessageTypes.PipelineAgentJobRequest, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (autoUpdateInProgress || runOnceJobReceived)
|
||||
|
||||
@@ -6,11 +6,13 @@ using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
@@ -28,6 +30,9 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
private static string _packageType = "agent";
|
||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||
private static string _dotnetRuntime = "dotnetRuntime";
|
||||
private static string _externals = "externals";
|
||||
private readonly Dictionary<string, string> _contentHashes = new();
|
||||
|
||||
private PackageMetadata _targetPackage;
|
||||
private ITerminal _terminal;
|
||||
@@ -35,6 +40,10 @@ namespace GitHub.Runner.Listener
|
||||
private int _poolId;
|
||||
private ulong _agentId;
|
||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||
private Task _cloneAndCalculateContentHashTask;
|
||||
private string _dotnetRuntimeCloneDirectory;
|
||||
private string _externalsCloneDirectory;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -47,6 +56,8 @@ namespace GitHub.Runner.Listener
|
||||
var settings = configStore.GetSettings();
|
||||
_poolId = settings.PoolId;
|
||||
_agentId = settings.AgentId;
|
||||
_dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__");
|
||||
_externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__");
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
@@ -56,6 +67,13 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
var totalUpdateTime = Stopwatch.StartNew();
|
||||
|
||||
// Copy dotnet runtime and externals of current runner to a temp folder
|
||||
// So we can re-use them with trimmed runner package, if possible.
|
||||
// This process is best effort, if we can't use trimmed runner package,
|
||||
// we will just go with the full package.
|
||||
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
_cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token);
|
||||
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
{
|
||||
Trace.Info($"Can't find available update package.");
|
||||
@@ -69,6 +87,24 @@ namespace GitHub.Runner.Listener
|
||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||
|
||||
if (_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
// wait for cloning assets task to finish only if we have trimmed packages
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
else
|
||||
{
|
||||
linkedTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
await DownloadLatestRunner(token, updateMessage.TargetVersion);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
@@ -182,8 +218,54 @@ namespace GitHub.Runner.Listener
|
||||
string archiveFile = null;
|
||||
var packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
var packageHashValue = _targetPackage.HashValue;
|
||||
var runtimeTrimmed = false;
|
||||
var externalsTrimmed = false;
|
||||
var fallbackToFullPackage = false;
|
||||
|
||||
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
|
||||
if (_contentHashes.Count == 2 &&
|
||||
_contentHashes.ContainsKey(_dotnetRuntime) &&
|
||||
_contentHashes.ContainsKey(_externals) &&
|
||||
_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}");
|
||||
Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}");
|
||||
// Try to see whether we can use any size trimmed down package to speed up runner updates.
|
||||
foreach (var trimmedPackage in _targetPackage.TrimmedPackages)
|
||||
{
|
||||
if (trimmedPackage.TrimmedContents.Count == 2 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) &&
|
||||
trimmedRuntimeHash == _contentHashes[_dotnetRuntime] &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
runtimeTrimmed = true;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else if (trimmedPackage.TrimmedContents.Count == 1 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
_updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}");
|
||||
_updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}");
|
||||
|
||||
try
|
||||
{
|
||||
@@ -241,6 +323,12 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
|
||||
{
|
||||
// if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package.
|
||||
Trace.Error($"Fail to download latest runner using trimmed package: {ex}");
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
@@ -259,6 +347,74 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
var trimmedPackageRestoreTasks = new List<Task<bool>>();
|
||||
if (!fallbackToFullPackage)
|
||||
{
|
||||
// Skip restoring externals and runtime if we are going to fullback to the full package.
|
||||
if (externalsTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token));
|
||||
}
|
||||
if (runtimeTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token));
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmedPackageRestoreTasks.Count > 0)
|
||||
{
|
||||
var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks);
|
||||
if (restoreResults.Any(x => x == false))
|
||||
{
|
||||
// if any of the restore failed, fallback to full package.
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (fallbackToFullPackage)
|
||||
{
|
||||
Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates.");
|
||||
_updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}");
|
||||
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
Directory.CreateDirectory(latestRunnerDirectory);
|
||||
|
||||
packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
packageHashValue = _targetPackage.HashValue;
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//it is not critical if we fail to delete the .zip file
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
|
||||
}
|
||||
|
||||
@@ -639,5 +795,330 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedExternals(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's externals if we are using a externals trimmed package
|
||||
// Execute the node.js to make sure the copied externals is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}.");
|
||||
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
|
||||
|
||||
// try run node.js to see if current node.js works fine after copy over to new location.
|
||||
var nodeVersions = NodeUtil.BuiltInNodeVersions;
|
||||
foreach (var nodeVersion in nodeVersions)
|
||||
{
|
||||
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
|
||||
if (File.Exists(newNodeBinary))
|
||||
{
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
var outputs = "";
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
outputs = data.Data;
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore externals for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package
|
||||
// Execute the runner.listener to make sure the copied runtime is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}.");
|
||||
IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token);
|
||||
|
||||
// try run the runner executable to see if current dotnet runtime + future runner binary works fine.
|
||||
var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener");
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token)
|
||||
{
|
||||
var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token);
|
||||
var externalsCloneTask = CloneExternals(externalsCloneDirectory, token);
|
||||
|
||||
var waitingTasks = new Dictionary<string, Task>()
|
||||
{
|
||||
{nameof(CloneDotnetRuntime), runtimeCloneTask},
|
||||
{nameof(CloneExternals),externalsCloneTask}
|
||||
};
|
||||
|
||||
while (waitingTasks.Count > 0)
|
||||
{
|
||||
Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete.");
|
||||
var complatedTask = await Task.WhenAny(waitingTasks.Values);
|
||||
if (waitingTasks.ContainsKey(nameof(CloneExternals)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneExternals)])
|
||||
{
|
||||
Trace.Info($"Externals clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneExternals));
|
||||
try
|
||||
{
|
||||
if (await externalsCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var externalsHash = await HashFiles(externalsCloneDirectory, token);
|
||||
Trace.Info($"Externals content hash: {externalsHash}");
|
||||
_contentHashes[_externals] = externalsHash;
|
||||
_updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone externals failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash externals content: {ex}");
|
||||
}
|
||||
}
|
||||
else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneDotnetRuntime)])
|
||||
{
|
||||
Trace.Info($"Dotnet runtime clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneDotnetRuntime));
|
||||
try
|
||||
{
|
||||
if (await runtimeCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token);
|
||||
Trace.Info($"Runtime content hash: {runtimeHash}");
|
||||
_contentHashes[_dotnetRuntime] = runtimeHash;
|
||||
_updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash runtime content: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> CloneDotnetRuntime(string runtimeDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning dotnet runtime to {runtimeDir}");
|
||||
IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(runtimeDir);
|
||||
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var assetsContent = default(string);
|
||||
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets"))
|
||||
using (var streamReader = new StreamReader(stream))
|
||||
{
|
||||
assetsContent = await streamReader.ReadToEndAsync();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(assetsContent))
|
||||
{
|
||||
var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (runnerCoreAssets.Length > 0)
|
||||
{
|
||||
var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
IOUtil.CopyDirectory(binDir, runtimeDir, token);
|
||||
|
||||
var clonedFile = 0;
|
||||
foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim())))
|
||||
{
|
||||
Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory.");
|
||||
IOUtil.DeleteFile(file);
|
||||
}
|
||||
else
|
||||
{
|
||||
clonedFile++;
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Task<bool> CloneExternals(string externalsDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning externals to {externalsDir}");
|
||||
IOUtil.DeleteDirectory(externalsDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(externalsDir);
|
||||
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token);
|
||||
Trace.Info($"Successfully cloned externals to {externalsDir}.");
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone externals to {externalsDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
private async Task<string> HashFiles(string fileFolder, CancellationToken token)
|
||||
{
|
||||
Trace.Info($"Calculating hash for {fileFolder}");
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
Trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
processInvoker.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
Trace.Verbose(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: token);
|
||||
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return hashResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,568 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
// This class is a fork of SelfUpdater.cs and is intended to only be used for the
|
||||
// new self-update flow where the PackageMetadata is sent in the message directly.
|
||||
// Forking the class prevents us from accidentally breaking the old flow while it's still in production
|
||||
|
||||
[ServiceLocator(Default = typeof(SelfUpdaterV2))]
|
||||
public interface ISelfUpdaterV2 : IRunnerService
|
||||
{
|
||||
bool Busy { get; }
|
||||
Task<bool> SelfUpdate(RunnerRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
||||
}
|
||||
public class SelfUpdaterV2 : RunnerService, ISelfUpdaterV2
|
||||
{
|
||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||
private ITerminal _terminal;
|
||||
private IRunnerServer _runnerServer;
|
||||
private int _poolId;
|
||||
private ulong _agentId;
|
||||
|
||||
private const int _numberOfOldVersionsToKeep = 1;
|
||||
|
||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
|
||||
_terminal = hostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
var configStore = HostContext.GetService<IConfigurationStore>();
|
||||
var settings = configStore.GetSettings();
|
||||
_poolId = settings.PoolId;
|
||||
_agentId = settings.AgentId;
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(RunnerRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
{
|
||||
Busy = true;
|
||||
try
|
||||
{
|
||||
var totalUpdateTime = Stopwatch.StartNew();
|
||||
|
||||
Trace.Info($"An update is available.");
|
||||
_updateTrace.Enqueue($"RunnerPlatform: {updateMessage.OS}");
|
||||
|
||||
// Print console line that warn user not shutdown runner.
|
||||
_terminal.WriteLine("Runner update in progress, do not shutdown runner.");
|
||||
_terminal.WriteLine($"Downloading {updateMessage.TargetVersion} runner");
|
||||
|
||||
await DownloadLatestRunner(token, updateMessage.TargetVersion, updateMessage.DownloadUrl, updateMessage.SHA256Checksum, updateMessage.OS);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
// wait till all running job finish
|
||||
_terminal.WriteLine("Waiting for current job finish running.");
|
||||
|
||||
await jobDispatcher.WaitAsync(token);
|
||||
Trace.Info($"All running job has exited.");
|
||||
|
||||
// We need to keep runner backup around for macOS until we fixed https://github.com/actions/runner/issues/743
|
||||
// delete runner backup
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
DeletePreviousVersionRunnerBackup(token, updateMessage.TargetVersion);
|
||||
Trace.Info($"Delete old version runner backup.");
|
||||
stopWatch.Stop();
|
||||
// generate update script from template
|
||||
_updateTrace.Enqueue($"DeleteRunnerBackupTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_terminal.WriteLine("Generate and execute update script.");
|
||||
|
||||
string updateScript = GenerateUpdateScript(restartInteractiveRunner, updateMessage.TargetVersion);
|
||||
Trace.Info($"Generate update script into: {updateScript}");
|
||||
|
||||
|
||||
#if DEBUG
|
||||
// For L0, we will skip execute update script.
|
||||
if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT")))
|
||||
#endif
|
||||
{
|
||||
string flagFile = "update.finished";
|
||||
IOUtil.DeleteFile(flagFile);
|
||||
// kick off update script
|
||||
Process invokeScript = new();
|
||||
#if OS_WINDOWS
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||
#elif (OS_OSX || OS_LINUX)
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||
#endif
|
||||
invokeScript.Start();
|
||||
Trace.Info($"Update script start running");
|
||||
}
|
||||
|
||||
totalUpdateTime.Stop();
|
||||
|
||||
_updateTrace.Enqueue($"TotalUpdateTime: {totalUpdateTime.ElapsedMilliseconds}ms");
|
||||
_terminal.WriteLine("Runner will exit shortly for update, should be back online within 10 seconds.");
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_updateTrace.Enqueue(ex.ToString());
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_terminal.WriteLine("Runner update process finished.");
|
||||
Busy = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// _work
|
||||
/// \_update
|
||||
/// \bin
|
||||
/// \externals
|
||||
/// \run.sh
|
||||
/// \run.cmd
|
||||
/// \package.zip //temp download .zip/.tar.gz
|
||||
/// </summary>
|
||||
/// <param name="token"></param>
|
||||
/// <returns></returns>
|
||||
private async Task DownloadLatestRunner(CancellationToken token, string targetVersion, string packageDownloadUrl, string packageHashValue, string targetPlatform)
|
||||
{
|
||||
string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory);
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
Directory.CreateDirectory(latestRunnerDirectory);
|
||||
|
||||
string archiveFile = null;
|
||||
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
#if DEBUG
|
||||
// Much of the update process (targetVersion, archive) is server-side, this is a way to control it from here for testing specific update scenarios
|
||||
// Add files like 'runner2.281.2.tar.gz' or 'runner2.283.0.zip' (depending on your platform) to your runner root folder
|
||||
// Note that runners still need to be older than the server's runner version in order to receive an 'AgentRefreshMessage' and trigger this update
|
||||
// Wrapped in #if DEBUG as this should not be in the RELEASE build
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
{
|
||||
var waitForDebugger = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE_WAIT_FOR_DEBUGGER"));
|
||||
if (waitForDebugger)
|
||||
{
|
||||
int waitInSeconds = 20;
|
||||
while (!Debugger.IsAttached && waitInSeconds-- > 0)
|
||||
{
|
||||
await Task.Delay(1000);
|
||||
}
|
||||
Debugger.Break();
|
||||
}
|
||||
|
||||
if (targetPlatform.StartsWith("win"))
|
||||
{
|
||||
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.zip");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.tar.gz");
|
||||
}
|
||||
|
||||
if (File.Exists(archiveFile))
|
||||
{
|
||||
_updateTrace.Enqueue($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||
_terminal.WriteLine($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = null;
|
||||
_terminal.WriteLine($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||
_updateTrace.Enqueue($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
// archiveFile is not null only if we mocked it above
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, targetPlatform, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
}
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//it is not critical if we fail to delete the .zip file
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
}
|
||||
}
|
||||
|
||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, targetVersion, token);
|
||||
}
|
||||
|
||||
private async Task<string> DownLoadRunner(string downloadDirectory, string packageDownloadUrl, string packageHashValue, string packagePlatform, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
int runnerSuffix = 1;
|
||||
string archiveFile = null;
|
||||
bool downloadSucceeded = false;
|
||||
|
||||
// Download the runner, using multiple attempts in order to be resilient against any networking/CDN issues
|
||||
for (int attempt = 1; attempt <= Constants.RunnerDownloadRetryMaxAttempts; attempt++)
|
||||
{
|
||||
// Generate an available package name, and do our best effort to clean up stale local zip files
|
||||
while (true)
|
||||
{
|
||||
if (packagePlatform.StartsWith("win"))
|
||||
{
|
||||
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.zip");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.tar.gz");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip '{0}'", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// couldn't delete the file for whatever reason, so generate another name
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
runnerSuffix++;
|
||||
}
|
||||
}
|
||||
|
||||
// Allow a 15-minute package download timeout, which is good enough to update the runner from a 1 Mbit/s ADSL connection.
|
||||
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_DOWNLOAD_TIMEOUT") ?? string.Empty, out int timeoutSeconds))
|
||||
{
|
||||
timeoutSeconds = 15 * 60;
|
||||
}
|
||||
|
||||
Trace.Info($"Attempt {attempt}: save latest runner into {archiveFile}.");
|
||||
|
||||
using (var downloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||
using (var downloadCts = CancellationTokenSource.CreateLinkedTokenSource(downloadTimeout.Token, token))
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Download runner: begin download");
|
||||
long downloadSize = 0;
|
||||
|
||||
//open zip stream in async mode
|
||||
using (HttpClient httpClient = new(HostContext.CreateHttpClientHandler()))
|
||||
{
|
||||
Trace.Info($"Downloading {packageDownloadUrl}");
|
||||
|
||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||
using (Stream result = await httpClient.GetStreamAsync(packageDownloadUrl))
|
||||
{
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||
await fs.FlushAsync(downloadCts.Token);
|
||||
downloadSize = fs.Length;
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Download runner: finished download");
|
||||
downloadSucceeded = true;
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"PackageDownloadTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Enqueue($"Attempts: {attempt}");
|
||||
_updateTrace.Enqueue($"PackageSize: {downloadSize / 1024 / 1024}MB");
|
||||
break;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"Runner download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (downloadCts.Token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Warning($"Runner download has timed out after {timeoutSeconds} seconds");
|
||||
}
|
||||
|
||||
Trace.Warning($"Failed to get package '{archiveFile}' from '{packageDownloadUrl}'. Exception {ex}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (downloadSucceeded)
|
||||
{
|
||||
return archiveFile;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ValidateRunnerHash(string archiveFile, string packageHashValue)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
// Validate Hash Matches if it is provided
|
||||
using (FileStream stream = File.OpenRead(archiveFile))
|
||||
{
|
||||
if (!string.IsNullOrEmpty(packageHashValue))
|
||||
{
|
||||
using (SHA256 sha256 = SHA256.Create())
|
||||
{
|
||||
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||
if (hash != packageHashValue)
|
||||
{
|
||||
// Hash did not match, we can't recover from this, just throw
|
||||
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {packageHashValue} for {archiveFile}");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
Trace.Info($"Validated Runner Hash matches {archiveFile} : {packageHashValue}");
|
||||
_updateTrace.Enqueue($"ValidateHashTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ExtractRunnerPackage(string archiveFile, string extractDirectory, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
|
||||
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ZipFile.ExtractToDirectory(archiveFile, extractDirectory);
|
||||
}
|
||||
else if (archiveFile.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
string tar = WhichUtil.Which("tar", trace: Trace);
|
||||
|
||||
if (string.IsNullOrEmpty(tar))
|
||||
{
|
||||
throw new NotSupportedException($"tar -xzf");
|
||||
}
|
||||
|
||||
// tar -xzf
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
Trace.Info(args.Data);
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
Trace.Error(args.Data);
|
||||
}
|
||||
});
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(extractDirectory, tar, $"-xzf \"{archiveFile}\"", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
throw new NotSupportedException($"Can't use 'tar -xzf' to extract archive file: {archiveFile}. return code: {exitCode}.");
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException($"{archiveFile}");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
Trace.Info($"Finished getting latest runner package at: {extractDirectory}.");
|
||||
_updateTrace.Enqueue($"PackageExtractTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
private Task CopyLatestRunnerToRoot(string latestRunnerDirectory, string targetVersion, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
// copy latest runner into runner root folder
|
||||
// copy bin from _work/_update -> bin.version under root
|
||||
string binVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.BinDirectory}.{targetVersion}");
|
||||
Directory.CreateDirectory(binVersionDir);
|
||||
Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory)} to {binVersionDir}.");
|
||||
IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory), binVersionDir, token);
|
||||
|
||||
// copy externals from _work/_update -> externals.version under root
|
||||
string externalsVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.ExternalsDirectory}.{targetVersion}");
|
||||
Directory.CreateDirectory(externalsVersionDir);
|
||||
Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory)} to {externalsVersionDir}.");
|
||||
IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory), externalsVersionDir, token);
|
||||
|
||||
// copy and replace all .sh/.cmd files
|
||||
Trace.Info($"Copy any remaining .sh/.cmd files into runner root.");
|
||||
foreach (FileInfo file in new DirectoryInfo(latestRunnerDirectory).GetFiles() ?? new FileInfo[0])
|
||||
{
|
||||
string destination = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name);
|
||||
|
||||
// Removing the file instead of just trying to overwrite it works around permissions issues on linux.
|
||||
// https://github.com/actions/runner/issues/981
|
||||
Trace.Info($"Copy {file.FullName} to {destination}");
|
||||
IOUtil.DeleteFile(destination);
|
||||
file.CopyTo(destination, true);
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"CopyRunnerToRootTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private void DeletePreviousVersionRunnerBackup(CancellationToken token, string targetVersion)
|
||||
{
|
||||
// delete previous backup runner (back compat, can be remove after serval sprints)
|
||||
// bin.bak.2.99.0
|
||||
// externals.bak.2.99.0
|
||||
foreach (string existBackUp in Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "*.bak.*"))
|
||||
{
|
||||
Trace.Info($"Delete existing runner backup at {existBackUp}.");
|
||||
try
|
||||
{
|
||||
IOUtil.DeleteDirectory(existBackUp, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Catch exception during delete backup folder {existBackUp}, ignore this error try delete the backup folder on next auto-update.");
|
||||
}
|
||||
}
|
||||
|
||||
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
||||
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
|
||||
{
|
||||
// there are more than {_numberOfOldVersionsToKeep} bin.version folder.
|
||||
// delete older bin.version folders.
|
||||
foreach (var oldBinDir in allBinDirs)
|
||||
{
|
||||
if (string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{targetVersion}"), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// skip for current runner version
|
||||
continue;
|
||||
}
|
||||
|
||||
Trace.Info($"Delete runner bin folder's backup at {oldBinDir}.");
|
||||
try
|
||||
{
|
||||
IOUtil.DeleteDirectory(oldBinDir, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Catch exception during delete backup folder {oldBinDir}, ignore this error try delete the backup folder on next auto-update.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
||||
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
|
||||
{
|
||||
// there are more than {_numberOfOldVersionsToKeep} externals.version folder.
|
||||
// delete older externals.version folders.
|
||||
foreach (var oldExternalDir in allExternalsDirs)
|
||||
{
|
||||
if (string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{targetVersion}"), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// skip for current runner version
|
||||
continue;
|
||||
}
|
||||
|
||||
Trace.Info($"Delete runner externals folder's backup at {oldExternalDir}.");
|
||||
try
|
||||
{
|
||||
IOUtil.DeleteDirectory(oldExternalDir, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Catch exception during delete backup folder {oldExternalDir}, ignore this error try delete the backup folder on next auto-update.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private string GenerateUpdateScript(bool restartInteractiveRunner, string targetVersion)
|
||||
{
|
||||
int processId = Process.GetCurrentProcess().Id;
|
||||
string updateLog = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), $"SelfUpdate-{DateTime.UtcNow.ToString("yyyyMMdd-HHmmss")}.log");
|
||||
string runnerRoot = HostContext.GetDirectory(WellKnownDirectory.Root);
|
||||
|
||||
#if OS_WINDOWS
|
||||
string templateName = "update.cmd.template";
|
||||
#else
|
||||
string templateName = "update.sh.template";
|
||||
#endif
|
||||
|
||||
string templatePath = Path.Combine(runnerRoot, $"bin.{targetVersion}", templateName);
|
||||
string template = File.ReadAllText(templatePath);
|
||||
|
||||
template = template.Replace("_PROCESS_ID_", processId.ToString());
|
||||
template = template.Replace("_RUNNER_PROCESS_NAME_", $"Runner.Listener{IOUtil.ExeExtension}");
|
||||
template = template.Replace("_ROOT_FOLDER_", runnerRoot);
|
||||
template = template.Replace("_EXIST_RUNNER_VERSION_", BuildConstants.RunnerPackage.Version);
|
||||
template = template.Replace("_DOWNLOAD_RUNNER_VERSION_", targetVersion);
|
||||
template = template.Replace("_UPDATE_LOG_", updateLog);
|
||||
template = template.Replace("_RESTART_INTERACTIVE_RUNNER_", restartInteractiveRunner ? "1" : "0");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string scriptName = "_update.cmd";
|
||||
#else
|
||||
string scriptName = "_update.sh";
|
||||
#endif
|
||||
|
||||
string updateScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), scriptName);
|
||||
if (File.Exists(updateScript))
|
||||
{
|
||||
IOUtil.DeleteFile(updateScript);
|
||||
}
|
||||
|
||||
File.WriteAllText(updateScript, template);
|
||||
return updateScript;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,6 @@ using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Sdk
|
||||
{
|
||||
@@ -74,25 +72,6 @@ namespace GitHub.Runner.Sdk
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task<string> GetFileContentSha256HashAsync(string path)
|
||||
{
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
using (FileStream stream = File.OpenRead(path))
|
||||
{
|
||||
using (SHA256 sha256 = SHA256.Create())
|
||||
{
|
||||
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||
return hash;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static void Delete(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
DeleteDirectory(path, cancellationToken);
|
||||
|
||||
@@ -85,35 +85,6 @@ namespace GitHub.Runner.Sdk
|
||||
VssCredentials credentials,
|
||||
IEnumerable<DelegatingHandler> additionalDelegatingHandler = null,
|
||||
TimeSpan? timeout = null)
|
||||
{
|
||||
RawClientHttpRequestSettings settings = GetHttpRequestSettings(timeout);
|
||||
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
|
||||
return connection;
|
||||
}
|
||||
|
||||
public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint)
|
||||
{
|
||||
ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint));
|
||||
ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization));
|
||||
ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme));
|
||||
|
||||
if (serviceEndpoint.Authorization.Parameters.Count == 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(serviceEndpoint));
|
||||
}
|
||||
|
||||
VssCredentials credentials = null;
|
||||
string accessToken;
|
||||
if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth &&
|
||||
serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken))
|
||||
{
|
||||
credentials = new VssCredentials(new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt);
|
||||
}
|
||||
|
||||
return credentials;
|
||||
}
|
||||
|
||||
public static RawClientHttpRequestSettings GetHttpRequestSettings(TimeSpan? timeout = null)
|
||||
{
|
||||
RawClientHttpRequestSettings settings = RawClientHttpRequestSettings.Default.Clone();
|
||||
|
||||
@@ -145,7 +116,30 @@ namespace GitHub.Runner.Sdk
|
||||
// settings are applied to an HttpRequestMessage.
|
||||
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
|
||||
|
||||
return settings;
|
||||
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
|
||||
return connection;
|
||||
}
|
||||
|
||||
public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint)
|
||||
{
|
||||
ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint));
|
||||
ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization));
|
||||
ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme));
|
||||
|
||||
if (serviceEndpoint.Authorization.Parameters.Count == 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(serviceEndpoint));
|
||||
}
|
||||
|
||||
VssCredentials credentials = null;
|
||||
string accessToken;
|
||||
if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth &&
|
||||
serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken))
|
||||
{
|
||||
credentials = new VssCredentials(new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt);
|
||||
}
|
||||
|
||||
return credentials;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,128 +114,6 @@ namespace GitHub.Runner.Sdk
|
||||
}
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
|
||||
#else
|
||||
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'PATH' environment variable.");
|
||||
#endif
|
||||
if (require)
|
||||
{
|
||||
throw new FileNotFoundException(
|
||||
message: $"{command}: command not found",
|
||||
fileName: command);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public static string Which2(string command, bool require = false, ITraceWriter trace = null, string prependPath = null)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(command, nameof(command));
|
||||
trace?.Info($"Which2: '{command}'");
|
||||
if (Path.IsPathFullyQualified(command) && File.Exists(command))
|
||||
{
|
||||
trace?.Info($"Fully qualified path: '{command}'");
|
||||
return command;
|
||||
}
|
||||
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||
if (string.IsNullOrEmpty(path))
|
||||
{
|
||||
trace?.Info("PATH environment variable not defined.");
|
||||
path = path ?? string.Empty;
|
||||
}
|
||||
if (!string.IsNullOrEmpty(prependPath))
|
||||
{
|
||||
path = PathUtil.PrependPath(prependPath, path);
|
||||
}
|
||||
|
||||
string[] pathSegments = path.Split(new Char[] { Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries);
|
||||
for (int i = 0; i < pathSegments.Length; i++)
|
||||
{
|
||||
pathSegments[i] = Environment.ExpandEnvironmentVariables(pathSegments[i]);
|
||||
}
|
||||
|
||||
foreach (string pathSegment in pathSegments)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(pathSegment) && Directory.Exists(pathSegment))
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
string pathExt = Environment.GetEnvironmentVariable("PATHEXT");
|
||||
if (string.IsNullOrEmpty(pathExt))
|
||||
{
|
||||
// XP's system default value for PATHEXT system variable
|
||||
pathExt = ".com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh";
|
||||
}
|
||||
|
||||
string[] pathExtSegments = pathExt.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
// if command already has an extension.
|
||||
if (pathExtSegments.Any(ext => command.EndsWith(ext, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
try
|
||||
{
|
||||
foreach (var file in Directory.EnumerateFiles(pathSegment, command))
|
||||
{
|
||||
if (IsPathValid(file, trace))
|
||||
{
|
||||
trace?.Info($"Location: '{file}'");
|
||||
return file;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (UnauthorizedAccessException ex)
|
||||
{
|
||||
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
|
||||
trace?.Verbose(ex.ToString());
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
string searchPattern;
|
||||
searchPattern = StringUtil.Format($"{command}.*");
|
||||
try
|
||||
{
|
||||
foreach (var file in Directory.EnumerateFiles(pathSegment, searchPattern))
|
||||
{
|
||||
// add extension.
|
||||
for (int i = 0; i < pathExtSegments.Length; i++)
|
||||
{
|
||||
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
|
||||
if (string.Equals(file, fullPath, StringComparison.OrdinalIgnoreCase) && IsPathValid(fullPath, trace))
|
||||
{
|
||||
trace?.Info($"Location: '{fullPath}'");
|
||||
return fullPath;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (UnauthorizedAccessException ex)
|
||||
{
|
||||
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
|
||||
trace?.Verbose(ex.ToString());
|
||||
}
|
||||
}
|
||||
#else
|
||||
try
|
||||
{
|
||||
foreach (var file in Directory.EnumerateFiles(pathSegment, command))
|
||||
{
|
||||
if (IsPathValid(file, trace))
|
||||
{
|
||||
trace?.Info($"Location: '{file}'");
|
||||
return file;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (UnauthorizedAccessException ex)
|
||||
{
|
||||
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
|
||||
trace?.Verbose(ex.ToString());
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
|
||||
#else
|
||||
@@ -256,12 +134,7 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
|
||||
if (fileInfo.LinkTarget == null ||
|
||||
File.Exists(linkTargetFullPath) ||
|
||||
File.Exists(fileInfo.LinkTarget))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if (fileInfo.LinkTarget == null || File.Exists(linkTargetFullPath) || File.Exists(fileInfo.LinkTarget)) return true;
|
||||
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
@@ -53,6 +52,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
private const int _defaultCopyBufferSize = 81920;
|
||||
private const string _dotcomApiUrl = "https://api.github.com";
|
||||
|
||||
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new();
|
||||
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
||||
@@ -739,7 +739,10 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
|
||||
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
|
||||
var apiUrl = GetApiUrl(executionContext);
|
||||
var defaultAccessToken = executionContext.GetGitHubContext("token");
|
||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||
var runnerSettings = configurationStore.GetSettings();
|
||||
|
||||
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
|
||||
{
|
||||
@@ -763,8 +766,6 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
|
||||
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
|
||||
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
|
||||
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.ResolvedNameWithOwner));
|
||||
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.ResolvedSha));
|
||||
|
||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
|
||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||
@@ -781,6 +782,31 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})");
|
||||
}
|
||||
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadInfo, destDirectory);
|
||||
}
|
||||
|
||||
private string GetApiUrl(IExecutionContext executionContext)
|
||||
{
|
||||
string apiUrl = executionContext.GetGitHubContext("api_url");
|
||||
if (!string.IsNullOrEmpty(apiUrl))
|
||||
{
|
||||
return apiUrl;
|
||||
}
|
||||
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
|
||||
return _dotcomApiUrl;
|
||||
}
|
||||
|
||||
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
|
||||
#else
|
||||
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
|
||||
#endif
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
|
||||
{
|
||||
//download and extract action in a temp folder and rename it on success
|
||||
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
||||
Directory.CreateDirectory(tempDirectory);
|
||||
@@ -793,50 +819,97 @@ namespace GitHub.Runner.Worker
|
||||
string link = downloadInfo?.TarballUrl;
|
||||
#endif
|
||||
|
||||
Trace.Info($"Save archive '{link}' into {archiveFile}.");
|
||||
try
|
||||
{
|
||||
var useActionArchiveCache = false;
|
||||
if (executionContext.Global.Variables.GetBoolean("DistributedTask.UseActionArchiveCache") == true)
|
||||
int retryCount = 0;
|
||||
|
||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||
int timeoutSeconds = 20 * 60;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
var hasActionArchiveCache = false;
|
||||
var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory);
|
||||
if (!string.IsNullOrEmpty(actionArchiveCacheDir) &&
|
||||
Directory.Exists(actionArchiveCacheDir))
|
||||
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
|
||||
{
|
||||
hasActionArchiveCache = true;
|
||||
Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'");
|
||||
#if OS_WINDOWS
|
||||
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip");
|
||||
#else
|
||||
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz");
|
||||
#endif
|
||||
if (File.Exists(cacheArchiveFile))
|
||||
try
|
||||
{
|
||||
try
|
||||
//open zip stream in async mode
|
||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'");
|
||||
File.Copy(cacheArchiveFile, archiveFile);
|
||||
useActionArchiveCache = true;
|
||||
executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'");
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
|
||||
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(link))
|
||||
{
|
||||
var requestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
if (!string.IsNullOrEmpty(requestId))
|
||||
{
|
||||
Trace.Info($"Request URL: {link} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
|
||||
}
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
using (var result = await response.Content.ReadAsStreamAsync())
|
||||
{
|
||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||
|
||||
// download succeed, break out the retry loop.
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
throw new ActionNotFoundException(new Uri(link), requestId);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
}
|
||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Action download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"The action at '{link}' does not exist");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}");
|
||||
// action download didn't finish within timeout
|
||||
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
|
||||
}
|
||||
else
|
||||
{
|
||||
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
executionContext.Global.JobTelemetry.Add(new JobTelemetry()
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
{
|
||||
Type = JobTelemetryType.General,
|
||||
Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}"
|
||||
});
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
}
|
||||
|
||||
if (!useActionArchiveCache)
|
||||
{
|
||||
await DownloadRepositoryArchive(executionContext, link, downloadInfo.Authentication?.Token, archiveFile);
|
||||
}
|
||||
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
||||
executionContext.Debug($"Download '{link}' to '{archiveFile}'");
|
||||
|
||||
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
|
||||
Directory.CreateDirectory(stagingDirectory);
|
||||
@@ -856,13 +929,11 @@ namespace GitHub.Runner.Worker
|
||||
// tar -xzf
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
var tarOutputs = new List<string>();
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
Trace.Info(args.Data);
|
||||
tarOutputs.Add($"STDOUT: {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
@@ -871,23 +942,13 @@ namespace GitHub.Runner.Worker
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
Trace.Error(args.Data);
|
||||
tarOutputs.Add($"STDERR: {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
if (executionContext.Global.Variables.GetBoolean("DistributedTask.DetailUntarFailure") == true)
|
||||
{
|
||||
var fileInfo = new FileInfo(archiveFile);
|
||||
var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile);
|
||||
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
|
||||
}
|
||||
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -905,6 +966,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
Trace.Verbose("Create watermark file indicate action download succeed.");
|
||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
|
||||
|
||||
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
|
||||
@@ -929,6 +991,29 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
|
||||
{
|
||||
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
||||
if (string.IsNullOrEmpty(authToken))
|
||||
{
|
||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(authToken))
|
||||
{
|
||||
HostContext.SecretMasker.AddValue(authToken);
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
var accessToken = executionContext.GetGitHubContext("token");
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
}
|
||||
|
||||
private string GetWatermarkFilePath(string directory) => directory + ".completed";
|
||||
|
||||
private ActionSetupInfo PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||
@@ -1101,6 +1186,13 @@ namespace GitHub.Runner.Worker
|
||||
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||
}
|
||||
|
||||
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
|
||||
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
|
||||
return $"{info.NameWithOwner}@{info.Ref}";
|
||||
}
|
||||
|
||||
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
||||
{
|
||||
if (string.IsNullOrEmpty(token))
|
||||
@@ -1112,104 +1204,6 @@ namespace GitHub.Runner.Worker
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryArchive(IExecutionContext executionContext, string downloadUrl, string downloadAuthToken, string archiveFile)
|
||||
{
|
||||
Trace.Info($"Save archive '{downloadUrl}' into {archiveFile}.");
|
||||
int retryCount = 0;
|
||||
|
||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||
int timeoutSeconds = 20 * 60;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
|
||||
{
|
||||
try
|
||||
{
|
||||
//open zip stream in async mode
|
||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadAuthToken);
|
||||
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(downloadUrl))
|
||||
{
|
||||
var requestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
if (!string.IsNullOrEmpty(requestId))
|
||||
{
|
||||
Trace.Info($"Request URL: {downloadUrl} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
|
||||
}
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
using (var result = await response.Content.ReadAsStreamAsync())
|
||||
{
|
||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||
|
||||
// download succeed, break out the retry loop.
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
throw new ActionNotFoundException(new Uri(downloadUrl), requestId);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Action download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (OperationCanceledException ex) when (!executionContext.CancellationToken.IsCancellationRequested && retryCount >= 2)
|
||||
{
|
||||
Trace.Info($"Action download final retry timeout after {timeoutSeconds} seconds.");
|
||||
throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message}");
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"The action at '{downloadUrl}' does not exist");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Fail to download archive '{downloadUrl}' -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||
{
|
||||
// action download didn't finish within timeout
|
||||
executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds.");
|
||||
}
|
||||
else
|
||||
{
|
||||
executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
{
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
||||
executionContext.Debug($"Download '{downloadUrl}' to '{archiveFile}'");
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class Definition
|
||||
|
||||
@@ -134,28 +134,6 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Remove environment variable that may cause conflicts with the node within the runner.
|
||||
Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795
|
||||
|
||||
if (string.Equals(Data.NodeVersion, Constants.Runner.DeprecatedNodeVersion, StringComparison.OrdinalIgnoreCase) && (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.Node16Warning) ?? false))
|
||||
{
|
||||
var repoAction = Action as RepositoryPathReference;
|
||||
var warningActions = new HashSet<string>();
|
||||
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
|
||||
{
|
||||
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings);
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(repoAction.Name))
|
||||
{
|
||||
// local actions don't have a 'Name'
|
||||
warningActions.Add(repoAction.Path);
|
||||
}
|
||||
else
|
||||
{
|
||||
warningActions.Add($"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}");
|
||||
}
|
||||
|
||||
ExecutionContext.Global.Variables.Set(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, StringUtil.ConvertToJson(warningActions));
|
||||
}
|
||||
|
||||
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||
{
|
||||
|
||||
@@ -83,40 +83,19 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
shellCommand = "pwsh";
|
||||
if (validateShellOnHost)
|
||||
{
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||
}
|
||||
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||
if (string.IsNullOrEmpty(shellCommandPath))
|
||||
{
|
||||
shellCommand = "powershell";
|
||||
Trace.Info($"Defaulting to {shellCommand}");
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
||||
}
|
||||
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
||||
}
|
||||
}
|
||||
#else
|
||||
shellCommand = "sh";
|
||||
if (validateShellOnHost)
|
||||
{
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
||||
}
|
||||
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
||||
}
|
||||
#endif
|
||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
||||
@@ -127,14 +106,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
shellCommand = parsed.shellCommand;
|
||||
if (validateShellOnHost)
|
||||
{
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which2(parsed.shellCommand, true, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
|
||||
}
|
||||
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
|
||||
}
|
||||
|
||||
argFormat = $"{parsed.shellArgs}".TrimStart();
|
||||
@@ -216,38 +188,17 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
shellCommand = "pwsh";
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
commandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||
}
|
||||
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||
if (string.IsNullOrEmpty(commandPath))
|
||||
{
|
||||
shellCommand = "powershell";
|
||||
Trace.Info($"Defaulting to {shellCommand}");
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
commandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
||||
}
|
||||
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
|
||||
}
|
||||
ArgUtil.NotNullOrEmpty(commandPath, "Default Shell");
|
||||
#else
|
||||
shellCommand = "sh";
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
commandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
||||
}
|
||||
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
|
||||
#endif
|
||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
||||
}
|
||||
@@ -258,14 +209,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
if (!IsActionStep && systemShells.Contains(shell))
|
||||
{
|
||||
shellCommand = shell;
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
commandPath = WhichUtil.Which2(shell, !isContainerStepHost, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
|
||||
}
|
||||
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
|
||||
if (shell == "bash")
|
||||
{
|
||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh");
|
||||
@@ -280,14 +224,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell);
|
||||
shellCommand = parsed.shellCommand;
|
||||
// For non-ContainerStepHost, the command must be located on the host by Which
|
||||
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
|
||||
{
|
||||
commandPath = WhichUtil.Which2(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
|
||||
}
|
||||
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
|
||||
argFormat = $"{parsed.shellArgs}".TrimStart();
|
||||
if (string.IsNullOrEmpty(argFormat))
|
||||
{
|
||||
|
||||
@@ -51,13 +51,6 @@ namespace GitHub.Runner.Worker
|
||||
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
|
||||
}
|
||||
|
||||
var jobServerQueueTelemetry = false;
|
||||
if (message.Variables.TryGetValue("DistributedTask.EnableJobServerQueueTelemetry", out VariableValue enableJobServerQueueTelemetry) &&
|
||||
!string.IsNullOrEmpty(enableJobServerQueueTelemetry?.Value))
|
||||
{
|
||||
jobServerQueueTelemetry = StringUtil.ConvertToBoolean(enableJobServerQueueTelemetry.Value);
|
||||
}
|
||||
|
||||
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
if (MessageUtil.IsRunServiceJob(message.MessageType))
|
||||
{
|
||||
@@ -79,7 +72,7 @@ namespace GitHub.Runner.Worker
|
||||
launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken);
|
||||
}
|
||||
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
||||
_jobServerQueue.Start(message, resultsServiceOnly: true, enableTelemetry: jobServerQueueTelemetry);
|
||||
_jobServerQueue.Start(message, resultServiceOnly: true);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -101,7 +94,7 @@ namespace GitHub.Runner.Worker
|
||||
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers);
|
||||
await jobServer.ConnectAsync(jobConnection);
|
||||
|
||||
_jobServerQueue.Start(message, enableTelemetry: jobServerQueueTelemetry);
|
||||
_jobServerQueue.Start(message);
|
||||
server = jobServer;
|
||||
}
|
||||
|
||||
@@ -283,12 +276,6 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
jobContext.Debug($"Finishing: {message.JobDisplayName}");
|
||||
TaskResult result = jobContext.Complete(taskResult);
|
||||
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
|
||||
{
|
||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings));
|
||||
jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions));
|
||||
}
|
||||
|
||||
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
|
||||
{
|
||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings));
|
||||
@@ -388,12 +375,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings))
|
||||
{
|
||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(deprecatedNodeWarnings));
|
||||
jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions));
|
||||
}
|
||||
|
||||
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings))
|
||||
{
|
||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node16ForceWarnings));
|
||||
@@ -402,12 +383,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
try
|
||||
{
|
||||
var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: true);
|
||||
// include any job telemetry from the background upload process.
|
||||
if (jobQueueTelemetry.Count > 0)
|
||||
{
|
||||
jobContext.Global.JobTelemetry.AddRange(jobQueueTelemetry);
|
||||
}
|
||||
await ShutdownQueue(throwOnFailure: true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -509,7 +485,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<IList<JobTelemetry>> ShutdownQueue(bool throwOnFailure)
|
||||
private async Task ShutdownQueue(bool throwOnFailure)
|
||||
{
|
||||
if (_jobServerQueue != null)
|
||||
{
|
||||
@@ -517,7 +493,6 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
Trace.Info("Shutting down the job server queue.");
|
||||
await _jobServerQueue.ShutdownAsync();
|
||||
return _jobServerQueue.JobTelemetries;
|
||||
}
|
||||
catch (Exception ex) when (!throwOnFailure)
|
||||
{
|
||||
@@ -529,8 +504,6 @@ namespace GitHub.Runner.Worker
|
||||
_jobServerQueue = null; // Prevent multiple attempts.
|
||||
}
|
||||
}
|
||||
|
||||
return Array.Empty<JobTelemetry>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Services.Common
|
||||
{
|
||||
// Set of classes used to bypass token operations
|
||||
// Results Service and External services follow a different auth model but
|
||||
// we are required to pass in a credentials object to create a RawHttpMessageHandler
|
||||
public class NoOpCredentials : FederatedCredential
|
||||
{
|
||||
public NoOpCredentials(IssuedToken initialToken) : base(initialToken)
|
||||
{
|
||||
}
|
||||
|
||||
public override VssCredentialsType CredentialType { get; }
|
||||
protected override IssuedTokenProvider OnCreateTokenProvider(Uri serverUrl, IHttpResponse response)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -109,7 +109,7 @@ namespace GitHub.Services.Common
|
||||
lock (m_thisLock)
|
||||
{
|
||||
// Ensure that we attempt to use the most appropriate authentication mechanism by default.
|
||||
if (m_tokenProvider == null && !(this.Credentials is NoOpCredentials))
|
||||
if (m_tokenProvider == null)
|
||||
{
|
||||
m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null);
|
||||
}
|
||||
@@ -121,8 +121,7 @@ namespace GitHub.Services.Common
|
||||
HttpResponseMessageWrapper responseWrapper;
|
||||
|
||||
Boolean lastResponseDemandedProxyAuth = false;
|
||||
// do not retry if we cannot recreate tokens
|
||||
Int32 retries = this.Credentials is NoOpCredentials ? 0 : m_maxAuthRetries;
|
||||
Int32 retries = m_maxAuthRetries;
|
||||
try
|
||||
{
|
||||
tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
@@ -139,12 +138,8 @@ namespace GitHub.Services.Common
|
||||
}
|
||||
|
||||
// Let's start with sending a token
|
||||
IssuedToken token = null;
|
||||
if (m_tokenProvider != null)
|
||||
{
|
||||
token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
|
||||
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
|
||||
}
|
||||
IssuedToken token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
|
||||
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
|
||||
|
||||
// The WinHttpHandler will chunk any content that does not have a computed length which is
|
||||
// not what we want. By loading into a buffer up-front we bypass this behavior and there is
|
||||
|
||||
@@ -461,9 +461,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
long? lastMessageId = null,
|
||||
TaskAgentStatus? status = null,
|
||||
string runnerVersion = null,
|
||||
string os = null,
|
||||
string architecture = null,
|
||||
bool? disableUpdate = null,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -486,21 +483,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
queryParams.Add("runnerVersion", runnerVersion);
|
||||
}
|
||||
|
||||
if (os != null)
|
||||
{
|
||||
queryParams.Add("os", os);
|
||||
}
|
||||
|
||||
if (architecture != null)
|
||||
{
|
||||
queryParams.Add("architecture", architecture);
|
||||
}
|
||||
|
||||
if (disableUpdate != null)
|
||||
{
|
||||
queryParams.Add("disableUpdate", disableUpdate.Value.ToString().ToLower());
|
||||
}
|
||||
|
||||
return SendAsync<TaskAgentMessage>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
|
||||
@@ -123,11 +123,8 @@ namespace GitHub.DistributedTask.Logging
|
||||
var secretSection = string.Empty;
|
||||
if (value.Contains("&+"))
|
||||
{
|
||||
if (value.Length > value.IndexOf("&+") + "&+".Length + 1)
|
||||
{
|
||||
// +1 to skip the letter that got colored
|
||||
secretSection = value.Substring(value.IndexOf("&+") + "&+".Length + 1);
|
||||
}
|
||||
// +1 to skip the letter that got colored
|
||||
secretSection = value.Substring(value.IndexOf("&+") + "&+".Length + 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
// do NOT add new enum since it will break backward compatibility with GHES
|
||||
public enum JobTelemetryType
|
||||
{
|
||||
[EnumMember]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using Newtonsoft.Json;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
|
||||
@@ -16,32 +15,35 @@ namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
}
|
||||
|
||||
[DataMember(Name = "target_version")]
|
||||
public RunnerRefreshMessage(
|
||||
ulong runnerId,
|
||||
String targetVersion,
|
||||
int? timeoutInSeconds = null)
|
||||
{
|
||||
this.RunnerId = runnerId;
|
||||
this.TimeoutInSeconds = timeoutInSeconds ?? TimeSpan.FromMinutes(60).Seconds;
|
||||
this.TargetVersion = targetVersion;
|
||||
}
|
||||
|
||||
[DataMember]
|
||||
public ulong RunnerId
|
||||
{
|
||||
get;
|
||||
private set;
|
||||
}
|
||||
|
||||
[DataMember]
|
||||
public int TimeoutInSeconds
|
||||
{
|
||||
get;
|
||||
private set;
|
||||
}
|
||||
|
||||
[DataMember]
|
||||
public String TargetVersion
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Name = "download_url")]
|
||||
public string DownloadUrl
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Name = "sha256_checksum")]
|
||||
public string SHA256Checksum
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Name = "os")]
|
||||
public string OS
|
||||
{
|
||||
get;
|
||||
set;
|
||||
private set;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Azure.Storage.Blobs" Version="12.19.1" />
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="5.2.9" />
|
||||
@@ -23,8 +22,6 @@
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||
<PackageReference Include="Minimatch" Version="2.0.0" />
|
||||
<PackageReference Include="YamlDotNet.Signed" Version="5.3.0" />
|
||||
<PackageReference Include="System.Net.Http" Version="4.3.4" />
|
||||
<PackageReference Include="System.Text.RegularExpressions" Version="4.3.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -59,9 +59,6 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
public async Task<TaskAgentMessage> GetRunnerMessageAsync(
|
||||
string runnerVersion,
|
||||
TaskAgentStatus? status,
|
||||
string os = null,
|
||||
string architecture = null,
|
||||
bool? disableUpdate = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -78,21 +75,6 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
queryParams.Add("runnerVersion", runnerVersion);
|
||||
}
|
||||
|
||||
if (os != null)
|
||||
{
|
||||
queryParams.Add("os", os);
|
||||
}
|
||||
|
||||
if (architecture != null)
|
||||
{
|
||||
queryParams.Add("architecture", architecture);
|
||||
}
|
||||
|
||||
if (disableUpdate != null)
|
||||
{
|
||||
queryParams.Add("disableUpdate", disableUpdate.Value.ToString().ToLower());
|
||||
}
|
||||
|
||||
var result = await SendAsync<TaskAgentMessage>(
|
||||
new HttpMethod("GET"),
|
||||
requestUri: requestUri,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
@@ -7,11 +8,8 @@ using System.Net.Http.Headers;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Net.Http.Formatting;
|
||||
using Azure;
|
||||
using Azure.Storage.Blobs;
|
||||
using Azure.Storage.Blobs.Models;
|
||||
using Azure.Storage.Blobs.Specialized;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.Results.Contracts;
|
||||
using Sdk.WebApi.WebApi;
|
||||
|
||||
@@ -23,15 +21,13 @@ namespace GitHub.Services.Results.Client
|
||||
Uri baseUrl,
|
||||
HttpMessageHandler pipeline,
|
||||
string token,
|
||||
bool disposeHandler,
|
||||
bool useSdk)
|
||||
bool disposeHandler)
|
||||
: base(baseUrl, pipeline, disposeHandler)
|
||||
{
|
||||
m_token = token;
|
||||
m_resultsServiceUrl = baseUrl;
|
||||
m_formatter = new JsonMediaTypeFormatter();
|
||||
m_changeIdCounter = 1;
|
||||
m_useSdk = useSdk;
|
||||
}
|
||||
|
||||
// Get Sas URL calls
|
||||
@@ -95,6 +91,7 @@ namespace GitHub.Services.Results.Client
|
||||
}
|
||||
|
||||
// Create metadata calls
|
||||
|
||||
private async Task SendRequest<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
|
||||
{
|
||||
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
|
||||
@@ -164,164 +161,73 @@ namespace GitHub.Services.Results.Client
|
||||
await SendRequest<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
|
||||
}
|
||||
|
||||
private (Uri path, string sas) ParseSasToken(string url)
|
||||
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
|
||||
{
|
||||
if (String.IsNullOrEmpty(url))
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
{
|
||||
throw new Exception($"SAS url is empty");
|
||||
}
|
||||
|
||||
var blobUri = new UriBuilder(url);
|
||||
var sasUrl = blobUri.Query.Substring(1); //remove starting "?"
|
||||
blobUri.Query = null; // remove query params
|
||||
return (blobUri.Uri, sasUrl);
|
||||
}
|
||||
|
||||
private BlobClient GetBlobClient(string url)
|
||||
{
|
||||
var blobUri = ParseSasToken(url);
|
||||
|
||||
var opts = new BlobClientOptions
|
||||
{
|
||||
Retry =
|
||||
{
|
||||
MaxRetries = Constants.DefaultBlobUploadRetries,
|
||||
NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds)
|
||||
}
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
return new BlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts);
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
private AppendBlobClient GetAppendBlobClient(string url)
|
||||
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
|
||||
{
|
||||
var blobUri = ParseSasToken(url);
|
||||
|
||||
var opts = new BlobClientOptions
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
{
|
||||
Retry =
|
||||
Content = new StringContent("")
|
||||
};
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
|
||||
request.Content.Headers.Add("Content-Length", "0");
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
MaxRetries = Constants.DefaultBlobUploadRetries,
|
||||
NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds)
|
||||
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
|
||||
{
|
||||
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
return new AppendBlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts);
|
||||
}
|
||||
|
||||
private async Task UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
|
||||
{
|
||||
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
var blobClient = GetBlobClient(url);
|
||||
try
|
||||
{
|
||||
await blobClient.UploadAsync(file, cancellationToken);
|
||||
}
|
||||
catch (RequestFailedException e)
|
||||
{
|
||||
throw new Exception($"Failed to upload block to Azure blob: {e.Message}");
|
||||
}
|
||||
request.Content.Headers.Add("Content-Length", fileSize.ToString());
|
||||
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
|
||||
}
|
||||
else
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
|
||||
{
|
||||
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
var appendBlobClient = GetAppendBlobClient(url);
|
||||
try
|
||||
{
|
||||
await appendBlobClient.CreateAsync(cancellationToken: cancellationToken);
|
||||
}
|
||||
catch (RequestFailedException e)
|
||||
{
|
||||
throw new Exception($"Failed to create append blob in Azure blob: {e.Message}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
{
|
||||
Content = new StringContent("")
|
||||
};
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
|
||||
request.Content.Headers.Add("Content-Length", "0");
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
|
||||
{
|
||||
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
var appendBlobClient = GetAppendBlobClient(url);
|
||||
try
|
||||
{
|
||||
await appendBlobClient.AppendBlockAsync(file, cancellationToken: cancellationToken);
|
||||
if (finalize)
|
||||
{
|
||||
await appendBlobClient.SealAsync(cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
catch (RequestFailedException e)
|
||||
{
|
||||
throw new Exception($"Failed to upload append block in Azure blob: {e.Message}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add("Content-Length", fileSize.ToString());
|
||||
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
|
||||
}
|
||||
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -345,42 +251,13 @@ namespace GitHub.Services.Results.Client
|
||||
// Upload the file
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
|
||||
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
|
||||
}
|
||||
|
||||
// Send step summary upload complete message
|
||||
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
|
||||
}
|
||||
|
||||
private async Task UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (firstBlock && finalize)
|
||||
{
|
||||
// This is the one and only block, just use a block blob
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// This is either not the first block, which means it's using appendBlob; or first block and need to wait for additional blocks. Using append blob in either case.
|
||||
// Create the Append blob
|
||||
if (firstBlock)
|
||||
{
|
||||
await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken);
|
||||
}
|
||||
|
||||
// Upload content
|
||||
var fileSize = new FileInfo(file).Length;
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle file upload for step log
|
||||
public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -391,7 +268,18 @@ namespace GitHub.Services.Results.Client
|
||||
throw new Exception("Failed to get step log upload url");
|
||||
}
|
||||
|
||||
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
// Create the Append blob
|
||||
if (firstBlock)
|
||||
{
|
||||
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
}
|
||||
|
||||
// Upload content
|
||||
var fileSize = new FileInfo(file).Length;
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
|
||||
}
|
||||
|
||||
// Update metadata
|
||||
if (finalize)
|
||||
@@ -411,7 +299,18 @@ namespace GitHub.Services.Results.Client
|
||||
throw new Exception("Failed to get job log upload url");
|
||||
}
|
||||
|
||||
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
// Create the Append blob
|
||||
if (firstBlock)
|
||||
{
|
||||
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
}
|
||||
|
||||
// Upload content
|
||||
var fileSize = new FileInfo(file).Length;
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
|
||||
}
|
||||
|
||||
// Update metadata
|
||||
if (finalize)
|
||||
@@ -496,7 +395,6 @@ namespace GitHub.Services.Results.Client
|
||||
private Uri m_resultsServiceUrl;
|
||||
private string m_token;
|
||||
private int m_changeIdCounter;
|
||||
private bool m_useSdk;
|
||||
}
|
||||
|
||||
// Constants specific to results
|
||||
@@ -514,9 +412,6 @@ namespace GitHub.Services.Results.Client
|
||||
public static readonly string ResultsProtoApiV1Endpoint = "twirp/github.actions.results.api.v1.WorkflowStepUpdateService/";
|
||||
public static readonly string WorkflowStepsUpdate = ResultsProtoApiV1Endpoint + "WorkflowStepsUpdate";
|
||||
|
||||
public static readonly int DefaultNetworkTimeoutInSeconds = 30;
|
||||
public static readonly int DefaultBlobUploadRetries = 3;
|
||||
|
||||
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
|
||||
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
|
||||
public static readonly string AzureBlockBlob = "BlockBlob";
|
||||
|
||||
@@ -120,7 +120,6 @@ namespace GitHub.Runner.Common.Tests
|
||||
[InlineData("secret&+secret&secret", "secret&+\x0033[96ms\x0033[0mecret&secret", "***\x0033[96ms\x0033[0m***")]
|
||||
[InlineData("secret&+secret&+secret", "secret&+\x0033[96ms\x0033[0mecret&+secret", "***\x0033[96ms\x0033[0m***")]
|
||||
[InlineData("secret&+secret&secret&+secret", "secret&+\x0033[96ms\x0033[0mecret&secret&+secret", "***\x0033[96ms\x0033[0m***")]
|
||||
[InlineData("secret&secret&+", "secret&secret&+\x0033[96m\x0033[0m", "***\x0033[96m\x0033[0m")]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void SecretSectionMasking(string secret, string rawOutput, string maskedOutput)
|
||||
|
||||
@@ -110,7 +110,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
||||
_runnerServer.Setup(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.IsAny<TaskAgentPoolType>())).Returns(Task.FromResult(expectedPools));
|
||||
|
||||
var expectedAgents = new List<TaskAgent>();
|
||||
_runnerServer.Setup(x => x.GetAgentsAsync(It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
|
||||
_runnerServer.Setup(x => x.GetAgentsAsync(It.IsAny<int>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
|
||||
|
||||
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
|
||||
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
|
||||
|
||||
@@ -192,8 +192,8 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) =>
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken) =>
|
||||
{
|
||||
await Task.Yield();
|
||||
return messages.Dequeue();
|
||||
@@ -208,7 +208,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,7 +293,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.Throws(new TaskAgentAccessTokenExpiredException("test"));
|
||||
try
|
||||
{
|
||||
@@ -311,7 +311,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Once);
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once);
|
||||
|
||||
_runnerServer
|
||||
.Verify(x => x.DeleteAgentSessionAsync(
|
||||
|
||||
@@ -23,6 +23,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
private Mock<IConfigurationStore> _configStore;
|
||||
private Mock<IJobDispatcher> _jobDispatcher;
|
||||
private AgentRefreshMessage _refreshMessage = new(1, "2.999.0");
|
||||
private List<TrimmedPackageMetadata> _trimmedPackages = new();
|
||||
|
||||
#if !OS_WINDOWS
|
||||
private string _packageUrl = null;
|
||||
@@ -70,6 +71,12 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
}
|
||||
}
|
||||
|
||||
using (var client = new HttpClient())
|
||||
{
|
||||
var json = await client.GetStringAsync($"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}-trimmedpackages.json");
|
||||
_trimmedPackages = StringUtil.ConvertFromJson<List<TrimmedPackageMetadata>>(json);
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl }));
|
||||
|
||||
@@ -84,10 +91,12 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -143,10 +152,12 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -194,10 +205,12 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -247,10 +260,12 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -290,6 +305,495 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_CloneHash_RuntimeAndExternals()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = new List<TrimmedPackageMetadata>() { new TrimmedPackageMetadata() } }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
|
||||
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(contentHashesProperty);
|
||||
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
|
||||
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
|
||||
Assert.Equal(File.ReadAllText(dotnetRuntimeHashFile).Trim(), contentHashes["dotnetRuntime"]);
|
||||
Assert.Equal(File.ReadAllText(externalsHashFile).Trim(), contentHashes["externals"]);
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_Cancel_CloneHashTask_WhenNotNeeded()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new Mock<IHttpClientHandlerFactory>().Object);
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
|
||||
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(contentHashesProperty);
|
||||
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
|
||||
|
||||
Assert.NotEqual(2, contentHashes.Count);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
hc.GetTrace().Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_UseExternalsTrimmedPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.Where(x => !x.TrimmedContents.ContainsKey("dotnetRuntime")).ToList();
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
|
||||
|
||||
if (externalsHash == trim[0].TrimmedContents["externals"])
|
||||
{
|
||||
Assert.Contains("Use trimmed (externals) package", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_UseExternalsRuntimeTrimmedPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
var p6 = new ProcessInvokerWrapper(); // runner -v
|
||||
p6.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p6);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.Where(x => x.TrimmedContents.ContainsKey("dotnetRuntime") && x.TrimmedContents.ContainsKey("externals")).ToList();
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
|
||||
|
||||
var runtimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var runtimeHash = await File.ReadAllTextAsync(runtimeHashFile);
|
||||
|
||||
if (externalsHash == trim[0].TrimmedContents["externals"] &&
|
||||
runtimeHash == trim[0].TrimmedContents["dotnetRuntime"])
|
||||
{
|
||||
Assert.Contains("Use trimmed (runtime+externals) package", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_NotUseExternalsRuntimeTrimmedPackageOnHashMismatch()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
var p6 = new ProcessInvokerWrapper(); // runner -v
|
||||
p6.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p6);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.ToList();
|
||||
foreach (var package in trim)
|
||||
{
|
||||
foreach (var hash in package.TrimmedContents.Keys)
|
||||
{
|
||||
package.TrimmedContents[hash] = "mismatch";
|
||||
}
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_FallbackToFullPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar trim
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // un-tar full
|
||||
p4.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.ToList();
|
||||
foreach (var package in trim)
|
||||
{
|
||||
package.HashValue = "mismatch";
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
|
||||
{
|
||||
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1,234 +0,0 @@
|
||||
#if !(OS_WINDOWS && ARM64)
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
public sealed class SelfUpdaterV2L0
|
||||
{
|
||||
private Mock<IRunnerServer> _runnerServer;
|
||||
private Mock<ITerminal> _term;
|
||||
private Mock<IConfigurationStore> _configStore;
|
||||
private Mock<IJobDispatcher> _jobDispatcher;
|
||||
private AgentRefreshMessage _refreshMessage = new(1, "2.999.0");
|
||||
|
||||
#if !OS_WINDOWS
|
||||
private string _packageUrl = null;
|
||||
#else
|
||||
private string _packageUrl = null;
|
||||
#endif
|
||||
public SelfUpdaterV2L0()
|
||||
{
|
||||
_runnerServer = new Mock<IRunnerServer>();
|
||||
_term = new Mock<ITerminal>();
|
||||
_configStore = new Mock<IConfigurationStore>();
|
||||
_jobDispatcher = new Mock<IJobDispatcher>();
|
||||
_configStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1, AgentId = 1 });
|
||||
|
||||
Environment.SetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT", "1");
|
||||
}
|
||||
|
||||
private async Task FetchLatestRunner()
|
||||
{
|
||||
var latestVersion = "";
|
||||
var httpClientHandler = new HttpClientHandler();
|
||||
httpClientHandler.AllowAutoRedirect = false;
|
||||
using (var client = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, "https://github.com/actions/runner/releases/latest"));
|
||||
if (response.StatusCode == System.Net.HttpStatusCode.Redirect)
|
||||
{
|
||||
var redirectUrl = response.Headers.Location.ToString();
|
||||
Regex regex = new(@"/runner/releases/tag/v(?<version>\d+\.\d+\.\d+)");
|
||||
var match = regex.Match(redirectUrl);
|
||||
if (match.Success)
|
||||
{
|
||||
latestVersion = match.Groups["version"].Value;
|
||||
|
||||
#if !OS_WINDOWS
|
||||
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.tar.gz";
|
||||
#else
|
||||
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.zip";
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception("The latest runner version could not be determined so a download URL could not be generated for it. Please check the location header of the redirect response of 'https://github.com/actions/runner/releases/latest'");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdaterV2();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
try
|
||||
{
|
||||
var message = new RunnerRefreshMessage()
|
||||
{
|
||||
TargetVersion = "2.999.0",
|
||||
OS = BuildConstants.RunnerPackage.PackageName,
|
||||
DownloadUrl = _packageUrl
|
||||
|
||||
};
|
||||
|
||||
var result = await updater.SelfUpdate(message, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_DownloadRetry()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdaterV2();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var message = new RunnerRefreshMessage()
|
||||
{
|
||||
TargetVersion = "2.999.0",
|
||||
OS = BuildConstants.RunnerPackage.PackageName,
|
||||
DownloadUrl = "https://github.com/actions/runner/notexists"
|
||||
};
|
||||
|
||||
var ex = await Assert.ThrowsAsync<TaskCanceledException>(() => updater.SelfUpdate(message, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains($"failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts", ex.Message);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_ValidateHash()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdaterV2();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var message = new RunnerRefreshMessage()
|
||||
{
|
||||
TargetVersion = "2.999.0",
|
||||
OS = BuildConstants.RunnerPackage.PackageName,
|
||||
DownloadUrl = _packageUrl,
|
||||
SHA256Checksum = "badhash"
|
||||
};
|
||||
|
||||
var ex = await Assert.ThrowsAsync<Exception>(() => updater.SelfUpdate(message, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains("did not match expected Runner Hash", ex.Message);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
277
src/Test/L0/PackagesTrimL0.cs
Normal file
277
src/Test/L0/PackagesTrimL0.cs
Normal file
@@ -0,0 +1,277 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests
|
||||
{
|
||||
public sealed class PackagesTrimL0
|
||||
{
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_NewFilesCrossAll()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
|
||||
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
|
||||
string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
var newFiles = new List<string>();
|
||||
if (Directory.Exists(layoutBin))
|
||||
{
|
||||
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
|
||||
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
|
||||
foreach (var file in Directory.GetFiles(layoutBin, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
if (!coreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)) &&
|
||||
!runtimeAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)))
|
||||
{
|
||||
newFiles.Add(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (newFiles.Count > 0)
|
||||
{
|
||||
Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_OverlapFiles()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
|
||||
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
|
||||
|
||||
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
|
||||
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
|
||||
|
||||
foreach (var line in coreAssets)
|
||||
{
|
||||
if (runtimeAssets.Contains(line, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
Assert.True(false, $"'Misc/runnercoreassets' and 'Misc/runnerdotnetruntimeassets' should not overlap.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_NewRunnerCoreAssets()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
|
||||
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
|
||||
|
||||
string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
var newFiles = new List<string>();
|
||||
if (Directory.Exists(layoutBin))
|
||||
{
|
||||
var binDirs = Directory.GetDirectories(TestUtil.GetSrcPath(), "net6.0", SearchOption.AllDirectories);
|
||||
foreach (var binDir in binDirs)
|
||||
{
|
||||
if (binDir.Contains("Test") || binDir.Contains("obj"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
Directory.GetFiles(binDir, "*", SearchOption.TopDirectoryOnly).ToList().ForEach(x =>
|
||||
{
|
||||
if (!x.Contains("runtimeconfig.dev.json"))
|
||||
{
|
||||
if (!coreAssets.Any(y => x.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(y)))
|
||||
{
|
||||
newFiles.Add(x);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (newFiles.Count > 0)
|
||||
{
|
||||
Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnercoreassets`.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_NewDotnetRuntimeAssets()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
|
||||
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
|
||||
|
||||
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runnerdotnetruntimeassets");
|
||||
var newFiles = new List<string>();
|
||||
if (File.Exists(layoutTrimsRuntimeAssets))
|
||||
{
|
||||
var runtimeAssetsCurrent = await File.ReadAllLinesAsync(layoutTrimsRuntimeAssets);
|
||||
foreach (var runtimeFile in runtimeAssetsCurrent)
|
||||
{
|
||||
if (runtimeAssets.Any(x => runtimeFile.EndsWith(x, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
newFiles.Add(runtimeFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (newFiles.Count > 0)
|
||||
{
|
||||
Assert.True(false, $"Found new dotnet runtime files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnerdotnetruntimeassets`.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_CheckDotnetRuntimeHash()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
trace.Info($"Current hash: {File.ReadAllText(dotnetRuntimeHashFile)}");
|
||||
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runtime");
|
||||
|
||||
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node");
|
||||
#else
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node");
|
||||
#endif
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
|
||||
p1.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
p1.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsRuntimeAssets,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: true,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.True(string.Equals(hashResult, File.ReadAllText(dotnetRuntimeHashFile).Trim()), $"Hash mismatch for dotnet runtime. You might need to update `Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_CheckExternalsHash()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
trace.Info($"Current hash: {File.ReadAllText(externalsHashFile)}");
|
||||
|
||||
string layoutTrimsExternalsAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/externals");
|
||||
|
||||
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node");
|
||||
#else
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node");
|
||||
#endif
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
|
||||
p1.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
p1.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsExternalsAssets,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: true,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -212,210 +212,5 @@ namespace GitHub.Runner.Common.Tests.Util
|
||||
File.Delete(brokenSymlink);
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void UseWhich2FindGit()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
//Arrange
|
||||
Tracing trace = hc.GetTrace();
|
||||
|
||||
// Act.
|
||||
string gitPath = WhichUtil.Which2("git", trace: trace);
|
||||
|
||||
trace.Info($"Which(\"git\") returns: {gitPath ?? string.Empty}");
|
||||
|
||||
// Assert.
|
||||
Assert.True(!string.IsNullOrEmpty(gitPath) && File.Exists(gitPath), $"Unable to find Git through: {nameof(WhichUtil.Which)}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void Which2ReturnsNullWhenNotFound()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
//Arrange
|
||||
Tracing trace = hc.GetTrace();
|
||||
|
||||
// Act.
|
||||
string nosuch = WhichUtil.Which2("no-such-file-cf7e351f", trace: trace);
|
||||
|
||||
trace.Info($"result: {nosuch ?? string.Empty}");
|
||||
|
||||
// Assert.
|
||||
Assert.True(string.IsNullOrEmpty(nosuch), "Path should not be resolved");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void Which2ThrowsWhenRequireAndNotFound()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
//Arrange
|
||||
Tracing trace = hc.GetTrace();
|
||||
|
||||
// Act.
|
||||
try
|
||||
{
|
||||
WhichUtil.Which2("no-such-file-cf7e351f", require: true, trace: trace);
|
||||
throw new Exception("which should have thrown");
|
||||
}
|
||||
catch (FileNotFoundException ex)
|
||||
{
|
||||
Assert.Equal("no-such-file-cf7e351f", ex.FileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void Which2HandleFullyQualifiedPath()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
//Arrange
|
||||
Tracing trace = hc.GetTrace();
|
||||
|
||||
// Act.
|
||||
var gitPath = WhichUtil.Which2("git", require: true, trace: trace);
|
||||
var gitPath2 = WhichUtil.Which2(gitPath, require: true, trace: trace);
|
||||
|
||||
// Assert.
|
||||
Assert.Equal(gitPath, gitPath2);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void Which2HandlesSymlinkToTargetFullPath()
|
||||
{
|
||||
// Arrange
|
||||
using TestHostContext hc = new TestHostContext(this);
|
||||
Tracing trace = hc.GetTrace();
|
||||
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||
#if OS_WINDOWS
|
||||
string newValue = oldValue + @$";{Path.GetTempPath()}";
|
||||
string symlinkName = $"symlink-{Guid.NewGuid()}";
|
||||
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
|
||||
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}.exe";
|
||||
#else
|
||||
string newValue = oldValue + @$":{Path.GetTempPath()}";
|
||||
string symlinkName = $"symlink-{Guid.NewGuid()}";
|
||||
string symlink = Path.GetTempPath() + $"{symlinkName}";
|
||||
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}";
|
||||
#endif
|
||||
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
|
||||
|
||||
|
||||
using (File.Create(target))
|
||||
{
|
||||
File.CreateSymbolicLink(symlink, target);
|
||||
|
||||
// Act.
|
||||
var result = WhichUtil.Which2(symlinkName, require: true, trace: trace);
|
||||
|
||||
// Assert
|
||||
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find symlink through: {nameof(WhichUtil.Which)}");
|
||||
|
||||
}
|
||||
|
||||
|
||||
// Cleanup
|
||||
File.Delete(symlink);
|
||||
File.Delete(target);
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
|
||||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void Which2HandlesSymlinkToTargetRelativePath()
|
||||
{
|
||||
// Arrange
|
||||
using TestHostContext hc = new TestHostContext(this);
|
||||
Tracing trace = hc.GetTrace();
|
||||
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||
#if OS_WINDOWS
|
||||
string newValue = oldValue + @$";{Path.GetTempPath()}";
|
||||
string symlinkName = $"symlink-{Guid.NewGuid()}";
|
||||
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
|
||||
string targetName = $"target-{Guid.NewGuid()}.exe";
|
||||
string target = Path.GetTempPath() + targetName;
|
||||
#else
|
||||
string newValue = oldValue + @$":{Path.GetTempPath()}";
|
||||
string symlinkName = $"symlink-{Guid.NewGuid()}";
|
||||
string symlink = Path.GetTempPath() + $"{symlinkName}";
|
||||
string targetName = $"target-{Guid.NewGuid()}";
|
||||
string target = Path.GetTempPath() + targetName;
|
||||
#endif
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
|
||||
|
||||
|
||||
using (File.Create(target))
|
||||
{
|
||||
File.CreateSymbolicLink(symlink, targetName);
|
||||
|
||||
// Act.
|
||||
var result = WhichUtil.Which2(symlinkName, require: true, trace: trace);
|
||||
|
||||
// Assert
|
||||
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find {symlinkName} through: {nameof(WhichUtil.Which)}");
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
File.Delete(symlink);
|
||||
File.Delete(target);
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
|
||||
|
||||
}
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void Which2ThrowsWhenSymlinkBroken()
|
||||
{
|
||||
// Arrange
|
||||
using TestHostContext hc = new TestHostContext(this);
|
||||
Tracing trace = hc.GetTrace();
|
||||
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||
|
||||
#if OS_WINDOWS
|
||||
string newValue = oldValue + @$";{Path.GetTempPath()}";
|
||||
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
|
||||
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}.exe";
|
||||
#else
|
||||
string newValue = oldValue + @$":{Path.GetTempPath()}";
|
||||
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
|
||||
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}";
|
||||
#endif
|
||||
|
||||
|
||||
string target = "no-such-file-cf7e351f";
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
|
||||
|
||||
File.CreateSymbolicLink(brokenSymlink, target);
|
||||
|
||||
// Act.
|
||||
var exception = Assert.Throws<FileNotFoundException>(() => WhichUtil.Which2(brokenSymlinkName, require: true, trace: trace));
|
||||
|
||||
// Assert
|
||||
Assert.Equal(brokenSymlinkName, exception.FileName);
|
||||
|
||||
// Cleanup
|
||||
File.Delete(brokenSymlink);
|
||||
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,118 +293,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_DownloadActionFromGraph_UseCache()
|
||||
{
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
Directory.CreateDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache"));
|
||||
Directory.CreateDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache", "actions_download-artifact"));
|
||||
Directory.CreateDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"));
|
||||
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory, Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache"));
|
||||
|
||||
const string Content = @"
|
||||
# Container action
|
||||
name: '1ae80bcb-c1df-4362-bdaa-54f729c60281'
|
||||
description: 'Greet the world and record the time'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
greeting: # id of input
|
||||
description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout'
|
||||
required: true
|
||||
default: 'Hello'
|
||||
entryPoint: # id of input
|
||||
description: 'optional docker entrypoint overwrite.'
|
||||
required: false
|
||||
outputs:
|
||||
time: # id of output
|
||||
description: 'The time we did the greeting'
|
||||
icon: 'hello.svg' # vector art to display in the GitHub Marketplace
|
||||
color: 'green' # optional, decorates the entry in the GitHub Marketplace
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'task.js'
|
||||
";
|
||||
await File.WriteAllTextAsync(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact", "action.yml"), Content);
|
||||
|
||||
#if OS_WINDOWS
|
||||
ZipFile.CreateFromDirectory(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"), Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache", "actions_download-artifact", "master-sha.zip"), CompressionLevel.Fastest, true);
|
||||
#else
|
||||
string tar = WhichUtil.Which("tar", require: true, trace: _hc.GetTrace());
|
||||
|
||||
// tar -xzf
|
||||
using (var processInvoker = new ProcessInvokerWrapper())
|
||||
{
|
||||
processInvoker.Initialize(_hc);
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
_hc.GetTrace().Info(args.Data);
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
_hc.GetTrace().Error(args.Data);
|
||||
}
|
||||
});
|
||||
|
||||
string cwd = Path.GetDirectoryName(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"));
|
||||
string inputDirectory = Path.GetFileName(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "actions-download-artifact"));
|
||||
string archiveFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "action_cache", "actions_download-artifact", "master-sha.tar.gz");
|
||||
int exitCode = await processInvoker.ExecuteAsync(_hc.GetDirectory(WellKnownDirectory.Bin), tar, $"-czf \"{archiveFile}\" -C \"{cwd}\" \"{inputDirectory}\"", null, CancellationToken.None);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
throw new NotSupportedException($"Can't use 'tar -czf' to create archive file: {archiveFile}. return code: {exitCode}.");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "actions/download-artifact",
|
||||
Ref = "master",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
_ec.Object.Global.Variables.Set("DistributedTask.UseActionArchiveCache", bool.TrueString);
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/download-artifact", "master.completed");
|
||||
Assert.True(File.Exists(watermarkFile));
|
||||
|
||||
var actionYamlFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/download-artifact", "master", "action.yml");
|
||||
Assert.True(File.Exists(actionYamlFile));
|
||||
|
||||
_hc.GetTrace().Info(File.ReadAllText(actionYamlFile));
|
||||
|
||||
Assert.Contains("1ae80bcb-c1df-4362-bdaa-54f729c60281", File.ReadAllText(actionYamlFile));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory, null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
@@ -2384,7 +2272,6 @@ runs:
|
||||
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
|
||||
_ec.Object.Global.FileTable = new List<String>();
|
||||
_ec.Object.Global.Plan = new TaskOrchestrationPlanReference();
|
||||
_ec.Object.Global.JobTelemetry = new List<JobTelemetry>();
|
||||
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"[{tag}]{message}"); });
|
||||
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
|
||||
_ec.Setup(x => x.GetGitHubContext("workspace")).Returns(Path.Combine(_workFolder, "actions", "actions"));
|
||||
@@ -2407,8 +2294,6 @@ runs:
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
@@ -2428,8 +2313,6 @@ runs:
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
|
||||
111
src/dev.sh
111
src/dev.sh
@@ -14,10 +14,15 @@ DEV_TARGET_RUNTIME=$3
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
LAYOUT_DIR="$SCRIPT_DIR/../_layout"
|
||||
LAYOUT_TRIMS_DIR="$SCRIPT_DIR/../_layout_trims"
|
||||
LAYOUT_TRIM_EXTERNALS_DIR="$LAYOUT_TRIMS_DIR/trim_externals"
|
||||
LAYOUT_TRIM_RUNTIME_DIR="$LAYOUT_TRIMS_DIR/trim_runtime"
|
||||
LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR="$LAYOUT_TRIMS_DIR/trim_runtime_externals"
|
||||
DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
|
||||
PACKAGE_DIR="$SCRIPT_DIR/../_package"
|
||||
PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims"
|
||||
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
|
||||
DOTNETSDK_VERSION="6.0.418"
|
||||
DOTNETSDK_VERSION="6.0.414"
|
||||
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
|
||||
RUNNER_VERSION=$(cat runnerversion)
|
||||
|
||||
@@ -143,6 +148,48 @@ function layout ()
|
||||
|
||||
heading "Setup externals folder for $RUNTIME_ID runner's layout"
|
||||
bash ./Misc/externals.sh $RUNTIME_ID || checkRC externals.sh
|
||||
|
||||
heading "Create layout (Trimmed) ..."
|
||||
|
||||
rm -Rf "$LAYOUT_TRIMS_DIR"
|
||||
mkdir -p "$LAYOUT_TRIMS_DIR"
|
||||
mkdir -p "$LAYOUT_TRIMS_DIR/runtime"
|
||||
cp -r "$LAYOUT_DIR/bin/." "$LAYOUT_TRIMS_DIR/runtime"
|
||||
mkdir -p "$LAYOUT_TRIMS_DIR/externals"
|
||||
cp -r "$LAYOUT_DIR/externals/." "$LAYOUT_TRIMS_DIR/externals"
|
||||
|
||||
pushd "$LAYOUT_TRIMS_DIR/runtime" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
sed -i 's/\n$/\r\n/' "$SCRIPT_DIR/Misc/runnercoreassets"
|
||||
fi
|
||||
|
||||
cat "$SCRIPT_DIR/Misc/runnercoreassets" | xargs rm -f
|
||||
find . -empty -type d -delete
|
||||
find . -type f > "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets"
|
||||
popd > /dev/null
|
||||
|
||||
heading "Create layout with externals trimmed ..."
|
||||
mkdir -p "$LAYOUT_TRIM_EXTERNALS_DIR"
|
||||
cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_EXTERNALS_DIR/"
|
||||
rm -Rf "$LAYOUT_TRIM_EXTERNALS_DIR/externals"
|
||||
echo "Created... $LAYOUT_TRIM_EXTERNALS_DIR"
|
||||
|
||||
heading "Create layout with dotnet runtime trimmed ..."
|
||||
mkdir -p "$LAYOUT_TRIM_RUNTIME_DIR"
|
||||
cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_RUNTIME_DIR/"
|
||||
pushd "$LAYOUT_TRIM_RUNTIME_DIR/bin" > /dev/null
|
||||
cat "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" | xargs rm -f
|
||||
echo "Created... $LAYOUT_TRIM_RUNTIME_DIR"
|
||||
popd > /dev/null
|
||||
|
||||
heading "Create layout with externals and dotnet runtime trimmed ..."
|
||||
mkdir -p "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR"
|
||||
cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/"
|
||||
rm -Rf "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/externals"
|
||||
pushd "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/bin" > /dev/null
|
||||
cat "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" | xargs rm -f
|
||||
echo "Created... $LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR"
|
||||
popd > /dev/null
|
||||
}
|
||||
|
||||
function runtest ()
|
||||
@@ -179,7 +226,9 @@ function package ()
|
||||
find "${LAYOUT_DIR}/bin" -type f -name '*.pdb' -delete
|
||||
|
||||
mkdir -p "$PACKAGE_DIR"
|
||||
mkdir -p "$PACKAGE_TRIMS_DIR"
|
||||
rm -Rf "${PACKAGE_DIR:?}"/*
|
||||
rm -Rf "${PACKAGE_TRIMS_DIR:?}"/*
|
||||
|
||||
pushd "$PACKAGE_DIR" > /dev/null
|
||||
|
||||
@@ -197,6 +246,66 @@ function package ()
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
runner_trim_externals_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noexternals"
|
||||
heading "Packaging ${runner_trim_externals_pkg_name} (Trimmed)"
|
||||
|
||||
PACKAGE_TRIM_EXTERNALS_DIR="$PACKAGE_TRIMS_DIR/trim_externals"
|
||||
mkdir -p "$PACKAGE_TRIM_EXTERNALS_DIR"
|
||||
pushd "$PACKAGE_TRIM_EXTERNALS_DIR" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then
|
||||
tar_name="${runner_trim_externals_pkg_name}.tar.gz"
|
||||
echo "Creating $tar_name in ${LAYOUT_TRIM_EXTERNALS_DIR}"
|
||||
tar -czf "${tar_name}" -C "${LAYOUT_TRIM_EXTERNALS_DIR}" .
|
||||
elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
zip_name="${runner_trim_externals_pkg_name}.zip"
|
||||
echo "Convert ${LAYOUT_TRIM_EXTERNALS_DIR} to Windows style path"
|
||||
window_path=${LAYOUT_TRIM_EXTERNALS_DIR:1}
|
||||
window_path=${window_path:0:1}:${window_path:1}
|
||||
echo "Creating $zip_name in ${window_path}"
|
||||
$POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")"
|
||||
fi
|
||||
popd > /dev/null
|
||||
|
||||
runner_trim_runtime_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noruntime"
|
||||
heading "Packaging ${runner_trim_runtime_pkg_name} (Trimmed)"
|
||||
|
||||
PACKAGE_TRIM_RUNTIME_DIR="$PACKAGE_TRIMS_DIR/trim_runtime"
|
||||
mkdir -p "$PACKAGE_TRIM_RUNTIME_DIR"
|
||||
pushd "$PACKAGE_TRIM_RUNTIME_DIR" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then
|
||||
tar_name="${runner_trim_runtime_pkg_name}.tar.gz"
|
||||
echo "Creating $tar_name in ${LAYOUT_TRIM_RUNTIME_DIR}"
|
||||
tar -czf "${tar_name}" -C "${LAYOUT_TRIM_RUNTIME_DIR}" .
|
||||
elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
zip_name="${runner_trim_runtime_pkg_name}.zip"
|
||||
echo "Convert ${LAYOUT_TRIM_RUNTIME_DIR} to Windows style path"
|
||||
window_path=${LAYOUT_TRIM_RUNTIME_DIR:1}
|
||||
window_path=${window_path:0:1}:${window_path:1}
|
||||
echo "Creating $zip_name in ${window_path}"
|
||||
$POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")"
|
||||
fi
|
||||
popd > /dev/null
|
||||
|
||||
runner_trim_runtime_externals_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noruntime-noexternals"
|
||||
heading "Packaging ${runner_trim_runtime_externals_pkg_name} (Trimmed)"
|
||||
|
||||
PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR="$PACKAGE_TRIMS_DIR/trim_runtime_externals"
|
||||
mkdir -p "$PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR"
|
||||
pushd "$PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then
|
||||
tar_name="${runner_trim_runtime_externals_pkg_name}.tar.gz"
|
||||
echo "Creating $tar_name in ${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR}"
|
||||
tar -czf "${tar_name}" -C "${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR}" .
|
||||
elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
zip_name="${runner_trim_runtime_externals_pkg_name}.zip"
|
||||
echo "Convert ${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR} to Windows style path"
|
||||
window_path=${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR:1}
|
||||
window_path=${window_path:0:1}:${window_path:1}
|
||||
echo "Creating $zip_name in ${window_path}"
|
||||
$POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")"
|
||||
fi
|
||||
popd > /dev/null
|
||||
}
|
||||
|
||||
if [[ (! -d "${DOTNETSDK_INSTALLDIR}") || (! -e "${DOTNETSDK_INSTALLDIR}/.${DOTNETSDK_VERSION}") || (! -e "${DOTNETSDK_INSTALLDIR}/dotnet") ]]; then
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "6.0.418"
|
||||
"version": "6.0.414"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.312.0
|
||||
2.309.0
|
||||
|
||||
Reference in New Issue
Block a user