mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:21:58 +00:00
Compare commits
56 Commits
add-mask-b
...
v2.315.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a52c53955c | ||
|
|
8ebf298bcd | ||
|
|
4b85145661 | ||
|
|
bc8b6e0152 | ||
|
|
82e01c6173 | ||
|
|
93bc1cd918 | ||
|
|
692d910868 | ||
|
|
2c8c941622 | ||
|
|
86d6211c75 | ||
|
|
aa90563cae | ||
|
|
4cb3cb2962 | ||
|
|
d7777fd632 | ||
|
|
d8bce88c4f | ||
|
|
601d3de3f3 | ||
|
|
034c51cd0b | ||
|
|
d296014f99 | ||
|
|
3449d5fa52 | ||
|
|
6603bfb74c | ||
|
|
b19b9462d8 | ||
|
|
3db5c90cc4 | ||
|
|
927b26a364 | ||
|
|
72559572f6 | ||
|
|
31318d81ba | ||
|
|
1d47bfa6c7 | ||
|
|
651ea42e00 | ||
|
|
bcc665a7a1 | ||
|
|
cd812f0395 | ||
|
|
fa874cf314 | ||
|
|
bf0e76631b | ||
|
|
1d82031a2c | ||
|
|
d1a619ff09 | ||
|
|
11680fc78f | ||
|
|
3e5433ec86 | ||
|
|
b647b890c5 | ||
|
|
894c50073a | ||
|
|
5268d74ade | ||
|
|
7414e08fbd | ||
|
|
dcb790f780 | ||
|
|
b7ab810945 | ||
|
|
7310ba0a08 | ||
|
|
e842959e3e | ||
|
|
9f19310b5b | ||
|
|
84220a21d1 | ||
|
|
8e0cd36cd8 | ||
|
|
f1f18f67e1 | ||
|
|
ac39c4bd0a | ||
|
|
3f3d9b0d99 | ||
|
|
af485fb660 | ||
|
|
9e3e57ff90 | ||
|
|
ac89b31d2f | ||
|
|
65201ff6be | ||
|
|
661b261959 | ||
|
|
8a25302ba3 | ||
|
|
c7d65c42d6 | ||
|
|
a9bae6f37a | ||
|
|
3136ce3a71 |
@@ -4,10 +4,13 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "6.0.415"
|
||||
"version": "6.0.420"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "16"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/sshd:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
|
||||
26
.github/workflows/build.yml
vendored
26
.github/workflows/build.yml
vendored
@@ -58,29 +58,6 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
exit $NeedUpdate
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
@@ -103,6 +80,3 @@ jobs:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
|
||||
1
.github/workflows/close-bugs-bot.yml
vendored
1
.github/workflows/close-bugs-bot.yml
vendored
@@ -15,4 +15,3 @@ jobs:
|
||||
only-labels: "actions-bug"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
close-issue-reason: "completed"
|
||||
|
||||
1
.github/workflows/close-features-bot.yml
vendored
1
.github/workflows/close-features-bot.yml
vendored
@@ -15,4 +15,3 @@ jobs:
|
||||
only-labels: "actions-feature"
|
||||
days-before-stale: 0
|
||||
days-before-close: 1
|
||||
close-issue-reason: "completed"
|
||||
|
||||
213
.github/workflows/dotnet-upgrade.yml
vendored
213
.github/workflows/dotnet-upgrade.yml
vendored
@@ -84,221 +84,20 @@ jobs:
|
||||
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git push --set-upstream origin $branch_name
|
||||
|
||||
build-hashes:
|
||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||
create-pr:
|
||||
needs: [dotnet-update]
|
||||
outputs:
|
||||
# pass outputs from this job to create-pr for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-arm64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
run: |
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
id: compute-hash
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
|
||||
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
|
||||
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
- name: update hash
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||
|
||||
if [ -n "$ExternalHash" ]; then
|
||||
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
fi
|
||||
|
||||
if [ -n "$DotNetRuntimeHash" ]; then
|
||||
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
fi
|
||||
- name: cache updated hashes
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
|
||||
|
||||
hash-update:
|
||||
needs: [build-hashes]
|
||||
if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }}
|
||||
outputs:
|
||||
# pass outputs from this job to create-pr for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Restore cached hashes - linux-x64
|
||||
id: cache-restore-linux-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm64
|
||||
id: cache-restore-linux-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm
|
||||
id: cache-restore-linux-arm
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-x64
|
||||
id: cache-restore-osx-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-arm64
|
||||
id: cache-restore-osx-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-x64
|
||||
id: cache-restore-win-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-arm64
|
||||
id: cache-restore-win-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Fetch cached computed hashes
|
||||
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" )
|
||||
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -a -m "Update computed hashes"
|
||||
git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
create-pr:
|
||||
needs: [hash-update]
|
||||
outputs:
|
||||
# pass outputs from this job to run-tests for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
|
||||
|
||||
---
|
||||
|
||||
419
.github/workflows/release.yml
vendored
419
.github/workflows/release.yml
vendored
@@ -53,27 +53,6 @@ jobs:
|
||||
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
||||
@@ -136,76 +115,6 @@ jobs:
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_externals
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime_externals
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
@@ -216,10 +125,6 @@ jobs:
|
||||
name: runner-packages
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
${{ matrix.runtime }}-trimmedpackages.json
|
||||
|
||||
release:
|
||||
needs: build
|
||||
@@ -253,33 +158,11 @@ jobs:
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
@@ -309,7 +192,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -319,7 +202,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -329,7 +212,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -339,7 +222,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -349,7 +232,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -359,7 +242,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -369,298 +252,10 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
publish-image:
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -7,8 +7,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
|
||||
---
|
||||
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||
|
||||
@@ -16,12 +18,15 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
```
|
||||
curl -v https://api.github.com/zen
|
||||
curl -v https://codeload.github.com/_ping
|
||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://results-receiver.actions.githubusercontent.com/health
|
||||
```
|
||||
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
|
||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
@@ -29,6 +34,7 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
```
|
||||
curl -v https://[hostname]/api/v3/zen
|
||||
curl -v https://codeload.[hostname]/_ping
|
||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||
```
|
||||
@@ -44,6 +50,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Ping codeload.github.com or codeload.myGHES.com using dotnet
|
||||
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
@@ -52,6 +62,10 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Ping results-receiver.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
|
||||
@@ -42,6 +42,7 @@ If you are having trouble connecting, try these steps:
|
||||
- https://api.github.com/
|
||||
- https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
- https://pipelines.actions.githubusercontent.com/_apis/health
|
||||
- https://results-receiver.actions.githubusercontent.com/health
|
||||
- For GHES/GHAE
|
||||
- https://myGHES.com/_services/vstoken/_apis/health
|
||||
- https://myGHES.com/_services/pipelines/_apis/health
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
## Supported Distributions and Versions
|
||||
|
||||
x64
|
||||
- Red Hat Enterprise Linux 7
|
||||
- CentOS 7
|
||||
- Oracle Linux 7
|
||||
- Red Hat Enterprise Linux 7+
|
||||
- CentOS 7+
|
||||
- Oracle Linux 7+
|
||||
- Fedora 29+
|
||||
- Debian 9+
|
||||
- Ubuntu 16.04+
|
||||
|
||||
@@ -4,9 +4,9 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.4.0
|
||||
ARG DOCKER_VERSION=24.0.6
|
||||
ARG BUILDX_VERSION=0.11.2
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.6.0
|
||||
ARG DOCKER_VERSION=25.0.4
|
||||
ARG BUILDX_VERSION=0.13.1
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
@@ -37,6 +37,7 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||
ENV ImageOS=ubuntu22
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
|
||||
@@ -1,23 +1,26 @@
|
||||
## What's Changed
|
||||
* Trim whitespace in `./Misc/contentHash/dotnetRuntime/*` by @TingluoHuang in https://github.com/actions/runner/pull/2915
|
||||
* Send os and arch during long poll by @luketomlinson in https://github.com/actions/runner/pull/2913
|
||||
* Revert "Update default version to node20 (#2844)" by @takost in https://github.com/actions/runner/pull/2918
|
||||
* Fix telemetry publish from JobServerQueue. by @TingluoHuang in https://github.com/actions/runner/pull/2919
|
||||
* Use block blob instead of append blob by @yacaovsnc in https://github.com/actions/runner/pull/2924
|
||||
* Provide detail info on untar failures. by @TingluoHuang in https://github.com/actions/runner/pull/2939
|
||||
* Bump node.js to 20.8.1 by @TingluoHuang in https://github.com/actions/runner/pull/2945
|
||||
* Update dotnet sdk to latest version @6.0.415 by @github-actions in https://github.com/actions/runner/pull/2929
|
||||
* Fix typo in log strings by @rajbos in https://github.com/actions/runner/pull/2695
|
||||
* feat: add support of arm64 arch runners in service creation script by @tuxity in https://github.com/actions/runner/pull/2606
|
||||
* Add `buildx` to images by @ajschmidt8 in https://github.com/actions/runner/pull/2901
|
||||
* fix summaries for actions results by @SrRyan in https://github.com/actions/runner/pull/3174
|
||||
* Bump runner version to match the latest patch release by @TingluoHuang in https://github.com/actions/runner/pull/3175
|
||||
* don't crash listener on getting job exceptions for run-service by @yaananth in https://github.com/actions/runner/pull/3177
|
||||
* Remove -f flag in wait when manually trap signal by @nikola-jokic in https://github.com/actions/runner/pull/3182
|
||||
* consume new pipelines service url in handlers by @patrickcarnahan in https://github.com/actions/runner/pull/3185
|
||||
* Add ability to enforce actions to run on node20 by @takost in https://github.com/actions/runner/pull/3192
|
||||
* Bump hook version to 0.6.0 by @nikola-jokic in https://github.com/actions/runner/pull/3203
|
||||
* Update dotnet sdk to latest version @6.0.420 by @github-actions in https://github.com/actions/runner/pull/3211
|
||||
* Bump docker version and docker buildx version by @nikola-jokic in https://github.com/actions/runner/pull/3208
|
||||
* Handle new non-retryable exception type by @thyeggman in https://github.com/actions/runner/pull/3191
|
||||
* Always Delete Actions Service Session by @luketomlinson in https://github.com/actions/runner/pull/3214
|
||||
|
||||
## New Contributors
|
||||
* @tuxity made their first contribution in https://github.com/actions/runner/pull/2606
|
||||
* @SrRyan made their first contribution in https://github.com/actions/runner/pull/3174
|
||||
* @patrickcarnahan made their first contribution in https://github.com/actions/runner/pull/3185
|
||||
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.310.2...v2.311.0
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.314.1...v2.315.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.313.0...v2.314.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
|
||||
|
||||
## Windows x64
|
||||
@@ -119,27 +122,3 @@ The SHA-256 checksums for the packages included in this build are shown below:
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
||||
|
||||
@@ -1 +1 @@
|
||||
531b31914e525ecb12cc5526415bc70a112ebc818f877347af1a231011f539c5
|
||||
54d95a44d118dba852395991224a6b9c1abe916858c87138656f80c619e85331
|
||||
@@ -1 +1 @@
|
||||
722dd5fa5ecc207fcccf67f6e502d689f2119d8117beff2041618fba17dc66a4
|
||||
68015af17f06a824fa478e62ae7393766ce627fd5599ab916432a14656a19a52
|
||||
@@ -1 +1 @@
|
||||
8ca75c76e15ab9dc7fe49a66c5c74e171e7fabd5d26546fda8931bd11bff30f9
|
||||
a2628119ca419cb54e279103ffae7986cdbd0814d57c73ff0dc74c38be08b9ae
|
||||
@@ -1 +1 @@
|
||||
70496eb1c99b39b3373b5088c95a35ebbaac1098e6c47c8aab94771f3ffbf501
|
||||
de71ca09ead807e1a2ce9df0a5b23eb7690cb71fff51169a77e4c3992be53dda
|
||||
@@ -1 +1 @@
|
||||
4f8d48727d535daabcaec814e0dafb271c10625366c78e7e022ca7477a73023f
|
||||
d009e05e6b26d614d65be736a15d1bd151932121c16a9ff1b986deadecc982b9
|
||||
@@ -1 +1 @@
|
||||
d54d7428f2b9200a0030365a6a4e174e30a1b29b922f8254dffb2924bd09549d
|
||||
f730db39c2305800b4653795360ba9c10c68f384a46b85d808f1f9f0ed3c42e4
|
||||
@@ -1 +1 @@
|
||||
eaa939c45307f46b7003902255b3a2a09287215d710984107667e03ac493eb26
|
||||
a35b5722375490e9473cdcccb5e18b41eba3dbf4344fe31abc9821e21f18ea5a
|
||||
@@ -63,17 +63,16 @@ function acquireExternalTool() {
|
||||
echo "Curl version: $CURL_VERSION"
|
||||
|
||||
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
||||
# -k Allow connections to SSL sites without certs (H)
|
||||
# -S Show error. With -s, make curl show errors when they occur
|
||||
# -L Follow redirects (H)
|
||||
# -o FILE Write to FILE instead of stdout
|
||||
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
||||
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
||||
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
||||
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
else
|
||||
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
||||
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
fi
|
||||
|
||||
# Move the partial file to the download target.
|
||||
|
||||
@@ -38,7 +38,7 @@ runWithManualTrap() {
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
"$DIR"/run-helper.sh $* &
|
||||
PID=$!
|
||||
wait -f $PID
|
||||
wait $PID
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
@@ -84,4 +84,4 @@ if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
|
||||
run $*
|
||||
else
|
||||
runWithManualTrap $*
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
actions.runner.plist.template
|
||||
actions.runner.service.template
|
||||
checkScripts/downloadCert.js
|
||||
checkScripts/makeWebRequest.js
|
||||
darwin.svc.sh.template
|
||||
hashFiles/index.js
|
||||
installdependencies.sh
|
||||
macos-run-invoker.js
|
||||
Microsoft.IdentityModel.Logging.dll
|
||||
Microsoft.IdentityModel.Tokens.dll
|
||||
Minimatch.dll
|
||||
Newtonsoft.Json.Bson.dll
|
||||
Newtonsoft.Json.dll
|
||||
Runner.Common.deps.json
|
||||
Runner.Common.dll
|
||||
Runner.Common.pdb
|
||||
Runner.Listener
|
||||
Runner.Listener.deps.json
|
||||
Runner.Listener.dll
|
||||
Runner.Listener.exe
|
||||
Runner.Listener.pdb
|
||||
Runner.Listener.runtimeconfig.json
|
||||
Runner.PluginHost
|
||||
Runner.PluginHost.deps.json
|
||||
Runner.PluginHost.dll
|
||||
Runner.PluginHost.exe
|
||||
Runner.PluginHost.pdb
|
||||
Runner.PluginHost.runtimeconfig.json
|
||||
Runner.Plugins.deps.json
|
||||
Runner.Plugins.dll
|
||||
Runner.Plugins.pdb
|
||||
Runner.Sdk.deps.json
|
||||
Runner.Sdk.dll
|
||||
Runner.Sdk.pdb
|
||||
Runner.Worker
|
||||
Runner.Worker.deps.json
|
||||
Runner.Worker.dll
|
||||
Runner.Worker.exe
|
||||
Runner.Worker.pdb
|
||||
Runner.Worker.runtimeconfig.json
|
||||
RunnerService.exe
|
||||
RunnerService.exe.config
|
||||
RunnerService.js
|
||||
RunnerService.pdb
|
||||
runsvc.sh
|
||||
Sdk.deps.json
|
||||
Sdk.dll
|
||||
Sdk.pdb
|
||||
System.IdentityModel.Tokens.Jwt.dll
|
||||
System.Net.Http.Formatting.dll
|
||||
System.Security.Cryptography.Pkcs.dll
|
||||
System.Security.Cryptography.ProtectedData.dll
|
||||
System.ServiceProcess.ServiceController.dll
|
||||
systemd.svc.sh.template
|
||||
update.cmd.template
|
||||
update.sh.template
|
||||
YamlDotNet.dll
|
||||
@@ -1,270 +0,0 @@
|
||||
api-ms-win-core-console-l1-1-0.dll
|
||||
api-ms-win-core-console-l1-2-0.dll
|
||||
api-ms-win-core-datetime-l1-1-0.dll
|
||||
api-ms-win-core-debug-l1-1-0.dll
|
||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
||||
api-ms-win-core-fibers-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-2-0.dll
|
||||
api-ms-win-core-file-l2-1-0.dll
|
||||
api-ms-win-core-handle-l1-1-0.dll
|
||||
api-ms-win-core-heap-l1-1-0.dll
|
||||
api-ms-win-core-interlocked-l1-1-0.dll
|
||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
||||
api-ms-win-core-localization-l1-2-0.dll
|
||||
api-ms-win-core-memory-l1-1-0.dll
|
||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-1.dll
|
||||
api-ms-win-core-profile-l1-1-0.dll
|
||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
||||
api-ms-win-core-string-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-2-0.dll
|
||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
||||
api-ms-win-core-timezone-l1-1-0.dll
|
||||
api-ms-win-core-util-l1-1-0.dll
|
||||
api-ms-win-crt-conio-l1-1-0.dll
|
||||
api-ms-win-crt-convert-l1-1-0.dll
|
||||
api-ms-win-crt-environment-l1-1-0.dll
|
||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
||||
api-ms-win-crt-heap-l1-1-0.dll
|
||||
api-ms-win-crt-locale-l1-1-0.dll
|
||||
api-ms-win-crt-math-l1-1-0.dll
|
||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
||||
api-ms-win-crt-private-l1-1-0.dll
|
||||
api-ms-win-crt-process-l1-1-0.dll
|
||||
api-ms-win-crt-runtime-l1-1-0.dll
|
||||
api-ms-win-crt-stdio-l1-1-0.dll
|
||||
api-ms-win-crt-string-l1-1-0.dll
|
||||
api-ms-win-crt-time-l1-1-0.dll
|
||||
api-ms-win-crt-utility-l1-1-0.dll
|
||||
clrcompression.dll
|
||||
clretwrc.dll
|
||||
clrjit.dll
|
||||
coreclr.dll
|
||||
createdump
|
||||
createdump.exe
|
||||
dbgshim.dll
|
||||
hostfxr.dll
|
||||
hostpolicy.dll
|
||||
libclrjit.dylib
|
||||
libclrjit.so
|
||||
libcoreclr.dylib
|
||||
libcoreclr.so
|
||||
libcoreclrtraceptprovider.so
|
||||
libdbgshim.dylib
|
||||
libdbgshim.so
|
||||
libhostfxr.dylib
|
||||
libhostfxr.so
|
||||
libhostpolicy.dylib
|
||||
libhostpolicy.so
|
||||
libmscordaccore.dylib
|
||||
libmscordaccore.so
|
||||
libmscordbi.dylib
|
||||
libmscordbi.so
|
||||
Microsoft.CSharp.dll
|
||||
Microsoft.DiaSymReader.Native.amd64.dll
|
||||
Microsoft.DiaSymReader.Native.arm64.dll
|
||||
Microsoft.VisualBasic.Core.dll
|
||||
Microsoft.VisualBasic.dll
|
||||
Microsoft.Win32.Primitives.dll
|
||||
Microsoft.Win32.Registry.dll
|
||||
mscordaccore.dll
|
||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
||||
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
||||
mscordaccore_amd64_amd64_6.0.2223.42425.dll
|
||||
mscordaccore_amd64_amd64_6.0.2323.48002.dll
|
||||
mscordbi.dll
|
||||
mscorlib.dll
|
||||
mscorrc.debug.dll
|
||||
mscorrc.dll
|
||||
msquic.dll
|
||||
netstandard.dll
|
||||
SOS_README.md
|
||||
System.AppContext.dll
|
||||
System.Buffers.dll
|
||||
System.Collections.Concurrent.dll
|
||||
System.Collections.dll
|
||||
System.Collections.Immutable.dll
|
||||
System.Collections.NonGeneric.dll
|
||||
System.Collections.Specialized.dll
|
||||
System.ComponentModel.Annotations.dll
|
||||
System.ComponentModel.DataAnnotations.dll
|
||||
System.ComponentModel.dll
|
||||
System.ComponentModel.EventBasedAsync.dll
|
||||
System.ComponentModel.Primitives.dll
|
||||
System.ComponentModel.TypeConverter.dll
|
||||
System.Configuration.dll
|
||||
System.Console.dll
|
||||
System.Core.dll
|
||||
System.Data.Common.dll
|
||||
System.Data.DataSetExtensions.dll
|
||||
System.Data.dll
|
||||
System.Diagnostics.Contracts.dll
|
||||
System.Diagnostics.Debug.dll
|
||||
System.Diagnostics.DiagnosticSource.dll
|
||||
System.Diagnostics.FileVersionInfo.dll
|
||||
System.Diagnostics.Process.dll
|
||||
System.Diagnostics.StackTrace.dll
|
||||
System.Diagnostics.TextWriterTraceListener.dll
|
||||
System.Diagnostics.Tools.dll
|
||||
System.Diagnostics.TraceSource.dll
|
||||
System.Diagnostics.Tracing.dll
|
||||
System.dll
|
||||
System.Drawing.dll
|
||||
System.Drawing.Primitives.dll
|
||||
System.Dynamic.Runtime.dll
|
||||
System.Formats.Asn1.dll
|
||||
System.Globalization.Calendars.dll
|
||||
System.Globalization.dll
|
||||
System.Globalization.Extensions.dll
|
||||
System.Globalization.Native.dylib
|
||||
System.Globalization.Native.so
|
||||
System.IO.Compression.Brotli.dll
|
||||
System.IO.Compression.dll
|
||||
System.IO.Compression.FileSystem.dll
|
||||
System.IO.Compression.Native.a
|
||||
System.IO.Compression.Native.dll
|
||||
System.IO.Compression.Native.dylib
|
||||
System.IO.Compression.Native.so
|
||||
System.IO.Compression.ZipFile.dll
|
||||
System.IO.dll
|
||||
System.IO.FileSystem.AccessControl.dll
|
||||
System.IO.FileSystem.dll
|
||||
System.IO.FileSystem.DriveInfo.dll
|
||||
System.IO.FileSystem.Primitives.dll
|
||||
System.IO.FileSystem.Watcher.dll
|
||||
System.IO.IsolatedStorage.dll
|
||||
System.IO.MemoryMappedFiles.dll
|
||||
System.IO.Pipes.AccessControl.dll
|
||||
System.IO.Pipes.dll
|
||||
System.IO.UnmanagedMemoryStream.dll
|
||||
System.Linq.dll
|
||||
System.Linq.Expressions.dll
|
||||
System.Linq.Parallel.dll
|
||||
System.Linq.Queryable.dll
|
||||
System.Memory.dll
|
||||
System.Native.a
|
||||
System.Native.dylib
|
||||
System.Native.so
|
||||
System.Net.dll
|
||||
System.Net.Http.dll
|
||||
System.Net.Http.Json.dll
|
||||
System.Net.Http.Native.a
|
||||
System.Net.Http.Native.dylib
|
||||
System.Net.Http.Native.so
|
||||
System.Net.HttpListener.dll
|
||||
System.Net.Mail.dll
|
||||
System.Net.NameResolution.dll
|
||||
System.Net.NetworkInformation.dll
|
||||
System.Net.Ping.dll
|
||||
System.Net.Primitives.dll
|
||||
System.Net.Quic.dll
|
||||
System.Net.Requests.dll
|
||||
System.Net.Security.dll
|
||||
System.Net.Security.Native.a
|
||||
System.Net.Security.Native.dylib
|
||||
System.Net.Security.Native.so
|
||||
System.Net.ServicePoint.dll
|
||||
System.Net.Sockets.dll
|
||||
System.Net.WebClient.dll
|
||||
System.Net.WebHeaderCollection.dll
|
||||
System.Net.WebProxy.dll
|
||||
System.Net.WebSockets.Client.dll
|
||||
System.Net.WebSockets.dll
|
||||
System.Numerics.dll
|
||||
System.Numerics.Vectors.dll
|
||||
System.ObjectModel.dll
|
||||
System.Private.CoreLib.dll
|
||||
System.Private.DataContractSerialization.dll
|
||||
System.Private.Uri.dll
|
||||
System.Private.Xml.dll
|
||||
System.Private.Xml.Linq.dll
|
||||
System.Reflection.DispatchProxy.dll
|
||||
System.Reflection.dll
|
||||
System.Reflection.Emit.dll
|
||||
System.Reflection.Emit.ILGeneration.dll
|
||||
System.Reflection.Emit.Lightweight.dll
|
||||
System.Reflection.Extensions.dll
|
||||
System.Reflection.Metadata.dll
|
||||
System.Reflection.Primitives.dll
|
||||
System.Reflection.TypeExtensions.dll
|
||||
System.Resources.Reader.dll
|
||||
System.Resources.ResourceManager.dll
|
||||
System.Resources.Writer.dll
|
||||
System.Runtime.CompilerServices.Unsafe.dll
|
||||
System.Runtime.CompilerServices.VisualC.dll
|
||||
System.Runtime.dll
|
||||
System.Runtime.Extensions.dll
|
||||
System.Runtime.Handles.dll
|
||||
System.Runtime.InteropServices.dll
|
||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
||||
System.Runtime.Intrinsics.dll
|
||||
System.Runtime.Loader.dll
|
||||
System.Runtime.Numerics.dll
|
||||
System.Runtime.Serialization.dll
|
||||
System.Runtime.Serialization.Formatters.dll
|
||||
System.Runtime.Serialization.Json.dll
|
||||
System.Runtime.Serialization.Primitives.dll
|
||||
System.Runtime.Serialization.Xml.dll
|
||||
System.Runtime.WindowsRuntime.dll
|
||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
||||
System.Security.AccessControl.dll
|
||||
System.Security.Claims.dll
|
||||
System.Security.Cryptography.Algorithms.dll
|
||||
System.Security.Cryptography.Cng.dll
|
||||
System.Security.Cryptography.Csp.dll
|
||||
System.Security.Cryptography.Encoding.dll
|
||||
System.Security.Cryptography.Native.Apple.a
|
||||
System.Security.Cryptography.Native.Apple.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.a
|
||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.so
|
||||
System.Security.Cryptography.OpenSsl.dll
|
||||
System.Security.Cryptography.Primitives.dll
|
||||
System.Security.Cryptography.X509Certificates.dll
|
||||
System.Security.Cryptography.XCertificates.dll
|
||||
System.Security.dll
|
||||
System.Security.Principal.dll
|
||||
System.Security.Principal.Windows.dll
|
||||
System.Security.SecureString.dll
|
||||
System.ServiceModel.Web.dll
|
||||
System.ServiceProcess.dll
|
||||
System.Text.Encoding.CodePages.dll
|
||||
System.Text.Encoding.dll
|
||||
System.Text.Encoding.Extensions.dll
|
||||
System.Text.Encodings.Web.dll
|
||||
System.Text.Json.dll
|
||||
System.Text.RegularExpressions.dll
|
||||
System.Threading.Channels.dll
|
||||
System.Threading.dll
|
||||
System.Threading.Overlapped.dll
|
||||
System.Threading.Tasks.Dataflow.dll
|
||||
System.Threading.Tasks.dll
|
||||
System.Threading.Tasks.Extensions.dll
|
||||
System.Threading.Tasks.Parallel.dll
|
||||
System.Threading.Thread.dll
|
||||
System.Threading.ThreadPool.dll
|
||||
System.Threading.Timer.dll
|
||||
System.Transactions.dll
|
||||
System.Transactions.Local.dll
|
||||
System.ValueTuple.dll
|
||||
System.Web.dll
|
||||
System.Web.HttpUtility.dll
|
||||
System.Windows.dll
|
||||
System.Xml.dll
|
||||
System.Xml.Linq.dll
|
||||
System.Xml.ReaderWriter.dll
|
||||
System.Xml.Serialization.dll
|
||||
System.Xml.XDocument.dll
|
||||
System.Xml.XmlDocument.dll
|
||||
System.Xml.XmlSerializer.dll
|
||||
System.Xml.XPath.dll
|
||||
System.Xml.XPath.XDocument.dll
|
||||
ucrtbase.dll
|
||||
WindowsBase.dll
|
||||
@@ -1,24 +0,0 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,24 +0,0 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,5 +1,4 @@
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
@@ -7,28 +6,29 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
public sealed class ActionCommand
|
||||
{
|
||||
private static readonly StringEscapingUtil.EscapeMapping[] _escapeMappings = new[]
|
||||
private static readonly EscapeMapping[] _escapeMappings = new[]
|
||||
{
|
||||
new StringEscapingUtil.EscapeMapping(token: ";", replacement: "%3B"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "]", replacement: "%5D"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "%", replacement: "%25"),
|
||||
new EscapeMapping(token: ";", replacement: "%3B"),
|
||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: "]", replacement: "%5D"),
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private static readonly StringEscapingUtil.EscapeMapping[] _escapeDataMappings = new[]
|
||||
private static readonly EscapeMapping[] _escapeDataMappings = new[]
|
||||
{
|
||||
new StringEscapingUtil.EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private static readonly StringEscapingUtil.EscapeMapping[] _escapePropertyMappings = new[]
|
||||
private static readonly EscapeMapping[] _escapePropertyMappings = new[]
|
||||
{
|
||||
new StringEscapingUtil.EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new StringEscapingUtil.EscapeMapping(token: ":", replacement: "%3A"),
|
||||
new StringEscapingUtil.EscapeMapping(token: ",", replacement: "%2C"),
|
||||
new StringEscapingUtil.EscapeMapping(token: "%", replacement: "%25"),
|
||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: ":", replacement: "%3A"),
|
||||
new EscapeMapping(token: ",", replacement: "%2C"),
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private readonly Dictionary<string, string> _properties = new(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -103,12 +103,12 @@ namespace GitHub.Runner.Common
|
||||
string[] pair = propertyStr.Split(new[] { '=' }, count: 2, options: StringSplitOptions.RemoveEmptyEntries);
|
||||
if (pair.Length == 2)
|
||||
{
|
||||
command.Properties[pair[0]] = StringEscapingUtil.UnescapeString(pair[1], _escapePropertyMappings);
|
||||
command.Properties[pair[0]] = UnescapeProperty(pair[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
command.Data = StringEscapingUtil.UnescapeString(message.Substring(endIndex + _commandKey.Length), _escapeDataMappings);
|
||||
command.Data = UnescapeData(message.Substring(endIndex + _commandKey.Length));
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
@@ -173,12 +173,12 @@ namespace GitHub.Runner.Common
|
||||
string[] pair = propertyStr.Split(new[] { '=' }, count: 2, options: StringSplitOptions.RemoveEmptyEntries);
|
||||
if (pair.Length == 2)
|
||||
{
|
||||
command.Properties[pair[0]] = StringEscapingUtil.UnescapeString(pair[1], _escapeMappings);
|
||||
command.Properties[pair[0]] = Unescape(pair[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
command.Data = StringEscapingUtil.UnescapeString(message.Substring(rbIndex + 1), _escapeMappings);
|
||||
command.Data = Unescape(message.Substring(rbIndex + 1));
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
@@ -187,5 +187,67 @@ namespace GitHub.Runner.Common
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static string Unescape(string escaped)
|
||||
{
|
||||
if (string.IsNullOrEmpty(escaped))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
string unescaped = escaped;
|
||||
foreach (EscapeMapping mapping in _escapeMappings)
|
||||
{
|
||||
unescaped = unescaped.Replace(mapping.Replacement, mapping.Token);
|
||||
}
|
||||
|
||||
return unescaped;
|
||||
}
|
||||
|
||||
private static string UnescapeProperty(string escaped)
|
||||
{
|
||||
if (string.IsNullOrEmpty(escaped))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
string unescaped = escaped;
|
||||
foreach (EscapeMapping mapping in _escapePropertyMappings)
|
||||
{
|
||||
unescaped = unescaped.Replace(mapping.Replacement, mapping.Token);
|
||||
}
|
||||
|
||||
return unescaped;
|
||||
}
|
||||
|
||||
private static string UnescapeData(string escaped)
|
||||
{
|
||||
if (string.IsNullOrEmpty(escaped))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
string unescaped = escaped;
|
||||
foreach (EscapeMapping mapping in _escapeDataMappings)
|
||||
{
|
||||
unescaped = unescaped.Replace(mapping.Replacement, mapping.Token);
|
||||
}
|
||||
|
||||
return unescaped;
|
||||
}
|
||||
|
||||
private sealed class EscapeMapping
|
||||
{
|
||||
public string Replacement { get; }
|
||||
public string Token { get; }
|
||||
|
||||
public EscapeMapping(string token, string replacement)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(token, nameof(token));
|
||||
ArgUtil.NotNullOrEmpty(replacement, nameof(replacement));
|
||||
Token = token;
|
||||
Replacement = replacement;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,14 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version, string os, string architecture);
|
||||
Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken);
|
||||
Task DeleteSessionAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
||||
|
||||
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
|
||||
|
||||
Task ForceRefreshConnection(VssCredentials credentials);
|
||||
}
|
||||
|
||||
public sealed class BrokerServer : RunnerService, IBrokerServer
|
||||
@@ -44,13 +51,53 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version, string os, string architecture)
|
||||
public async Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
var jobMessage = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, os, architecture, cancellationToken), cancellationToken);
|
||||
var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken);
|
||||
|
||||
return jobMessage;
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
var brokerSession = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
|
||||
|
||||
|
||||
return brokerSession;
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
await _brokerHttpClient.DeleteSessionAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials)
|
||||
{
|
||||
if (_brokerUri != serverUri || !_hasConnection)
|
||||
{
|
||||
return ConnectAsync(serverUri, credentials);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task ForceRefreshConnection(VssCredentials credentials)
|
||||
{
|
||||
return ConnectAsync(_brokerUri, credentials);
|
||||
}
|
||||
|
||||
public bool ShouldRetryException(Exception ex)
|
||||
{
|
||||
if (ex is AccessDeniedException ade && ade.ErrorCode == 1)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,6 +180,9 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string DeprecatedNodeVersion = "node16";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
|
||||
public static readonly string EnforcedNode16DetectedAfterEndOfLife = "The following actions uses Node.js version which is deprecated and will be forced to run on node20: {0}. For more info: https://github.blog/changelog/2024-03-07-github-actions-all-actions-will-run-on-node20-instead-of-node16-by-default/";
|
||||
public static readonly string EnforcedNode16DetectedAfterEndOfLifeEnvVariable = "Node20ForceActionsWarnings";
|
||||
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
@@ -251,6 +254,7 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
public static readonly string ManualForceActionsToNode20 = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE20";
|
||||
}
|
||||
|
||||
public static class Agent
|
||||
@@ -262,6 +266,7 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
||||
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
||||
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
|
||||
public static readonly string ManualForceActionsToNode20 = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE20";
|
||||
}
|
||||
|
||||
public static class System
|
||||
|
||||
@@ -200,6 +200,10 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
}
|
||||
else
|
||||
{
|
||||
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString));
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
|
||||
@@ -134,8 +134,8 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
liveConsoleFeedUrl = feedStreamUrl;
|
||||
}
|
||||
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
|
||||
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
|
||||
_resultsClientInitiated = true;
|
||||
}
|
||||
|
||||
@@ -551,6 +551,10 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
await UploadSummaryFile(file);
|
||||
}
|
||||
if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await UploadResultsDiagnosticLogsFile(file);
|
||||
}
|
||||
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (file.RecordId != _jobTimelineRecordId)
|
||||
@@ -922,6 +926,17 @@ namespace GitHub.Runner.Common
|
||||
await UploadResultsFile(file, summaryHandler);
|
||||
}
|
||||
|
||||
private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file)
|
||||
{
|
||||
Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}");
|
||||
ResultsFileUploadHandler diagnosticLogsHandler = async (file) =>
|
||||
{
|
||||
await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None);
|
||||
};
|
||||
|
||||
await UploadResultsFile(file, diagnosticLogsHandler);
|
||||
}
|
||||
|
||||
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
|
||||
{
|
||||
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
|
||||
|
||||
@@ -19,7 +19,7 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(ResultServer))]
|
||||
public interface IResultsServer : IRunnerService, IAsyncDisposable
|
||||
{
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk);
|
||||
|
||||
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||
|
||||
@@ -35,6 +35,8 @@ namespace GitHub.Runner.Common
|
||||
|
||||
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
|
||||
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||
|
||||
Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class ResultServer : RunnerService, IResultsServer
|
||||
@@ -51,9 +53,9 @@ namespace GitHub.Runner.Common
|
||||
private String _liveConsoleFeedUrl;
|
||||
private string _token;
|
||||
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk)
|
||||
{
|
||||
this._resultsClient = CreateHttpClient(uri, token);
|
||||
this._resultsClient = CreateHttpClient(uri, token, useSdk);
|
||||
|
||||
_token = token;
|
||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||
@@ -63,7 +65,7 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public ResultsHttpClient CreateHttpClient(Uri uri, string token)
|
||||
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
|
||||
{
|
||||
// Using default 100 timeout
|
||||
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||
@@ -80,7 +82,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||
|
||||
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true);
|
||||
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
|
||||
}
|
||||
|
||||
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
||||
@@ -141,6 +143,18 @@ namespace GitHub.Runner.Common
|
||||
throw new InvalidOperationException("Results client is not initialized.");
|
||||
}
|
||||
|
||||
public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_resultsClient != null)
|
||||
{
|
||||
return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Results client is not initialized.");
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Threading.Tasks;
|
||||
using GitHub.Actions.RunService.WebApi;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
@@ -60,7 +61,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest<AgentJobRequestMessage>(
|
||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
|
||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
|
||||
}
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
|
||||
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
|
||||
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
|
||||
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
|
||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, CancellationToken cancellationToken);
|
||||
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken);
|
||||
|
||||
// job request
|
||||
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
||||
@@ -272,10 +272,10 @@ namespace GitHub.Runner.Common
|
||||
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, CancellationToken cancellationToken)
|
||||
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, cancellationToken: cancellationToken);
|
||||
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, disableUpdate, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class StringEscapingUtil
|
||||
{
|
||||
|
||||
public static string UnescapeString(string escaped, EscapeMapping[] _escapeDataMappings)
|
||||
{
|
||||
if (string.IsNullOrEmpty(escaped))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
string unescaped = escaped;
|
||||
foreach (EscapeMapping mapping in _escapeDataMappings)
|
||||
{
|
||||
unescaped = unescaped.Replace(mapping.Replacement, mapping.Token);
|
||||
}
|
||||
|
||||
return unescaped;
|
||||
}
|
||||
public class EscapeMapping
|
||||
{
|
||||
public string Replacement { get; }
|
||||
public string Token { get; }
|
||||
|
||||
public EscapeMapping(string token, string replacement)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(token, nameof(token));
|
||||
ArgUtil.NotNullOrEmpty(replacement, nameof(replacement));
|
||||
Token = token;
|
||||
Replacement = replacement;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,15 @@ namespace GitHub.Runner.Listener
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private TaskAgentSession _session;
|
||||
private IBrokerServer _brokerServer;
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private bool _accessTokenRevoked = false;
|
||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -36,13 +44,134 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
{
|
||||
await RefreshBrokerConnection();
|
||||
return await Task.FromResult(true);
|
||||
Trace.Entering();
|
||||
|
||||
// Settings
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
_settings = configManager.LoadSettings();
|
||||
var serverUrl = _settings.ServerUrlV2;
|
||||
Trace.Info(_settings);
|
||||
|
||||
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||
{
|
||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||
}
|
||||
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
_creds = credMgr.LoadCredentials();
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
Id = _settings.AgentId,
|
||||
Name = _settings.AgentName,
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
};
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"}";
|
||||
var taskAgentSession = new TaskAgentSession(sessionName, agent);
|
||||
|
||||
string errorMessage = string.Empty;
|
||||
bool encounteringError = false;
|
||||
|
||||
while (true)
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
Trace.Info($"Attempt to create session.");
|
||||
try
|
||||
{
|
||||
Trace.Info("Connecting to the Broker Server...");
|
||||
await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Connected to GitHub");
|
||||
_term.WriteLine();
|
||||
|
||||
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
||||
|
||||
Trace.Info($"Session created.");
|
||||
if (encounteringError)
|
||||
{
|
||||
_term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected.");
|
||||
_sessionCreationExceptionTracker.Clear();
|
||||
encounteringError = false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Session creation has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check whether we get 401 because the runner registration already removed by the service.
|
||||
// If the runner registration get deleted, we can't exchange oauth token.
|
||||
Trace.Error("Test oauth app registration.");
|
||||
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
|
||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!encounteringError) //print the message only on the first error
|
||||
{
|
||||
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_sessionCreationRetryInterval, token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync()
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
if (!_accessTokenRevoked)
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _brokerServer.DeleteSessionAsync(ts.Token);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
@@ -73,7 +202,13 @@ namespace GitHub.Runner.Listener
|
||||
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
try
|
||||
{
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version, VarUtil.OS, VarUtil.OSArchitecture);
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
_getMessagesTokenSource.Token);
|
||||
|
||||
if (message == null)
|
||||
{
|
||||
@@ -138,7 +273,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
// re-create VssConnection before next retry
|
||||
await RefreshBrokerConnection();
|
||||
await RefreshBrokerConnectionAsync();
|
||||
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
||||
@@ -168,6 +303,11 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await RefreshBrokerConnectionAsync();
|
||||
}
|
||||
|
||||
public async Task DeleteMessageAsync(TaskAgentMessage message)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
@@ -191,12 +331,84 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RefreshBrokerConnection()
|
||||
private bool IsSessionCreationExceptionRetriable(Exception ex)
|
||||
{
|
||||
if (ex is TaskAgentNotFoundException)
|
||||
{
|
||||
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
|
||||
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
|
||||
return false;
|
||||
}
|
||||
else if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
Trace.Info("The session for this runner already exists.");
|
||||
_term.WriteError("A session for this runner already exists.");
|
||||
if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException)))
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++;
|
||||
if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds)
|
||||
{
|
||||
Trace.Info("The session conflict exception have reached retry limit.");
|
||||
_term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1;
|
||||
}
|
||||
|
||||
Trace.Info("The session conflict exception haven't reached retry limit.");
|
||||
return true;
|
||||
}
|
||||
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
|
||||
{
|
||||
Trace.Info("Local clock might be skewed.");
|
||||
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
|
||||
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++;
|
||||
if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds)
|
||||
{
|
||||
Trace.Info("The OAuth token request exception have reached retry limit.");
|
||||
_term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1;
|
||||
}
|
||||
|
||||
Trace.Info("The OAuth token request exception haven't reached retry limit.");
|
||||
return true;
|
||||
}
|
||||
else if (ex is TaskAgentPoolNotFoundException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
|
||||
else if (ex is InvalidOperationException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Retriable exception: {ex.Message}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RefreshBrokerConnectionAsync()
|
||||
{
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
_settings = configManager.LoadSettings();
|
||||
|
||||
if (_settings.ServerUrlV2 == null)
|
||||
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||
{
|
||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ namespace GitHub.Runner.Listener.Check
|
||||
string githubApiUrl = null;
|
||||
string actionsTokenServiceUrl = null;
|
||||
string actionsPipelinesServiceUrl = null;
|
||||
string resultsReceiverServiceUrl = null;
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
@@ -47,6 +48,7 @@ namespace GitHub.Runner.Listener.Check
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||
resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health";
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -56,13 +58,31 @@ namespace GitHub.Runner.Listener.Check
|
||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet.
|
||||
}
|
||||
|
||||
var codeLoadUrlBuilder = new UriBuilder(url);
|
||||
codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}";
|
||||
codeLoadUrlBuilder.Path = "_ping";
|
||||
|
||||
// check github api
|
||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check github codeload
|
||||
checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||
checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check results-receiver service
|
||||
if (!string.IsNullOrEmpty(resultsReceiverServiceUrl))
|
||||
{
|
||||
checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
}
|
||||
|
||||
// check actions token service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||
|
||||
@@ -35,7 +35,7 @@ namespace GitHub.Runner.Listener
|
||||
// This implementation of IJobDispatcher is not thread safe.
|
||||
// It is based on the fact that the current design of the runner is a dequeue
|
||||
// and processes one message from the message queue at a time.
|
||||
// In addition, it only executes one job every time,
|
||||
// In addition, it only executes one job every time,
|
||||
// and the server will not send another job while this one is still running.
|
||||
public sealed class JobDispatcher : RunnerService, IJobDispatcher
|
||||
{
|
||||
@@ -546,13 +546,27 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||
|
||||
var jobServer = await InitializeJobServerAsync(systemConnection);
|
||||
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
|
||||
|
||||
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
|
||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
switch (jobServer)
|
||||
{
|
||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||
await ForceFailJob(jobServer, message, detailInfo);
|
||||
case IJobServer js:
|
||||
{
|
||||
await LogWorkerProcessUnhandledException(js, message, unhandledExceptionIssue);
|
||||
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
|
||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||
await ForceFailJob(js, message);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case IRunServer rs:
|
||||
await ForceFailJob(rs, message, unhandledExceptionIssue);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException($"JobServer type '{jobServer.GetType().Name}' is not supported.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -629,8 +643,22 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// message send failed, this might indicate worker process is already exited or stuck.
|
||||
Trace.Info($"Job cancel message sending for job {message.JobId} failed, kill running worker. {ex}");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
|
||||
// wait worker to exit
|
||||
// wait worker to exit
|
||||
// if worker doesn't exit within timeout, then kill worker.
|
||||
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
||||
|
||||
@@ -1117,77 +1145,70 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
// log an error issue to job level timeline record
|
||||
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, Issue issue)
|
||||
{
|
||||
if (server is IJobServer jobServer)
|
||||
try
|
||||
{
|
||||
try
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
|
||||
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(issue);
|
||||
|
||||
if (message.Variables.TryGetValue("DistributedTask.MarkJobAsFailedOnWorkerCrash", out var markJobAsFailedOnWorkerCrash) &&
|
||||
StringUtil.ConvertToBoolean(markJobAsFailedOnWorkerCrash?.Value))
|
||||
{
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
|
||||
Trace.Error(ex);
|
||||
Trace.Info("Mark the job as failed since the worker crashed");
|
||||
jobRecord.Result = TaskResult.Failed;
|
||||
// mark the job as completed so service will pickup the result
|
||||
jobRecord.State = TimelineRecordState.Completed;
|
||||
}
|
||||
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
else
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
|
||||
return;
|
||||
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
// raise job completed event to fail the job.
|
||||
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
|
||||
{
|
||||
if (server is IJobServer jobServer)
|
||||
try
|
||||
{
|
||||
try
|
||||
{
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
|
||||
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise JobCompletedEvent back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
|
||||
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
|
||||
}
|
||||
else if (server is IRunServer runServer)
|
||||
catch (Exception ex)
|
||||
{
|
||||
try
|
||||
{
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
|
||||
var jobAnnotations = new List<Annotation>();
|
||||
if (unhandledAnnotation.HasValue)
|
||||
{
|
||||
jobAnnotations.Add(unhandledAnnotation.Value);
|
||||
}
|
||||
Trace.Error("Fail to raise JobCompletedEvent back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise job completion back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
else
|
||||
private async Task ForceFailJob(IRunServer runServer, Pipelines.AgentJobRequestMessage message, Issue issue)
|
||||
{
|
||||
try
|
||||
{
|
||||
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
|
||||
var annotation = issue.ToAnnotation();
|
||||
var jobAnnotations = new List<Annotation>();
|
||||
if (annotation.HasValue)
|
||||
{
|
||||
jobAnnotations.Add(annotation.Value);
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise job completion back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -24,6 +25,8 @@ namespace GitHub.Runner.Listener
|
||||
Task DeleteSessionAsync();
|
||||
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
||||
Task DeleteMessageAsync(TaskAgentMessage message);
|
||||
|
||||
Task RefreshListenerTokenAsync(CancellationToken token);
|
||||
void OnJobStatus(object sender, JobStatusEventArgs e);
|
||||
}
|
||||
|
||||
@@ -33,6 +36,7 @@ namespace GitHub.Runner.Listener
|
||||
private RunnerSettings _settings;
|
||||
private ITerminal _term;
|
||||
private IRunnerServer _runnerServer;
|
||||
private IBrokerServer _brokerServer;
|
||||
private TaskAgentSession _session;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private bool _accessTokenRevoked = false;
|
||||
@@ -42,6 +46,9 @@ namespace GitHub.Runner.Listener
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
|
||||
private bool _isBrokerSession = false;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -49,6 +56,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
_brokerServer = hostContext.GetService<IBrokerServer>();
|
||||
}
|
||||
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
@@ -64,7 +72,7 @@ namespace GitHub.Runner.Listener
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials creds = credMgr.LoadCredentials();
|
||||
_creds = credMgr.LoadCredentials();
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
@@ -86,7 +94,7 @@ namespace GitHub.Runner.Listener
|
||||
try
|
||||
{
|
||||
Trace.Info("Connecting to the Runner Server...");
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
_term.WriteLine();
|
||||
@@ -98,6 +106,15 @@ namespace GitHub.Runner.Listener
|
||||
taskAgentSession,
|
||||
token);
|
||||
|
||||
if (_session.BrokerMigrationMessage != null)
|
||||
{
|
||||
Trace.Info("Runner session is in migration mode: Creating Broker session with BrokerBaseUrl: {0}", _session.BrokerMigrationMessage.BrokerBaseUrl);
|
||||
|
||||
await _brokerServer.UpdateConnectionIfNeeded(_session.BrokerMigrationMessage.BrokerBaseUrl, _creds);
|
||||
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
||||
_isBrokerSession = true;
|
||||
}
|
||||
|
||||
Trace.Info($"Session created.");
|
||||
if (encounteringError)
|
||||
{
|
||||
@@ -124,7 +141,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
@@ -172,6 +189,11 @@ namespace GitHub.Runner.Listener
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||
|
||||
if (_isBrokerSession)
|
||||
{
|
||||
await _brokerServer.DeleteSessionAsync(ts.Token);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -183,19 +205,17 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
}
|
||||
catch (ObjectDisposedException)
|
||||
{
|
||||
Trace.Info("_getMessagesTokenSource is already disposed.");
|
||||
}
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
}
|
||||
catch (ObjectDisposedException)
|
||||
{
|
||||
Trace.Info("_getMessagesTokenSource is already disposed.");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
|
||||
@@ -222,11 +242,29 @@ namespace GitHub.Runner.Listener
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
_getMessagesTokenSource.Token);
|
||||
|
||||
// Decrypt the message body if the session is using encryption
|
||||
message = DecryptMessage(message);
|
||||
|
||||
|
||||
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
|
||||
{
|
||||
Trace.Info("BrokerMigration message received. Polling Broker for messages...");
|
||||
|
||||
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
|
||||
|
||||
await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _creds);
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
token);
|
||||
}
|
||||
|
||||
if (message != null)
|
||||
{
|
||||
_lastMessageId = message.MessageId;
|
||||
@@ -342,6 +380,12 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||
await _brokerServer.ForceRefreshConnection(_creds);
|
||||
}
|
||||
|
||||
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
||||
{
|
||||
if (_session.EncryptionKey == null ||
|
||||
|
||||
@@ -25,12 +25,6 @@
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="..\Misc\runnercoreassets">
|
||||
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
|
||||
</EmbeddedResource>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
<DebugType>portable</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
@@ -457,22 +457,13 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
message = await getNextMessage; //get next message
|
||||
HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}");
|
||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (autoUpdateInProgress == false)
|
||||
{
|
||||
autoUpdateInProgress = true;
|
||||
AgentRefreshMessage runnerUpdateMessage = null;
|
||||
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||
}
|
||||
else
|
||||
{
|
||||
var brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
|
||||
runnerUpdateMessage = new AgentRefreshMessage(brokerRunnerUpdateMessage.RunnerId, brokerRunnerUpdateMessage.TargetVersion, TimeSpan.FromSeconds(brokerRunnerUpdateMessage.TimeoutInSeconds));
|
||||
}
|
||||
AgentRefreshMessage runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||
|
||||
#if DEBUG
|
||||
// Can mock the update for testing
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
@@ -503,6 +494,22 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
||||
}
|
||||
}
|
||||
else if (string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (autoUpdateInProgress == false)
|
||||
{
|
||||
autoUpdateInProgress = true;
|
||||
RunnerRefreshMessage brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
|
||||
|
||||
var selfUpdater = HostContext.GetService<ISelfUpdaterV2>();
|
||||
selfUpdateTask = selfUpdater.SelfUpdate(brokerRunnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
|
||||
Trace.Info("Refresh message received, kick-off selfupdate background process.");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running.");
|
||||
}
|
||||
}
|
||||
else if (string.Equals(message.MessageType, JobRequestMessageTypes.PipelineAgentJobRequest, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (autoUpdateInProgress || runOnceJobReceived)
|
||||
@@ -560,6 +567,11 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("Job is already acquired, skip this message.");
|
||||
continue;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Caught exception from acquiring job message: {ex}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
jobDispatcher.Run(jobRequestMessage, runOnce);
|
||||
@@ -589,6 +601,11 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'.");
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh))
|
||||
{
|
||||
Trace.Info("Received ForceTokenRefreshMessage");
|
||||
await _listener.RefreshListenerTokenAsync(messageQueueLoopTokenSource.Token);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}.");
|
||||
@@ -627,6 +644,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info("Deleting Runner Session...");
|
||||
await _listener.DeleteSessionAsync();
|
||||
}
|
||||
catch (Exception ex) when (runOnce)
|
||||
|
||||
@@ -6,13 +6,11 @@ using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
@@ -30,20 +28,14 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
private static string _packageType = "agent";
|
||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||
private static string _dotnetRuntime = "dotnetRuntime";
|
||||
private static string _externals = "externals";
|
||||
private readonly Dictionary<string, string> _contentHashes = new();
|
||||
|
||||
private PackageMetadata _targetPackage;
|
||||
private ITerminal _terminal;
|
||||
private IRunnerServer _runnerServer;
|
||||
private int _poolId;
|
||||
private ulong _agentId;
|
||||
private const int _numberOfOldVersionsToKeep = 1;
|
||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||
private Task _cloneAndCalculateContentHashTask;
|
||||
private string _dotnetRuntimeCloneDirectory;
|
||||
private string _externalsCloneDirectory;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -56,8 +48,6 @@ namespace GitHub.Runner.Listener
|
||||
var settings = configStore.GetSettings();
|
||||
_poolId = settings.PoolId;
|
||||
_agentId = settings.AgentId;
|
||||
_dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__");
|
||||
_externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__");
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
@@ -67,13 +57,6 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
var totalUpdateTime = Stopwatch.StartNew();
|
||||
|
||||
// Copy dotnet runtime and externals of current runner to a temp folder
|
||||
// So we can re-use them with trimmed runner package, if possible.
|
||||
// This process is best effort, if we can't use trimmed runner package,
|
||||
// we will just go with the full package.
|
||||
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
_cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token);
|
||||
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
{
|
||||
Trace.Info($"Can't find available update package.");
|
||||
@@ -87,24 +70,6 @@ namespace GitHub.Runner.Listener
|
||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||
|
||||
if (_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
// wait for cloning assets task to finish only if we have trimmed packages
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
else
|
||||
{
|
||||
linkedTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
await DownloadLatestRunner(token, updateMessage.TargetVersion);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
@@ -218,54 +183,8 @@ namespace GitHub.Runner.Listener
|
||||
string archiveFile = null;
|
||||
var packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
var packageHashValue = _targetPackage.HashValue;
|
||||
var runtimeTrimmed = false;
|
||||
var externalsTrimmed = false;
|
||||
var fallbackToFullPackage = false;
|
||||
|
||||
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
|
||||
if (_contentHashes.Count == 2 &&
|
||||
_contentHashes.ContainsKey(_dotnetRuntime) &&
|
||||
_contentHashes.ContainsKey(_externals) &&
|
||||
_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}");
|
||||
Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}");
|
||||
// Try to see whether we can use any size trimmed down package to speed up runner updates.
|
||||
foreach (var trimmedPackage in _targetPackage.TrimmedPackages)
|
||||
{
|
||||
if (trimmedPackage.TrimmedContents.Count == 2 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) &&
|
||||
trimmedRuntimeHash == _contentHashes[_dotnetRuntime] &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
runtimeTrimmed = true;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else if (trimmedPackage.TrimmedContents.Count == 1 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
_updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}");
|
||||
_updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}");
|
||||
|
||||
try
|
||||
{
|
||||
@@ -323,12 +242,6 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
|
||||
{
|
||||
// if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package.
|
||||
Trace.Error($"Fail to download latest runner using trimmed package: {ex}");
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
@@ -347,74 +260,6 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
var trimmedPackageRestoreTasks = new List<Task<bool>>();
|
||||
if (!fallbackToFullPackage)
|
||||
{
|
||||
// Skip restoring externals and runtime if we are going to fullback to the full package.
|
||||
if (externalsTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token));
|
||||
}
|
||||
if (runtimeTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token));
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmedPackageRestoreTasks.Count > 0)
|
||||
{
|
||||
var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks);
|
||||
if (restoreResults.Any(x => x == false))
|
||||
{
|
||||
// if any of the restore failed, fallback to full package.
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (fallbackToFullPackage)
|
||||
{
|
||||
Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates.");
|
||||
_updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}");
|
||||
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
Directory.CreateDirectory(latestRunnerDirectory);
|
||||
|
||||
packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
packageHashValue = _targetPackage.HashValue;
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//it is not critical if we fail to delete the .zip file
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
|
||||
}
|
||||
|
||||
@@ -665,9 +510,9 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
||||
if (allBinDirs.Length > 2)
|
||||
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
|
||||
{
|
||||
// there are more than 2 bin.version folder.
|
||||
// there are more than one bin.version folder.
|
||||
// delete older bin.version folders.
|
||||
foreach (var oldBinDir in allBinDirs)
|
||||
{
|
||||
@@ -694,9 +539,9 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
||||
if (allExternalsDirs.Length > 2)
|
||||
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
|
||||
{
|
||||
// there are more than 2 externals.version folder.
|
||||
// there are more than one externals.version folder.
|
||||
// delete older externals.version folders.
|
||||
foreach (var oldExternalDir in allExternalsDirs)
|
||||
{
|
||||
@@ -795,330 +640,5 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedExternals(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's externals if we are using a externals trimmed package
|
||||
// Execute the node.js to make sure the copied externals is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}.");
|
||||
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
|
||||
|
||||
// try run node.js to see if current node.js works fine after copy over to new location.
|
||||
var nodeVersions = NodeUtil.BuiltInNodeVersions;
|
||||
foreach (var nodeVersion in nodeVersions)
|
||||
{
|
||||
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
|
||||
if (File.Exists(newNodeBinary))
|
||||
{
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
var outputs = "";
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
outputs = data.Data;
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore externals for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package
|
||||
// Execute the runner.listener to make sure the copied runtime is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}.");
|
||||
IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token);
|
||||
|
||||
// try run the runner executable to see if current dotnet runtime + future runner binary works fine.
|
||||
var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener");
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token)
|
||||
{
|
||||
var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token);
|
||||
var externalsCloneTask = CloneExternals(externalsCloneDirectory, token);
|
||||
|
||||
var waitingTasks = new Dictionary<string, Task>()
|
||||
{
|
||||
{nameof(CloneDotnetRuntime), runtimeCloneTask},
|
||||
{nameof(CloneExternals),externalsCloneTask}
|
||||
};
|
||||
|
||||
while (waitingTasks.Count > 0)
|
||||
{
|
||||
Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete.");
|
||||
var complatedTask = await Task.WhenAny(waitingTasks.Values);
|
||||
if (waitingTasks.ContainsKey(nameof(CloneExternals)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneExternals)])
|
||||
{
|
||||
Trace.Info($"Externals clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneExternals));
|
||||
try
|
||||
{
|
||||
if (await externalsCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var externalsHash = await HashFiles(externalsCloneDirectory, token);
|
||||
Trace.Info($"Externals content hash: {externalsHash}");
|
||||
_contentHashes[_externals] = externalsHash;
|
||||
_updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone externals failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash externals content: {ex}");
|
||||
}
|
||||
}
|
||||
else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneDotnetRuntime)])
|
||||
{
|
||||
Trace.Info($"Dotnet runtime clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneDotnetRuntime));
|
||||
try
|
||||
{
|
||||
if (await runtimeCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token);
|
||||
Trace.Info($"Runtime content hash: {runtimeHash}");
|
||||
_contentHashes[_dotnetRuntime] = runtimeHash;
|
||||
_updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash runtime content: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> CloneDotnetRuntime(string runtimeDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning dotnet runtime to {runtimeDir}");
|
||||
IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(runtimeDir);
|
||||
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var assetsContent = default(string);
|
||||
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets"))
|
||||
using (var streamReader = new StreamReader(stream))
|
||||
{
|
||||
assetsContent = await streamReader.ReadToEndAsync();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(assetsContent))
|
||||
{
|
||||
var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (runnerCoreAssets.Length > 0)
|
||||
{
|
||||
var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
IOUtil.CopyDirectory(binDir, runtimeDir, token);
|
||||
|
||||
var clonedFile = 0;
|
||||
foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim())))
|
||||
{
|
||||
Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory.");
|
||||
IOUtil.DeleteFile(file);
|
||||
}
|
||||
else
|
||||
{
|
||||
clonedFile++;
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Task<bool> CloneExternals(string externalsDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning externals to {externalsDir}");
|
||||
IOUtil.DeleteDirectory(externalsDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(externalsDir);
|
||||
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token);
|
||||
Trace.Info($"Successfully cloned externals to {externalsDir}.");
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone externals to {externalsDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
private async Task<string> HashFiles(string fileFolder, CancellationToken token)
|
||||
{
|
||||
Trace.Info($"Calculating hash for {fileFolder}");
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
Trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
processInvoker.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
Trace.Verbose(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: token);
|
||||
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return hashResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
568
src/Runner.Listener/SelfUpdaterV2.cs
Normal file
568
src/Runner.Listener/SelfUpdaterV2.cs
Normal file
@@ -0,0 +1,568 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
// This class is a fork of SelfUpdater.cs and is intended to only be used for the
|
||||
// new self-update flow where the PackageMetadata is sent in the message directly.
|
||||
// Forking the class prevents us from accidentally breaking the old flow while it's still in production
|
||||
|
||||
[ServiceLocator(Default = typeof(SelfUpdaterV2))]
|
||||
public interface ISelfUpdaterV2 : IRunnerService
|
||||
{
|
||||
bool Busy { get; }
|
||||
Task<bool> SelfUpdate(RunnerRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
||||
}
|
||||
public class SelfUpdaterV2 : RunnerService, ISelfUpdaterV2
|
||||
{
|
||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||
private ITerminal _terminal;
|
||||
private IRunnerServer _runnerServer;
|
||||
private int _poolId;
|
||||
private ulong _agentId;
|
||||
|
||||
private const int _numberOfOldVersionsToKeep = 1;
|
||||
|
||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
|
||||
_terminal = hostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
var configStore = HostContext.GetService<IConfigurationStore>();
|
||||
var settings = configStore.GetSettings();
|
||||
_poolId = settings.PoolId;
|
||||
_agentId = settings.AgentId;
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(RunnerRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
{
|
||||
Busy = true;
|
||||
try
|
||||
{
|
||||
var totalUpdateTime = Stopwatch.StartNew();
|
||||
|
||||
Trace.Info($"An update is available.");
|
||||
_updateTrace.Enqueue($"RunnerPlatform: {updateMessage.OS}");
|
||||
|
||||
// Print console line that warn user not shutdown runner.
|
||||
_terminal.WriteLine("Runner update in progress, do not shutdown runner.");
|
||||
_terminal.WriteLine($"Downloading {updateMessage.TargetVersion} runner");
|
||||
|
||||
await DownloadLatestRunner(token, updateMessage.TargetVersion, updateMessage.DownloadUrl, updateMessage.SHA256Checksum, updateMessage.OS);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
// wait till all running job finish
|
||||
_terminal.WriteLine("Waiting for current job finish running.");
|
||||
|
||||
await jobDispatcher.WaitAsync(token);
|
||||
Trace.Info($"All running job has exited.");
|
||||
|
||||
// We need to keep runner backup around for macOS until we fixed https://github.com/actions/runner/issues/743
|
||||
// delete runner backup
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
DeletePreviousVersionRunnerBackup(token, updateMessage.TargetVersion);
|
||||
Trace.Info($"Delete old version runner backup.");
|
||||
stopWatch.Stop();
|
||||
// generate update script from template
|
||||
_updateTrace.Enqueue($"DeleteRunnerBackupTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_terminal.WriteLine("Generate and execute update script.");
|
||||
|
||||
string updateScript = GenerateUpdateScript(restartInteractiveRunner, updateMessage.TargetVersion);
|
||||
Trace.Info($"Generate update script into: {updateScript}");
|
||||
|
||||
|
||||
#if DEBUG
|
||||
// For L0, we will skip execute update script.
|
||||
if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT")))
|
||||
#endif
|
||||
{
|
||||
string flagFile = "update.finished";
|
||||
IOUtil.DeleteFile(flagFile);
|
||||
// kick off update script
|
||||
Process invokeScript = new();
|
||||
#if OS_WINDOWS
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||
#elif (OS_OSX || OS_LINUX)
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||
#endif
|
||||
invokeScript.Start();
|
||||
Trace.Info($"Update script start running");
|
||||
}
|
||||
|
||||
totalUpdateTime.Stop();
|
||||
|
||||
_updateTrace.Enqueue($"TotalUpdateTime: {totalUpdateTime.ElapsedMilliseconds}ms");
|
||||
_terminal.WriteLine("Runner will exit shortly for update, should be back online within 10 seconds.");
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_updateTrace.Enqueue(ex.ToString());
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_terminal.WriteLine("Runner update process finished.");
|
||||
Busy = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// _work
|
||||
/// \_update
|
||||
/// \bin
|
||||
/// \externals
|
||||
/// \run.sh
|
||||
/// \run.cmd
|
||||
/// \package.zip //temp download .zip/.tar.gz
|
||||
/// </summary>
|
||||
/// <param name="token"></param>
|
||||
/// <returns></returns>
|
||||
private async Task DownloadLatestRunner(CancellationToken token, string targetVersion, string packageDownloadUrl, string packageHashValue, string targetPlatform)
|
||||
{
|
||||
string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory);
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
Directory.CreateDirectory(latestRunnerDirectory);
|
||||
|
||||
string archiveFile = null;
|
||||
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
#if DEBUG
|
||||
// Much of the update process (targetVersion, archive) is server-side, this is a way to control it from here for testing specific update scenarios
|
||||
// Add files like 'runner2.281.2.tar.gz' or 'runner2.283.0.zip' (depending on your platform) to your runner root folder
|
||||
// Note that runners still need to be older than the server's runner version in order to receive an 'AgentRefreshMessage' and trigger this update
|
||||
// Wrapped in #if DEBUG as this should not be in the RELEASE build
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
{
|
||||
var waitForDebugger = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE_WAIT_FOR_DEBUGGER"));
|
||||
if (waitForDebugger)
|
||||
{
|
||||
int waitInSeconds = 20;
|
||||
while (!Debugger.IsAttached && waitInSeconds-- > 0)
|
||||
{
|
||||
await Task.Delay(1000);
|
||||
}
|
||||
Debugger.Break();
|
||||
}
|
||||
|
||||
if (targetPlatform.StartsWith("win"))
|
||||
{
|
||||
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.zip");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.tar.gz");
|
||||
}
|
||||
|
||||
if (File.Exists(archiveFile))
|
||||
{
|
||||
_updateTrace.Enqueue($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||
_terminal.WriteLine($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = null;
|
||||
_terminal.WriteLine($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||
_updateTrace.Enqueue($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
// archiveFile is not null only if we mocked it above
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, targetPlatform, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
}
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//it is not critical if we fail to delete the .zip file
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
}
|
||||
}
|
||||
|
||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, targetVersion, token);
|
||||
}
|
||||
|
||||
private async Task<string> DownLoadRunner(string downloadDirectory, string packageDownloadUrl, string packageHashValue, string packagePlatform, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
int runnerSuffix = 1;
|
||||
string archiveFile = null;
|
||||
bool downloadSucceeded = false;
|
||||
|
||||
// Download the runner, using multiple attempts in order to be resilient against any networking/CDN issues
|
||||
for (int attempt = 1; attempt <= Constants.RunnerDownloadRetryMaxAttempts; attempt++)
|
||||
{
|
||||
// Generate an available package name, and do our best effort to clean up stale local zip files
|
||||
while (true)
|
||||
{
|
||||
if (packagePlatform.StartsWith("win"))
|
||||
{
|
||||
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.zip");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.tar.gz");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip '{0}'", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// couldn't delete the file for whatever reason, so generate another name
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
runnerSuffix++;
|
||||
}
|
||||
}
|
||||
|
||||
// Allow a 15-minute package download timeout, which is good enough to update the runner from a 1 Mbit/s ADSL connection.
|
||||
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_DOWNLOAD_TIMEOUT") ?? string.Empty, out int timeoutSeconds))
|
||||
{
|
||||
timeoutSeconds = 15 * 60;
|
||||
}
|
||||
|
||||
Trace.Info($"Attempt {attempt}: save latest runner into {archiveFile}.");
|
||||
|
||||
using (var downloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||
using (var downloadCts = CancellationTokenSource.CreateLinkedTokenSource(downloadTimeout.Token, token))
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Download runner: begin download");
|
||||
long downloadSize = 0;
|
||||
|
||||
//open zip stream in async mode
|
||||
using (HttpClient httpClient = new(HostContext.CreateHttpClientHandler()))
|
||||
{
|
||||
Trace.Info($"Downloading {packageDownloadUrl}");
|
||||
|
||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||
using (Stream result = await httpClient.GetStreamAsync(packageDownloadUrl))
|
||||
{
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||
await fs.FlushAsync(downloadCts.Token);
|
||||
downloadSize = fs.Length;
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Download runner: finished download");
|
||||
downloadSucceeded = true;
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"PackageDownloadTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Enqueue($"Attempts: {attempt}");
|
||||
_updateTrace.Enqueue($"PackageSize: {downloadSize / 1024 / 1024}MB");
|
||||
break;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"Runner download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (downloadCts.Token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Warning($"Runner download has timed out after {timeoutSeconds} seconds");
|
||||
}
|
||||
|
||||
Trace.Warning($"Failed to get package '{archiveFile}' from '{packageDownloadUrl}'. Exception {ex}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (downloadSucceeded)
|
||||
{
|
||||
return archiveFile;
|
||||
}
|
||||
else
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ValidateRunnerHash(string archiveFile, string packageHashValue)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
// Validate Hash Matches if it is provided
|
||||
using (FileStream stream = File.OpenRead(archiveFile))
|
||||
{
|
||||
if (!string.IsNullOrEmpty(packageHashValue))
|
||||
{
|
||||
using (SHA256 sha256 = SHA256.Create())
|
||||
{
|
||||
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||
if (hash != packageHashValue)
|
||||
{
|
||||
// Hash did not match, we can't recover from this, just throw
|
||||
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {packageHashValue} for {archiveFile}");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
Trace.Info($"Validated Runner Hash matches {archiveFile} : {packageHashValue}");
|
||||
_updateTrace.Enqueue($"ValidateHashTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ExtractRunnerPackage(string archiveFile, string extractDirectory, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
|
||||
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ZipFile.ExtractToDirectory(archiveFile, extractDirectory);
|
||||
}
|
||||
else if (archiveFile.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
string tar = WhichUtil.Which("tar", trace: Trace);
|
||||
|
||||
if (string.IsNullOrEmpty(tar))
|
||||
{
|
||||
throw new NotSupportedException($"tar -xzf");
|
||||
}
|
||||
|
||||
// tar -xzf
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
Trace.Info(args.Data);
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
Trace.Error(args.Data);
|
||||
}
|
||||
});
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(extractDirectory, tar, $"-xzf \"{archiveFile}\"", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
throw new NotSupportedException($"Can't use 'tar -xzf' to extract archive file: {archiveFile}. return code: {exitCode}.");
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException($"{archiveFile}");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
Trace.Info($"Finished getting latest runner package at: {extractDirectory}.");
|
||||
_updateTrace.Enqueue($"PackageExtractTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
private Task CopyLatestRunnerToRoot(string latestRunnerDirectory, string targetVersion, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
// copy latest runner into runner root folder
|
||||
// copy bin from _work/_update -> bin.version under root
|
||||
string binVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.BinDirectory}.{targetVersion}");
|
||||
Directory.CreateDirectory(binVersionDir);
|
||||
Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory)} to {binVersionDir}.");
|
||||
IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory), binVersionDir, token);
|
||||
|
||||
// copy externals from _work/_update -> externals.version under root
|
||||
string externalsVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.ExternalsDirectory}.{targetVersion}");
|
||||
Directory.CreateDirectory(externalsVersionDir);
|
||||
Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory)} to {externalsVersionDir}.");
|
||||
IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory), externalsVersionDir, token);
|
||||
|
||||
// copy and replace all .sh/.cmd files
|
||||
Trace.Info($"Copy any remaining .sh/.cmd files into runner root.");
|
||||
foreach (FileInfo file in new DirectoryInfo(latestRunnerDirectory).GetFiles() ?? new FileInfo[0])
|
||||
{
|
||||
string destination = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name);
|
||||
|
||||
// Removing the file instead of just trying to overwrite it works around permissions issues on linux.
|
||||
// https://github.com/actions/runner/issues/981
|
||||
Trace.Info($"Copy {file.FullName} to {destination}");
|
||||
IOUtil.DeleteFile(destination);
|
||||
file.CopyTo(destination, true);
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"CopyRunnerToRootTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private void DeletePreviousVersionRunnerBackup(CancellationToken token, string targetVersion)
|
||||
{
|
||||
// delete previous backup runner (back compat, can be remove after serval sprints)
|
||||
// bin.bak.2.99.0
|
||||
// externals.bak.2.99.0
|
||||
foreach (string existBackUp in Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "*.bak.*"))
|
||||
{
|
||||
Trace.Info($"Delete existing runner backup at {existBackUp}.");
|
||||
try
|
||||
{
|
||||
IOUtil.DeleteDirectory(existBackUp, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Catch exception during delete backup folder {existBackUp}, ignore this error try delete the backup folder on next auto-update.");
|
||||
}
|
||||
}
|
||||
|
||||
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
||||
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
|
||||
{
|
||||
// there are more than {_numberOfOldVersionsToKeep} bin.version folder.
|
||||
// delete older bin.version folders.
|
||||
foreach (var oldBinDir in allBinDirs)
|
||||
{
|
||||
if (string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{targetVersion}"), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// skip for current runner version
|
||||
continue;
|
||||
}
|
||||
|
||||
Trace.Info($"Delete runner bin folder's backup at {oldBinDir}.");
|
||||
try
|
||||
{
|
||||
IOUtil.DeleteDirectory(oldBinDir, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Catch exception during delete backup folder {oldBinDir}, ignore this error try delete the backup folder on next auto-update.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
||||
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
|
||||
{
|
||||
// there are more than {_numberOfOldVersionsToKeep} externals.version folder.
|
||||
// delete older externals.version folders.
|
||||
foreach (var oldExternalDir in allExternalsDirs)
|
||||
{
|
||||
if (string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{targetVersion}"), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// skip for current runner version
|
||||
continue;
|
||||
}
|
||||
|
||||
Trace.Info($"Delete runner externals folder's backup at {oldExternalDir}.");
|
||||
try
|
||||
{
|
||||
IOUtil.DeleteDirectory(oldExternalDir, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Catch exception during delete backup folder {oldExternalDir}, ignore this error try delete the backup folder on next auto-update.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private string GenerateUpdateScript(bool restartInteractiveRunner, string targetVersion)
|
||||
{
|
||||
int processId = Process.GetCurrentProcess().Id;
|
||||
string updateLog = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), $"SelfUpdate-{DateTime.UtcNow.ToString("yyyyMMdd-HHmmss")}.log");
|
||||
string runnerRoot = HostContext.GetDirectory(WellKnownDirectory.Root);
|
||||
|
||||
#if OS_WINDOWS
|
||||
string templateName = "update.cmd.template";
|
||||
#else
|
||||
string templateName = "update.sh.template";
|
||||
#endif
|
||||
|
||||
string templatePath = Path.Combine(runnerRoot, $"bin.{targetVersion}", templateName);
|
||||
string template = File.ReadAllText(templatePath);
|
||||
|
||||
template = template.Replace("_PROCESS_ID_", processId.ToString());
|
||||
template = template.Replace("_RUNNER_PROCESS_NAME_", $"Runner.Listener{IOUtil.ExeExtension}");
|
||||
template = template.Replace("_ROOT_FOLDER_", runnerRoot);
|
||||
template = template.Replace("_EXIST_RUNNER_VERSION_", BuildConstants.RunnerPackage.Version);
|
||||
template = template.Replace("_DOWNLOAD_RUNNER_VERSION_", targetVersion);
|
||||
template = template.Replace("_UPDATE_LOG_", updateLog);
|
||||
template = template.Replace("_RESTART_INTERACTIVE_RUNNER_", restartInteractiveRunner ? "1" : "0");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string scriptName = "_update.cmd";
|
||||
#else
|
||||
string scriptName = "_update.sh";
|
||||
#endif
|
||||
|
||||
string updateScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), scriptName);
|
||||
if (File.Exists(updateScript))
|
||||
{
|
||||
IOUtil.DeleteFile(updateScript);
|
||||
}
|
||||
|
||||
File.WriteAllText(updateScript, template);
|
||||
return updateScript;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,7 +23,13 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
||||
{
|
||||
headerValues.AddRange(VssClientHttpRequestSettings.Default.UserAgent);
|
||||
foreach (var headerVal in VssClientHttpRequestSettings.Default.UserAgent)
|
||||
{
|
||||
if (!headerValues.Contains(headerVal))
|
||||
{
|
||||
headerValues.Add(headerVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
|
||||
@@ -33,6 +39,23 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
|
||||
var rawHeaderValues = new List<ProductInfoHeaderValue>();
|
||||
rawHeaderValues.AddRange(additionalUserAgents);
|
||||
rawHeaderValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
|
||||
|
||||
if (RawClientHttpRequestSettings.Default.UserAgent != null && RawClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
||||
{
|
||||
foreach (var headerVal in RawClientHttpRequestSettings.Default.UserAgent)
|
||||
{
|
||||
if (!rawHeaderValues.Contains(headerVal))
|
||||
{
|
||||
rawHeaderValues.Add(headerVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RawClientHttpRequestSettings.Default.UserAgent = rawHeaderValues;
|
||||
}
|
||||
|
||||
public static VssConnection CreateConnection(
|
||||
@@ -62,11 +85,6 @@ namespace GitHub.Runner.Sdk
|
||||
settings.SendTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(httpRequestTimeoutSeconds, 100), 1200));
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
|
||||
{
|
||||
settings.AllowAutoRedirectForBroker = true;
|
||||
}
|
||||
|
||||
// Remove Invariant from the list of accepted languages.
|
||||
//
|
||||
// The constructor of VssHttpRequestSettings (base class of VssClientHttpRequestSettings) adds the current
|
||||
|
||||
@@ -703,11 +703,12 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
|
||||
{
|
||||
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
|
||||
// NonRetryableActionDownloadInfoException is an non-retryable exception from Actions
|
||||
// Some possible cases are:
|
||||
// * Repo is rate limited
|
||||
// * Repo or tag doesn't exist, or isn't public
|
||||
// * Policy validation failed
|
||||
if (attempt < 3 && !(ex is WebApi.UnresolvableActionDownloadInfoException))
|
||||
if (attempt < 3 && !(ex is WebApi.UnresolvableActionDownloadInfoException) && !(ex is WebApi.NonRetryableActionDownloadInfoException))
|
||||
{
|
||||
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
|
||||
executionContext.Debug(ex.ToString());
|
||||
|
||||
@@ -144,7 +144,7 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Error(error.Message);
|
||||
}
|
||||
|
||||
throw new ArgumentException($"Fail to load {fileRelativePath}");
|
||||
throw new ArgumentException($"Failed to load {fileRelativePath}");
|
||||
}
|
||||
|
||||
if (actionDefinition.Execution == null)
|
||||
|
||||
@@ -466,17 +466,39 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}");
|
||||
}
|
||||
var loginExitCode = await _dockerManager.DockerLogin(
|
||||
executionContext,
|
||||
configLocation,
|
||||
container.RegistryServer,
|
||||
container.RegistryAuthUsername,
|
||||
container.RegistryAuthPassword);
|
||||
|
||||
if (loginExitCode != 0)
|
||||
// Login docker with retry up to 3 times
|
||||
int retryCount = 0;
|
||||
int loginExitCode = 0;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
loginExitCode = await _dockerManager.DockerLogin(
|
||||
executionContext,
|
||||
configLocation,
|
||||
container.RegistryServer,
|
||||
container.RegistryAuthUsername,
|
||||
container.RegistryAuthPassword);
|
||||
if (loginExitCode == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
retryCount++;
|
||||
if (retryCount < 3)
|
||||
{
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
|
||||
executionContext.Warning($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}, back off {backOff.TotalSeconds} seconds before retry.");
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (retryCount == 3 && loginExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}");
|
||||
}
|
||||
|
||||
return configLocation;
|
||||
}
|
||||
|
||||
|
||||
@@ -108,6 +108,8 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
|
||||
|
||||
parentContext.QueueDiagnosticLogFile(name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
|
||||
|
||||
executionContext.Debug("Diagnostic file upload complete.");
|
||||
}
|
||||
|
||||
|
||||
@@ -90,6 +90,7 @@ namespace GitHub.Runner.Worker
|
||||
long Write(string tag, string message);
|
||||
void QueueAttachFile(string type, string name, string filePath);
|
||||
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
|
||||
void QueueDiagnosticLogFile(string name, string filePath);
|
||||
|
||||
// timeline record update methods
|
||||
void Start(string currentOperation = null);
|
||||
@@ -397,11 +398,11 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (recordOrder != null)
|
||||
{
|
||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder);
|
||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder, embedded: isEmbedded);
|
||||
}
|
||||
else
|
||||
{
|
||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
|
||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder, embedded: isEmbedded);
|
||||
}
|
||||
if (logger != null)
|
||||
{
|
||||
@@ -432,7 +433,7 @@ namespace GitHub.Runner.Worker
|
||||
Dictionary<string, string> intraActionState = null,
|
||||
string siblingScopeName = null)
|
||||
{
|
||||
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout());
|
||||
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout(), recordOrder: _record.Order);
|
||||
}
|
||||
|
||||
public void Start(string currentOperation = null)
|
||||
@@ -982,6 +983,18 @@ namespace GitHub.Runner.Worker
|
||||
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
|
||||
}
|
||||
|
||||
public void QueueDiagnosticLogFile(string name, string filePath)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(name, nameof(name));
|
||||
ArgUtil.NotNullOrEmpty(filePath, nameof(filePath));
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
throw new FileNotFoundException($"Can't upload diagnostic log file: {filePath}. File does not exist.");
|
||||
}
|
||||
_jobServerQueue.QueueResultsUpload(_record.Id, name, filePath, CoreAttachmentType.ResultsDiagnosticLog, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
|
||||
}
|
||||
|
||||
// Add OnMatcherChanged
|
||||
public void Add(OnMatcherChanged handler)
|
||||
{
|
||||
@@ -1160,7 +1173,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
|
||||
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order, bool embedded = false)
|
||||
{
|
||||
_mainTimelineId = timelineId;
|
||||
_record.Id = timelineRecordId;
|
||||
@@ -1186,7 +1199,11 @@ namespace GitHub.Runner.Worker
|
||||
var configuration = HostContext.GetService<IConfigurationStore>();
|
||||
_record.WorkerName = configuration.GetSettings().AgentName;
|
||||
|
||||
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
|
||||
// We don't want to update the timeline record for embedded steps since they are not really represented in the UI.
|
||||
if (!embedded)
|
||||
{
|
||||
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
|
||||
}
|
||||
}
|
||||
|
||||
private void JobServerQueueThrottling_EventReceived(object sender, ThrottlingEventArgs data)
|
||||
|
||||
@@ -244,7 +244,7 @@ namespace GitHub.Runner.Worker
|
||||
if (resultsReceiverEndpoint != null)
|
||||
{
|
||||
Trace.Info($"Queueing results file ({filePath}) for attachment upload ({attachmentName})");
|
||||
var stepId = context.Id;
|
||||
var stepId = context.IsEmbedded ? context.EmbeddedId : context.Id;
|
||||
// Attachments must be added to the parent context (job), not the current context (step)
|
||||
context.Root.QueueSummaryFile(attachmentName, scrubbedFilePath, stepId);
|
||||
}
|
||||
|
||||
@@ -223,6 +223,10 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
Environment["ACTIONS_CACHE_URL"] = cacheUrl;
|
||||
}
|
||||
if (systemConnection.Data.TryGetValue("PipelinesServiceUrl", out var pipelinesServiceUrl) && !string.IsNullOrEmpty(pipelinesServiceUrl))
|
||||
{
|
||||
Environment["ACTIONS_RUNTIME_URL"] = pipelinesServiceUrl;
|
||||
}
|
||||
if (systemConnection.Data.TryGetValue("GenerateIdTokenUrl", out var generateIdTokenUrl) && !string.IsNullOrEmpty(generateIdTokenUrl))
|
||||
{
|
||||
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
|
||||
|
||||
@@ -84,6 +84,45 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
nodeData.NodeVersion = "node16";
|
||||
}
|
||||
|
||||
var localForceActionsToNode20 = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Agent.ManualForceActionsToNode20));
|
||||
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.ManualForceActionsToNode20, out var workflowForceActionsToNode20);
|
||||
var enforceNode20Locally = !string.IsNullOrWhiteSpace(workflowForceActionsToNode20) ? StringUtil.ConvertToBoolean(workflowForceActionsToNode20) : localForceActionsToNode20;
|
||||
if (string.Equals(nodeData.NodeVersion, "node16")
|
||||
&& ((executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode20") ?? false) || enforceNode20Locally))
|
||||
{
|
||||
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
|
||||
var isWorkflowOptOutSet = !string.IsNullOrWhiteSpace(workflowOptOut);
|
||||
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
|
||||
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
|
||||
|
||||
if (!isOptOut)
|
||||
{
|
||||
var repoAction = action as Pipelines.RepositoryPathReference;
|
||||
if (repoAction != null)
|
||||
{
|
||||
var warningActions = new HashSet<string>();
|
||||
if (executionContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, out var node20ForceWarnings))
|
||||
{
|
||||
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(node20ForceWarnings);
|
||||
}
|
||||
|
||||
string repoActionFullName;
|
||||
if (string.IsNullOrEmpty(repoAction.Name))
|
||||
{
|
||||
repoActionFullName = repoAction.Path; // local actions don't have a 'Name'
|
||||
}
|
||||
else
|
||||
{
|
||||
repoActionFullName = $"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}";
|
||||
}
|
||||
|
||||
warningActions.Add(repoActionFullName);
|
||||
executionContext.Global.Variables.Set(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, StringUtil.ConvertToJson(warningActions));
|
||||
}
|
||||
nodeData.NodeVersion = "node20";
|
||||
}
|
||||
}
|
||||
(handler as INodeScriptActionHandler).Data = nodeData;
|
||||
}
|
||||
else if (data.ExecutionType == ActionExecutionType.Script)
|
||||
|
||||
@@ -58,6 +58,10 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
Environment["ACTIONS_CACHE_URL"] = cacheUrl;
|
||||
}
|
||||
if (systemConnection.Data.TryGetValue("PipelinesServiceUrl", out var pipelinesServiceUrl) && !string.IsNullOrEmpty(pipelinesServiceUrl))
|
||||
{
|
||||
Environment["ACTIONS_RUNTIME_URL"] = pipelinesServiceUrl;
|
||||
}
|
||||
if (systemConnection.Data.TryGetValue("GenerateIdTokenUrl", out var generateIdTokenUrl) && !string.IsNullOrEmpty(generateIdTokenUrl))
|
||||
{
|
||||
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
|
||||
@@ -114,6 +118,11 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
Data.NodeVersion = "node16";
|
||||
}
|
||||
|
||||
if (forcedNodeVersion == "node20" && Data.NodeVersion != "node20")
|
||||
{
|
||||
Data.NodeVersion = "node20";
|
||||
}
|
||||
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext, Data.NodeVersion);
|
||||
string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
|
||||
|
||||
|
||||
@@ -392,6 +392,18 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Register custom image creation post-job step if the "snapshot" token is present in the message.
|
||||
var snapshotRequest = templateEvaluator.EvaluateJobSnapshotRequest(message.Snapshot, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
|
||||
if (snapshotRequest != null)
|
||||
{
|
||||
var snapshotOperationProvider = HostContext.GetService<ISnapshotOperationProvider>();
|
||||
jobContext.RegisterPostJobStep(new JobExtensionRunner(
|
||||
runAsync: (executionContext, _) => snapshotOperationProvider.CreateSnapshotRequestAsync(executionContext, snapshotRequest),
|
||||
condition: $"{PipelineTemplateConstants.Success}()",
|
||||
displayName: $"Create custom image",
|
||||
data: null));
|
||||
}
|
||||
|
||||
// Register Job Completed hook if the variable is set
|
||||
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
|
||||
if (!string.IsNullOrEmpty(completedHookPath))
|
||||
|
||||
@@ -49,6 +49,9 @@ namespace GitHub.Runner.Worker
|
||||
!string.IsNullOrEmpty(orchestrationId.Value))
|
||||
{
|
||||
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
|
||||
|
||||
// make sure orchestration id is in the user-agent header.
|
||||
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
|
||||
}
|
||||
|
||||
var jobServerQueueTelemetry = false;
|
||||
@@ -295,6 +298,14 @@ namespace GitHub.Runner.Worker
|
||||
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions));
|
||||
}
|
||||
|
||||
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, out var node20ForceWarnings) && (jobContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode20") ?? false))
|
||||
{
|
||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node20ForceWarnings));
|
||||
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode16DetectedAfterEndOfLife, actions));
|
||||
}
|
||||
|
||||
await ShutdownQueue(throwOnFailure: false);
|
||||
|
||||
// Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir.
|
||||
_tempDirectoryManager?.CleanupTempDirectory();
|
||||
|
||||
@@ -400,6 +411,12 @@ namespace GitHub.Runner.Worker
|
||||
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions));
|
||||
}
|
||||
|
||||
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, out var node20ForceWarnings))
|
||||
{
|
||||
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node20ForceWarnings));
|
||||
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode16DetectedAfterEndOfLife, actions));
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: true);
|
||||
|
||||
32
src/Runner.Worker/SnapshotOperationProvider.cs
Normal file
32
src/Runner.Worker/SnapshotOperationProvider.cs
Normal file
@@ -0,0 +1,32 @@
|
||||
#nullable enable
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
namespace GitHub.Runner.Worker;
|
||||
|
||||
[ServiceLocator(Default = typeof(SnapshotOperationProvider))]
|
||||
public interface ISnapshotOperationProvider : IRunnerService
|
||||
{
|
||||
Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest);
|
||||
}
|
||||
|
||||
public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvider
|
||||
{
|
||||
public Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest)
|
||||
{
|
||||
var snapshotRequestFilePath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json");
|
||||
var snapshotRequestDirectoryPath = Path.GetDirectoryName(snapshotRequestFilePath);
|
||||
if (snapshotRequestDirectoryPath != null)
|
||||
{
|
||||
Directory.CreateDirectory(snapshotRequestDirectoryPath);
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(snapshotRequest, snapshotRequestFilePath);
|
||||
executionContext.Output($"Request written to: {snapshotRequestFilePath}");
|
||||
executionContext.Output("This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job.");
|
||||
executionContext.Output("If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner.");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -295,7 +295,7 @@ namespace GitHub.Runner.Worker
|
||||
!jobCancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Error($"Caught timeout exception from step: {ex.Message}");
|
||||
step.ExecutionContext.Error("The action has timed out.");
|
||||
step.ExecutionContext.Error($"The action '{step.DisplayName}' has timed out after {timeoutMinutes} minutes.");
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
else
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
using System;
|
||||
using System.ComponentModel;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
|
||||
namespace GitHub.Services.Common.Internal
|
||||
{
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
public static class RawHttpHeaders
|
||||
{
|
||||
public const String SessionHeader = "X-Runner-Session";
|
||||
public const String SessionHeader = "X-Actions-Session";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,6 +138,8 @@ namespace GitHub.Services.Common
|
||||
response.Dispose();
|
||||
}
|
||||
|
||||
this.Settings.ApplyTo(request);
|
||||
|
||||
// Let's start with sending a token
|
||||
IssuedToken token = null;
|
||||
if (m_tokenProvider != null)
|
||||
|
||||
@@ -214,25 +214,7 @@ namespace GitHub.Services.Common
|
||||
// ConfigureAwait(false) enables the continuation to be run outside any captured
|
||||
// SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking...
|
||||
|
||||
var tmpResponse = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
|
||||
if (Settings.AllowAutoRedirectForBroker && tmpResponse.StatusCode == HttpStatusCode.Redirect)
|
||||
{
|
||||
//Dispose of the previous response
|
||||
tmpResponse?.Dispose();
|
||||
|
||||
var location = tmpResponse.Headers.Location;
|
||||
request = new HttpRequestMessage(HttpMethod.Get, location);
|
||||
|
||||
// Reapply the token to new redirected request
|
||||
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
|
||||
|
||||
// Resend the request
|
||||
response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
response = tmpResponse;
|
||||
}
|
||||
response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
|
||||
|
||||
traceInfo?.TraceRequestSendTime();
|
||||
|
||||
|
||||
@@ -110,16 +110,6 @@ namespace GitHub.Services.Common
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether or not HttpClientHandler should follow redirect on outgoing broker requests
|
||||
/// This is special since this also sends token in the request, where as default AllowAutoRedirect does not
|
||||
/// </summary>
|
||||
public Boolean AllowAutoRedirectForBroker
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether or not compression should be used on outgoing requests.
|
||||
/// The default value is true.
|
||||
|
||||
@@ -463,6 +463,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
string runnerVersion = null,
|
||||
string os = null,
|
||||
string architecture = null,
|
||||
bool? disableUpdate = null,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -495,6 +496,11 @@ namespace GitHub.DistributedTask.WebApi
|
||||
queryParams.Add("architecture", architecture);
|
||||
}
|
||||
|
||||
if (disableUpdate != null)
|
||||
{
|
||||
queryParams.Add("disableUpdate", disableUpdate.Value.ToString().ToLower());
|
||||
}
|
||||
|
||||
return SendAsync<TaskAgentMessage>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
|
||||
@@ -43,6 +43,7 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
TemplateToken jobOutputs,
|
||||
IList<TemplateToken> defaults,
|
||||
ActionsEnvironmentReference actionsEnvironment,
|
||||
TemplateToken snapshot,
|
||||
String messageType = JobRequestMessageTypes.PipelineAgentJobRequest)
|
||||
{
|
||||
this.MessageType = messageType;
|
||||
@@ -57,6 +58,7 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
this.Workspace = workspaceOptions;
|
||||
this.JobOutputs = jobOutputs;
|
||||
this.ActionsEnvironment = actionsEnvironment;
|
||||
this.Snapshot = snapshot;
|
||||
m_variables = new Dictionary<String, VariableValue>(variables, StringComparer.OrdinalIgnoreCase);
|
||||
m_maskHints = new List<MaskHint>(maskHints);
|
||||
m_steps = new List<JobStep>(steps);
|
||||
@@ -237,6 +239,13 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public TemplateToken Snapshot
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the collection of variables associated with the current context.
|
||||
/// </summary>
|
||||
|
||||
@@ -29,6 +29,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
public const String Id = "id";
|
||||
public const String If = "if";
|
||||
public const String Image = "image";
|
||||
public const String ImageName = "image-name";
|
||||
public const String Include = "include";
|
||||
public const String Inputs = "inputs";
|
||||
public const String Job = "job";
|
||||
@@ -60,6 +61,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
public const String Services = "services";
|
||||
public const String Shell = "shell";
|
||||
public const String Skipped = "skipped";
|
||||
public const String Snapshot = "snapshot";
|
||||
public const String StepEnv = "step-env";
|
||||
public const String StepIfResult = "step-if-result";
|
||||
public const String StepWith = "step-with";
|
||||
|
||||
@@ -346,6 +346,39 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static Snapshot ConvertToJobSnapshotRequest(TemplateContext context, TemplateToken token)
|
||||
{
|
||||
string imageName = null;
|
||||
if (token is StringToken snapshotStringLiteral)
|
||||
{
|
||||
imageName = snapshotStringLiteral.Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
var snapshotMapping = token.AssertMapping($"{PipelineTemplateConstants.Snapshot}");
|
||||
foreach (var snapshotPropertyPair in snapshotMapping)
|
||||
{
|
||||
var propertyName = snapshotPropertyPair.Key.AssertString($"{PipelineTemplateConstants.Snapshot} key");
|
||||
switch (propertyName.Value)
|
||||
{
|
||||
case PipelineTemplateConstants.ImageName:
|
||||
imageName = snapshotPropertyPair.Value.AssertString($"{PipelineTemplateConstants.Snapshot} {propertyName}").Value;
|
||||
break;
|
||||
default:
|
||||
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Snapshot} key");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (String.IsNullOrEmpty(imageName))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new Snapshot(imageName);
|
||||
}
|
||||
|
||||
private static ActionStep ConvertToStep(
|
||||
TemplateContext context,
|
||||
TemplateToken stepsItem,
|
||||
|
||||
@@ -370,6 +370,32 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
return result;
|
||||
}
|
||||
|
||||
public Snapshot EvaluateJobSnapshotRequest(TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
var result = default(Snapshot);
|
||||
|
||||
if (token != null && token.Type != TokenType.Null)
|
||||
{
|
||||
var context = CreateContext(contextData, expressionFunctions);
|
||||
try
|
||||
{
|
||||
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Snapshot, token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
result = PipelineTemplateConverter.ConvertToJobSnapshotRequest(context, token);
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
context.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TemplateContext CreateContext(
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions,
|
||||
|
||||
17
src/Sdk/DTPipelines/Pipelines/Snapshot.cs
Normal file
17
src/Sdk/DTPipelines/Pipelines/Snapshot.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace GitHub.DistributedTask.Pipelines
|
||||
{
|
||||
[DataContract]
|
||||
public class Snapshot
|
||||
{
|
||||
public Snapshot(string imageName)
|
||||
{
|
||||
ImageName = imageName;
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public String ImageName { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -71,7 +71,8 @@
|
||||
"env": "job-env",
|
||||
"outputs": "job-outputs",
|
||||
"defaults": "job-defaults",
|
||||
"steps": "steps"
|
||||
"steps": "steps",
|
||||
"snapshot": "snapshot"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -155,6 +156,24 @@
|
||||
}
|
||||
},
|
||||
|
||||
"snapshot": {
|
||||
"one-of": [
|
||||
"non-empty-string",
|
||||
"snapshot-mapping"
|
||||
]
|
||||
},
|
||||
|
||||
"snapshot-mapping": {
|
||||
"mapping": {
|
||||
"properties": {
|
||||
"image-name": {
|
||||
"type": "non-empty-string",
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"runs-on": {
|
||||
"context": [
|
||||
"github",
|
||||
|
||||
38
src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs
Normal file
38
src/Sdk/DTWebApi/WebApi/BrokerMigrationMessage.cs
Normal file
@@ -0,0 +1,38 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
/// <summary>
|
||||
/// Message that tells the runner to redirect itself to BrokerListener for messages.
|
||||
/// (Note that we use a special Message instead of a simple 302. This is because
|
||||
/// the runner will need to apply the runner's token to the request, and it is
|
||||
/// a security best practice to *not* blindly add sensitive data to redirects
|
||||
/// 302s.)
|
||||
/// </summary>
|
||||
[DataContract]
|
||||
public class BrokerMigrationMessage
|
||||
{
|
||||
public static readonly string MessageType = "BrokerMigration";
|
||||
|
||||
public BrokerMigrationMessage()
|
||||
{
|
||||
}
|
||||
|
||||
public BrokerMigrationMessage(
|
||||
Uri brokerUrl)
|
||||
{
|
||||
this.BrokerBaseUrl = brokerUrl;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The base url for the broker listener
|
||||
/// </summary>
|
||||
[DataMember]
|
||||
public Uri BrokerBaseUrl
|
||||
{
|
||||
get;
|
||||
internal set;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2498,6 +2498,25 @@ namespace GitHub.DistributedTask.WebApi
|
||||
}
|
||||
}
|
||||
|
||||
[Serializable]
|
||||
public class NonRetryableActionDownloadInfoException : DistributedTaskException
|
||||
{
|
||||
public NonRetryableActionDownloadInfoException(String message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
public NonRetryableActionDownloadInfoException(String message, Exception innerException)
|
||||
: base(message, innerException)
|
||||
{
|
||||
}
|
||||
|
||||
protected NonRetryableActionDownloadInfoException(SerializationInfo info, StreamingContext context)
|
||||
: base(info, context)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
[Serializable]
|
||||
public sealed class FailedToResolveActionDownloadInfoException : DistributedTaskException
|
||||
{
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Newtonsoft.Json;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
|
||||
@@ -15,35 +16,32 @@ namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
}
|
||||
|
||||
public RunnerRefreshMessage(
|
||||
ulong runnerId,
|
||||
String targetVersion,
|
||||
int? timeoutInSeconds = null)
|
||||
{
|
||||
this.RunnerId = runnerId;
|
||||
this.TimeoutInSeconds = timeoutInSeconds ?? TimeSpan.FromMinutes(60).Seconds;
|
||||
this.TargetVersion = targetVersion;
|
||||
}
|
||||
|
||||
[DataMember]
|
||||
public ulong RunnerId
|
||||
{
|
||||
get;
|
||||
private set;
|
||||
}
|
||||
|
||||
[DataMember]
|
||||
public int TimeoutInSeconds
|
||||
{
|
||||
get;
|
||||
private set;
|
||||
}
|
||||
|
||||
[DataMember]
|
||||
[DataMember(Name = "target_version")]
|
||||
public String TargetVersion
|
||||
{
|
||||
get;
|
||||
private set;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Name = "download_url")]
|
||||
public string DownloadUrl
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Name = "sha256_checksum")]
|
||||
public string SHA256Checksum
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Name = "os")]
|
||||
public string OS
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
10
src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs
Normal file
10
src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
public sealed class TaskAgentMessageTypes
|
||||
{
|
||||
public static readonly string ForceTokenRefresh = "ForceTokenRefresh";
|
||||
}
|
||||
}
|
||||
@@ -75,5 +75,12 @@ namespace GitHub.DistributedTask.WebApi
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false, IsRequired = false)]
|
||||
public BrokerMigrationMessage BrokerMigrationMessage
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,6 +101,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
|
||||
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
|
||||
public static readonly String ResultsLog = "Results.Core.Log";
|
||||
public static readonly String ResultsDiagnosticLog = "Results.Core.DiagnosticLog";
|
||||
}
|
||||
|
||||
[GenerateAllConstants]
|
||||
|
||||
@@ -7,5 +7,8 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
{
|
||||
[DataMember(Name = "jobMessageId", EmitDefaultValue = false)]
|
||||
public string JobMessageId { get; set; }
|
||||
|
||||
[DataMember(Name = "runnerOS", EmitDefaultValue = false)]
|
||||
public string RunnerOS { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,12 +59,14 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
public async Task<AgentJobRequestMessage> GetJobMessageAsync(
|
||||
Uri requestUri,
|
||||
string messageId,
|
||||
string runnerOS,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
HttpMethod httpMethod = new HttpMethod("POST");
|
||||
var payload = new AcquireJobRequest
|
||||
{
|
||||
JobMessageId = messageId,
|
||||
RunnerOS = runnerOS
|
||||
};
|
||||
|
||||
requestUri = new Uri(requestUri, "acquirejob");
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Azure.Storage.Blobs" Version="12.19.1" />
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="5.2.9" />
|
||||
|
||||
@@ -57,10 +57,12 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
}
|
||||
|
||||
public async Task<TaskAgentMessage> GetRunnerMessageAsync(
|
||||
Guid? sessionId,
|
||||
string runnerVersion,
|
||||
TaskAgentStatus? status,
|
||||
string os = null,
|
||||
string architecture = null,
|
||||
bool? disableUpdate = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -68,6 +70,11 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
|
||||
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
|
||||
|
||||
if (sessionId != null)
|
||||
{
|
||||
queryParams.Add("sessionId", sessionId.Value.ToString());
|
||||
}
|
||||
|
||||
if (status != null)
|
||||
{
|
||||
queryParams.Add("status", status.Value.ToString());
|
||||
@@ -87,6 +94,11 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
queryParams.Add("architecture", architecture);
|
||||
}
|
||||
|
||||
if (disableUpdate != null)
|
||||
{
|
||||
queryParams.Add("disableUpdate", disableUpdate.Value.ToString().ToLower());
|
||||
}
|
||||
|
||||
var result = await SendAsync<TaskAgentMessage>(
|
||||
new HttpMethod("GET"),
|
||||
requestUri: requestUri,
|
||||
@@ -98,12 +110,67 @@ namespace GitHub.Actions.RunService.WebApi
|
||||
return result.Value;
|
||||
}
|
||||
|
||||
// the only time we throw a `Forbidden` exception from Listener /messages is when the runner is
|
||||
// disable_update and is too old to poll
|
||||
if (result.StatusCode == HttpStatusCode.Forbidden)
|
||||
{
|
||||
throw new AccessDeniedException($"{result.Error} Runner version v{runnerVersion} is deprecated and cannot receive messages.")
|
||||
{
|
||||
ErrorCode = 1
|
||||
};
|
||||
}
|
||||
|
||||
throw new Exception($"Failed to get job message: {result.Error}");
|
||||
}
|
||||
|
||||
public async Task<TaskAgentSession> CreateSessionAsync(
|
||||
|
||||
TaskAgentSession session,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var requestUri = new Uri(Client.BaseAddress, "session");
|
||||
var requestContent = new ObjectContent<TaskAgentSession>(session, new VssJsonMediaTypeFormatter(true));
|
||||
|
||||
var result = await SendAsync<TaskAgentSession>(
|
||||
new HttpMethod("POST"),
|
||||
requestUri: requestUri,
|
||||
content: requestContent,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
if (result.IsSuccess)
|
||||
{
|
||||
return result.Value;
|
||||
}
|
||||
|
||||
if (result.StatusCode == HttpStatusCode.Forbidden)
|
||||
{
|
||||
throw new AccessDeniedException(result.Error);
|
||||
}
|
||||
|
||||
throw new Exception($"Failed to get job message: {result.Error}");
|
||||
if (result.StatusCode == HttpStatusCode.Conflict)
|
||||
{
|
||||
throw new TaskAgentSessionConflictException(result.Error);
|
||||
}
|
||||
|
||||
throw new Exception($"Failed to create broker session: {result.Error}");
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync(
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var requestUri = new Uri(Client.BaseAddress, $"session");
|
||||
|
||||
var result = await SendAsync<object>(
|
||||
new HttpMethod("DELETE"),
|
||||
requestUri: requestUri,
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
if (result.IsSuccess)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Exception($"Failed to delete broker session: {result.Error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,26 @@ namespace GitHub.Services.Results.Contracts
|
||||
public long SoftSizeLimit;
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
|
||||
public class GetSignedDiagnosticLogsURLRequest
|
||||
{
|
||||
[DataMember]
|
||||
public string WorkflowJobRunBackendId;
|
||||
[DataMember]
|
||||
public string WorkflowRunBackendId;
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
|
||||
public class GetSignedDiagnosticLogsURLResponse
|
||||
{
|
||||
[DataMember]
|
||||
public string DiagLogsURL;
|
||||
[DataMember]
|
||||
public string BlobStorageType;
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
|
||||
public class JobLogsMetadataCreate
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
@@ -8,8 +7,11 @@ using System.Net.Http.Headers;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Net.Http.Formatting;
|
||||
using Azure;
|
||||
using Azure.Storage.Blobs;
|
||||
using Azure.Storage.Blobs.Models;
|
||||
using Azure.Storage.Blobs.Specialized;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.Results.Contracts;
|
||||
using Sdk.WebApi.WebApi;
|
||||
|
||||
@@ -21,13 +23,15 @@ namespace GitHub.Services.Results.Client
|
||||
Uri baseUrl,
|
||||
HttpMessageHandler pipeline,
|
||||
string token,
|
||||
bool disposeHandler)
|
||||
bool disposeHandler,
|
||||
bool useSdk)
|
||||
: base(baseUrl, pipeline, disposeHandler)
|
||||
{
|
||||
m_token = token;
|
||||
m_resultsServiceUrl = baseUrl;
|
||||
m_formatter = new JsonMediaTypeFormatter();
|
||||
m_changeIdCounter = 1;
|
||||
m_useSdk = useSdk;
|
||||
}
|
||||
|
||||
// Get Sas URL calls
|
||||
@@ -77,6 +81,19 @@ namespace GitHub.Services.Results.Client
|
||||
return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
|
||||
}
|
||||
|
||||
private async Task<GetSignedDiagnosticLogsURLResponse> GetDiagnosticLogsUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new GetSignedDiagnosticLogsURLRequest()
|
||||
{
|
||||
WorkflowJobRunBackendId = jobId,
|
||||
WorkflowRunBackendId = planId,
|
||||
};
|
||||
|
||||
var getDiagnosticLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobDiagLogsSignedBlobURL);
|
||||
|
||||
return await GetResultsSignedURLResponse<GetSignedDiagnosticLogsURLRequest, GetSignedDiagnosticLogsURLResponse>(getDiagnosticLogsSignedBlobURLEndpoint, cancellationToken, request);
|
||||
}
|
||||
|
||||
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new GetSignedJobLogsURLRequest()
|
||||
@@ -91,7 +108,6 @@ namespace GitHub.Services.Results.Client
|
||||
}
|
||||
|
||||
// Create metadata calls
|
||||
|
||||
private async Task SendRequest<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
|
||||
{
|
||||
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
|
||||
@@ -161,73 +177,219 @@ namespace GitHub.Services.Results.Client
|
||||
await SendRequest<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
|
||||
}
|
||||
|
||||
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
|
||||
private (Uri path, string sas) ParseSasToken(string url)
|
||||
{
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
if (String.IsNullOrEmpty(url))
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
|
||||
throw new Exception($"SAS url is empty");
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
var blobUri = new UriBuilder(url);
|
||||
var sasUrl = blobUri.Query.Substring(1); //remove starting "?"
|
||||
blobUri.Query = null; // remove query params
|
||||
return (blobUri.Uri, sasUrl);
|
||||
}
|
||||
|
||||
private BlobClient GetBlobClient(string url)
|
||||
{
|
||||
var blobUri = ParseSasToken(url);
|
||||
|
||||
var opts = new BlobClientOptions
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
Retry =
|
||||
{
|
||||
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
MaxRetries = Constants.DefaultBlobUploadRetries,
|
||||
NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds)
|
||||
}
|
||||
};
|
||||
|
||||
return new BlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts);
|
||||
}
|
||||
|
||||
private AppendBlobClient GetAppendBlobClient(string url)
|
||||
{
|
||||
var blobUri = ParseSasToken(url);
|
||||
|
||||
var opts = new BlobClientOptions
|
||||
{
|
||||
Retry =
|
||||
{
|
||||
MaxRetries = Constants.DefaultBlobUploadRetries,
|
||||
NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds)
|
||||
}
|
||||
};
|
||||
|
||||
return new AppendBlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts);
|
||||
}
|
||||
|
||||
private async Task UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken, Dictionary<string, string> customHeaders = null)
|
||||
{
|
||||
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
var blobClient = GetBlobClient(url);
|
||||
var httpHeaders = new BlobHttpHeaders();
|
||||
if (customHeaders != null)
|
||||
{
|
||||
foreach (var header in customHeaders)
|
||||
{
|
||||
switch (header.Key)
|
||||
{
|
||||
case Constants.ContentTypeHeader:
|
||||
httpHeaders.ContentType = header.Value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
try
|
||||
{
|
||||
await blobClient.UploadAsync(file, new BlobUploadOptions()
|
||||
{
|
||||
HttpHeaders = httpHeaders,
|
||||
Conditions = new BlobRequestConditions
|
||||
{
|
||||
IfNoneMatch = new ETag("*")
|
||||
}
|
||||
}, cancellationToken);
|
||||
}
|
||||
catch (RequestFailedException e)
|
||||
{
|
||||
throw new Exception($"Failed to upload block to Azure blob: {e.Message}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
|
||||
}
|
||||
|
||||
if (customHeaders != null)
|
||||
{
|
||||
foreach (var header in customHeaders)
|
||||
{
|
||||
request.Content.Headers.Add(header.Key, header.Value);
|
||||
}
|
||||
};
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
|
||||
private async Task CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken, Dictionary<string, string> customHeaders = null)
|
||||
{
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
Content = new StringContent("")
|
||||
};
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
|
||||
request.Content.Headers.Add("Content-Length", "0");
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
var appendBlobClient = GetAppendBlobClient(url);
|
||||
var httpHeaders = new BlobHttpHeaders();
|
||||
if (customHeaders != null)
|
||||
{
|
||||
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
foreach (var header in customHeaders)
|
||||
{
|
||||
switch (header.Key)
|
||||
{
|
||||
case Constants.ContentTypeHeader:
|
||||
httpHeaders.ContentType = header.Value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
try
|
||||
{
|
||||
await appendBlobClient.CreateAsync(new AppendBlobCreateOptions()
|
||||
{
|
||||
HttpHeaders = httpHeaders,
|
||||
Conditions = new AppendBlobRequestConditions
|
||||
{
|
||||
IfNoneMatch = new ETag("*")
|
||||
}
|
||||
}, cancellationToken: cancellationToken);
|
||||
}
|
||||
catch (RequestFailedException e)
|
||||
{
|
||||
throw new Exception($"Failed to create append blob in Azure blob: {e.Message}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url)
|
||||
{
|
||||
Content = new StringContent("")
|
||||
};
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
|
||||
request.Content.Headers.Add("Content-Length", "0");
|
||||
}
|
||||
if (customHeaders != null)
|
||||
{
|
||||
foreach (var header in customHeaders)
|
||||
{
|
||||
request.Content.Headers.Add(header.Key, header.Value);
|
||||
}
|
||||
};
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
|
||||
private async Task UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
|
||||
{
|
||||
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
|
||||
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add("Content-Length", fileSize.ToString());
|
||||
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
var appendBlobClient = GetAppendBlobClient(url);
|
||||
try
|
||||
{
|
||||
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
|
||||
await appendBlobClient.AppendBlockAsync(file, cancellationToken: cancellationToken);
|
||||
if (finalize)
|
||||
{
|
||||
await appendBlobClient.SealAsync(cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
catch (RequestFailedException e)
|
||||
{
|
||||
throw new Exception($"Failed to upload append block in Azure blob: {e.Message}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
|
||||
// Upload the file to the url
|
||||
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
|
||||
{
|
||||
Content = new StreamContent(file)
|
||||
};
|
||||
|
||||
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
|
||||
{
|
||||
request.Content.Headers.Add("Content-Length", fileSize.ToString());
|
||||
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
|
||||
}
|
||||
|
||||
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
|
||||
{
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -251,23 +413,22 @@ namespace GitHub.Services.Results.Client
|
||||
// Upload the file
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
|
||||
await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
|
||||
}
|
||||
|
||||
// Send step summary upload complete message
|
||||
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
|
||||
}
|
||||
|
||||
private async Task<HttpResponseMessage> UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType,
|
||||
CancellationToken cancellationToken)
|
||||
private async Task UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType,
|
||||
CancellationToken cancellationToken, Dictionary<string, string> customHeaders = null)
|
||||
{
|
||||
HttpResponseMessage response;
|
||||
if (firstBlock && finalize)
|
||||
{
|
||||
// This is the one and only block, just use a block blob
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
response = await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken);
|
||||
await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken, customHeaders);
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -276,18 +437,16 @@ namespace GitHub.Services.Results.Client
|
||||
// Create the Append blob
|
||||
if (firstBlock)
|
||||
{
|
||||
await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken);
|
||||
await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken, customHeaders);
|
||||
}
|
||||
|
||||
// Upload content
|
||||
var fileSize = new FileInfo(file).Length;
|
||||
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
|
||||
{
|
||||
response = await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken);
|
||||
await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
// Handle file upload for step log
|
||||
@@ -300,7 +459,12 @@ namespace GitHub.Services.Results.Client
|
||||
throw new Exception("Failed to get step log upload url");
|
||||
}
|
||||
|
||||
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
var customHeaders = new Dictionary<string, string>
|
||||
{
|
||||
{ Constants.ContentTypeHeader, Constants.TextPlainContentType }
|
||||
};
|
||||
|
||||
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken, customHeaders);
|
||||
|
||||
// Update metadata
|
||||
if (finalize)
|
||||
@@ -320,7 +484,12 @@ namespace GitHub.Services.Results.Client
|
||||
throw new Exception("Failed to get job log upload url");
|
||||
}
|
||||
|
||||
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
var customHeaders = new Dictionary<string, string>
|
||||
{
|
||||
{ Constants.ContentTypeHeader, Constants.TextPlainContentType }
|
||||
};
|
||||
|
||||
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken, customHeaders);
|
||||
|
||||
// Update metadata
|
||||
if (finalize)
|
||||
@@ -330,6 +499,18 @@ namespace GitHub.Services.Results.Client
|
||||
}
|
||||
}
|
||||
|
||||
public async Task UploadResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken)
|
||||
{
|
||||
// Get the upload url
|
||||
var uploadUrlResponse = await GetDiagnosticLogsUploadUrlAsync(planId, jobId, cancellationToken);
|
||||
if (uploadUrlResponse == null || uploadUrlResponse.DiagLogsURL == null)
|
||||
{
|
||||
throw new Exception("Failed to get diagnostic logs upload url");
|
||||
}
|
||||
|
||||
await UploadLogFile(file, true, true, uploadUrlResponse.DiagLogsURL, uploadUrlResponse.BlobStorageType, cancellationToken);
|
||||
}
|
||||
|
||||
private Step ConvertTimelineRecordToStep(TimelineRecord r)
|
||||
{
|
||||
return new Step()
|
||||
@@ -405,6 +586,7 @@ namespace GitHub.Services.Results.Client
|
||||
private Uri m_resultsServiceUrl;
|
||||
private string m_token;
|
||||
private int m_changeIdCounter;
|
||||
private bool m_useSdk;
|
||||
}
|
||||
|
||||
// Constants specific to results
|
||||
@@ -419,13 +601,20 @@ namespace GitHub.Services.Results.Client
|
||||
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
|
||||
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
|
||||
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
|
||||
public static readonly string GetJobDiagLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobDiagLogsSignedBlobURL";
|
||||
public static readonly string ResultsProtoApiV1Endpoint = "twirp/github.actions.results.api.v1.WorkflowStepUpdateService/";
|
||||
public static readonly string WorkflowStepsUpdate = ResultsProtoApiV1Endpoint + "WorkflowStepsUpdate";
|
||||
|
||||
public static readonly int DefaultNetworkTimeoutInSeconds = 30;
|
||||
public static readonly int DefaultBlobUploadRetries = 3;
|
||||
|
||||
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
|
||||
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
|
||||
public static readonly string AzureBlockBlob = "BlockBlob";
|
||||
public static readonly string AzureAppendBlob = "AppendBlob";
|
||||
|
||||
public const string ContentTypeHeader = "Content-Type";
|
||||
public const string TextPlainContentType = "text/plain";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
81
src/Test/L0/Listener/BrokerMessageListenerL0.cs
Normal file
81
src/Test/L0/Listener/BrokerMessageListenerL0.cs
Normal file
@@ -0,0 +1,81 @@
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Services.Common;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
public sealed class BrokerMessageListenerL0
|
||||
{
|
||||
private readonly RunnerSettings _settings;
|
||||
private readonly Mock<IConfigurationManager> _config;
|
||||
private readonly Mock<IBrokerServer> _brokerServer;
|
||||
private readonly Mock<ICredentialManager> _credMgr;
|
||||
private Mock<IConfigurationStore> _store;
|
||||
|
||||
|
||||
public BrokerMessageListenerL0()
|
||||
{
|
||||
_settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work", ServerUrlV2 = "http://myserverv2" };
|
||||
_config = new Mock<IConfigurationManager>();
|
||||
_config.Setup(x => x.LoadSettings()).Returns(_settings);
|
||||
_credMgr = new Mock<ICredentialManager>();
|
||||
_store = new Mock<IConfigurationStore>();
|
||||
_brokerServer = new Mock<IBrokerServer>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void CreatesSession()
|
||||
{
|
||||
using (TestHostContext tc = CreateTestContext())
|
||||
using (var tokenSource = new CancellationTokenSource())
|
||||
{
|
||||
Tracing trace = tc.GetTrace();
|
||||
|
||||
// Arrange.
|
||||
var expectedSession = new TaskAgentSession();
|
||||
_brokerServer
|
||||
.Setup(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedSession));
|
||||
|
||||
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
|
||||
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
|
||||
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
|
||||
|
||||
// Act.
|
||||
BrokerMessageListener listener = new();
|
||||
listener.Initialize(tc);
|
||||
|
||||
bool result = await listener.CreateSessionAsync(tokenSource.Token);
|
||||
trace.Info("result: {0}", result);
|
||||
|
||||
// Assert.
|
||||
Assert.True(result);
|
||||
_brokerServer
|
||||
.Verify(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Once());
|
||||
}
|
||||
}
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
|
||||
{
|
||||
TestHostContext tc = new(this, testName);
|
||||
tc.SetSingleton<IConfigurationManager>(_config.Object);
|
||||
tc.SetSingleton<ICredentialManager>(_credMgr.Object);
|
||||
tc.SetSingleton<IConfigurationStore>(_store.Object);
|
||||
tc.SetSingleton<IBrokerServer>(_brokerServer.Object);
|
||||
return tc;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
TaskOrchestrationPlanReference plan = new();
|
||||
TimelineReference timeline = null;
|
||||
Guid jobId = Guid.NewGuid();
|
||||
var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
result.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
return result;
|
||||
}
|
||||
@@ -806,7 +806,8 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
},
|
||||
null,
|
||||
new List<TemplateToken>(),
|
||||
new ActionsEnvironmentReference("env")
|
||||
new ActionsEnvironmentReference("env"),
|
||||
null
|
||||
);
|
||||
return message;
|
||||
}
|
||||
|
||||
@@ -24,6 +24,8 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
private Mock<ICredentialManager> _credMgr;
|
||||
private Mock<IConfigurationStore> _store;
|
||||
|
||||
private Mock<IBrokerServer> _brokerServer;
|
||||
|
||||
public MessageListenerL0()
|
||||
{
|
||||
_settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work" };
|
||||
@@ -32,6 +34,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
_runnerServer = new Mock<IRunnerServer>();
|
||||
_credMgr = new Mock<ICredentialManager>();
|
||||
_store = new Mock<IConfigurationStore>();
|
||||
_brokerServer = new Mock<IBrokerServer>();
|
||||
}
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
|
||||
@@ -41,6 +44,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
tc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
tc.SetSingleton<ICredentialManager>(_credMgr.Object);
|
||||
tc.SetSingleton<IConfigurationStore>(_store.Object);
|
||||
tc.SetSingleton<IBrokerServer>(_brokerServer.Object);
|
||||
return tc;
|
||||
}
|
||||
|
||||
@@ -81,6 +85,72 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Once());
|
||||
_brokerServer
|
||||
.Verify(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Never());
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void CreatesSessionWithBrokerMigration()
|
||||
{
|
||||
using (TestHostContext tc = CreateTestContext())
|
||||
using (var tokenSource = new CancellationTokenSource())
|
||||
{
|
||||
Tracing trace = tc.GetTrace();
|
||||
|
||||
// Arrange.
|
||||
var expectedSession = new TaskAgentSession()
|
||||
{
|
||||
OwnerName = "legacy",
|
||||
BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com"))
|
||||
};
|
||||
|
||||
var expectedBrokerSession = new TaskAgentSession()
|
||||
{
|
||||
OwnerName = "broker"
|
||||
};
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.CreateAgentSessionAsync(
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedSession));
|
||||
|
||||
_brokerServer
|
||||
.Setup(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedBrokerSession));
|
||||
|
||||
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
|
||||
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
|
||||
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
|
||||
|
||||
// Act.
|
||||
MessageListener listener = new();
|
||||
listener.Initialize(tc);
|
||||
|
||||
bool result = await listener.CreateSessionAsync(tokenSource.Token);
|
||||
trace.Info("result: {0}", result);
|
||||
|
||||
// Assert.
|
||||
Assert.True(result);
|
||||
|
||||
_runnerServer
|
||||
.Verify(x => x.CreateAgentSessionAsync(
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Once());
|
||||
|
||||
_brokerServer
|
||||
.Verify(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Once());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,6 +201,83 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void DeleteSessionWithBrokerMigration()
|
||||
{
|
||||
using (TestHostContext tc = CreateTestContext())
|
||||
using (var tokenSource = new CancellationTokenSource())
|
||||
{
|
||||
Tracing trace = tc.GetTrace();
|
||||
|
||||
// Arrange.
|
||||
var expectedSession = new TaskAgentSession()
|
||||
{
|
||||
OwnerName = "legacy",
|
||||
BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com"))
|
||||
};
|
||||
|
||||
var expectedBrokerSession = new TaskAgentSession()
|
||||
{
|
||||
SessionId = Guid.NewGuid(),
|
||||
OwnerName = "broker"
|
||||
};
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.CreateAgentSessionAsync(
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedSession));
|
||||
|
||||
_brokerServer
|
||||
.Setup(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedBrokerSession));
|
||||
|
||||
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
|
||||
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
|
||||
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
|
||||
|
||||
// Act.
|
||||
MessageListener listener = new();
|
||||
listener.Initialize(tc);
|
||||
|
||||
bool result = await listener.CreateSessionAsync(tokenSource.Token);
|
||||
trace.Info("result: {0}", result);
|
||||
|
||||
Assert.True(result);
|
||||
|
||||
_runnerServer
|
||||
.Verify(x => x.CreateAgentSessionAsync(
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Once());
|
||||
|
||||
_brokerServer
|
||||
.Verify(x => x.CreateSessionAsync(
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token), Times.Once());
|
||||
|
||||
_brokerServer
|
||||
.Setup(x => x.DeleteSessionAsync(It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act.
|
||||
await listener.DeleteSessionAsync();
|
||||
|
||||
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.DeleteAgentSessionAsync(
|
||||
_settings.PoolId, expectedBrokerSession.SessionId, It.IsAny<CancellationToken>()), Times.Once());
|
||||
_brokerServer
|
||||
.Verify(x => x.DeleteSessionAsync(It.IsAny<CancellationToken>()), Times.Once());
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
@@ -192,8 +339,8 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, CancellationToken cancellationToken) =>
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) =>
|
||||
{
|
||||
await Task.Yield();
|
||||
return messages.Dequeue();
|
||||
@@ -208,7 +355,113 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void GetNextMessageWithBrokerMigration()
|
||||
{
|
||||
using (TestHostContext tc = CreateTestContext())
|
||||
using (var tokenSource = new CancellationTokenSource())
|
||||
{
|
||||
Tracing trace = tc.GetTrace();
|
||||
|
||||
// Arrange.
|
||||
var expectedSession = new TaskAgentSession();
|
||||
PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(sessionIdProperty);
|
||||
sessionIdProperty.SetValue(expectedSession, Guid.NewGuid());
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.CreateAgentSessionAsync(
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedSession));
|
||||
|
||||
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
|
||||
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
|
||||
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
|
||||
|
||||
// Act.
|
||||
MessageListener listener = new();
|
||||
listener.Initialize(tc);
|
||||
|
||||
bool result = await listener.CreateSessionAsync(tokenSource.Token);
|
||||
Assert.True(result);
|
||||
|
||||
var brokerMigrationMesage = new BrokerMigrationMessage(new Uri("https://actions.broker.com"));
|
||||
|
||||
var arMessages = new TaskAgentMessage[]
|
||||
{
|
||||
new TaskAgentMessage
|
||||
{
|
||||
Body = JsonUtility.ToString(brokerMigrationMesage),
|
||||
MessageType = BrokerMigrationMessage.MessageType
|
||||
},
|
||||
};
|
||||
|
||||
var brokerMessages = new TaskAgentMessage[]
|
||||
{
|
||||
new TaskAgentMessage
|
||||
{
|
||||
Body = "somebody1",
|
||||
MessageId = 4234,
|
||||
MessageType = JobRequestMessageTypes.PipelineAgentJobRequest
|
||||
},
|
||||
new TaskAgentMessage
|
||||
{
|
||||
Body = "somebody2",
|
||||
MessageId = 4235,
|
||||
MessageType = JobCancelMessage.MessageType
|
||||
},
|
||||
null, //should be skipped by GetNextMessageAsync implementation
|
||||
null,
|
||||
new TaskAgentMessage
|
||||
{
|
||||
Body = "somebody3",
|
||||
MessageId = 4236,
|
||||
MessageType = JobRequestMessageTypes.PipelineAgentJobRequest
|
||||
}
|
||||
};
|
||||
var brokerMessageQueue = new Queue<TaskAgentMessage>(brokerMessages);
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) =>
|
||||
{
|
||||
await Task.Yield();
|
||||
return arMessages[0]; // always send migration message
|
||||
});
|
||||
|
||||
_brokerServer
|
||||
.Setup(x => x.GetRunnerMessageAsync(
|
||||
expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (Guid sessionId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) =>
|
||||
{
|
||||
await Task.Yield();
|
||||
return brokerMessageQueue.Dequeue();
|
||||
});
|
||||
|
||||
TaskAgentMessage message1 = await listener.GetNextMessageAsync(tokenSource.Token);
|
||||
TaskAgentMessage message2 = await listener.GetNextMessageAsync(tokenSource.Token);
|
||||
TaskAgentMessage message3 = await listener.GetNextMessageAsync(tokenSource.Token);
|
||||
Assert.Equal(brokerMessages[0], message1);
|
||||
Assert.Equal(brokerMessages[1], message2);
|
||||
Assert.Equal(brokerMessages[4], message3);
|
||||
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Exactly(brokerMessages.Length));
|
||||
|
||||
_brokerServer
|
||||
.Verify(x => x.GetRunnerMessageAsync(
|
||||
expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Exactly(brokerMessages.Length));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -293,7 +546,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.Throws(new TaskAgentAccessTokenExpiredException("test"));
|
||||
try
|
||||
{
|
||||
@@ -311,7 +564,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once);
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Once);
|
||||
|
||||
_runnerServer
|
||||
.Verify(x => x.DeleteAgentSessionAsync(
|
||||
|
||||
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
TaskOrchestrationPlanReference plan = new();
|
||||
TimelineReference timeline = null;
|
||||
Guid jobId = Guid.NewGuid();
|
||||
return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
}
|
||||
|
||||
private JobCancelMessage CreateJobCancelMessage()
|
||||
|
||||
@@ -23,7 +23,6 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
private Mock<IConfigurationStore> _configStore;
|
||||
private Mock<IJobDispatcher> _jobDispatcher;
|
||||
private AgentRefreshMessage _refreshMessage = new(1, "2.999.0");
|
||||
private List<TrimmedPackageMetadata> _trimmedPackages = new();
|
||||
|
||||
#if !OS_WINDOWS
|
||||
private string _packageUrl = null;
|
||||
@@ -71,12 +70,6 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
}
|
||||
}
|
||||
|
||||
using (var client = new HttpClient())
|
||||
{
|
||||
var json = await client.GetStringAsync($"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}-trimmedpackages.json");
|
||||
_trimmedPackages = StringUtil.ConvertFromJson<List<TrimmedPackageMetadata>>(json);
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl }));
|
||||
|
||||
@@ -91,12 +84,10 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -152,12 +143,10 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -205,12 +194,10 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -260,12 +247,10 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
@@ -305,495 +290,6 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_CloneHash_RuntimeAndExternals()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = new List<TrimmedPackageMetadata>() { new TrimmedPackageMetadata() } }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
|
||||
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(contentHashesProperty);
|
||||
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
|
||||
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
|
||||
Assert.Equal(File.ReadAllText(dotnetRuntimeHashFile).Trim(), contentHashes["dotnetRuntime"]);
|
||||
Assert.Equal(File.ReadAllText(externalsHashFile).Trim(), contentHashes["externals"]);
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_Cancel_CloneHashTask_WhenNotNeeded()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new Mock<IHttpClientHandlerFactory>().Object);
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
|
||||
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(contentHashesProperty);
|
||||
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
|
||||
|
||||
Assert.NotEqual(2, contentHashes.Count);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
hc.GetTrace().Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_UseExternalsTrimmedPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.Where(x => !x.TrimmedContents.ContainsKey("dotnetRuntime")).ToList();
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
|
||||
|
||||
if (externalsHash == trim[0].TrimmedContents["externals"])
|
||||
{
|
||||
Assert.Contains("Use trimmed (externals) package", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_UseExternalsRuntimeTrimmedPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
var p6 = new ProcessInvokerWrapper(); // runner -v
|
||||
p6.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p6);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.Where(x => x.TrimmedContents.ContainsKey("dotnetRuntime") && x.TrimmedContents.ContainsKey("externals")).ToList();
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
|
||||
|
||||
var runtimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var runtimeHash = await File.ReadAllTextAsync(runtimeHashFile);
|
||||
|
||||
if (externalsHash == trim[0].TrimmedContents["externals"] &&
|
||||
runtimeHash == trim[0].TrimmedContents["dotnetRuntime"])
|
||||
{
|
||||
Assert.Contains("Use trimmed (runtime+externals) package", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_NotUseExternalsRuntimeTrimmedPackageOnHashMismatch()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
var p6 = new ProcessInvokerWrapper(); // runner -v
|
||||
p6.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p6);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.ToList();
|
||||
foreach (var package in trim)
|
||||
{
|
||||
foreach (var hash in package.TrimmedContents.Keys)
|
||||
{
|
||||
package.TrimmedContents[hash] = "mismatch";
|
||||
}
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_FallbackToFullPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar trim
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // un-tar full
|
||||
p4.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.ToList();
|
||||
foreach (var package in trim)
|
||||
{
|
||||
package.HashValue = "mismatch";
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, ulong a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
|
||||
{
|
||||
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
234
src/Test/L0/Listener/SelfUpdaterV2L0.cs
Normal file
234
src/Test/L0/Listener/SelfUpdaterV2L0.cs
Normal file
@@ -0,0 +1,234 @@
|
||||
#if !(OS_WINDOWS && ARM64)
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
public sealed class SelfUpdaterV2L0
|
||||
{
|
||||
private Mock<IRunnerServer> _runnerServer;
|
||||
private Mock<ITerminal> _term;
|
||||
private Mock<IConfigurationStore> _configStore;
|
||||
private Mock<IJobDispatcher> _jobDispatcher;
|
||||
private AgentRefreshMessage _refreshMessage = new(1, "2.999.0");
|
||||
|
||||
#if !OS_WINDOWS
|
||||
private string _packageUrl = null;
|
||||
#else
|
||||
private string _packageUrl = null;
|
||||
#endif
|
||||
public SelfUpdaterV2L0()
|
||||
{
|
||||
_runnerServer = new Mock<IRunnerServer>();
|
||||
_term = new Mock<ITerminal>();
|
||||
_configStore = new Mock<IConfigurationStore>();
|
||||
_jobDispatcher = new Mock<IJobDispatcher>();
|
||||
_configStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1, AgentId = 1 });
|
||||
|
||||
Environment.SetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT", "1");
|
||||
}
|
||||
|
||||
private async Task FetchLatestRunner()
|
||||
{
|
||||
var latestVersion = "";
|
||||
var httpClientHandler = new HttpClientHandler();
|
||||
httpClientHandler.AllowAutoRedirect = false;
|
||||
using (var client = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, "https://github.com/actions/runner/releases/latest"));
|
||||
if (response.StatusCode == System.Net.HttpStatusCode.Redirect)
|
||||
{
|
||||
var redirectUrl = response.Headers.Location.ToString();
|
||||
Regex regex = new(@"/runner/releases/tag/v(?<version>\d+\.\d+\.\d+)");
|
||||
var match = regex.Match(redirectUrl);
|
||||
if (match.Success)
|
||||
{
|
||||
latestVersion = match.Groups["version"].Value;
|
||||
|
||||
#if !OS_WINDOWS
|
||||
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.tar.gz";
|
||||
#else
|
||||
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.zip";
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception("The latest runner version could not be determined so a download URL could not be generated for it. Please check the location header of the redirect response of 'https://github.com/actions/runner/releases/latest'");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdaterV2();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
try
|
||||
{
|
||||
var message = new RunnerRefreshMessage()
|
||||
{
|
||||
TargetVersion = "2.999.0",
|
||||
OS = BuildConstants.RunnerPackage.PackageName,
|
||||
DownloadUrl = _packageUrl
|
||||
|
||||
};
|
||||
|
||||
var result = await updater.SelfUpdate(message, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_DownloadRetry()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdaterV2();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var message = new RunnerRefreshMessage()
|
||||
{
|
||||
TargetVersion = "2.999.0",
|
||||
OS = BuildConstants.RunnerPackage.PackageName,
|
||||
DownloadUrl = "https://github.com/actions/runner/notexists"
|
||||
};
|
||||
|
||||
var ex = await Assert.ThrowsAsync<TaskCanceledException>(() => updater.SelfUpdate(message, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains($"failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts", ex.Message);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_ValidateHash()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdaterV2();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var message = new RunnerRefreshMessage()
|
||||
{
|
||||
TargetVersion = "2.999.0",
|
||||
OS = BuildConstants.RunnerPackage.PackageName,
|
||||
DownloadUrl = _packageUrl,
|
||||
SHA256Checksum = "badhash"
|
||||
};
|
||||
|
||||
var ex = await Assert.ThrowsAsync<Exception>(() => updater.SelfUpdate(message, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains("did not match expected Runner Hash", ex.Message);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@@ -1,301 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests
|
||||
{
|
||||
public sealed class PackagesTrimL0
|
||||
{
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_NewFilesCrossAll()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
|
||||
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
|
||||
string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
var newFiles = new List<string>();
|
||||
if (Directory.Exists(layoutBin))
|
||||
{
|
||||
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
|
||||
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
|
||||
foreach (var file in Directory.GetFiles(layoutBin, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
if (!coreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)) &&
|
||||
!runtimeAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)))
|
||||
{
|
||||
newFiles.Add(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (newFiles.Count > 0)
|
||||
{
|
||||
Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_OverlapFiles()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
|
||||
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
|
||||
|
||||
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
|
||||
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
|
||||
|
||||
foreach (var line in coreAssets)
|
||||
{
|
||||
if (runtimeAssets.Contains(line, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
Assert.True(false, $"'Misc/runnercoreassets' and 'Misc/runnerdotnetruntimeassets' should not overlap.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_NewRunnerCoreAssets()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
|
||||
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
|
||||
|
||||
string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
var newFiles = new List<string>();
|
||||
if (Directory.Exists(layoutBin))
|
||||
{
|
||||
var binDirs = Directory.GetDirectories(TestUtil.GetSrcPath(), "net6.0", SearchOption.AllDirectories);
|
||||
foreach (var binDir in binDirs)
|
||||
{
|
||||
if (binDir.Contains("Test") || binDir.Contains("obj"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
Directory.GetFiles(binDir, "*", SearchOption.TopDirectoryOnly).ToList().ForEach(x =>
|
||||
{
|
||||
if (!x.Contains("runtimeconfig.dev.json"))
|
||||
{
|
||||
if (!coreAssets.Any(y => x.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(y)))
|
||||
{
|
||||
newFiles.Add(x);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (newFiles.Count > 0)
|
||||
{
|
||||
Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnercoreassets`.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_NewDotnetRuntimeAssets()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
|
||||
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
|
||||
|
||||
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runnerdotnetruntimeassets");
|
||||
var newFiles = new List<string>();
|
||||
if (File.Exists(layoutTrimsRuntimeAssets))
|
||||
{
|
||||
var runtimeAssetsCurrent = await File.ReadAllLinesAsync(layoutTrimsRuntimeAssets);
|
||||
foreach (var runtimeFile in runtimeAssetsCurrent)
|
||||
{
|
||||
if (runtimeAssets.Any(x => runtimeFile.EndsWith(x, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
newFiles.Add(runtimeFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (newFiles.Count > 0)
|
||||
{
|
||||
Assert.True(false, $"Found new dotnet runtime files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnerdotnetruntimeassets`.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_CheckDotnetRuntimeHash()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
trace.Info($"Current hash: {File.ReadAllText(dotnetRuntimeHashFile)}");
|
||||
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runtime");
|
||||
|
||||
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node");
|
||||
#else
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node");
|
||||
#endif
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
|
||||
p1.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
p1.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsRuntimeAssets,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: true,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.True(string.Equals(hashResult, File.ReadAllText(dotnetRuntimeHashFile).Trim()), $"Hash mismatch for dotnet runtime. You might need to update `Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_CheckExternalsHash()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
trace.Info($"Current hash: {File.ReadAllText(externalsHashFile)}");
|
||||
|
||||
string layoutTrimsExternalsAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/externals");
|
||||
|
||||
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node");
|
||||
#else
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node");
|
||||
#endif
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
|
||||
p1.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
p1.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsExternalsAssets,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: true,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public Task RunnerLayoutParts_ContentHashFilesNoNewline()
|
||||
{
|
||||
using (TestHostContext hc = new(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var dotnetRuntimeHash = File.ReadAllText(dotnetRuntimeHashFile);
|
||||
trace.Info($"Current hash: {dotnetRuntimeHash}");
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = File.ReadAllText(externalsHashFile);
|
||||
trace.Info($"Current hash: {externalsHash}");
|
||||
|
||||
Assert.False(externalsHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in externals hash file.");
|
||||
Assert.False(dotnetRuntimeHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in dotnet runtime hash file.");
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,7 +139,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
message = "::do-something k1=;=%252C=%250D=%250A=]=%253A,::;-%250D-%250A-]-:-,";
|
||||
test = new ActionCommand("do-something")
|
||||
{
|
||||
Data = ";-%250D-%250A-]-:-,",
|
||||
Data = ";-%0D-%0A-]-:-,",
|
||||
};
|
||||
test.Properties.Add("k1", ";=%2C=%0D=%0A=]=%3A");
|
||||
Assert.True(ActionCommand.TryParseV2(message, commands, out verify));
|
||||
|
||||
@@ -232,7 +232,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -443,21 +443,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void AddMaskWithPercentEncodedString()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Act
|
||||
_commandManager.TryProcessCommand(_ec.Object, $"::add-mask::%252F%2F", null);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("***", hc.SecretMasker.MaskSecrets("%252F%2F"));
|
||||
}
|
||||
}
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
var hostContext = new TestHostContext(this, testName);
|
||||
|
||||
@@ -757,7 +757,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
|
||||
//Assert
|
||||
var err = Assert.Throws<ArgumentException>(() => actionManifest.Load(_ec.Object, action_path));
|
||||
Assert.Contains($"Fail to load {action_path}", err.Message);
|
||||
Assert.Contains($"Failed to load {action_path}", err.Message);
|
||||
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'.")), It.IsAny<ExecutionContextLogOptions>()), Times.Once);
|
||||
}
|
||||
finally
|
||||
|
||||
@@ -193,7 +193,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "Summary Job";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
|
||||
@@ -29,7 +29,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -106,7 +106,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -162,7 +162,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -216,7 +216,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -271,7 +271,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -322,7 +322,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -373,7 +373,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -471,7 +471,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -555,7 +555,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -610,7 +610,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -653,7 +653,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -717,7 +717,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -781,7 +781,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -969,7 +969,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -1014,7 +1014,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
@@ -1057,7 +1057,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
|
||||
@@ -4,6 +4,8 @@ using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Worker;
|
||||
using Moq;
|
||||
@@ -25,6 +27,9 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
private Mock<IContainerOperationProvider> _containerProvider;
|
||||
private Mock<IDiagnosticLogManager> _diagnosticLogManager;
|
||||
private Mock<IJobHookProvider> _jobHookProvider;
|
||||
private Mock<ISnapshotOperationProvider> _snapshotOperationProvider;
|
||||
|
||||
private Pipelines.Snapshot _requestedSnapshot;
|
||||
|
||||
private CancellationTokenSource _tokenSource;
|
||||
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
|
||||
@@ -41,7 +46,16 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
_directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>()))
|
||||
.Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" });
|
||||
_jobHookProvider = new Mock<IJobHookProvider>();
|
||||
_snapshotOperationProvider = new Mock<ISnapshotOperationProvider>();
|
||||
|
||||
_requestedSnapshot = null;
|
||||
_snapshotOperationProvider
|
||||
.Setup(p => p.CreateSnapshotRequestAsync(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.Snapshot>()))
|
||||
.Returns((IExecutionContext _, object data) =>
|
||||
{
|
||||
_requestedSnapshot = data as Pipelines.Snapshot;
|
||||
return Task.CompletedTask;
|
||||
});
|
||||
IActionRunner step1 = new ActionRunner();
|
||||
IActionRunner step2 = new ActionRunner();
|
||||
IActionRunner step3 = new ActionRunner();
|
||||
@@ -100,7 +114,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
};
|
||||
|
||||
Guid jobId = Guid.NewGuid();
|
||||
_message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), steps, null, null, null, null);
|
||||
_message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), steps, null, null, null, null, null);
|
||||
GitHubContext github = new();
|
||||
github["repository"] = new Pipelines.ContextData.StringContextData("actions/runner");
|
||||
github["secret_source"] = new Pipelines.ContextData.StringContextData("Actions");
|
||||
@@ -125,6 +139,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
hc.SetSingleton(_directoryManager.Object);
|
||||
hc.SetSingleton(_diagnosticLogManager.Object);
|
||||
hc.SetSingleton(_jobHookProvider.Object);
|
||||
hc.SetSingleton(_snapshotOperationProvider.Object);
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job start hook
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job
|
||||
@@ -443,5 +458,80 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
Assert.Equal(0, _jobEc.PostJobSteps.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task EnsureNoSnapshotPostJobStep()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
var jobExtension = new JobExtension();
|
||||
jobExtension.Initialize(hc);
|
||||
|
||||
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
|
||||
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
|
||||
|
||||
_message.Snapshot = null;
|
||||
await jobExtension.InitializeJob(_jobEc, _message);
|
||||
|
||||
var postJobSteps = _jobEc.PostJobSteps;
|
||||
Assert.Equal(0, postJobSteps.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public Task EnsureSnapshotPostJobStepForStringToken()
|
||||
{
|
||||
var snapshot = new Pipelines.Snapshot("TestImageNameFromStringToken");
|
||||
var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName);
|
||||
return EnsureSnapshotPostJobStepForToken(imageNameValueStringToken, snapshot);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public Task EnsureSnapshotPostJobStepForMappingToken()
|
||||
{
|
||||
var snapshot = new Pipelines.Snapshot("TestImageNameFromMappingToken");
|
||||
var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName);
|
||||
var mappingToken = new MappingToken(null, null, null)
|
||||
{
|
||||
{ new StringToken(null,null,null, PipelineTemplateConstants.ImageName), imageNameValueStringToken }
|
||||
};
|
||||
|
||||
return EnsureSnapshotPostJobStepForToken(mappingToken, snapshot);
|
||||
}
|
||||
|
||||
private async Task EnsureSnapshotPostJobStepForToken(TemplateToken snapshotToken, Pipelines.Snapshot expectedSnapshot)
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
var jobExtension = new JobExtension();
|
||||
jobExtension.Initialize(hc);
|
||||
|
||||
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
|
||||
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
|
||||
|
||||
_message.Snapshot = snapshotToken;
|
||||
|
||||
await jobExtension.InitializeJob(_jobEc, _message);
|
||||
|
||||
var postJobSteps = _jobEc.PostJobSteps;
|
||||
|
||||
Assert.Equal(1, postJobSteps.Count);
|
||||
var snapshotStep = postJobSteps.First();
|
||||
Assert.Equal("Create custom image", snapshotStep.DisplayName);
|
||||
Assert.Equal($"{PipelineTemplateConstants.Success}()", snapshotStep.Condition);
|
||||
|
||||
// Run the mock snapshot step, so we can verify it was executed with the expected snapshot object.
|
||||
await snapshotStep.RunAsync();
|
||||
|
||||
Assert.NotNull(_requestedSnapshot);
|
||||
Assert.Equal(expectedSnapshot.ImageName, _requestedSnapshot.ImageName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,6 +101,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
testName,
|
||||
testName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null,
|
||||
new ActionsEnvironmentReference("staging"),
|
||||
null,
|
||||
messageType: messageType);
|
||||
message.Variables[Constants.Variables.System.Culture] = "en-US";
|
||||
message.Resources.Endpoints.Add(new ServiceEndpoint()
|
||||
|
||||
78
src/Test/L0/Worker/SnapshotOperationProviderL0.cs
Normal file
78
src/Test/L0/Worker/SnapshotOperationProviderL0.cs
Normal file
@@ -0,0 +1,78 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker;
|
||||
|
||||
public class SnapshotOperationProviderL0
|
||||
{
|
||||
private Mock<IExecutionContext>? _ec;
|
||||
private SnapshotOperationProvider? _snapshotOperationProvider;
|
||||
private string? _snapshotRequestFilePath;
|
||||
private string? _snapshotRequestDirectoryPath;
|
||||
|
||||
[Theory]
|
||||
[InlineData(true)]
|
||||
[InlineData(false)]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void CreateSnapshotRequestAsync(bool shouldSnapshotDirectoryAlreadyExist)
|
||||
{
|
||||
using (TestHostContext testHostContext = CreateTestHostContext())
|
||||
{
|
||||
//Arrange
|
||||
Setup(testHostContext, shouldSnapshotDirectoryAlreadyExist);
|
||||
var expectedSnapshot = new Snapshot(Guid.NewGuid().ToString());
|
||||
|
||||
//Act
|
||||
await _snapshotOperationProvider!.CreateSnapshotRequestAsync(_ec!.Object, expectedSnapshot);
|
||||
|
||||
//Assert
|
||||
var actualSnapshot = IOUtil.LoadObject<Snapshot>(_snapshotRequestFilePath);
|
||||
Assert.NotNull(actualSnapshot);
|
||||
Assert.Equal(expectedSnapshot.ImageName, actualSnapshot!.ImageName);
|
||||
_ec.Verify(ec => ec.Write(null, $"Request written to: {_snapshotRequestFilePath}"), Times.Once);
|
||||
_ec.Verify(ec => ec.Write(null, "This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job."), Times.Once);
|
||||
_ec.Verify(ec => ec.Write(null, "If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner."), Times.Once);
|
||||
_ec.VerifyNoOtherCalls();
|
||||
}
|
||||
}
|
||||
|
||||
private void Setup(IHostContext hostContext, bool shouldSnapshotDirectoryAlreadyExist)
|
||||
{
|
||||
_ec = new Mock<IExecutionContext>();
|
||||
_snapshotOperationProvider = new SnapshotOperationProvider();
|
||||
_snapshotOperationProvider.Initialize(hostContext);
|
||||
_snapshotRequestFilePath = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json");
|
||||
_snapshotRequestDirectoryPath = Path.GetDirectoryName(_snapshotRequestFilePath);
|
||||
|
||||
if (_snapshotRequestDirectoryPath != null)
|
||||
{
|
||||
// Clean up any existing the snapshot directory and its contents before starting the test.
|
||||
if (Directory.Exists(_snapshotRequestDirectoryPath))
|
||||
{
|
||||
Directory.Delete(_snapshotRequestDirectoryPath, true);
|
||||
}
|
||||
|
||||
if (shouldSnapshotDirectoryAlreadyExist)
|
||||
{
|
||||
// Create a fresh snapshot directory if it's required for the test.
|
||||
Directory.CreateDirectory(_snapshotRequestDirectoryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private TestHostContext CreateTestHostContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
var testHostContext = new TestHostContext(this, testName);
|
||||
_ec = new Mock<IExecutionContext>();
|
||||
_ec.Object.Initialize(testHostContext);
|
||||
return testHostContext;
|
||||
}
|
||||
}
|
||||
@@ -67,7 +67,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
new Pipelines.ContextData.DictionaryContextData()
|
||||
},
|
||||
};
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List<MaskHint>(), resources, context, null, actions, null, null, null, null);
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List<MaskHint>(), resources, context, null, actions, null, null, null, null, null);
|
||||
return jobRequest;
|
||||
}
|
||||
|
||||
|
||||
111
src/dev.sh
111
src/dev.sh
@@ -14,15 +14,10 @@ DEV_TARGET_RUNTIME=$3
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
LAYOUT_DIR="$SCRIPT_DIR/../_layout"
|
||||
LAYOUT_TRIMS_DIR="$SCRIPT_DIR/../_layout_trims"
|
||||
LAYOUT_TRIM_EXTERNALS_DIR="$LAYOUT_TRIMS_DIR/trim_externals"
|
||||
LAYOUT_TRIM_RUNTIME_DIR="$LAYOUT_TRIMS_DIR/trim_runtime"
|
||||
LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR="$LAYOUT_TRIMS_DIR/trim_runtime_externals"
|
||||
DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
|
||||
PACKAGE_DIR="$SCRIPT_DIR/../_package"
|
||||
PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims"
|
||||
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
|
||||
DOTNETSDK_VERSION="6.0.415"
|
||||
DOTNETSDK_VERSION="6.0.420"
|
||||
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
|
||||
RUNNER_VERSION=$(cat runnerversion)
|
||||
|
||||
@@ -148,48 +143,6 @@ function layout ()
|
||||
|
||||
heading "Setup externals folder for $RUNTIME_ID runner's layout"
|
||||
bash ./Misc/externals.sh $RUNTIME_ID || checkRC externals.sh
|
||||
|
||||
heading "Create layout (Trimmed) ..."
|
||||
|
||||
rm -Rf "$LAYOUT_TRIMS_DIR"
|
||||
mkdir -p "$LAYOUT_TRIMS_DIR"
|
||||
mkdir -p "$LAYOUT_TRIMS_DIR/runtime"
|
||||
cp -r "$LAYOUT_DIR/bin/." "$LAYOUT_TRIMS_DIR/runtime"
|
||||
mkdir -p "$LAYOUT_TRIMS_DIR/externals"
|
||||
cp -r "$LAYOUT_DIR/externals/." "$LAYOUT_TRIMS_DIR/externals"
|
||||
|
||||
pushd "$LAYOUT_TRIMS_DIR/runtime" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
sed -i 's/\n$/\r\n/' "$SCRIPT_DIR/Misc/runnercoreassets"
|
||||
fi
|
||||
|
||||
cat "$SCRIPT_DIR/Misc/runnercoreassets" | xargs rm -f
|
||||
find . -empty -type d -delete
|
||||
find . -type f > "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets"
|
||||
popd > /dev/null
|
||||
|
||||
heading "Create layout with externals trimmed ..."
|
||||
mkdir -p "$LAYOUT_TRIM_EXTERNALS_DIR"
|
||||
cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_EXTERNALS_DIR/"
|
||||
rm -Rf "$LAYOUT_TRIM_EXTERNALS_DIR/externals"
|
||||
echo "Created... $LAYOUT_TRIM_EXTERNALS_DIR"
|
||||
|
||||
heading "Create layout with dotnet runtime trimmed ..."
|
||||
mkdir -p "$LAYOUT_TRIM_RUNTIME_DIR"
|
||||
cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_RUNTIME_DIR/"
|
||||
pushd "$LAYOUT_TRIM_RUNTIME_DIR/bin" > /dev/null
|
||||
cat "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" | xargs rm -f
|
||||
echo "Created... $LAYOUT_TRIM_RUNTIME_DIR"
|
||||
popd > /dev/null
|
||||
|
||||
heading "Create layout with externals and dotnet runtime trimmed ..."
|
||||
mkdir -p "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR"
|
||||
cp -r "$LAYOUT_DIR/." "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/"
|
||||
rm -Rf "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/externals"
|
||||
pushd "$LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR/bin" > /dev/null
|
||||
cat "$LAYOUT_TRIMS_DIR/runnerdotnetruntimeassets" | xargs rm -f
|
||||
echo "Created... $LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR"
|
||||
popd > /dev/null
|
||||
}
|
||||
|
||||
function runtest ()
|
||||
@@ -226,9 +179,7 @@ function package ()
|
||||
find "${LAYOUT_DIR}/bin" -type f -name '*.pdb' -delete
|
||||
|
||||
mkdir -p "$PACKAGE_DIR"
|
||||
mkdir -p "$PACKAGE_TRIMS_DIR"
|
||||
rm -Rf "${PACKAGE_DIR:?}"/*
|
||||
rm -Rf "${PACKAGE_TRIMS_DIR:?}"/*
|
||||
|
||||
pushd "$PACKAGE_DIR" > /dev/null
|
||||
|
||||
@@ -246,66 +197,6 @@ function package ()
|
||||
fi
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
runner_trim_externals_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noexternals"
|
||||
heading "Packaging ${runner_trim_externals_pkg_name} (Trimmed)"
|
||||
|
||||
PACKAGE_TRIM_EXTERNALS_DIR="$PACKAGE_TRIMS_DIR/trim_externals"
|
||||
mkdir -p "$PACKAGE_TRIM_EXTERNALS_DIR"
|
||||
pushd "$PACKAGE_TRIM_EXTERNALS_DIR" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then
|
||||
tar_name="${runner_trim_externals_pkg_name}.tar.gz"
|
||||
echo "Creating $tar_name in ${LAYOUT_TRIM_EXTERNALS_DIR}"
|
||||
tar -czf "${tar_name}" -C "${LAYOUT_TRIM_EXTERNALS_DIR}" .
|
||||
elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
zip_name="${runner_trim_externals_pkg_name}.zip"
|
||||
echo "Convert ${LAYOUT_TRIM_EXTERNALS_DIR} to Windows style path"
|
||||
window_path=${LAYOUT_TRIM_EXTERNALS_DIR:1}
|
||||
window_path=${window_path:0:1}:${window_path:1}
|
||||
echo "Creating $zip_name in ${window_path}"
|
||||
$POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")"
|
||||
fi
|
||||
popd > /dev/null
|
||||
|
||||
runner_trim_runtime_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noruntime"
|
||||
heading "Packaging ${runner_trim_runtime_pkg_name} (Trimmed)"
|
||||
|
||||
PACKAGE_TRIM_RUNTIME_DIR="$PACKAGE_TRIMS_DIR/trim_runtime"
|
||||
mkdir -p "$PACKAGE_TRIM_RUNTIME_DIR"
|
||||
pushd "$PACKAGE_TRIM_RUNTIME_DIR" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then
|
||||
tar_name="${runner_trim_runtime_pkg_name}.tar.gz"
|
||||
echo "Creating $tar_name in ${LAYOUT_TRIM_RUNTIME_DIR}"
|
||||
tar -czf "${tar_name}" -C "${LAYOUT_TRIM_RUNTIME_DIR}" .
|
||||
elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
zip_name="${runner_trim_runtime_pkg_name}.zip"
|
||||
echo "Convert ${LAYOUT_TRIM_RUNTIME_DIR} to Windows style path"
|
||||
window_path=${LAYOUT_TRIM_RUNTIME_DIR:1}
|
||||
window_path=${window_path:0:1}:${window_path:1}
|
||||
echo "Creating $zip_name in ${window_path}"
|
||||
$POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")"
|
||||
fi
|
||||
popd > /dev/null
|
||||
|
||||
runner_trim_runtime_externals_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}-noruntime-noexternals"
|
||||
heading "Packaging ${runner_trim_runtime_externals_pkg_name} (Trimmed)"
|
||||
|
||||
PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR="$PACKAGE_TRIMS_DIR/trim_runtime_externals"
|
||||
mkdir -p "$PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR"
|
||||
pushd "$PACKAGE_TRIM_RUNTIME_EXTERNALS_DIR" > /dev/null
|
||||
if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then
|
||||
tar_name="${runner_trim_runtime_externals_pkg_name}.tar.gz"
|
||||
echo "Creating $tar_name in ${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR}"
|
||||
tar -czf "${tar_name}" -C "${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR}" .
|
||||
elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then
|
||||
zip_name="${runner_trim_runtime_externals_pkg_name}.zip"
|
||||
echo "Convert ${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR} to Windows style path"
|
||||
window_path=${LAYOUT_TRIM_RUNTIME_EXTERNALS_DIR:1}
|
||||
window_path=${window_path:0:1}:${window_path:1}
|
||||
echo "Creating $zip_name in ${window_path}"
|
||||
$POWERSHELL -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")"
|
||||
fi
|
||||
popd > /dev/null
|
||||
}
|
||||
|
||||
if [[ (! -d "${DOTNETSDK_INSTALLDIR}") || (! -e "${DOTNETSDK_INSTALLDIR}/.${DOTNETSDK_VERSION}") || (! -e "${DOTNETSDK_INSTALLDIR}/dotnet") ]]; then
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "6.0.415"
|
||||
"version": "6.0.420"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.311.0
|
||||
2.315.0
|
||||
|
||||
Reference in New Issue
Block a user