Compare commits

..

2 Commits

Author SHA1 Message Date
Ferenc Hammerl
693e594e43 Linting 2023-11-15 16:50:23 +00:00
Ferenc Hammerl
b7995ff076 Read ACTIONS_RUNNER_TRACE_LEVEL
GITHUB_ACTIONS_RUNNER_TRACE still takes precedence for back compat.
2023-11-15 14:58:29 +00:00
109 changed files with 3317 additions and 2111 deletions

View File

@@ -4,13 +4,10 @@
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": {
"version": "6.0.421"
"version": "6.0.415"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "16"
},
"ghcr.io/devcontainers/features/sshd:1": {
"version": "latest"
}
},
"customizations": {

View File

@@ -58,6 +58,29 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash
shell: bash
run: |
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
echo "Current Externals hash result: $EXTERNALS_HASH"
NeedUpdate=0
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
NeedUpdate=1
fi
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
NeedUpdate=1
fi
exit $NeedUpdate
env:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Run tests
- name: L0
run: |
@@ -80,3 +103,6 @@ jobs:
name: runner-package-${{ matrix.runtime }}
path: |
_package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals

View File

@@ -15,3 +15,4 @@ jobs:
only-labels: "actions-bug"
days-before-stale: 0
days-before-close: 1
close-issue-reason: "completed"

View File

@@ -15,3 +15,4 @@ jobs:
only-labels: "actions-feature"
days-before-stale: 0
days-before-close: 1
close-issue-reason: "completed"

View File

@@ -84,20 +84,221 @@ jobs:
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
git push --set-upstream origin $branch_name
create-pr:
needs: [dotnet-update]
build-hashes:
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
runs-on: ubuntu-latest
needs: [dotnet-update]
outputs:
# pass outputs from this job to create-pr for use
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }}
strategy:
fail-fast: false
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
include:
- runtime: linux-x64
os: ubuntu-latest
devScript: ./dev.sh
- runtime: linux-arm64
os: ubuntu-latest
devScript: ./dev.sh
- runtime: linux-arm
os: ubuntu-latest
devScript: ./dev.sh
- runtime: osx-x64
os: macOS-latest
devScript: ./dev.sh
- runtime: osx-arm64
os: macOS-latest
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
devScript: ./dev
- runtime: win-arm64
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
with:
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
# Build runner layout
- name: Build & Layout Release
run: |
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash
id: compute-hash
continue-on-error: true
shell: bash
run: |
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
echo "Current Externals hash result: $EXTERNALS_HASH"
NeedUpdate=0
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
NeedUpdate=1
fi
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
NeedUpdate=1
fi
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
env:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
- name: update hash
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
shell: bash
run: |
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
if [ -n "$ExternalHash" ]; then
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
fi
if [ -n "$DotNetRuntimeHash" ]; then
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
fi
- name: cache updated hashes
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
uses: actions/cache/save@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/${{ matrix.runtime }}
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
hash-update:
needs: [build-hashes]
if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }}
outputs:
# pass outputs from this job to create-pr for use
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
- name: Restore cached hashes - linux-x64
id: cache-restore-linux-x64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/linux-x64
./src/Misc/contentHash/dotnetRuntime/linux-x64
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - linux-arm64
id: cache-restore-linux-arm64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/linux-arm64
./src/Misc/contentHash/dotnetRuntime/linux-arm64
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - linux-arm
id: cache-restore-linux-arm
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/linux-arm
./src/Misc/contentHash/dotnetRuntime/linux-arm
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - osx-x64
id: cache-restore-osx-x64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/osx-x64
./src/Misc/contentHash/dotnetRuntime/osx-x64
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - osx-arm64
id: cache-restore-osx-arm64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/osx-arm64
./src/Misc/contentHash/dotnetRuntime/osx-arm64
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - win-x64
id: cache-restore-win-x64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/win-x64
./src/Misc/contentHash/dotnetRuntime/win-x64
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Restore cached hashes - win-arm64
id: cache-restore-win-arm64
uses: actions/cache/restore@v3
with:
enableCrossOsArchive: true
path: |
./src/Misc/contentHash/externals/win-arm64
./src/Misc/contentHash/dotnetRuntime/win-arm64
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
- name: Fetch cached computed hashes
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
shell: bash
run: |
Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" )
git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git commit -a -m "Update computed hashes"
git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
create-pr:
needs: [hash-update]
outputs:
# pass outputs from this job to run-tests for use
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
- name: Create Pull Request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
---

View File

@@ -53,6 +53,27 @@ jobs:
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
@@ -115,6 +136,76 @@ jobs:
id: sha
name: Compute SHA256
working-directory: _package
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_externals
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime
name: Compute SHA256
working-directory: _package_trims/trim_runtime
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_runtime_externals
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
# Upload runner package tar.gz/zip as artifact.
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
@@ -125,6 +216,10 @@ jobs:
name: runner-packages
path: |
_package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals
${{ matrix.runtime }}-trimmedpackages.json
release:
needs: build
@@ -158,11 +253,33 @@ jobs:
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
console.log(releaseNote)
core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
@@ -192,7 +309,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
@@ -202,7 +319,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
@@ -212,7 +329,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -222,7 +339,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -232,7 +349,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -242,7 +359,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -252,10 +369,298 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim externals)
- name: Upload Release Asset (win-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_content_type: application/octet-stream
# Upload release assets (trim externals)
- name: Upload Release Asset (win-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime)
- name: Upload Release Asset (win-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_content_type: application/octet-stream
# Upload release assets (trim runtime)
- name: Upload Release Asset (win-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime and externals)
- name: Upload Release Asset (win-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_content_type: application/octet-stream
# Upload release assets (trim runtime and externals)
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trimmedpackages.json)
- name: Upload Release Asset (win-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
# Upload release assets (trimmedpackages.json)
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
publish-image:
needs: release
runs-on: ubuntu-latest

View File

@@ -7,10 +7,8 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
- For GitHub.com
- The runner needs to access `https://api.github.com` for downloading actions.
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
---
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
@@ -18,15 +16,12 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
```
curl -v https://api.github.com/zen
curl -v https://codeload.github.com/_ping
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
curl -v https://results-receiver.actions.githubusercontent.com/health
```
- For GitHub Enterprise Server
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
@@ -34,7 +29,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
```
curl -v https://[hostname]/api/v3/zen
curl -v https://codeload.[hostname]/_ping
curl -v https://[hostname]/_services/vstoken/_apis/health
curl -v https://[hostname]/_services/pipelines/_apis/health
```
@@ -50,10 +44,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
- Ping api.github.com or myGHES.com using dotnet
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
---
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
- Ping codeload.github.com or codeload.myGHES.com using dotnet
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
---
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
- Ping vstoken.actions.githubusercontent.com using dotnet
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
@@ -62,10 +52,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
- Ping pipelines.actions.githubusercontent.com using dotnet
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
---
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
- Ping results-receiver.actions.githubusercontent.com using dotnet
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
## How to fix the issue?

View File

@@ -42,7 +42,6 @@ If you are having trouble connecting, try these steps:
- https://api.github.com/
- https://vstoken.actions.githubusercontent.com/_apis/health
- https://pipelines.actions.githubusercontent.com/_apis/health
- https://results-receiver.actions.githubusercontent.com/health
- For GHES/GHAE
- https://myGHES.com/_services/vstoken/_apis/health
- https://myGHES.com/_services/pipelines/_apis/health

View File

@@ -4,9 +4,9 @@
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
The runner carries its own copy of node.js executable under `<runner_root>/externals/node20/`.
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node20/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
> Not the `node` from `$PATH`

View File

@@ -5,9 +5,9 @@
## Supported Distributions and Versions
x64
- Red Hat Enterprise Linux 7+
- CentOS 7+
- Oracle Linux 7+
- Red Hat Enterprise Linux 7
- CentOS 7
- Oracle Linux 7
- Fedora 29+
- Debian 9+
- Ubuntu 16.04+

View File

@@ -4,9 +4,9 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
ARG TARGETOS
ARG TARGETARCH
ARG RUNNER_VERSION
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.6.0
ARG DOCKER_VERSION=25.0.4
ARG BUILDX_VERSION=0.13.1
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.4.0
ARG DOCKER_VERSION=24.0.6
ARG BUILDX_VERSION=0.11.2
RUN apt update -y && apt install curl unzip -y
@@ -37,7 +37,6 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
ENV ImageOS=ubuntu22
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \

View File

@@ -1,20 +1,30 @@
## What's Changed
* Trim whitespace in `./Misc/contentHash/dotnetRuntime/*` by @TingluoHuang in https://github.com/actions/runner/pull/2915
* Send os and arch during long poll by @luketomlinson in https://github.com/actions/runner/pull/2913
* Revert "Update default version to node20 (#2844)" by @takost in https://github.com/actions/runner/pull/2918
* Fix telemetry publish from JobServerQueue. by @TingluoHuang in https://github.com/actions/runner/pull/2919
* Use block blob instead of append blob by @yacaovsnc in https://github.com/actions/runner/pull/2924
* Provide detail info on untar failures. by @TingluoHuang in https://github.com/actions/runner/pull/2939
* Bump node.js to 20.8.1 by @TingluoHuang in https://github.com/actions/runner/pull/2945
* Update dotnet sdk to latest version @6.0.415 by @github-actions in https://github.com/actions/runner/pull/2929
* Fix typo in log strings by @rajbos in https://github.com/actions/runner/pull/2695
* feat: add support of arm64 arch runners in service creation script by @tuxity in https://github.com/actions/runner/pull/2606
* Add `buildx` to images by @ajschmidt8 in https://github.com/actions/runner/pull/2901
- Preserve dates when deserializing job message from Run Service by @ericsciple in https://github.com/actions/runner/pull/3269
## New Contributors
* @tuxity made their first contribution in https://github.com/actions/runner/pull/2606
**Full Changelog**: https://github.com/actions/runner/compare/v2.316.0...v2.316.1
**Full Changelog**: https://github.com/actions/runner/compare/v2.310.2...v2.311.0
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`:
```powershell
``` powershell
# Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner
# Download the latest runner package
@@ -25,14 +35,12 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
```
## [Pre-release] Windows arm64
**Warning:** Windows arm64 runners are currently in preview status and use [unofficial versions of nodejs](https://unofficial-builds.nodejs.org/). They are not intended for production workflows.
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`:
```powershell
``` powershell
# Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner
# Download the latest runner package
@@ -44,7 +52,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
## OSX x64
```bash
``` bash
# Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
@@ -55,7 +63,7 @@ tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
## OSX arm64 (Apple silicon)
```bash
``` bash
# Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
@@ -66,7 +74,7 @@ tar xzf ./actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
## Linux x64
```bash
``` bash
# Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
@@ -77,7 +85,7 @@ tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
## Linux arm64
```bash
``` bash
# Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
@@ -88,7 +96,7 @@ tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
## Linux arm
```bash
``` bash
# Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
@@ -98,7 +106,6 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
```
## Using your self hosted runner
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
## SHA-256 Checksums
@@ -112,3 +119,27 @@ The SHA-256 checksums for the packages included in this build are shown below:
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->

View File

@@ -1 +1 @@
54d95a44d118dba852395991224a6b9c1abe916858c87138656f80c619e85331
531b31914e525ecb12cc5526415bc70a112ebc818f877347af1a231011f539c5

View File

@@ -1 +1 @@
68015af17f06a824fa478e62ae7393766ce627fd5599ab916432a14656a19a52
722dd5fa5ecc207fcccf67f6e502d689f2119d8117beff2041618fba17dc66a4

View File

@@ -1 +1 @@
a2628119ca419cb54e279103ffae7986cdbd0814d57c73ff0dc74c38be08b9ae
8ca75c76e15ab9dc7fe49a66c5c74e171e7fabd5d26546fda8931bd11bff30f9

View File

@@ -1 +1 @@
de71ca09ead807e1a2ce9df0a5b23eb7690cb71fff51169a77e4c3992be53dda
70496eb1c99b39b3373b5088c95a35ebbaac1098e6c47c8aab94771f3ffbf501

View File

@@ -1 +1 @@
d009e05e6b26d614d65be736a15d1bd151932121c16a9ff1b986deadecc982b9
4f8d48727d535daabcaec814e0dafb271c10625366c78e7e022ca7477a73023f

View File

@@ -1 +1 @@
f730db39c2305800b4653795360ba9c10c68f384a46b85d808f1f9f0ed3c42e4
d54d7428f2b9200a0030365a6a4e174e30a1b29b922f8254dffb2924bd09549d

View File

@@ -1 +1 @@
a35b5722375490e9473cdcccb5e18b41eba3dbf4344fe31abc9821e21f18ea5a
eaa939c45307f46b7003902255b3a2a09287215d710984107667e03ac493eb26

View File

@@ -63,16 +63,17 @@ function acquireExternalTool() {
echo "Curl version: $CURL_VERSION"
# curl -f Fail silently (no output at all) on HTTP errors (H)
# -k Allow connections to SSL sites without certs (H)
# -S Show error. With -s, make curl show errors when they occur
# -L Follow redirects (H)
# -o FILE Write to FILE instead of stdout
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
else
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
fi
# Move the partial file to the download target.

View File

@@ -114,11 +114,6 @@ var runService = function () {
);
stopping = true;
}
} else if (code === 5) {
console.log(
"Runner listener exit with Session Conflict error, stop the service, no retry needed."
);
stopping = true;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
unknownFailureRetryCount++;

View File

@@ -10,7 +10,7 @@ if [ -f ".path" ]; then
echo ".path=${PATH}"
fi
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node20}
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
# insert anything to setup env when running as a service
# run the host process which keep the listener alive

View File

@@ -135,17 +135,12 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then
# inspect the open file handles to find the node process
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
nodever="node20"
nodever="node16"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
then
nodever="node16"
nodever="node12"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
then
nodever="node12"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
fi
fi
if [[ $? -eq 0 && -n "$path" ]]
then

View File

@@ -49,10 +49,5 @@ if %ERRORLEVEL% EQU 4 (
exit /b 1
)
if %ERRORLEVEL% EQU 5 (
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
exit /b 0
)
echo "Exiting after unknown error code: %ERRORLEVEL%"
exit /b 0

View File

@@ -70,9 +70,6 @@ elif [[ $returnCode == 4 ]]; then
"$DIR"/safe_sleep.sh 1
done
exit 2
elif [[ $returnCode == 5 ]]; then
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
exit 0
else
echo "Exiting with unknown error code: ${returnCode}"
exit 0

View File

@@ -38,7 +38,7 @@ runWithManualTrap() {
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
"$DIR"/run-helper.sh $* &
PID=$!
wait $PID
wait -f $PID
returnCode=$?
if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..."

57
src/Misc/runnercoreassets Normal file
View File

@@ -0,0 +1,57 @@
actions.runner.plist.template
actions.runner.service.template
checkScripts/downloadCert.js
checkScripts/makeWebRequest.js
darwin.svc.sh.template
hashFiles/index.js
installdependencies.sh
macos-run-invoker.js
Microsoft.IdentityModel.Logging.dll
Microsoft.IdentityModel.Tokens.dll
Minimatch.dll
Newtonsoft.Json.Bson.dll
Newtonsoft.Json.dll
Runner.Common.deps.json
Runner.Common.dll
Runner.Common.pdb
Runner.Listener
Runner.Listener.deps.json
Runner.Listener.dll
Runner.Listener.exe
Runner.Listener.pdb
Runner.Listener.runtimeconfig.json
Runner.PluginHost
Runner.PluginHost.deps.json
Runner.PluginHost.dll
Runner.PluginHost.exe
Runner.PluginHost.pdb
Runner.PluginHost.runtimeconfig.json
Runner.Plugins.deps.json
Runner.Plugins.dll
Runner.Plugins.pdb
Runner.Sdk.deps.json
Runner.Sdk.dll
Runner.Sdk.pdb
Runner.Worker
Runner.Worker.deps.json
Runner.Worker.dll
Runner.Worker.exe
Runner.Worker.pdb
Runner.Worker.runtimeconfig.json
RunnerService.exe
RunnerService.exe.config
RunnerService.js
RunnerService.pdb
runsvc.sh
Sdk.deps.json
Sdk.dll
Sdk.pdb
System.IdentityModel.Tokens.Jwt.dll
System.Net.Http.Formatting.dll
System.Security.Cryptography.Pkcs.dll
System.Security.Cryptography.ProtectedData.dll
System.ServiceProcess.ServiceController.dll
systemd.svc.sh.template
update.cmd.template
update.sh.template
YamlDotNet.dll

View File

@@ -0,0 +1,270 @@
api-ms-win-core-console-l1-1-0.dll
api-ms-win-core-console-l1-2-0.dll
api-ms-win-core-datetime-l1-1-0.dll
api-ms-win-core-debug-l1-1-0.dll
api-ms-win-core-errorhandling-l1-1-0.dll
api-ms-win-core-fibers-l1-1-0.dll
api-ms-win-core-file-l1-1-0.dll
api-ms-win-core-file-l1-2-0.dll
api-ms-win-core-file-l2-1-0.dll
api-ms-win-core-handle-l1-1-0.dll
api-ms-win-core-heap-l1-1-0.dll
api-ms-win-core-interlocked-l1-1-0.dll
api-ms-win-core-libraryloader-l1-1-0.dll
api-ms-win-core-localization-l1-2-0.dll
api-ms-win-core-memory-l1-1-0.dll
api-ms-win-core-namedpipe-l1-1-0.dll
api-ms-win-core-processenvironment-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-1.dll
api-ms-win-core-profile-l1-1-0.dll
api-ms-win-core-rtlsupport-l1-1-0.dll
api-ms-win-core-string-l1-1-0.dll
api-ms-win-core-synch-l1-1-0.dll
api-ms-win-core-synch-l1-2-0.dll
api-ms-win-core-sysinfo-l1-1-0.dll
api-ms-win-core-timezone-l1-1-0.dll
api-ms-win-core-util-l1-1-0.dll
api-ms-win-crt-conio-l1-1-0.dll
api-ms-win-crt-convert-l1-1-0.dll
api-ms-win-crt-environment-l1-1-0.dll
api-ms-win-crt-filesystem-l1-1-0.dll
api-ms-win-crt-heap-l1-1-0.dll
api-ms-win-crt-locale-l1-1-0.dll
api-ms-win-crt-math-l1-1-0.dll
api-ms-win-crt-multibyte-l1-1-0.dll
api-ms-win-crt-private-l1-1-0.dll
api-ms-win-crt-process-l1-1-0.dll
api-ms-win-crt-runtime-l1-1-0.dll
api-ms-win-crt-stdio-l1-1-0.dll
api-ms-win-crt-string-l1-1-0.dll
api-ms-win-crt-time-l1-1-0.dll
api-ms-win-crt-utility-l1-1-0.dll
clrcompression.dll
clretwrc.dll
clrjit.dll
coreclr.dll
createdump
createdump.exe
dbgshim.dll
hostfxr.dll
hostpolicy.dll
libclrjit.dylib
libclrjit.so
libcoreclr.dylib
libcoreclr.so
libcoreclrtraceptprovider.so
libdbgshim.dylib
libdbgshim.so
libhostfxr.dylib
libhostfxr.so
libhostpolicy.dylib
libhostpolicy.so
libmscordaccore.dylib
libmscordaccore.so
libmscordbi.dylib
libmscordbi.so
Microsoft.CSharp.dll
Microsoft.DiaSymReader.Native.amd64.dll
Microsoft.DiaSymReader.Native.arm64.dll
Microsoft.VisualBasic.Core.dll
Microsoft.VisualBasic.dll
Microsoft.Win32.Primitives.dll
Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.522.21309.dll
mscordaccore_arm64_arm64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.1322.58009.dll
mscordaccore_amd64_amd64_6.0.2023.32017.dll
mscordaccore_amd64_amd64_6.0.2223.42425.dll
mscordaccore_amd64_amd64_6.0.2323.48002.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll
mscorrc.dll
msquic.dll
netstandard.dll
SOS_README.md
System.AppContext.dll
System.Buffers.dll
System.Collections.Concurrent.dll
System.Collections.dll
System.Collections.Immutable.dll
System.Collections.NonGeneric.dll
System.Collections.Specialized.dll
System.ComponentModel.Annotations.dll
System.ComponentModel.DataAnnotations.dll
System.ComponentModel.dll
System.ComponentModel.EventBasedAsync.dll
System.ComponentModel.Primitives.dll
System.ComponentModel.TypeConverter.dll
System.Configuration.dll
System.Console.dll
System.Core.dll
System.Data.Common.dll
System.Data.DataSetExtensions.dll
System.Data.dll
System.Diagnostics.Contracts.dll
System.Diagnostics.Debug.dll
System.Diagnostics.DiagnosticSource.dll
System.Diagnostics.FileVersionInfo.dll
System.Diagnostics.Process.dll
System.Diagnostics.StackTrace.dll
System.Diagnostics.TextWriterTraceListener.dll
System.Diagnostics.Tools.dll
System.Diagnostics.TraceSource.dll
System.Diagnostics.Tracing.dll
System.dll
System.Drawing.dll
System.Drawing.Primitives.dll
System.Dynamic.Runtime.dll
System.Formats.Asn1.dll
System.Globalization.Calendars.dll
System.Globalization.dll
System.Globalization.Extensions.dll
System.Globalization.Native.dylib
System.Globalization.Native.so
System.IO.Compression.Brotli.dll
System.IO.Compression.dll
System.IO.Compression.FileSystem.dll
System.IO.Compression.Native.a
System.IO.Compression.Native.dll
System.IO.Compression.Native.dylib
System.IO.Compression.Native.so
System.IO.Compression.ZipFile.dll
System.IO.dll
System.IO.FileSystem.AccessControl.dll
System.IO.FileSystem.dll
System.IO.FileSystem.DriveInfo.dll
System.IO.FileSystem.Primitives.dll
System.IO.FileSystem.Watcher.dll
System.IO.IsolatedStorage.dll
System.IO.MemoryMappedFiles.dll
System.IO.Pipes.AccessControl.dll
System.IO.Pipes.dll
System.IO.UnmanagedMemoryStream.dll
System.Linq.dll
System.Linq.Expressions.dll
System.Linq.Parallel.dll
System.Linq.Queryable.dll
System.Memory.dll
System.Native.a
System.Native.dylib
System.Native.so
System.Net.dll
System.Net.Http.dll
System.Net.Http.Json.dll
System.Net.Http.Native.a
System.Net.Http.Native.dylib
System.Net.Http.Native.so
System.Net.HttpListener.dll
System.Net.Mail.dll
System.Net.NameResolution.dll
System.Net.NetworkInformation.dll
System.Net.Ping.dll
System.Net.Primitives.dll
System.Net.Quic.dll
System.Net.Requests.dll
System.Net.Security.dll
System.Net.Security.Native.a
System.Net.Security.Native.dylib
System.Net.Security.Native.so
System.Net.ServicePoint.dll
System.Net.Sockets.dll
System.Net.WebClient.dll
System.Net.WebHeaderCollection.dll
System.Net.WebProxy.dll
System.Net.WebSockets.Client.dll
System.Net.WebSockets.dll
System.Numerics.dll
System.Numerics.Vectors.dll
System.ObjectModel.dll
System.Private.CoreLib.dll
System.Private.DataContractSerialization.dll
System.Private.Uri.dll
System.Private.Xml.dll
System.Private.Xml.Linq.dll
System.Reflection.DispatchProxy.dll
System.Reflection.dll
System.Reflection.Emit.dll
System.Reflection.Emit.ILGeneration.dll
System.Reflection.Emit.Lightweight.dll
System.Reflection.Extensions.dll
System.Reflection.Metadata.dll
System.Reflection.Primitives.dll
System.Reflection.TypeExtensions.dll
System.Resources.Reader.dll
System.Resources.ResourceManager.dll
System.Resources.Writer.dll
System.Runtime.CompilerServices.Unsafe.dll
System.Runtime.CompilerServices.VisualC.dll
System.Runtime.dll
System.Runtime.Extensions.dll
System.Runtime.Handles.dll
System.Runtime.InteropServices.dll
System.Runtime.InteropServices.RuntimeInformation.dll
System.Runtime.InteropServices.WindowsRuntime.dll
System.Runtime.Intrinsics.dll
System.Runtime.Loader.dll
System.Runtime.Numerics.dll
System.Runtime.Serialization.dll
System.Runtime.Serialization.Formatters.dll
System.Runtime.Serialization.Json.dll
System.Runtime.Serialization.Primitives.dll
System.Runtime.Serialization.Xml.dll
System.Runtime.WindowsRuntime.dll
System.Runtime.WindowsRuntime.UI.Xaml.dll
System.Security.AccessControl.dll
System.Security.Claims.dll
System.Security.Cryptography.Algorithms.dll
System.Security.Cryptography.Cng.dll
System.Security.Cryptography.Csp.dll
System.Security.Cryptography.Encoding.dll
System.Security.Cryptography.Native.Apple.a
System.Security.Cryptography.Native.Apple.dylib
System.Security.Cryptography.Native.OpenSsl.a
System.Security.Cryptography.Native.OpenSsl.dylib
System.Security.Cryptography.Native.OpenSsl.so
System.Security.Cryptography.OpenSsl.dll
System.Security.Cryptography.Primitives.dll
System.Security.Cryptography.X509Certificates.dll
System.Security.Cryptography.XCertificates.dll
System.Security.dll
System.Security.Principal.dll
System.Security.Principal.Windows.dll
System.Security.SecureString.dll
System.ServiceModel.Web.dll
System.ServiceProcess.dll
System.Text.Encoding.CodePages.dll
System.Text.Encoding.dll
System.Text.Encoding.Extensions.dll
System.Text.Encodings.Web.dll
System.Text.Json.dll
System.Text.RegularExpressions.dll
System.Threading.Channels.dll
System.Threading.dll
System.Threading.Overlapped.dll
System.Threading.Tasks.Dataflow.dll
System.Threading.Tasks.dll
System.Threading.Tasks.Extensions.dll
System.Threading.Tasks.Parallel.dll
System.Threading.Thread.dll
System.Threading.ThreadPool.dll
System.Threading.Timer.dll
System.Transactions.dll
System.Transactions.Local.dll
System.ValueTuple.dll
System.Web.dll
System.Web.HttpUtility.dll
System.Windows.dll
System.Xml.dll
System.Xml.Linq.dll
System.Xml.ReaderWriter.dll
System.Xml.Serialization.dll
System.Xml.XDocument.dll
System.Xml.XmlDocument.dll
System.Xml.XmlSerializer.dll
System.Xml.XPath.dll
System.Xml.XPath.XDocument.dll
ucrtbase.dll
WindowsBase.dll

View File

@@ -0,0 +1,24 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -0,0 +1,24 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -20,12 +20,12 @@ namespace GitHub.Runner.Common
{
private bool _hasConnection;
private VssConnection _connection;
private ActionsRunServerHttpClient _actionsRunServerClient;
private TaskAgentHttpClient _taskAgentClient;
public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials)
{
_connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100));
_actionsRunServerClient = _connection.GetClient<ActionsRunServerHttpClient>();
_taskAgentClient = _connection.GetClient<TaskAgentHttpClient>();
_hasConnection = true;
}
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Common
CheckConnection();
var jobMessage = RetryRequest<AgentJobRequestMessage>(async () =>
{
return await _actionsRunServerClient.GetJobMessageAsync(id, cancellationToken);
return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken);
}, cancellationToken);
return jobMessage;

View File

@@ -17,14 +17,7 @@ namespace GitHub.Runner.Common
{
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken);
Task DeleteSessionAsync(CancellationToken cancellationToken);
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
Task ForceRefreshConnection(VssCredentials credentials);
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate);
}
public sealed class BrokerServer : RunnerService, IBrokerServer
@@ -51,53 +44,13 @@ namespace GitHub.Runner.Common
}
}
public async Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken)
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate)
{
CheckConnection();
var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken);
var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken);
return jobMessage;
}
public Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
{
CheckConnection();
var brokerSession = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
return brokerSession;
}
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
{
CheckConnection();
await _brokerHttpClient.DeleteSessionAsync(cancellationToken);
}
public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials)
{
if (_brokerUri != serverUri || !_hasConnection)
{
return ConnectAsync(serverUri, credentials);
}
return Task.CompletedTask;
}
public Task ForceRefreshConnection(VssCredentials credentials)
{
return ConnectAsync(_brokerUri, credentials);
}
public bool ShouldRetryException(Exception ex)
{
if (ex is AccessDeniedException ade && ade.ErrorCode == 1)
{
return false;
}
return true;
}
}
}

View File

@@ -153,7 +153,6 @@ namespace GitHub.Runner.Common
public const int RetryableError = 2;
public const int RunnerUpdating = 3;
public const int RunOnceRunnerUpdating = 4;
public const int SessionConflict = 5;
}
public static class Features
@@ -181,9 +180,6 @@ namespace GitHub.Runner.Common
public static readonly string DeprecatedNodeVersion = "node16";
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
public static readonly string EnforcedNode16DetectedAfterEndOfLife = "The following actions uses Node.js version which is deprecated and will be forced to run on node20: {0}. For more info: https://github.blog/changelog/2024-03-07-github-actions-all-actions-will-run-on-node20-instead-of-node16-by-default/";
public static readonly string EnforcedNode16DetectedAfterEndOfLifeEnvVariable = "Node20ForceActionsWarnings";
}
public static class RunnerEvent
@@ -255,19 +251,17 @@ namespace GitHub.Runner.Common
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
public static readonly string ManualForceActionsToNode20 = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE20";
}
public static class Agent
{
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
// Set this env var to "node16" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
public static readonly string ManualForceActionsToNode20 = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE20";
}
public static class System

View File

@@ -200,10 +200,6 @@ namespace GitHub.Runner.Common
{
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
}
else
{
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString));
}
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
{

View File

@@ -19,7 +19,7 @@ namespace GitHub.Runner.Common
TaskCompletionSource<int> JobRecordUpdated { get; }
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false);
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
@@ -74,7 +74,6 @@ namespace GitHub.Runner.Common
private readonly List<JobTelemetry> _jobTelemetries = new();
private bool _queueInProcess = false;
private bool _resultsServiceOnly = false;
private int _resultsServiceExceptionsCount = 0;
private Stopwatch _resultsUploadTimer = new();
private Stopwatch _actionsUploadTimer = new();
@@ -105,10 +104,11 @@ namespace GitHub.Runner.Common
_resultsServer = hostContext.GetService<IResultsServer>();
}
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false)
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false)
{
Trace.Entering();
_resultsServiceOnly = resultsServiceOnly;
_enableTelemetry = enableTelemetry;
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
@@ -134,15 +134,9 @@ namespace GitHub.Runner.Common
{
liveConsoleFeedUrl = feedStreamUrl;
}
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
_resultsClientInitiated = true;
}
// Enable telemetry if we have both results service and actions service
if (_resultsClientInitiated && !_resultsServiceOnly)
{
_enableTelemetry = true;
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
_resultsClientInitiated = true;
}
if (_queueInProcess)
@@ -557,10 +551,6 @@ namespace GitHub.Runner.Common
{
await UploadSummaryFile(file);
}
if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase))
{
await UploadResultsDiagnosticLogsFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
@@ -580,9 +570,9 @@ namespace GitHub.Runner.Common
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
errorCount++;
_resultsServiceExceptionsCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3)
if (!_resultsServiceOnly)
{
_resultsClientInitiated = false;
SendResultsTelemetry(ex);
@@ -932,17 +922,6 @@ namespace GitHub.Runner.Common
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler diagnosticLogsHandler = async (file) =>
{
await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, diagnosticLogsHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");

View File

@@ -19,7 +19,7 @@ namespace GitHub.Runner.Common
[ServiceLocator(Default = typeof(ResultServer))]
public interface IResultsServer : IRunnerService, IAsyncDisposable
{
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk);
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
@@ -35,8 +35,6 @@ namespace GitHub.Runner.Common
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken);
}
public sealed class ResultServer : RunnerService, IResultsServer
@@ -53,9 +51,9 @@ namespace GitHub.Runner.Common
private String _liveConsoleFeedUrl;
private string _token;
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk)
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
{
this._resultsClient = CreateHttpClient(uri, token, useSdk);
this._resultsClient = CreateHttpClient(uri, token);
_token = token;
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
@@ -65,7 +63,7 @@ namespace GitHub.Runner.Common
}
}
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
public ResultsHttpClient CreateHttpClient(Uri uri, string token)
{
// Using default 100 timeout
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
@@ -82,7 +80,7 @@ namespace GitHub.Runner.Common
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true);
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
@@ -143,18 +141,6 @@ namespace GitHub.Runner.Common
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file,
CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);

View File

@@ -5,7 +5,6 @@ using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
@@ -61,7 +60,7 @@ namespace GitHub.Runner.Common
{
CheckConnection();
return RetryRequest<AgentJobRequestMessage>(
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, cancellationToken), cancellationToken,
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
}

View File

@@ -19,6 +19,13 @@ namespace GitHub.Runner.Common
{
DefaultTraceLevel = TraceLevel.Verbose;
}
else if (int.TryParse(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_TRACE_LEVEL"), out var traceLevel))
{
// force user's TraceLevel to comply with runner TraceLevel enums
traceLevel = Math.Clamp(traceLevel, 0, 5);
DefaultTraceLevel = (TraceLevel)traceLevel;
}
}
[DataMember(EmitDefaultValue = false)]

View File

@@ -5,7 +5,7 @@ namespace GitHub.Runner.Common.Util
{
public static class NodeUtil
{
private const string _defaultNodeVersion = "node20";
private const string _defaultNodeVersion = "node16";
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16", "node20" });
public static string GetInternalNodeVersion()
{

View File

@@ -24,15 +24,7 @@ namespace GitHub.Runner.Listener
private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private VssCredentials _creds;
private TaskAgentSession _session;
private IBrokerServer _brokerServer;
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
private bool _accessTokenRevoked = false;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
public override void Initialize(IHostContext hostContext)
{
@@ -42,140 +34,15 @@ namespace GitHub.Runner.Listener
_brokerServer = HostContext.GetService<IBrokerServer>();
}
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{
Trace.Entering();
// Settings
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
var serverUrl = _settings.ServerUrlV2;
Trace.Info(_settings);
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}
// Create connection.
Trace.Info("Loading Credentials");
var credMgr = HostContext.GetService<ICredentialManager>();
_creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
Id = _settings.AgentId,
Name = _settings.AgentName,
Version = BuildConstants.RunnerPackage.Version,
OSDescription = RuntimeInformation.OSDescription,
};
string sessionName = $"{Environment.MachineName ?? "RUNNER"}";
var taskAgentSession = new TaskAgentSession(sessionName, agent);
string errorMessage = string.Empty;
bool encounteringError = false;
while (true)
{
token.ThrowIfCancellationRequested();
Trace.Info($"Attempt to create session.");
try
{
Trace.Info("Connecting to the Broker Server...");
await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds);
Trace.Info("VssConnection created");
_term.WriteLine();
_term.WriteSuccessMessage("Connected to GitHub");
_term.WriteLine();
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
Trace.Info($"Session created.");
if (encounteringError)
{
_term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected.");
_sessionCreationExceptionTracker.Clear();
encounteringError = false;
}
return CreateSessionResult.Success;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info("Session creation has been cancelled.");
throw;
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
_accessTokenRevoked = true;
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during create session.");
Trace.Error(ex);
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
{
// "invalid_client" means the runner registration has been deleted from the server.
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
return CreateSessionResult.Failure;
}
// Check whether we get 401 because the runner registration already removed by the service.
// If the runner registration get deleted, we can't exchange oauth token.
Trace.Error("Test oauth app registration.");
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
return CreateSessionResult.Failure;
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");
if (ex is TaskAgentSessionConflictException)
{
return CreateSessionResult.SessionConflict;
}
return CreateSessionResult.Failure;
}
if (!encounteringError) //print the message only on the first error
{
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
encounteringError = true;
}
Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds);
await HostContext.Delay(_sessionCreationRetryInterval, token);
}
}
await RefreshBrokerConnection();
return await Task.FromResult(true);
}
public async Task DeleteSessionAsync()
{
if (_session != null && _session.SessionId != Guid.Empty)
{
if (!_accessTokenRevoked)
{
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
{
await _brokerServer.DeleteSessionAsync(ts.Token);
}
}
else
{
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
}
}
await Task.CompletedTask;
}
public void OnJobStatus(object sender, JobStatusEventArgs e)
@@ -206,13 +73,12 @@ namespace GitHub.Runner.Listener
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try
{
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token,
runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
_settings.DisableUpdate,
_getMessagesTokenSource.Token);
_settings.DisableUpdate);
if (message == null)
{
@@ -277,7 +143,7 @@ namespace GitHub.Runner.Listener
}
// re-create VssConnection before next retry
await RefreshBrokerConnectionAsync();
await RefreshBrokerConnection();
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token);
@@ -307,11 +173,6 @@ namespace GitHub.Runner.Listener
}
}
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
{
await RefreshBrokerConnectionAsync();
}
public async Task DeleteMessageAsync(TaskAgentMessage message)
{
await Task.CompletedTask;
@@ -335,84 +196,12 @@ namespace GitHub.Runner.Listener
}
}
private bool IsSessionCreationExceptionRetriable(Exception ex)
{
if (ex is TaskAgentNotFoundException)
{
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
return false;
}
else if (ex is TaskAgentSessionConflictException)
{
Trace.Info("The session for this runner already exists.");
_term.WriteError("A session for this runner already exists.");
if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException)))
{
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++;
if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds)
{
Trace.Info("The session conflict exception have reached retry limit.");
_term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds.");
return false;
}
}
else
{
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1;
}
Trace.Info("The session conflict exception haven't reached retry limit.");
return true;
}
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
{
Trace.Info("Local clock might be skewed.");
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
{
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++;
if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds)
{
Trace.Info("The OAuth token request exception have reached retry limit.");
_term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds.");
return false;
}
}
else
{
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1;
}
Trace.Info("The OAuth token request exception haven't reached retry limit.");
return true;
}
else if (ex is TaskAgentPoolNotFoundException ||
ex is AccessDeniedException ||
ex is VssUnauthorizedException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else if (ex is InvalidOperationException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else
{
Trace.Info($"Retriable exception: {ex.Message}");
return true;
}
}
private async Task RefreshBrokerConnectionAsync()
private async Task RefreshBrokerConnection()
{
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
if (_settings.ServerUrlV2 == null)
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}

View File

@@ -39,7 +39,6 @@ namespace GitHub.Runner.Listener.Check
string githubApiUrl = null;
string actionsTokenServiceUrl = null;
string actionsPipelinesServiceUrl = null;
string resultsReceiverServiceUrl = null;
var urlBuilder = new UriBuilder(url);
if (UrlUtil.IsHostedServer(urlBuilder))
{
@@ -48,7 +47,6 @@ namespace GitHub.Runner.Listener.Check
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health";
}
else
{
@@ -58,31 +56,13 @@ namespace GitHub.Runner.Listener.Check
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
urlBuilder.Path = "_services/pipelines/_apis/health";
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet.
}
var codeLoadUrlBuilder = new UriBuilder(url);
codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}";
codeLoadUrlBuilder.Path = "_ping";
// check github api
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
// check github codeload
checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri));
checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri));
checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id"));
// check results-receiver service
if (!string.IsNullOrEmpty(resultsReceiverServiceUrl))
{
checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl));
checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
}
// check actions token service
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));

View File

@@ -546,27 +546,13 @@ namespace GitHub.Runner.Listener
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = await InitializeJobServerAsync(systemConnection);
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
switch (jobServer)
{
case IJobServer js:
{
await LogWorkerProcessUnhandledException(js, message, unhandledExceptionIssue);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
{
Trace.Info($"Finish job with result 'Failed' due to IOException.");
await ForceFailJob(js, message);
}
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
break;
}
case IRunServer rs:
await ForceFailJob(rs, message, unhandledExceptionIssue);
break;
default:
throw new NotSupportedException($"JobServer type '{jobServer.GetType().Name}' is not supported.");
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
{
Trace.Info($"Finish job with result 'Failed' due to IOException.");
await ForceFailJob(jobServer, message, detailInfo);
}
}
@@ -643,20 +629,6 @@ namespace GitHub.Runner.Listener
Trace.Info("worker process has been killed.");
}
}
catch (Exception ex)
{
// message send failed, this might indicate worker process is already exited or stuck.
Trace.Info($"Job cancel message sending for job {message.JobId} failed, kill running worker. {ex}");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
}
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
@@ -1145,65 +1117,77 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, Issue issue)
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
{
try
if (server is IJobServer jobServer)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
jobRecord.ErrorCount++;
jobRecord.Issues.Add(issue);
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
Trace.Info("Mark the job as failed since the worker crashed");
jobRecord.Result = TaskResult.Failed;
// mark the job as completed so service will pickup the result
jobRecord.State = TimelineRecordState.Completed;
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
}
}
catch (Exception ex)
else
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
return;
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
{
try
if (server is IJobServer jobServer)
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
}
private async Task ForceFailJob(IRunServer runServer, Pipelines.AgentJobRequestMessage message, Issue issue)
{
try
{
var annotation = issue.ToAnnotation();
var jobAnnotations = new List<Annotation>();
if (annotation.HasValue)
try
{
jobAnnotations.Add(annotation.Value);
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
}
catch (Exception ex)
else if (server is IRunServer runServer)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
try
{
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
var jobAnnotations = new List<Annotation>();
if (unhandledAnnotation.HasValue)
{
jobAnnotations.Add(unhandledAnnotation.Value);
}
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}

View File

@@ -14,26 +14,16 @@ using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
namespace GitHub.Runner.Listener
{
public enum CreateSessionResult
{
Success,
Failure,
SessionConflict
}
[ServiceLocator(Default = typeof(MessageListener))]
public interface IMessageListener : IRunnerService
{
Task<CreateSessionResult> CreateSessionAsync(CancellationToken token);
Task<Boolean> CreateSessionAsync(CancellationToken token);
Task DeleteSessionAsync();
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
Task DeleteMessageAsync(TaskAgentMessage message);
Task RefreshListenerTokenAsync(CancellationToken token);
void OnJobStatus(object sender, JobStatusEventArgs e);
}
@@ -43,7 +33,6 @@ namespace GitHub.Runner.Listener
private RunnerSettings _settings;
private ITerminal _term;
private IRunnerServer _runnerServer;
private IBrokerServer _brokerServer;
private TaskAgentSession _session;
private TimeSpan _getNextMessageRetryInterval;
private bool _accessTokenRevoked = false;
@@ -53,9 +42,6 @@ namespace GitHub.Runner.Listener
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private VssCredentials _creds;
private bool _isBrokerSession = false;
public override void Initialize(IHostContext hostContext)
{
@@ -63,10 +49,9 @@ namespace GitHub.Runner.Listener
_term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>();
_brokerServer = hostContext.GetService<IBrokerServer>();
}
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{
Trace.Entering();
@@ -79,7 +64,7 @@ namespace GitHub.Runner.Listener
// Create connection.
Trace.Info("Loading Credentials");
var credMgr = HostContext.GetService<ICredentialManager>();
_creds = credMgr.LoadCredentials();
VssCredentials creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
@@ -101,7 +86,7 @@ namespace GitHub.Runner.Listener
try
{
Trace.Info("Connecting to the Runner Server...");
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
Trace.Info("VssConnection created");
_term.WriteLine();
@@ -113,15 +98,6 @@ namespace GitHub.Runner.Listener
taskAgentSession,
token);
if (_session.BrokerMigrationMessage != null)
{
Trace.Info("Runner session is in migration mode: Creating Broker session with BrokerBaseUrl: {0}", _session.BrokerMigrationMessage.BrokerBaseUrl);
await _brokerServer.UpdateConnectionIfNeeded(_session.BrokerMigrationMessage.BrokerBaseUrl, _creds);
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
_isBrokerSession = true;
}
Trace.Info($"Session created.");
if (encounteringError)
{
@@ -130,7 +106,7 @@ namespace GitHub.Runner.Listener
encounteringError = false;
}
return CreateSessionResult.Success;
return true;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
@@ -148,13 +124,13 @@ namespace GitHub.Runner.Listener
Trace.Error("Catch exception during create session.");
Trace.Error(ex);
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
if (ex is VssOAuthTokenRequestException vssOAuthEx && creds.Federated is VssOAuthCredential vssOAuthCred)
{
// "invalid_client" means the runner registration has been deleted from the server.
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
return CreateSessionResult.Failure;
return false;
}
// Check whether we get 401 because the runner registration already removed by the service.
@@ -165,18 +141,14 @@ namespace GitHub.Runner.Listener
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
return CreateSessionResult.Failure;
return false;
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");
if (ex is TaskAgentSessionConflictException)
{
return CreateSessionResult.SessionConflict;
}
return CreateSessionResult.Failure;
return false;
}
if (!encounteringError) //print the message only on the first error
@@ -200,11 +172,6 @@ namespace GitHub.Runner.Listener
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
{
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
if (_isBrokerSession)
{
await _brokerServer.DeleteSessionAsync(ts.Token);
}
}
}
else
@@ -216,17 +183,19 @@ namespace GitHub.Runner.Listener
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
runnerStatus = e.Status;
try
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
{
_getMessagesTokenSource?.Cancel();
Trace.Info("Received job status event. JobState: {0}", e.Status);
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
}
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
}
}
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
@@ -236,7 +205,6 @@ namespace GitHub.Runner.Listener
ArgUtil.NotNull(_settings, nameof(_settings));
bool encounteringError = false;
int continuousError = 0;
int continuousEmptyMessage = 0;
string errorMessage = string.Empty;
Stopwatch heartbeat = new();
heartbeat.Restart();
@@ -260,23 +228,6 @@ namespace GitHub.Runner.Listener
// Decrypt the message body if the session is using encryption
message = DecryptMessage(message);
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
{
Trace.Info("BrokerMigration message received. Polling Broker for messages...");
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _creds);
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
_settings.DisableUpdate,
token);
}
if (message != null)
{
_lastMessageId = message.MessageId;
@@ -315,7 +266,7 @@ namespace GitHub.Runner.Listener
Trace.Error(ex);
// don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs.
if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && (await CreateSessionAsync(token) == CreateSessionResult.Success))
if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && await CreateSessionAsync(token))
{
Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session.");
}
@@ -360,27 +311,16 @@ namespace GitHub.Runner.Listener
if (message == null)
{
continuousEmptyMessage++;
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
{
Trace.Info($"No message retrieved from session '{_session.SessionId}' within last 30 minutes.");
heartbeat.Restart();
continuousEmptyMessage = 0;
}
else
{
Trace.Verbose($"No message retrieved from session '{_session.SessionId}'.");
}
if (continuousEmptyMessage > 50)
{
// retried more than 50 times in less than 30mins and still getting empty message
// something is not right on the service side, backoff for 15-30s before retry
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
continue;
}
@@ -403,12 +343,6 @@ namespace GitHub.Runner.Listener
}
}
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
{
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
await _brokerServer.ForceRefreshConnection(_creds);
}
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
{
if (_session.EncryptionKey == null ||

View File

@@ -25,6 +25,12 @@
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="..\Misc\runnercoreassets">
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
</EmbeddedResource>
</ItemGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
<DebugType>portable</DebugType>
</PropertyGroup>

View File

@@ -359,12 +359,7 @@ namespace GitHub.Runner.Listener
{
Trace.Info(nameof(RunAsync));
_listener = GetMesageListener(settings);
CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken);
if (createSessionResult == CreateSessionResult.SessionConflict)
{
return Constants.Runner.ReturnCode.SessionConflict;
}
else if (createSessionResult == CreateSessionResult.Failure)
if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken))
{
return Constants.Runner.ReturnCode.TerminatedError;
}
@@ -572,11 +567,6 @@ namespace GitHub.Runner.Listener
Trace.Info("Job is already acquired, skip this message.");
continue;
}
catch (Exception ex)
{
Trace.Error($"Caught exception from acquiring job message: {ex}");
continue;
}
}
jobDispatcher.Run(jobRequestMessage, runOnce);
@@ -606,11 +596,6 @@ namespace GitHub.Runner.Listener
Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'.");
return Constants.Runner.ReturnCode.Success;
}
else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh))
{
Trace.Info("Received ForceTokenRefreshMessage");
await _listener.RefreshListenerTokenAsync(messageQueueLoopTokenSource.Token);
}
else
{
Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}.");
@@ -649,7 +634,6 @@ namespace GitHub.Runner.Listener
{
try
{
Trace.Info("Deleting Runner Session...");
await _listener.DeleteSessionAsync();
}
catch (Exception ex) when (runOnce)

View File

@@ -6,11 +6,13 @@ using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net.Http;
using System.Reflection;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
@@ -28,14 +30,20 @@ namespace GitHub.Runner.Listener
{
private static string _packageType = "agent";
private static string _platform = BuildConstants.RunnerPackage.PackageName;
private static string _dotnetRuntime = "dotnetRuntime";
private static string _externals = "externals";
private readonly Dictionary<string, string> _contentHashes = new();
private PackageMetadata _targetPackage;
private ITerminal _terminal;
private IRunnerServer _runnerServer;
private int _poolId;
private ulong _agentId;
private const int _numberOfOldVersionsToKeep = 1;
private readonly ConcurrentQueue<string> _updateTrace = new();
private Task _cloneAndCalculateContentHashTask;
private string _dotnetRuntimeCloneDirectory;
private string _externalsCloneDirectory;
public bool Busy { get; private set; }
public override void Initialize(IHostContext hostContext)
@@ -48,6 +56,8 @@ namespace GitHub.Runner.Listener
var settings = configStore.GetSettings();
_poolId = settings.PoolId;
_agentId = settings.AgentId;
_dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__");
_externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__");
}
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
@@ -57,6 +67,13 @@ namespace GitHub.Runner.Listener
{
var totalUpdateTime = Stopwatch.StartNew();
// Copy dotnet runtime and externals of current runner to a temp folder
// So we can re-use them with trimmed runner package, if possible.
// This process is best effort, if we can't use trimmed runner package,
// we will just go with the full package.
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
_cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token);
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
Trace.Info($"Can't find available update package.");
@@ -70,6 +87,24 @@ namespace GitHub.Runner.Listener
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
if (_targetPackage.TrimmedPackages?.Count > 0)
{
// wait for cloning assets task to finish only if we have trimmed packages
await _cloneAndCalculateContentHashTask;
}
else
{
linkedTokenSource.Cancel();
try
{
await _cloneAndCalculateContentHashTask;
}
catch (Exception ex)
{
Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}");
}
}
await DownloadLatestRunner(token, updateMessage.TargetVersion);
Trace.Info($"Download latest runner and unzip into runner root.");
@@ -183,8 +218,54 @@ namespace GitHub.Runner.Listener
string archiveFile = null;
var packageDownloadUrl = _targetPackage.DownloadUrl;
var packageHashValue = _targetPackage.HashValue;
var runtimeTrimmed = false;
var externalsTrimmed = false;
var fallbackToFullPackage = false;
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
if (_contentHashes.Count == 2 &&
_contentHashes.ContainsKey(_dotnetRuntime) &&
_contentHashes.ContainsKey(_externals) &&
_targetPackage.TrimmedPackages?.Count > 0)
{
Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}");
Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}");
// Try to see whether we can use any size trimmed down package to speed up runner updates.
foreach (var trimmedPackage in _targetPackage.TrimmedPackages)
{
if (trimmedPackage.TrimmedContents.Count == 2 &&
trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) &&
trimmedRuntimeHash == _contentHashes[_dotnetRuntime] &&
trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) &&
trimmedExternalsHash == _contentHashes[_externals])
{
Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
packageDownloadUrl = trimmedPackage.DownloadUrl;
packageHashValue = trimmedPackage.HashValue;
runtimeTrimmed = true;
externalsTrimmed = true;
break;
}
else if (trimmedPackage.TrimmedContents.Count == 1 &&
trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) &&
trimmedExternalsHash == _contentHashes[_externals])
{
Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
packageDownloadUrl = trimmedPackage.DownloadUrl;
packageHashValue = trimmedPackage.HashValue;
externalsTrimmed = true;
break;
}
else
{
Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch).");
}
}
}
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
_updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}");
_updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}");
try
{
@@ -242,6 +323,12 @@ namespace GitHub.Runner.Listener
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
}
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
{
// if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package.
Trace.Error($"Fail to download latest runner using trimmed package: {ex}");
fallbackToFullPackage = true;
}
finally
{
try
@@ -260,6 +347,74 @@ namespace GitHub.Runner.Listener
}
}
var trimmedPackageRestoreTasks = new List<Task<bool>>();
if (!fallbackToFullPackage)
{
// Skip restoring externals and runtime if we are going to fullback to the full package.
if (externalsTrimmed)
{
trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token));
}
if (runtimeTrimmed)
{
trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token));
}
}
if (trimmedPackageRestoreTasks.Count > 0)
{
var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks);
if (restoreResults.Any(x => x == false))
{
// if any of the restore failed, fallback to full package.
fallbackToFullPackage = true;
}
}
if (fallbackToFullPackage)
{
Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates.");
_updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}");
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
Directory.CreateDirectory(latestRunnerDirectory);
packageDownloadUrl = _targetPackage.DownloadUrl;
packageHashValue = _targetPackage.HashValue;
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
try
{
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
if (string.IsNullOrEmpty(archiveFile))
{
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
}
await ValidateRunnerHash(archiveFile, packageHashValue);
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
}
finally
{
try
{
// delete .zip file
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
{
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
IOUtil.DeleteFile(archiveFile);
}
}
catch (Exception ex)
{
//it is not critical if we fail to delete the .zip file
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
}
}
}
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
}
@@ -510,9 +665,9 @@ namespace GitHub.Runner.Listener
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
if (allBinDirs.Length > 2)
{
// there are more than one bin.version folder.
// there are more than 2 bin.version folder.
// delete older bin.version folders.
foreach (var oldBinDir in allBinDirs)
{
@@ -539,9 +694,9 @@ namespace GitHub.Runner.Listener
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
if (allExternalsDirs.Length > 2)
{
// there are more than one externals.version folder.
// there are more than 2 externals.version folder.
// delete older externals.version folders.
foreach (var oldExternalDir in allExternalsDirs)
{
@@ -640,5 +795,330 @@ namespace GitHub.Runner.Listener
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
}
}
private async Task<bool> RestoreTrimmedExternals(string downloadDirectory, CancellationToken token)
{
// Copy the current runner's externals if we are using a externals trimmed package
// Execute the node.js to make sure the copied externals is working.
var stopWatch = Stopwatch.StartNew();
try
{
Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}.");
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
// try run node.js to see if current node.js works fine after copy over to new location.
var nodeVersions = NodeUtil.BuiltInNodeVersions;
foreach (var nodeVersion in nodeVersions)
{
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
if (File.Exists(newNodeBinary))
{
using (var p = HostContext.CreateService<IProcessInvoker>())
{
var outputs = "";
p.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data))
{
Trace.Error(data.Data);
}
};
p.OutputDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data))
{
Trace.Info(data.Data);
outputs = data.Data;
}
};
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token);
if (exitCode != 0)
{
Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}");
return false;
}
if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase))
{
Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content.");
return false;
}
}
}
}
return true;
}
catch (Exception ex)
{
Trace.Error($"Fail to restore externals for trimmed package: {ex}");
return false;
}
finally
{
stopWatch.Stop();
_updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
}
}
private async Task<bool> RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token)
{
// Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package
// Execute the runner.listener to make sure the copied runtime is working.
var stopWatch = Stopwatch.StartNew();
try
{
Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}.");
IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token);
// try run the runner executable to see if current dotnet runtime + future runner binary works fine.
var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener");
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data))
{
Trace.Error(data.Data);
}
};
p.OutputDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data))
{
Trace.Info(data.Data);
}
};
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token);
if (exitCode != 0)
{
Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}");
return false;
}
else
{
return true;
}
}
}
catch (Exception ex)
{
Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}");
return false;
}
finally
{
stopWatch.Stop();
_updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
}
}
private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token)
{
var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token);
var externalsCloneTask = CloneExternals(externalsCloneDirectory, token);
var waitingTasks = new Dictionary<string, Task>()
{
{nameof(CloneDotnetRuntime), runtimeCloneTask},
{nameof(CloneExternals),externalsCloneTask}
};
while (waitingTasks.Count > 0)
{
Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete.");
var complatedTask = await Task.WhenAny(waitingTasks.Values);
if (waitingTasks.ContainsKey(nameof(CloneExternals)) &&
complatedTask == waitingTasks[nameof(CloneExternals)])
{
Trace.Info($"Externals clone finished.");
waitingTasks.Remove(nameof(CloneExternals));
try
{
if (await externalsCloneTask && !token.IsCancellationRequested)
{
var externalsHash = await HashFiles(externalsCloneDirectory, token);
Trace.Info($"Externals content hash: {externalsHash}");
_contentHashes[_externals] = externalsHash;
_updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}");
}
else
{
Trace.Error($"Skip compute hash since clone externals failed/cancelled.");
}
}
catch (Exception ex)
{
Trace.Error($"Fail to hash externals content: {ex}");
}
}
else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) &&
complatedTask == waitingTasks[nameof(CloneDotnetRuntime)])
{
Trace.Info($"Dotnet runtime clone finished.");
waitingTasks.Remove(nameof(CloneDotnetRuntime));
try
{
if (await runtimeCloneTask && !token.IsCancellationRequested)
{
var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token);
Trace.Info($"Runtime content hash: {runtimeHash}");
_contentHashes[_dotnetRuntime] = runtimeHash;
_updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}");
}
else
{
Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled.");
}
}
catch (Exception ex)
{
Trace.Error($"Fail to hash runtime content: {ex}");
}
}
Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete.");
}
}
private async Task<bool> CloneDotnetRuntime(string runtimeDir, CancellationToken token)
{
var stopWatch = Stopwatch.StartNew();
try
{
Trace.Info($"Cloning dotnet runtime to {runtimeDir}");
IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None);
Directory.CreateDirectory(runtimeDir);
var assembly = Assembly.GetExecutingAssembly();
var assetsContent = default(string);
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets"))
using (var streamReader = new StreamReader(stream))
{
assetsContent = await streamReader.ReadToEndAsync();
}
if (!string.IsNullOrEmpty(assetsContent))
{
var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries);
if (runnerCoreAssets.Length > 0)
{
var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
IOUtil.CopyDirectory(binDir, runtimeDir, token);
var clonedFile = 0;
foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories))
{
token.ThrowIfCancellationRequested();
if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim())))
{
Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory.");
IOUtil.DeleteFile(file);
}
else
{
clonedFile++;
}
}
Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}");
return true;
}
}
}
catch (Exception ex)
{
Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}");
Trace.Error(ex);
}
finally
{
stopWatch.Stop();
_updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
}
return false;
}
private Task<bool> CloneExternals(string externalsDir, CancellationToken token)
{
var stopWatch = Stopwatch.StartNew();
try
{
Trace.Info($"Cloning externals to {externalsDir}");
IOUtil.DeleteDirectory(externalsDir, CancellationToken.None);
Directory.CreateDirectory(externalsDir);
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token);
Trace.Info($"Successfully cloned externals to {externalsDir}.");
return Task.FromResult(true);
}
catch (Exception ex)
{
Trace.Error($"Fail to clone externals to {externalsDir}");
Trace.Error(ex);
}
finally
{
stopWatch.Stop();
_updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
}
return Task.FromResult(false);
}
private async Task<string> HashFiles(string fileFolder, CancellationToken token)
{
Trace.Info($"Calculating hash for {fileFolder}");
var stopWatch = Stopwatch.StartNew();
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
processInvoker.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
{
hashResult = data.Data.Substring(10, data.Data.Length - 20);
Trace.Info($"Hash result: '{hashResult}'");
}
else
{
Trace.Info(data.Data);
}
};
processInvoker.OutputDataReceived += (_, data) =>
{
Trace.Verbose(data.Data);
};
var env = new Dictionary<string, string>
{
["patterns"] = "**"
};
int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: token);
if (exitCode != 0)
{
Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'");
}
stopWatch.Stop();
_updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms");
return hashResult;
}
}
}
}

View File

@@ -149,6 +149,7 @@ namespace GitHub.Runner.Listener
string archiveFile = null;
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
try

View File

@@ -459,34 +459,6 @@ namespace GitHub.Runner.Sdk
File.WriteAllText(path, null);
}
/// <summary>
/// Replaces invalid file name characters with '_'
/// </summary>
public static string ReplaceInvalidFileNameChars(string fileName)
{
var result = new StringBuilder();
var invalidChars = Path.GetInvalidFileNameChars();
var current = 0; // Current index
while (current < fileName?.Length)
{
var next = fileName.IndexOfAny(invalidChars, current);
if (next >= 0)
{
result.Append(fileName.Substring(current, next - current));
result.Append('_');
current = next + 1;
}
else
{
result.Append(fileName.Substring(current));
break;
}
}
return result.ToString();
}
/// <summary>
/// Recursively enumerates a directory without following directory reparse points.
/// </summary>

View File

@@ -23,13 +23,7 @@ namespace GitHub.Runner.Sdk
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{
foreach (var headerVal in VssClientHttpRequestSettings.Default.UserAgent)
{
if (!headerValues.Contains(headerVal))
{
headerValues.Add(headerVal);
}
}
headerValues.AddRange(VssClientHttpRequestSettings.Default.UserAgent);
}
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
@@ -39,23 +33,6 @@ namespace GitHub.Runner.Sdk
{
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
}
var rawHeaderValues = new List<ProductInfoHeaderValue>();
rawHeaderValues.AddRange(additionalUserAgents);
rawHeaderValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
if (RawClientHttpRequestSettings.Default.UserAgent != null && RawClientHttpRequestSettings.Default.UserAgent.Count > 0)
{
foreach (var headerVal in RawClientHttpRequestSettings.Default.UserAgent)
{
if (!rawHeaderValues.Contains(headerVal))
{
rawHeaderValues.Add(headerVal);
}
}
}
RawClientHttpRequestSettings.Default.UserAgent = rawHeaderValues;
}
public static VssConnection CreateConnection(
@@ -85,6 +62,11 @@ namespace GitHub.Runner.Sdk
settings.SendTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(httpRequestTimeoutSeconds, 100), 1200));
}
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW")))
{
settings.AllowAutoRedirectForBroker = true;
}
// Remove Invariant from the list of accepted languages.
//
// The constructor of VssHttpRequestSettings (base class of VssClientHttpRequestSettings) adds the current

View File

@@ -7,6 +7,129 @@ namespace GitHub.Runner.Sdk
public static class WhichUtil
{
public static string Which(string command, bool require = false, ITraceWriter trace = null, string prependPath = null)
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path))
{
trace?.Info("PATH environment variable not defined.");
path = path ?? string.Empty;
}
if (!string.IsNullOrEmpty(prependPath))
{
path = PathUtil.PrependPath(prependPath, path);
}
string[] pathSegments = path.Split(new Char[] { Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < pathSegments.Length; i++)
{
pathSegments[i] = Environment.ExpandEnvironmentVariables(pathSegments[i]);
}
foreach (string pathSegment in pathSegments)
{
if (!string.IsNullOrEmpty(pathSegment) && Directory.Exists(pathSegment))
{
string[] matches = null;
#if OS_WINDOWS
string pathExt = Environment.GetEnvironmentVariable("PATHEXT");
if (string.IsNullOrEmpty(pathExt))
{
// XP's system default value for PATHEXT system variable
pathExt = ".com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh";
}
string[] pathExtSegments = pathExt.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
// if command already has an extension.
if (pathExtSegments.Any(ext => command.EndsWith(ext, StringComparison.OrdinalIgnoreCase)))
{
try
{
matches = Directory.GetFiles(pathSegment, command);
}
catch (UnauthorizedAccessException ex)
{
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
}
}
else
{
string searchPattern;
searchPattern = StringUtil.Format($"{command}.*");
try
{
matches = Directory.GetFiles(pathSegment, searchPattern);
}
catch (UnauthorizedAccessException ex)
{
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0)
{
// add extension.
for (int i = 0; i < pathExtSegments.Length; i++)
{
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)) && IsPathValid(fullPath, trace))
{
trace?.Info($"Location: '{fullPath}'");
return fullPath;
}
}
}
}
#else
try
{
matches = Directory.GetFiles(pathSegment, command);
}
catch (UnauthorizedAccessException ex)
{
trace?.Info("Ignore UnauthorizedAccess exception during Which.");
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
}
#endif
}
}
#if OS_WINDOWS
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
#else
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'PATH' environment variable.");
#endif
if (require)
{
throw new FileNotFoundException(
message: $"{command}: command not found",
fileName: command);
}
return null;
}
public static string Which2(string command, bool require = false, ITraceWriter trace = null, string prependPath = null)
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which2: '{command}'");

View File

@@ -483,6 +483,10 @@ namespace GitHub.Runner.Worker
{
// Load stored Ids for later load actions
compositeAction.Steps[i].Id = _cachedEmbeddedStepIds[action.Id][i];
if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && compositeAction.Steps[i].Reference.Type != Pipelines.ActionSourceType.Script)
{
throw new Exception("`uses:` keyword is not currently supported.");
}
}
}
else
@@ -699,12 +703,11 @@ namespace GitHub.Runner.Worker
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
{
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
// NonRetryableActionDownloadInfoException is an non-retryable exception from Actions
// Some possible cases are:
// * Repo is rate limited
// * Repo or tag doesn't exist, or isn't public
// * Policy validation failed
if (attempt < 3 && !(ex is WebApi.UnresolvableActionDownloadInfoException) && !(ex is WebApi.NonRetryableActionDownloadInfoException))
if (attempt < 3 && !(ex is WebApi.UnresolvableActionDownloadInfoException))
{
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
executionContext.Debug(ex.ToString());
@@ -793,39 +796,42 @@ namespace GitHub.Runner.Worker
try
{
var useActionArchiveCache = false;
var hasActionArchiveCache = false;
var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory);
if (!string.IsNullOrEmpty(actionArchiveCacheDir) &&
Directory.Exists(actionArchiveCacheDir))
if (executionContext.Global.Variables.GetBoolean("DistributedTask.UseActionArchiveCache") == true)
{
hasActionArchiveCache = true;
Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'");
#if OS_WINDOWS
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip");
#else
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz");
#endif
if (File.Exists(cacheArchiveFile))
var hasActionArchiveCache = false;
var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory);
if (!string.IsNullOrEmpty(actionArchiveCacheDir) &&
Directory.Exists(actionArchiveCacheDir))
{
try
hasActionArchiveCache = true;
Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'");
#if OS_WINDOWS
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip");
#else
var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz");
#endif
if (File.Exists(cacheArchiveFile))
{
Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'");
File.Copy(cacheArchiveFile, archiveFile);
useActionArchiveCache = true;
executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'");
}
catch (Exception ex)
{
Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}");
try
{
Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'");
File.Copy(cacheArchiveFile, archiveFile);
useActionArchiveCache = true;
executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'");
}
catch (Exception ex)
{
Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}");
}
}
}
}
executionContext.Global.JobTelemetry.Add(new JobTelemetry()
{
Type = JobTelemetryType.General,
Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}"
});
executionContext.Global.JobTelemetry.Add(new JobTelemetry()
{
Type = JobTelemetryType.General,
Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}"
});
}
if (!useActionArchiveCache)
{
@@ -872,9 +878,16 @@ namespace GitHub.Runner.Worker
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
if (exitCode != 0)
{
var fileInfo = new FileInfo(archiveFile);
var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile);
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
if (executionContext.Global.Variables.GetBoolean("DistributedTask.DetailUntarFailure") == true)
{
var fileInfo = new FileInfo(archiveFile);
var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile);
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
}
else
{
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
}
}
}
#endif
@@ -1018,6 +1031,13 @@ namespace GitHub.Runner.Worker
}
}
foreach (var step in compositeAction.Steps)
{
if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && step.Reference.Type != Pipelines.ActionSourceType.Script)
{
throw new Exception("`uses:` keyword is not currently supported.");
}
}
return setupInfo;
}
else

View File

@@ -144,7 +144,7 @@ namespace GitHub.Runner.Worker
executionContext.Error(error.Message);
}
throw new ArgumentException($"Failed to load {fileRelativePath}");
throw new ArgumentException($"Fail to load {fileRelativePath}");
}
if (actionDefinition.Execution == null)

View File

@@ -466,39 +466,17 @@ namespace GitHub.Runner.Worker
{
throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}");
}
var loginExitCode = await _dockerManager.DockerLogin(
executionContext,
configLocation,
container.RegistryServer,
container.RegistryAuthUsername,
container.RegistryAuthPassword);
// Login docker with retry up to 3 times
int retryCount = 0;
int loginExitCode = 0;
while (retryCount < 3)
{
loginExitCode = await _dockerManager.DockerLogin(
executionContext,
configLocation,
container.RegistryServer,
container.RegistryAuthUsername,
container.RegistryAuthPassword);
if (loginExitCode == 0)
{
break;
}
else
{
retryCount++;
if (retryCount < 3)
{
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
executionContext.Warning($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}, back off {backOff.TotalSeconds} seconds before retry.");
await Task.Delay(backOff);
}
}
}
if (retryCount == 3 && loginExitCode != 0)
if (loginExitCode != 0)
{
throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}");
}
return configLocation;
}

View File

@@ -91,13 +91,13 @@ namespace GitHub.Runner.Worker
string phaseName = executionContext.Global.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
// zip the files
string diagnosticsZipFileName = $"{buildName}-{IOUtil.ReplaceInvalidFileNameChars(phaseName)}.zip";
string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip";
string diagnosticsZipFilePath = Path.Combine(supportRootFolder, diagnosticsZipFileName);
ZipFile.CreateFromDirectory(supportFilesFolder, diagnosticsZipFilePath);
// upload the json metadata file
executionContext.Debug("Uploading diagnostic metadata file.");
string metadataFileName = $"diagnostics-{buildName}-{IOUtil.ReplaceInvalidFileNameChars(phaseName)}.json";
string metadataFileName = $"diagnostics-{buildName}-{phaseName}.json";
string metadataFilePath = Path.Combine(supportFilesFolder, metadataFileName);
string phaseResult = GetTaskResultAsString(executionContext.Result);
@@ -108,8 +108,6 @@ namespace GitHub.Runner.Worker
parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
parentContext.QueueDiagnosticLogFile(name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath);
executionContext.Debug("Diagnostic file upload complete.");
}

View File

@@ -90,7 +90,6 @@ namespace GitHub.Runner.Worker
long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
void QueueDiagnosticLogFile(string name, string filePath);
// timeline record update methods
void Start(string currentOperation = null);
@@ -398,11 +397,11 @@ namespace GitHub.Runner.Worker
if (recordOrder != null)
{
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder, embedded: isEmbedded);
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder);
}
else
{
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder, embedded: isEmbedded);
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
}
if (logger != null)
{
@@ -433,7 +432,7 @@ namespace GitHub.Runner.Worker
Dictionary<string, string> intraActionState = null,
string siblingScopeName = null)
{
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout(), recordOrder: _record.Order);
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout());
}
public void Start(string currentOperation = null)
@@ -837,6 +836,7 @@ namespace GitHub.Runner.Worker
// Actions environment
ActionsEnvironment = message.ActionsEnvironment;
// Service container info
Global.ServiceContainers = new List<ContainerInfo>();
@@ -982,18 +982,6 @@ namespace GitHub.Runner.Worker
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
}
public void QueueDiagnosticLogFile(string name, string filePath)
{
ArgUtil.NotNullOrEmpty(name, nameof(name));
ArgUtil.NotNullOrEmpty(filePath, nameof(filePath));
if (!File.Exists(filePath))
{
throw new FileNotFoundException($"Can't upload diagnostic log file: {filePath}. File does not exist.");
}
_jobServerQueue.QueueResultsUpload(_record.Id, name, filePath, CoreAttachmentType.ResultsDiagnosticLog, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
}
// Add OnMatcherChanged
public void Add(OnMatcherChanged handler)
{
@@ -1172,7 +1160,7 @@ namespace GitHub.Runner.Worker
}
}
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order, bool embedded = false)
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
{
_mainTimelineId = timelineId;
_record.Id = timelineRecordId;
@@ -1198,11 +1186,7 @@ namespace GitHub.Runner.Worker
var configuration = HostContext.GetService<IConfigurationStore>();
_record.WorkerName = configuration.GetSettings().AgentName;
// We don't want to update the timeline record for embedded steps since they are not really represented in the UI.
if (!embedded)
{
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
}
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
}
private void JobServerQueueThrottling_EventReceived(object sender, ThrottlingEventArgs data)

View File

@@ -244,7 +244,7 @@ namespace GitHub.Runner.Worker
if (resultsReceiverEndpoint != null)
{
Trace.Info($"Queueing results file ({filePath}) for attachment upload ({attachmentName})");
var stepId = context.IsEmbedded ? context.EmbeddedId : context.Id;
var stepId = context.Id;
// Attachments must be added to the parent context (job), not the current context (step)
context.Root.QueueSummaryFile(attachmentName, scrubbedFilePath, stepId);
}

View File

@@ -223,10 +223,6 @@ namespace GitHub.Runner.Worker.Handlers
{
Environment["ACTIONS_CACHE_URL"] = cacheUrl;
}
if (systemConnection.Data.TryGetValue("PipelinesServiceUrl", out var pipelinesServiceUrl) && !string.IsNullOrEmpty(pipelinesServiceUrl))
{
Environment["ACTIONS_RUNTIME_URL"] = pipelinesServiceUrl;
}
if (systemConnection.Data.TryGetValue("GenerateIdTokenUrl", out var generateIdTokenUrl) && !string.IsNullOrEmpty(generateIdTokenUrl))
{
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;

View File

@@ -84,45 +84,6 @@ namespace GitHub.Runner.Worker.Handlers
}
nodeData.NodeVersion = "node16";
}
var localForceActionsToNode20 = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Agent.ManualForceActionsToNode20));
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.ManualForceActionsToNode20, out var workflowForceActionsToNode20);
var enforceNode20Locally = !string.IsNullOrWhiteSpace(workflowForceActionsToNode20) ? StringUtil.ConvertToBoolean(workflowForceActionsToNode20) : localForceActionsToNode20;
if (string.Equals(nodeData.NodeVersion, "node16")
&& ((executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode20") ?? false) || enforceNode20Locally))
{
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
var isWorkflowOptOutSet = !string.IsNullOrWhiteSpace(workflowOptOut);
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
if (!isOptOut)
{
var repoAction = action as Pipelines.RepositoryPathReference;
if (repoAction != null)
{
var warningActions = new HashSet<string>();
if (executionContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, out var node20ForceWarnings))
{
warningActions = StringUtil.ConvertFromJson<HashSet<string>>(node20ForceWarnings);
}
string repoActionFullName;
if (string.IsNullOrEmpty(repoAction.Name))
{
repoActionFullName = repoAction.Path; // local actions don't have a 'Name'
}
else
{
repoActionFullName = $"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}";
}
warningActions.Add(repoActionFullName);
executionContext.Global.Variables.Set(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, StringUtil.ConvertToJson(warningActions));
}
nodeData.NodeVersion = "node20";
}
}
(handler as INodeScriptActionHandler).Data = nodeData;
}
else if (data.ExecutionType == ActionExecutionType.Script)

View File

@@ -58,10 +58,6 @@ namespace GitHub.Runner.Worker.Handlers
{
Environment["ACTIONS_CACHE_URL"] = cacheUrl;
}
if (systemConnection.Data.TryGetValue("PipelinesServiceUrl", out var pipelinesServiceUrl) && !string.IsNullOrEmpty(pipelinesServiceUrl))
{
Environment["ACTIONS_RUNTIME_URL"] = pipelinesServiceUrl;
}
if (systemConnection.Data.TryGetValue("GenerateIdTokenUrl", out var generateIdTokenUrl) && !string.IsNullOrEmpty(generateIdTokenUrl))
{
Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl;
@@ -118,11 +114,6 @@ namespace GitHub.Runner.Worker.Handlers
{
Data.NodeVersion = "node16";
}
if (forcedNodeVersion == "node20" && Data.NodeVersion != "node20")
{
Data.NodeVersion = "node20";
}
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext, Data.NodeVersion);
string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");

View File

@@ -83,19 +83,40 @@ namespace GitHub.Runner.Worker.Handlers
shellCommand = "pwsh";
if (validateShellOnHost)
{
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
shellCommandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
}
if (string.IsNullOrEmpty(shellCommandPath))
{
shellCommand = "powershell";
Trace.Info($"Defaulting to {shellCommand}");
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
shellCommandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
}
}
}
#else
shellCommand = "sh";
if (validateShellOnHost)
{
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
shellCommandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
}
}
#endif
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
@@ -106,7 +127,14 @@ namespace GitHub.Runner.Worker.Handlers
shellCommand = parsed.shellCommand;
if (validateShellOnHost)
{
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
shellCommandPath = WhichUtil.Which2(parsed.shellCommand, true, Trace, prependPath);
}
else
{
shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath);
}
}
argFormat = $"{parsed.shellArgs}".TrimStart();
@@ -188,17 +216,38 @@ namespace GitHub.Runner.Worker.Handlers
{
#if OS_WINDOWS
shellCommand = "pwsh";
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
commandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
}
if (string.IsNullOrEmpty(commandPath))
{
shellCommand = "powershell";
Trace.Info($"Defaulting to {shellCommand}");
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
commandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath);
}
}
ArgUtil.NotNullOrEmpty(commandPath, "Default Shell");
#else
shellCommand = "sh";
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
commandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath);
}
#endif
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
}
@@ -209,7 +258,14 @@ namespace GitHub.Runner.Worker.Handlers
if (!IsActionStep && systemShells.Contains(shell))
{
shellCommand = shell;
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
commandPath = WhichUtil.Which2(shell, !isContainerStepHost, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath);
}
if (shell == "bash")
{
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh");
@@ -224,7 +280,14 @@ namespace GitHub.Runner.Worker.Handlers
var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell);
shellCommand = parsed.shellCommand;
// For non-ContainerStepHost, the command must be located on the host by Which
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true)
{
commandPath = WhichUtil.Which2(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
}
else
{
commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath);
}
argFormat = $"{parsed.shellArgs}".TrimStart();
if (string.IsNullOrEmpty(argFormat))
{

View File

@@ -392,18 +392,6 @@ namespace GitHub.Runner.Worker
}
}
// Register custom image creation post-job step if the "snapshot" token is present in the message.
var snapshotRequest = templateEvaluator.EvaluateJobSnapshotRequest(message.Snapshot, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
if (snapshotRequest != null)
{
var snapshotOperationProvider = HostContext.GetService<ISnapshotOperationProvider>();
jobContext.RegisterPostJobStep(new JobExtensionRunner(
runAsync: (executionContext, _) => snapshotOperationProvider.CreateSnapshotRequestAsync(executionContext, snapshotRequest),
condition: $"{PipelineTemplateConstants.Success}()",
displayName: $"Create custom image",
data: null));
}
// Register Job Completed hook if the variable is set
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
if (!string.IsNullOrEmpty(completedHookPath))

View File

@@ -42,7 +42,6 @@ namespace GitHub.Runner.Worker
Trace.Info("Job ID {0}", message.JobId);
DateTime jobStartTimeUtc = DateTime.UtcNow;
_runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings();
IRunnerService server = null;
// add orchestration id to useragent for better correlation.
@@ -50,9 +49,13 @@ namespace GitHub.Runner.Worker
!string.IsNullOrEmpty(orchestrationId.Value))
{
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
}
// make sure orchestration id is in the user-agent header.
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
var jobServerQueueTelemetry = false;
if (message.Variables.TryGetValue("DistributedTask.EnableJobServerQueueTelemetry", out VariableValue enableJobServerQueueTelemetry) &&
!string.IsNullOrEmpty(enableJobServerQueueTelemetry?.Value))
{
jobServerQueueTelemetry = StringUtil.ConvertToBoolean(enableJobServerQueueTelemetry.Value);
}
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
@@ -76,7 +79,7 @@ namespace GitHub.Runner.Worker
launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken);
}
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
_jobServerQueue.Start(message, resultsServiceOnly: true);
_jobServerQueue.Start(message, resultsServiceOnly: true, enableTelemetry: jobServerQueueTelemetry);
}
else
{
@@ -98,7 +101,7 @@ namespace GitHub.Runner.Worker
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers);
await jobServer.ConnectAsync(jobConnection);
_jobServerQueue.Start(message);
_jobServerQueue.Start(message, enableTelemetry: jobServerQueueTelemetry);
server = jobServer;
}
@@ -158,6 +161,8 @@ namespace GitHub.Runner.Worker
jobContext.SetRunnerContext("os", VarUtil.OS);
jobContext.SetRunnerContext("arch", VarUtil.OSArchitecture);
_runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings();
jobContext.SetRunnerContext("name", _runnerSettings.AgentName);
if (jobContext.Global.Variables.TryGetValue(WellKnownDistributedTaskVariables.RunnerEnvironment, out var runnerEnvironment))
@@ -290,14 +295,6 @@ namespace GitHub.Runner.Worker
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions));
}
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, out var node20ForceWarnings) && (jobContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode20") ?? false))
{
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node20ForceWarnings));
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode16DetectedAfterEndOfLife, actions));
}
await ShutdownQueue(throwOnFailure: false);
// Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir.
_tempDirectoryManager?.CleanupTempDirectory();
@@ -403,12 +400,6 @@ namespace GitHub.Runner.Worker
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions));
}
if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode16DetectedAfterEndOfLifeEnvVariable, out var node20ForceWarnings))
{
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node20ForceWarnings));
jobContext.Warning(string.Format(Constants.Runner.EnforcedNode16DetectedAfterEndOfLife, actions));
}
try
{
var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: true);

View File

@@ -1,32 +0,0 @@
#nullable enable
using System.IO;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker;
[ServiceLocator(Default = typeof(SnapshotOperationProvider))]
public interface ISnapshotOperationProvider : IRunnerService
{
Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest);
}
public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvider
{
public Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest)
{
var snapshotRequestFilePath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json");
var snapshotRequestDirectoryPath = Path.GetDirectoryName(snapshotRequestFilePath);
if (snapshotRequestDirectoryPath != null)
{
Directory.CreateDirectory(snapshotRequestDirectoryPath);
}
IOUtil.SaveObject(snapshotRequest, snapshotRequestFilePath);
executionContext.Output($"Request written to: {snapshotRequestFilePath}");
executionContext.Output("This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job.");
executionContext.Output("If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner.");
return Task.CompletedTask;
}
}

View File

@@ -295,7 +295,7 @@ namespace GitHub.Runner.Worker
!jobCancellationToken.IsCancellationRequested)
{
Trace.Error($"Caught timeout exception from step: {ex.Message}");
step.ExecutionContext.Error($"The action '{step.DisplayName}' has timed out after {timeoutMinutes} minutes.");
step.ExecutionContext.Error("The action has timed out.");
step.ExecutionContext.Result = TaskResult.Failed;
}
else

View File

@@ -1,11 +1,12 @@
using System;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
namespace GitHub.Services.Common.Internal
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class RawHttpHeaders
{
public const String SessionHeader = "X-Actions-Session";
public const String SessionHeader = "X-Runner-Session";
}
}

View File

@@ -138,8 +138,6 @@ namespace GitHub.Services.Common
response.Dispose();
}
this.Settings.ApplyTo(request);
// Let's start with sending a token
IssuedToken token = null;
if (m_tokenProvider != null)

View File

@@ -214,7 +214,25 @@ namespace GitHub.Services.Common
// ConfigureAwait(false) enables the continuation to be run outside any captured
// SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking...
response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
var tmpResponse = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
if (Settings.AllowAutoRedirectForBroker && tmpResponse.StatusCode == HttpStatusCode.Redirect)
{
//Dispose of the previous response
tmpResponse?.Dispose();
var location = tmpResponse.Headers.Location;
request = new HttpRequestMessage(HttpMethod.Get, location);
// Reapply the token to new redirected request
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
// Resend the request
response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false);
}
else
{
response = tmpResponse;
}
traceInfo?.TraceRequestSendTime();

View File

@@ -110,6 +110,16 @@ namespace GitHub.Services.Common
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not HttpClientHandler should follow redirect on outgoing broker requests
/// This is special since this also sends token in the request, where as default AllowAutoRedirect does not
/// </summary>
public Boolean AllowAutoRedirectForBroker
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not compression should be used on outgoing requests.
/// The default value is true.

View File

@@ -43,7 +43,6 @@ namespace GitHub.DistributedTask.Pipelines
TemplateToken jobOutputs,
IList<TemplateToken> defaults,
ActionsEnvironmentReference actionsEnvironment,
TemplateToken snapshot,
String messageType = JobRequestMessageTypes.PipelineAgentJobRequest)
{
this.MessageType = messageType;
@@ -58,7 +57,6 @@ namespace GitHub.DistributedTask.Pipelines
this.Workspace = workspaceOptions;
this.JobOutputs = jobOutputs;
this.ActionsEnvironment = actionsEnvironment;
this.Snapshot = snapshot;
m_variables = new Dictionary<String, VariableValue>(variables, StringComparer.OrdinalIgnoreCase);
m_maskHints = new List<MaskHint>(maskHints);
m_steps = new List<JobStep>(steps);
@@ -239,13 +237,6 @@ namespace GitHub.DistributedTask.Pipelines
set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken Snapshot
{
get;
set;
}
/// <summary>
/// Gets the collection of variables associated with the current context.
/// </summary>

View File

@@ -29,7 +29,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Id = "id";
public const String If = "if";
public const String Image = "image";
public const String ImageName = "image-name";
public const String Include = "include";
public const String Inputs = "inputs";
public const String Job = "job";
@@ -61,7 +60,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Services = "services";
public const String Shell = "shell";
public const String Skipped = "skipped";
public const String Snapshot = "snapshot";
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String StepWith = "step-with";

View File

@@ -346,39 +346,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
internal static Snapshot ConvertToJobSnapshotRequest(TemplateContext context, TemplateToken token)
{
string imageName = null;
if (token is StringToken snapshotStringLiteral)
{
imageName = snapshotStringLiteral.Value;
}
else
{
var snapshotMapping = token.AssertMapping($"{PipelineTemplateConstants.Snapshot}");
foreach (var snapshotPropertyPair in snapshotMapping)
{
var propertyName = snapshotPropertyPair.Key.AssertString($"{PipelineTemplateConstants.Snapshot} key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.ImageName:
imageName = snapshotPropertyPair.Value.AssertString($"{PipelineTemplateConstants.Snapshot} {propertyName}").Value;
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Snapshot} key");
break;
}
}
}
if (String.IsNullOrEmpty(imageName))
{
return null;
}
return new Snapshot(imageName);
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem,

View File

@@ -370,32 +370,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
public Snapshot EvaluateJobSnapshotRequest(TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Snapshot);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Snapshot, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToJobSnapshotRequest(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
private TemplateContext CreateContext(
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,

View File

@@ -1,17 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
public class Snapshot
{
public Snapshot(string imageName)
{
ImageName = imageName;
}
[DataMember(EmitDefaultValue = false)]
public String ImageName { get; set; }
}
}

View File

@@ -71,8 +71,7 @@
"env": "job-env",
"outputs": "job-outputs",
"defaults": "job-defaults",
"steps": "steps",
"snapshot": "snapshot"
"steps": "steps"
}
}
},
@@ -156,24 +155,6 @@
}
},
"snapshot": {
"one-of": [
"non-empty-string",
"snapshot-mapping"
]
},
"snapshot-mapping": {
"mapping": {
"properties": {
"image-name": {
"type": "non-empty-string",
"required": true
}
}
}
},
"runs-on": {
"context": [
"github",

View File

@@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.Common;
using GitHub.Services.Common.Diagnostics;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.WebApi
{
[ResourceArea(TaskResourceIds.AreaId)]
public class ActionsRunServerHttpClient : TaskAgentHttpClient
{
private static readonly JsonSerializerSettings s_serializerSettings;
static ActionsRunServerHttpClient()
{
s_serializerSettings = new VssJsonMediaTypeFormatter().SerializerSettings;
s_serializerSettings.DateParseHandling = DateParseHandling.None;
s_serializerSettings.FloatParseHandling = FloatParseHandling.Double;
}
public ActionsRunServerHttpClient(
Uri baseUrl,
VssCredentials credentials)
: base(baseUrl, credentials)
{
}
public ActionsRunServerHttpClient(
Uri baseUrl,
VssCredentials credentials,
VssHttpRequestSettings settings)
: base(baseUrl, credentials, settings)
{
}
public ActionsRunServerHttpClient(
Uri baseUrl,
VssCredentials credentials,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, handlers)
{
}
public ActionsRunServerHttpClient(
Uri baseUrl,
VssCredentials credentials,
VssHttpRequestSettings settings,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, settings, handlers)
{
}
public ActionsRunServerHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
Boolean disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
}
public Task<Pipelines.AgentJobRequestMessage> GetJobMessageAsync(
string messageId,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("GET");
Guid locationId = new Guid("25adab70-1379-4186-be8e-b643061ebe3a");
object routeValues = new { messageId = messageId };
return SendAsync<Pipelines.AgentJobRequestMessage>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken);
}
protected override async Task<T> ReadJsonContentAsync<T>(HttpResponseMessage response, CancellationToken cancellationToken = default(CancellationToken))
{
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
return JsonConvert.DeserializeObject<T>(json, s_serializerSettings);
}
}
}

View File

@@ -1,38 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
/// <summary>
/// Message that tells the runner to redirect itself to BrokerListener for messages.
/// (Note that we use a special Message instead of a simple 302. This is because
/// the runner will need to apply the runner's token to the request, and it is
/// a security best practice to *not* blindly add sensitive data to redirects
/// 302s.)
/// </summary>
[DataContract]
public class BrokerMigrationMessage
{
public static readonly string MessageType = "BrokerMigration";
public BrokerMigrationMessage()
{
}
public BrokerMigrationMessage(
Uri brokerUrl)
{
this.BrokerBaseUrl = brokerUrl;
}
/// <summary>
/// The base url for the broker listener
/// </summary>
[DataMember]
public Uri BrokerBaseUrl
{
get;
internal set;
}
}
}

View File

@@ -2498,25 +2498,6 @@ namespace GitHub.DistributedTask.WebApi
}
}
[Serializable]
public class NonRetryableActionDownloadInfoException : DistributedTaskException
{
public NonRetryableActionDownloadInfoException(String message)
: base(message)
{
}
public NonRetryableActionDownloadInfoException(String message, Exception innerException)
: base(message, innerException)
{
}
protected NonRetryableActionDownloadInfoException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
[Serializable]
public sealed class FailedToResolveActionDownloadInfoException : DistributedTaskException
{

View File

@@ -141,6 +141,24 @@ namespace GitHub.DistributedTask.WebApi
return ReplaceAgentAsync(poolId, agent.Id, agent, userState, cancellationToken);
}
public Task<Pipelines.AgentJobRequestMessage> GetJobMessageAsync(
string messageId,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("GET");
Guid locationId = new Guid("25adab70-1379-4186-be8e-b643061ebe3a");
object routeValues = new { messageId = messageId };
return SendAsync<Pipelines.AgentJobRequestMessage>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken);
}
protected Task<T> SendAsync<T>(
HttpMethod method,
Guid locationId,

View File

@@ -1,10 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
public sealed class TaskAgentMessageTypes
{
public static readonly string ForceTokenRefresh = "ForceTokenRefresh";
}
}

View File

@@ -75,12 +75,5 @@ namespace GitHub.DistributedTask.WebApi
get;
set;
}
[DataMember(EmitDefaultValue = false, IsRequired = false)]
public BrokerMigrationMessage BrokerMigrationMessage
{
get;
set;
}
}
}

View File

@@ -101,7 +101,6 @@ namespace GitHub.DistributedTask.WebApi
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
public static readonly String ResultsLog = "Results.Core.Log";
public static readonly String ResultsDiagnosticLog = "Results.Core.DiagnosticLog";
}
[GenerateAllConstants]

View File

@@ -7,8 +7,5 @@ namespace GitHub.Actions.RunService.WebApi
{
[DataMember(Name = "jobMessageId", EmitDefaultValue = false)]
public string JobMessageId { get; set; }
[DataMember(Name = "runnerOS", EmitDefaultValue = false)]
public string RunnerOS { get; set; }
}
}

View File

@@ -9,7 +9,6 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
@@ -17,15 +16,6 @@ namespace GitHub.Actions.RunService.WebApi
{
public class RunServiceHttpClient : RawHttpClientBase
{
private static readonly JsonSerializerSettings s_serializerSettings;
static RunServiceHttpClient()
{
s_serializerSettings = new VssJsonMediaTypeFormatter().SerializerSettings;
s_serializerSettings.DateParseHandling = DateParseHandling.None;
s_serializerSettings.FloatParseHandling = FloatParseHandling.Double;
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials)
@@ -69,14 +59,12 @@ namespace GitHub.Actions.RunService.WebApi
public async Task<AgentJobRequestMessage> GetJobMessageAsync(
Uri requestUri,
string messageId,
string runnerOS,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new AcquireJobRequest
{
JobMessageId = messageId,
RunnerOS = runnerOS
};
requestUri = new Uri(requestUri, "acquirejob");
@@ -184,11 +172,5 @@ namespace GitHub.Actions.RunService.WebApi
throw new Exception($"Failed to renew job: {result.Error}");
}
}
protected override async Task<T> ReadJsonContentAsync<T>(HttpResponseMessage response, CancellationToken cancellationToken = default(CancellationToken))
{
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
return JsonConvert.DeserializeObject<T>(json, s_serializerSettings);
}
}
}

View File

@@ -13,7 +13,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Azure.Storage.Blobs" Version="12.19.1" />
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="5.2.9" />

View File

@@ -57,7 +57,6 @@ namespace GitHub.Actions.RunService.WebApi
}
public async Task<TaskAgentMessage> GetRunnerMessageAsync(
Guid? sessionId,
string runnerVersion,
TaskAgentStatus? status,
string os = null,
@@ -70,11 +69,6 @@ namespace GitHub.Actions.RunService.WebApi
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
if (sessionId != null)
{
queryParams.Add("sessionId", sessionId.Value.ToString());
}
if (status != null)
{
queryParams.Add("status", status.Value.ToString());
@@ -110,67 +104,12 @@ namespace GitHub.Actions.RunService.WebApi
return result.Value;
}
// the only time we throw a `Forbidden` exception from Listener /messages is when the runner is
// disable_update and is too old to poll
if (result.StatusCode == HttpStatusCode.Forbidden)
{
throw new AccessDeniedException($"{result.Error} Runner version v{runnerVersion} is deprecated and cannot receive messages.")
{
ErrorCode = 1
};
}
throw new Exception($"Failed to get job message: {result.Error}");
}
public async Task<TaskAgentSession> CreateSessionAsync(
TaskAgentSession session,
CancellationToken cancellationToken = default)
{
var requestUri = new Uri(Client.BaseAddress, "session");
var requestContent = new ObjectContent<TaskAgentSession>(session, new VssJsonMediaTypeFormatter(true));
var result = await SendAsync<TaskAgentSession>(
new HttpMethod("POST"),
requestUri: requestUri,
content: requestContent,
cancellationToken: cancellationToken);
if (result.IsSuccess)
{
return result.Value;
}
if (result.StatusCode == HttpStatusCode.Forbidden)
{
throw new AccessDeniedException(result.Error);
}
if (result.StatusCode == HttpStatusCode.Conflict)
{
throw new TaskAgentSessionConflictException(result.Error);
}
throw new Exception($"Failed to create broker session: {result.Error}");
}
public async Task DeleteSessionAsync(
CancellationToken cancellationToken = default)
{
var requestUri = new Uri(Client.BaseAddress, $"session");
var result = await SendAsync<object>(
new HttpMethod("DELETE"),
requestUri: requestUri,
cancellationToken: cancellationToken);
if (result.IsSuccess)
{
return;
}
throw new Exception($"Failed to delete broker session: {result.Error}");
throw new Exception($"Failed to get job message: {result.Error}");
}
}
}

View File

@@ -89,26 +89,6 @@ namespace GitHub.Services.Results.Contracts
public long SoftSizeLimit;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedDiagnosticLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedDiagnosticLogsURLResponse
{
[DataMember]
public string DiagLogsURL;
[DataMember]
public string BlobStorageType;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class JobLogsMetadataCreate

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Http;
@@ -7,11 +8,8 @@ using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using System.Net.Http.Formatting;
using Azure;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
using Azure.Storage.Blobs.Specialized;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.Results.Contracts;
using Sdk.WebApi.WebApi;
@@ -23,15 +21,13 @@ namespace GitHub.Services.Results.Client
Uri baseUrl,
HttpMessageHandler pipeline,
string token,
bool disposeHandler,
bool useSdk)
bool disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
m_token = token;
m_resultsServiceUrl = baseUrl;
m_formatter = new JsonMediaTypeFormatter();
m_changeIdCounter = 1;
m_useSdk = useSdk;
}
// Get Sas URL calls
@@ -81,19 +77,6 @@ namespace GitHub.Services.Results.Client
return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedDiagnosticLogsURLResponse> GetDiagnosticLogsUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedDiagnosticLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
};
var getDiagnosticLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobDiagLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedDiagnosticLogsURLRequest, GetSignedDiagnosticLogsURLResponse>(getDiagnosticLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedJobLogsURLRequest()
@@ -108,6 +91,7 @@ namespace GitHub.Services.Results.Client
}
// Create metadata calls
private async Task SendRequest<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
@@ -177,219 +161,73 @@ namespace GitHub.Services.Results.Client
await SendRequest<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private (Uri path, string sas) ParseSasToken(string url)
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
{
if (String.IsNullOrEmpty(url))
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
throw new Exception($"SAS url is empty");
}
var blobUri = new UriBuilder(url);
var sasUrl = blobUri.Query.Substring(1); //remove starting "?"
blobUri.Query = null; // remove query params
return (blobUri.Uri, sasUrl);
}
private BlobClient GetBlobClient(string url)
{
var blobUri = ParseSasToken(url);
var opts = new BlobClientOptions
{
Retry =
{
MaxRetries = Constants.DefaultBlobUploadRetries,
NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds)
}
Content = new StreamContent(file)
};
return new BlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts);
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private AppendBlobClient GetAppendBlobClient(string url)
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
{
var blobUri = ParseSasToken(url);
var opts = new BlobClientOptions
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Retry =
Content = new StringContent("")
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0");
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
MaxRetries = Constants.DefaultBlobUploadRetries,
NetworkTimeout = TimeSpan.FromSeconds(Constants.DefaultNetworkTimeoutInSeconds)
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
{
Content = new StreamContent(file)
};
return new AppendBlobClient(blobUri.path, new AzureSasCredential(blobUri.sas), opts);
}
private async Task UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken, Dictionary<string, string> customHeaders = null)
{
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
var blobClient = GetBlobClient(url);
var httpHeaders = new BlobHttpHeaders();
if (customHeaders != null)
{
foreach (var header in customHeaders)
{
switch (header.Key)
{
case Constants.ContentTypeHeader:
httpHeaders.ContentType = header.Value;
break;
}
}
}
try
{
await blobClient.UploadAsync(file, new BlobUploadOptions()
{
HttpHeaders = httpHeaders,
Conditions = new BlobRequestConditions
{
IfNoneMatch = new ETag("*")
}
}, cancellationToken);
}
catch (RequestFailedException e)
{
throw new Exception($"Failed to upload block to Azure blob: {e.Message}");
}
request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
}
else
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url)
if (!response.IsSuccessStatusCode)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
}
if (customHeaders != null)
{
foreach (var header in customHeaders)
{
request.Content.Headers.Add(header.Key, header.Value);
}
};
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
}
}
}
private async Task CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken, Dictionary<string, string> customHeaders = null)
{
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
var appendBlobClient = GetAppendBlobClient(url);
var httpHeaders = new BlobHttpHeaders();
if (customHeaders != null)
{
foreach (var header in customHeaders)
{
switch (header.Key)
{
case Constants.ContentTypeHeader:
httpHeaders.ContentType = header.Value;
break;
}
}
}
try
{
await appendBlobClient.CreateAsync(new AppendBlobCreateOptions()
{
HttpHeaders = httpHeaders,
Conditions = new AppendBlobRequestConditions
{
IfNoneMatch = new ETag("*")
}
}, cancellationToken: cancellationToken);
}
catch (RequestFailedException e)
{
throw new Exception($"Failed to create append blob in Azure blob: {e.Message}");
}
}
else
{
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Content = new StringContent("")
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0");
}
if (customHeaders != null)
{
foreach (var header in customHeaders)
{
request.Content.Headers.Add(header.Key, header.Value);
}
};
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
}
}
}
private async Task UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{
if (m_useSdk && blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
var appendBlobClient = GetAppendBlobClient(url);
try
{
await appendBlobClient.AppendBlockAsync(file, cancellationToken: cancellationToken);
if (finalize)
{
await appendBlobClient.SealAsync(cancellationToken: cancellationToken);
}
}
catch (RequestFailedException e)
{
throw new Exception($"Failed to upload append block in Azure blob: {e.Message}");
}
}
else
{
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
}
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
}
return response;
}
}
@@ -413,22 +251,23 @@ namespace GitHub.Services.Results.Client
// Upload the file
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
}
// Send step summary upload complete message
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
}
private async Task UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType,
CancellationToken cancellationToken, Dictionary<string, string> customHeaders = null)
private async Task<HttpResponseMessage> UploadLogFile(string file, bool finalize, bool firstBlock, string sasUrl, string blobStorageType,
CancellationToken cancellationToken)
{
HttpResponseMessage response;
if (firstBlock && finalize)
{
// This is the one and only block, just use a block blob
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken, customHeaders);
response = await UploadBlockFileAsync(sasUrl, blobStorageType, fileStream, cancellationToken);
}
}
else
@@ -437,16 +276,18 @@ namespace GitHub.Services.Results.Client
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken, customHeaders);
await CreateAppendFileAsync(sasUrl, blobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken);
response = await UploadAppendFileAsync(sasUrl, blobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
}
return response;
}
// Handle file upload for step log
@@ -459,12 +300,7 @@ namespace GitHub.Services.Results.Client
throw new Exception("Failed to get step log upload url");
}
var customHeaders = new Dictionary<string, string>
{
{ Constants.ContentTypeHeader, Constants.TextPlainContentType }
};
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken, customHeaders);
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
// Update metadata
if (finalize)
@@ -484,12 +320,7 @@ namespace GitHub.Services.Results.Client
throw new Exception("Failed to get job log upload url");
}
var customHeaders = new Dictionary<string, string>
{
{ Constants.ContentTypeHeader, Constants.TextPlainContentType }
};
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken, customHeaders);
await UploadLogFile(file, finalize, firstBlock, uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
// Update metadata
if (finalize)
@@ -499,18 +330,6 @@ namespace GitHub.Services.Results.Client
}
}
public async Task UploadResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetDiagnosticLogsUploadUrlAsync(planId, jobId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.DiagLogsURL == null)
{
throw new Exception("Failed to get diagnostic logs upload url");
}
await UploadLogFile(file, true, true, uploadUrlResponse.DiagLogsURL, uploadUrlResponse.BlobStorageType, cancellationToken);
}
private Step ConvertTimelineRecordToStep(TimelineRecord r)
{
return new Step()
@@ -586,7 +405,6 @@ namespace GitHub.Services.Results.Client
private Uri m_resultsServiceUrl;
private string m_token;
private int m_changeIdCounter;
private bool m_useSdk;
}
// Constants specific to results
@@ -601,20 +419,13 @@ namespace GitHub.Services.Results.Client
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
public static readonly string GetJobDiagLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobDiagLogsSignedBlobURL";
public static readonly string ResultsProtoApiV1Endpoint = "twirp/github.actions.results.api.v1.WorkflowStepUpdateService/";
public static readonly string WorkflowStepsUpdate = ResultsProtoApiV1Endpoint + "WorkflowStepsUpdate";
public static readonly int DefaultNetworkTimeoutInSeconds = 30;
public static readonly int DefaultBlobUploadRetries = 3;
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
public static readonly string AzureBlockBlob = "BlockBlob";
public static readonly string AzureAppendBlob = "AppendBlob";
public const string ContentTypeHeader = "Content-Type";
public const string TextPlainContentType = "text/plain";
}
}

View File

@@ -172,6 +172,43 @@ namespace GitHub.Runner.Common.Tests
}
}
[Theory]
[InlineData("randomString", "0", "VERB", "FALSELEVEL")]
[InlineData("", "-1", "", "INFO")]
[InlineData("", "6", "VERB", "FALSELEVEL")]
[InlineData("", "99", "VERB", "FALSELEVEL")]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void TraceLevel(string trace, string traceLevel, string mustContainTraceLevel, string mustNotContainTraceLevel)
{
try
{
Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TRACE", trace);
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_TRACE_LEVEL", traceLevel);
// Arrange.
Setup();
_hc.SetDefaultCulture("hu-HU"); // logs [VERB] if traceLevel allows it
// Assert.
var logFile = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), $"trace_{nameof(HostContextL0)}_{nameof(TraceLevel)}.log");
var tempFile = Path.GetTempFileName();
File.Delete(tempFile);
File.Copy(logFile, tempFile);
var content = File.ReadAllText(tempFile);
Assert.Contains($"{mustContainTraceLevel}", content);
Assert.DoesNotContain($"{mustNotContainTraceLevel}", content);
}
finally
{
Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TRACE", null);
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_TRACE_LEVEL", null);
// Cleanup.
Teardown();
}
}
private void Setup([CallerMemberName] string testName = "")
{
_tokenSource = new CancellationTokenSource();

View File

@@ -1,81 +0,0 @@
using System;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.Common;
using Moq;
using Xunit;
namespace GitHub.Runner.Common.Tests.Listener
{
public sealed class BrokerMessageListenerL0
{
private readonly RunnerSettings _settings;
private readonly Mock<IConfigurationManager> _config;
private readonly Mock<IBrokerServer> _brokerServer;
private readonly Mock<ICredentialManager> _credMgr;
private Mock<IConfigurationStore> _store;
public BrokerMessageListenerL0()
{
_settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work", ServerUrlV2 = "http://myserverv2" };
_config = new Mock<IConfigurationManager>();
_config.Setup(x => x.LoadSettings()).Returns(_settings);
_credMgr = new Mock<ICredentialManager>();
_store = new Mock<IConfigurationStore>();
_brokerServer = new Mock<IBrokerServer>();
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void CreatesSession()
{
using (TestHostContext tc = CreateTestContext())
using (var tokenSource = new CancellationTokenSource())
{
Tracing trace = tc.GetTrace();
// Arrange.
var expectedSession = new TaskAgentSession();
_brokerServer
.Setup(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedSession));
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
// Act.
BrokerMessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
trace.Info("result: {0}", result);
// Assert.
Assert.Equal(CreateSessionResult.Success, result);
_brokerServer
.Verify(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Once());
}
}
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
{
TestHostContext tc = new(this, testName);
tc.SetSingleton<IConfigurationManager>(_config.Object);
tc.SetSingleton<ICredentialManager>(_credMgr.Object);
tc.SetSingleton<IConfigurationStore>(_store.Object);
tc.SetSingleton<IBrokerServer>(_brokerServer.Object);
return tc;
}
}
}

View File

@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common.Tests.Listener
TaskOrchestrationPlanReference plan = new();
TimelineReference timeline = null;
Guid jobId = Guid.NewGuid();
var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
result.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
return result;
}
@@ -806,8 +806,7 @@ namespace GitHub.Runner.Common.Tests.Listener
},
null,
new List<TemplateToken>(),
new ActionsEnvironmentReference("env"),
null
new ActionsEnvironmentReference("env")
);
return message;
}

View File

@@ -24,8 +24,6 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<ICredentialManager> _credMgr;
private Mock<IConfigurationStore> _store;
private Mock<IBrokerServer> _brokerServer;
public MessageListenerL0()
{
_settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work" };
@@ -34,7 +32,6 @@ namespace GitHub.Runner.Common.Tests.Listener
_runnerServer = new Mock<IRunnerServer>();
_credMgr = new Mock<ICredentialManager>();
_store = new Mock<IConfigurationStore>();
_brokerServer = new Mock<IBrokerServer>();
}
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
@@ -44,7 +41,6 @@ namespace GitHub.Runner.Common.Tests.Listener
tc.SetSingleton<IRunnerServer>(_runnerServer.Object);
tc.SetSingleton<ICredentialManager>(_credMgr.Object);
tc.SetSingleton<IConfigurationStore>(_store.Object);
tc.SetSingleton<IBrokerServer>(_brokerServer.Object);
return tc;
}
@@ -75,82 +71,16 @@ namespace GitHub.Runner.Common.Tests.Listener
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
trace.Info("result: {0}", result);
// Assert.
Assert.Equal(CreateSessionResult.Success, result);
Assert.True(result);
_runnerServer
.Verify(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Once());
_brokerServer
.Verify(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Never());
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void CreatesSessionWithBrokerMigration()
{
using (TestHostContext tc = CreateTestContext())
using (var tokenSource = new CancellationTokenSource())
{
Tracing trace = tc.GetTrace();
// Arrange.
var expectedSession = new TaskAgentSession()
{
OwnerName = "legacy",
BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com"))
};
var expectedBrokerSession = new TaskAgentSession()
{
OwnerName = "broker"
};
_runnerServer
.Setup(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedSession));
_brokerServer
.Setup(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedBrokerSession));
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
// Act.
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
trace.Info("result: {0}", result);
// Assert.
Assert.Equal(CreateSessionResult.Success, result);
_runnerServer
.Verify(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Once());
_brokerServer
.Verify(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Once());
}
}
@@ -185,8 +115,8 @@ namespace GitHub.Runner.Common.Tests.Listener
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.Equal(CreateSessionResult.Success, result);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.True(result);
_runnerServer
.Setup(x => x.DeleteAgentSessionAsync(
@@ -201,83 +131,6 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void DeleteSessionWithBrokerMigration()
{
using (TestHostContext tc = CreateTestContext())
using (var tokenSource = new CancellationTokenSource())
{
Tracing trace = tc.GetTrace();
// Arrange.
var expectedSession = new TaskAgentSession()
{
OwnerName = "legacy",
BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com"))
};
var expectedBrokerSession = new TaskAgentSession()
{
SessionId = Guid.NewGuid(),
OwnerName = "broker"
};
_runnerServer
.Setup(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedSession));
_brokerServer
.Setup(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedBrokerSession));
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
// Act.
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
trace.Info("result: {0}", result);
Assert.Equal(CreateSessionResult.Success, result);
_runnerServer
.Verify(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Once());
_brokerServer
.Verify(x => x.CreateSessionAsync(
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token), Times.Once());
_brokerServer
.Setup(x => x.DeleteSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.CompletedTask);
// Act.
await listener.DeleteSessionAsync();
//Assert
_runnerServer
.Verify(x => x.DeleteAgentSessionAsync(
_settings.PoolId, expectedBrokerSession.SessionId, It.IsAny<CancellationToken>()), Times.Once());
_brokerServer
.Verify(x => x.DeleteSessionAsync(It.IsAny<CancellationToken>()), Times.Once());
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
@@ -309,8 +162,8 @@ namespace GitHub.Runner.Common.Tests.Listener
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.Equal(CreateSessionResult.Success, result);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.True(result);
var arMessages = new TaskAgentMessage[]
{
@@ -359,112 +212,6 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void GetNextMessageWithBrokerMigration()
{
using (TestHostContext tc = CreateTestContext())
using (var tokenSource = new CancellationTokenSource())
{
Tracing trace = tc.GetTrace();
// Arrange.
var expectedSession = new TaskAgentSession();
PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(sessionIdProperty);
sessionIdProperty.SetValue(expectedSession, Guid.NewGuid());
_runnerServer
.Setup(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedSession));
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
// Act.
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.Equal(CreateSessionResult.Success, result);
var brokerMigrationMesage = new BrokerMigrationMessage(new Uri("https://actions.broker.com"));
var arMessages = new TaskAgentMessage[]
{
new TaskAgentMessage
{
Body = JsonUtility.ToString(brokerMigrationMesage),
MessageType = BrokerMigrationMessage.MessageType
},
};
var brokerMessages = new TaskAgentMessage[]
{
new TaskAgentMessage
{
Body = "somebody1",
MessageId = 4234,
MessageType = JobRequestMessageTypes.PipelineAgentJobRequest
},
new TaskAgentMessage
{
Body = "somebody2",
MessageId = 4235,
MessageType = JobCancelMessage.MessageType
},
null, //should be skipped by GetNextMessageAsync implementation
null,
new TaskAgentMessage
{
Body = "somebody3",
MessageId = 4236,
MessageType = JobRequestMessageTypes.PipelineAgentJobRequest
}
};
var brokerMessageQueue = new Queue<TaskAgentMessage>(brokerMessages);
_runnerServer
.Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) =>
{
await Task.Yield();
return arMessages[0]; // always send migration message
});
_brokerServer
.Setup(x => x.GetRunnerMessageAsync(
expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
.Returns(async (Guid sessionId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) =>
{
await Task.Yield();
return brokerMessageQueue.Dequeue();
});
TaskAgentMessage message1 = await listener.GetNextMessageAsync(tokenSource.Token);
TaskAgentMessage message2 = await listener.GetNextMessageAsync(tokenSource.Token);
TaskAgentMessage message3 = await listener.GetNextMessageAsync(tokenSource.Token);
Assert.Equal(brokerMessages[0], message1);
Assert.Equal(brokerMessages[1], message2);
Assert.Equal(brokerMessages[4], message3);
//Assert
_runnerServer
.Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Exactly(brokerMessages.Length));
_brokerServer
.Verify(x => x.GetRunnerMessageAsync(
expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()), Times.Exactly(brokerMessages.Length));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
@@ -497,11 +244,11 @@ namespace GitHub.Runner.Common.Tests.Listener
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
trace.Info("result: {0}", result);
// Assert.
Assert.Equal(CreateSessionResult.Success, result);
Assert.True(result);
_runnerServer
.Verify(x => x.CreateAgentSessionAsync(
_settings.PoolId,
@@ -541,8 +288,8 @@ namespace GitHub.Runner.Common.Tests.Listener
MessageListener listener = new();
listener.Initialize(tc);
CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.Equal(CreateSessionResult.Success, result);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.True(result);
_runnerServer
.Setup(x => x.GetAgentMessageAsync(

View File

@@ -42,7 +42,7 @@ namespace GitHub.Runner.Common.Tests.Listener
TaskOrchestrationPlanReference plan = new();
TimelineReference timeline = null;
Guid jobId = Guid.NewGuid();
return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
}
private JobCancelMessage CreateJobCancelMessage()
@@ -88,7 +88,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configurationManager.Setup(x => x.IsConfigured())
.Returns(true);
_messageListener.Setup(x => x.CreateSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<CreateSessionResult>(CreateSessionResult.Success));
.Returns(Task.FromResult<bool>(true));
_messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny<CancellationToken>()))
.Returns(async () =>
{
@@ -184,7 +184,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configStore.Setup(x => x.IsServiceConfigured()).Returns(configureAsService);
_messageListener.Setup(x => x.CreateSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<CreateSessionResult>(CreateSessionResult.Failure));
.Returns(Task.FromResult(false));
var runner = new Runner.Listener.Runner();
runner.Initialize(hc);
@@ -217,7 +217,7 @@ namespace GitHub.Runner.Common.Tests.Listener
.Returns(false);
_messageListener.Setup(x => x.CreateSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<CreateSessionResult>(CreateSessionResult.Failure));
.Returns(Task.FromResult(false));
var runner = new Runner.Listener.Runner();
runner.Initialize(hc);
@@ -263,7 +263,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configurationManager.Setup(x => x.IsConfigured())
.Returns(true);
_messageListener.Setup(x => x.CreateSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<CreateSessionResult>(CreateSessionResult.Success));
.Returns(Task.FromResult<bool>(true));
_messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny<CancellationToken>()))
.Returns(async () =>
{
@@ -363,7 +363,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configurationManager.Setup(x => x.IsConfigured())
.Returns(true);
_messageListener.Setup(x => x.CreateSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<CreateSessionResult>(CreateSessionResult.Success));
.Returns(Task.FromResult<bool>(true));
_messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny<CancellationToken>()))
.Returns(async () =>
{
@@ -458,7 +458,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configurationManager.Setup(x => x.IsConfigured())
.Returns(true);
_messageListener.Setup(x => x.CreateSessionAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<CreateSessionResult>(CreateSessionResult.Success));
.Returns(Task.FromResult<bool>(true));
_messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny<CancellationToken>()))
.Returns(async () =>
{

View File

@@ -23,6 +23,7 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IConfigurationStore> _configStore;
private Mock<IJobDispatcher> _jobDispatcher;
private AgentRefreshMessage _refreshMessage = new(1, "2.999.0");
private List<TrimmedPackageMetadata> _trimmedPackages = new();
#if !OS_WINDOWS
private string _packageUrl = null;
@@ -70,6 +71,12 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
using (var client = new HttpClient())
{
var json = await client.GetStringAsync($"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}-trimmedpackages.json");
_trimmedPackages = StringUtil.ConvertFromJson<List<TrimmedPackageMetadata>>(json);
}
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl }));
@@ -84,10 +91,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
@@ -143,10 +152,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
@@ -194,10 +205,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
@@ -247,10 +260,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
@@ -290,6 +305,495 @@ namespace GitHub.Runner.Common.Tests.Listener
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestSelfUpdateAsync_CloneHash_RuntimeAndExternals()
{
try
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
hc.SetSingleton<ITerminal>(_term.Object);
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
var p1 = new ProcessInvokerWrapper();
p1.Initialize(hc);
var p2 = new ProcessInvokerWrapper();
p2.Initialize(hc);
var p3 = new ProcessInvokerWrapper();
p3.Initialize(hc);
hc.EnqueueInstance<IProcessInvoker>(p1);
hc.EnqueueInstance<IProcessInvoker>(p2);
hc.EnqueueInstance<IProcessInvoker>(p3);
updater.Initialize(hc);
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = new List<TrimmedPackageMetadata>() { new TrimmedPackageMetadata() } }));
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
.Callback((int p, ulong a, string s, string t) =>
{
hc.GetTrace().Info(t);
})
.Returns(Task.FromResult(new TaskAgent()));
try
{
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
Assert.True(result);
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(contentHashesProperty);
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
Assert.Equal(File.ReadAllText(dotnetRuntimeHashFile).Trim(), contentHashes["dotnetRuntime"]);
Assert.Equal(File.ReadAllText(externalsHashFile).Trim(), contentHashes["externals"]);
}
finally
{
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
}
}
}
finally
{
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestSelfUpdateAsync_Cancel_CloneHashTask_WhenNotNeeded()
{
try
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
hc.SetSingleton<ITerminal>(_term.Object);
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IHttpClientHandlerFactory>(new Mock<IHttpClientHandlerFactory>().Object);
var p1 = new ProcessInvokerWrapper();
p1.Initialize(hc);
var p2 = new ProcessInvokerWrapper();
p2.Initialize(hc);
var p3 = new ProcessInvokerWrapper();
p3.Initialize(hc);
hc.EnqueueInstance<IProcessInvoker>(p1);
hc.EnqueueInstance<IProcessInvoker>(p2);
hc.EnqueueInstance<IProcessInvoker>(p3);
updater.Initialize(hc);
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
.Callback((int p, ulong a, string s, string t) =>
{
hc.GetTrace().Info(t);
})
.Returns(Task.FromResult(new TaskAgent()));
try
{
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(contentHashesProperty);
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
Assert.NotEqual(2, contentHashes.Count);
}
catch (Exception ex)
{
hc.GetTrace().Error(ex);
}
}
}
finally
{
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestSelfUpdateAsync_UseExternalsTrimmedPackage()
{
try
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
hc.SetSingleton<ITerminal>(_term.Object);
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
var p1 = new ProcessInvokerWrapper(); // hashfiles
p1.Initialize(hc);
var p2 = new ProcessInvokerWrapper(); // hashfiles
p2.Initialize(hc);
var p3 = new ProcessInvokerWrapper(); // un-tar
p3.Initialize(hc);
var p4 = new ProcessInvokerWrapper(); // node -v
p4.Initialize(hc);
var p5 = new ProcessInvokerWrapper(); // node -v
p5.Initialize(hc);
hc.EnqueueInstance<IProcessInvoker>(p1);
hc.EnqueueInstance<IProcessInvoker>(p2);
hc.EnqueueInstance<IProcessInvoker>(p3);
hc.EnqueueInstance<IProcessInvoker>(p4);
hc.EnqueueInstance<IProcessInvoker>(p5);
updater.Initialize(hc);
var trim = _trimmedPackages.Where(x => !x.TrimmedContents.ContainsKey("dotnetRuntime")).ToList();
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
.Callback((int p, ulong a, string s, string t) =>
{
hc.GetTrace().Info(t);
})
.Returns(Task.FromResult(new TaskAgent()));
try
{
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
Assert.True(result);
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
}
finally
{
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
}
var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true);
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
if (externalsHash == trim[0].TrimmedContents["externals"])
{
Assert.Contains("Use trimmed (externals) package", File.ReadAllText(traceFile));
}
else
{
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
}
}
}
finally
{
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestSelfUpdateAsync_UseExternalsRuntimeTrimmedPackage()
{
try
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
hc.SetSingleton<ITerminal>(_term.Object);
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
var p1 = new ProcessInvokerWrapper(); // hashfiles
p1.Initialize(hc);
var p2 = new ProcessInvokerWrapper(); // hashfiles
p2.Initialize(hc);
var p3 = new ProcessInvokerWrapper(); // un-tar
p3.Initialize(hc);
var p4 = new ProcessInvokerWrapper(); // node -v
p4.Initialize(hc);
var p5 = new ProcessInvokerWrapper(); // node -v
p5.Initialize(hc);
var p6 = new ProcessInvokerWrapper(); // runner -v
p6.Initialize(hc);
hc.EnqueueInstance<IProcessInvoker>(p1);
hc.EnqueueInstance<IProcessInvoker>(p2);
hc.EnqueueInstance<IProcessInvoker>(p3);
hc.EnqueueInstance<IProcessInvoker>(p4);
hc.EnqueueInstance<IProcessInvoker>(p5);
hc.EnqueueInstance<IProcessInvoker>(p6);
updater.Initialize(hc);
var trim = _trimmedPackages.Where(x => x.TrimmedContents.ContainsKey("dotnetRuntime") && x.TrimmedContents.ContainsKey("externals")).ToList();
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
.Callback((int p, ulong a, string s, string t) =>
{
hc.GetTrace().Info(t);
})
.Returns(Task.FromResult(new TaskAgent()));
try
{
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
Assert.True(result);
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
}
finally
{
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
}
var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true);
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
var runtimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
var runtimeHash = await File.ReadAllTextAsync(runtimeHashFile);
if (externalsHash == trim[0].TrimmedContents["externals"] &&
runtimeHash == trim[0].TrimmedContents["dotnetRuntime"])
{
Assert.Contains("Use trimmed (runtime+externals) package", File.ReadAllText(traceFile));
}
else
{
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
}
}
}
finally
{
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestSelfUpdateAsync_NotUseExternalsRuntimeTrimmedPackageOnHashMismatch()
{
try
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
hc.SetSingleton<ITerminal>(_term.Object);
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
var p1 = new ProcessInvokerWrapper(); // hashfiles
p1.Initialize(hc);
var p2 = new ProcessInvokerWrapper(); // hashfiles
p2.Initialize(hc);
var p3 = new ProcessInvokerWrapper(); // un-tar
p3.Initialize(hc);
var p4 = new ProcessInvokerWrapper(); // node -v
p4.Initialize(hc);
var p5 = new ProcessInvokerWrapper(); // node -v
p5.Initialize(hc);
var p6 = new ProcessInvokerWrapper(); // runner -v
p6.Initialize(hc);
hc.EnqueueInstance<IProcessInvoker>(p1);
hc.EnqueueInstance<IProcessInvoker>(p2);
hc.EnqueueInstance<IProcessInvoker>(p3);
hc.EnqueueInstance<IProcessInvoker>(p4);
hc.EnqueueInstance<IProcessInvoker>(p5);
hc.EnqueueInstance<IProcessInvoker>(p6);
updater.Initialize(hc);
var trim = _trimmedPackages.ToList();
foreach (var package in trim)
{
foreach (var hash in package.TrimmedContents.Keys)
{
package.TrimmedContents[hash] = "mismatch";
}
}
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
.Callback((int p, ulong a, string s, string t) =>
{
hc.GetTrace().Info(t);
})
.Returns(Task.FromResult(new TaskAgent()));
try
{
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
Assert.True(result);
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
}
finally
{
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
}
var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true);
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
}
}
finally
{
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestSelfUpdateAsync_FallbackToFullPackage()
{
try
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdater();
hc.SetSingleton<ITerminal>(_term.Object);
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
var p1 = new ProcessInvokerWrapper(); // hashfiles
p1.Initialize(hc);
var p2 = new ProcessInvokerWrapper(); // hashfiles
p2.Initialize(hc);
var p3 = new ProcessInvokerWrapper(); // un-tar trim
p3.Initialize(hc);
var p4 = new ProcessInvokerWrapper(); // un-tar full
p4.Initialize(hc);
hc.EnqueueInstance<IProcessInvoker>(p1);
hc.EnqueueInstance<IProcessInvoker>(p2);
hc.EnqueueInstance<IProcessInvoker>(p3);
hc.EnqueueInstance<IProcessInvoker>(p4);
updater.Initialize(hc);
var trim = _trimmedPackages.ToList();
foreach (var package in trim)
{
package.HashValue = "mismatch";
}
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.999.0", true, It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.999.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
.Callback((int p, ulong a, string s, string t) =>
{
hc.GetTrace().Info(t);
})
.Returns(Task.FromResult(new TaskAgent()));
try
{
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
Assert.True(result);
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0")));
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0")));
}
finally
{
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.999.0"), CancellationToken.None);
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.999.0"), CancellationToken.None);
}
var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true);
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
{
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
}
else
{
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
}
}
}
finally
{
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
}
}
}
}
#endif

View File

@@ -23,6 +23,7 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IConfigurationStore> _configStore;
private Mock<IJobDispatcher> _jobDispatcher;
private AgentRefreshMessage _refreshMessage = new(1, "2.999.0");
private List<TrimmedPackageMetadata> _trimmedPackages = new();
#if !OS_WINDOWS
private string _packageUrl = null;
@@ -80,10 +81,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdaterV2();
@@ -140,10 +143,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdaterV2();
@@ -189,10 +194,12 @@ namespace GitHub.Runner.Common.Tests.Listener
{
await FetchLatestRunner();
Assert.NotNull(_packageUrl);
Assert.NotNull(_trimmedPackages);
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
using (var hc = new TestHostContext(this))
{
hc.GetTrace().Info(_packageUrl);
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
//Arrange
var updater = new Runner.Listener.SelfUpdaterV2();

View File

@@ -0,0 +1,301 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using Xunit;
namespace GitHub.Runner.Common.Tests
{
public sealed class PackagesTrimL0
{
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_NewFilesCrossAll()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
var newFiles = new List<string>();
if (Directory.Exists(layoutBin))
{
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
foreach (var file in Directory.GetFiles(layoutBin, "*", SearchOption.AllDirectories))
{
if (!coreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)) &&
!runtimeAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x)))
{
newFiles.Add(file);
}
}
if (newFiles.Count > 0)
{
Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages.");
}
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_OverlapFiles()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
foreach (var line in coreAssets)
{
if (runtimeAssets.Contains(line, StringComparer.OrdinalIgnoreCase))
{
Assert.True(false, $"'Misc/runnercoreassets' and 'Misc/runnerdotnetruntimeassets' should not overlap.");
}
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_NewRunnerCoreAssets()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var runnerCoreAssetsFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnercoreassets");
var coreAssets = await File.ReadAllLinesAsync(runnerCoreAssetsFile);
string layoutBin = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
var newFiles = new List<string>();
if (Directory.Exists(layoutBin))
{
var binDirs = Directory.GetDirectories(TestUtil.GetSrcPath(), "net6.0", SearchOption.AllDirectories);
foreach (var binDir in binDirs)
{
if (binDir.Contains("Test") || binDir.Contains("obj"))
{
continue;
}
Directory.GetFiles(binDir, "*", SearchOption.TopDirectoryOnly).ToList().ForEach(x =>
{
if (!x.Contains("runtimeconfig.dev.json"))
{
if (!coreAssets.Any(y => x.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(y)))
{
newFiles.Add(x);
}
}
});
}
if (newFiles.Count > 0)
{
Assert.True(false, $"Found new files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnercoreassets`.");
}
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_NewDotnetRuntimeAssets()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var runnerDotnetRuntimeFile = Path.Combine(TestUtil.GetSrcPath(), @"Misc/runnerdotnetruntimeassets");
var runtimeAssets = await File.ReadAllLinesAsync(runnerDotnetRuntimeFile);
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runnerdotnetruntimeassets");
var newFiles = new List<string>();
if (File.Exists(layoutTrimsRuntimeAssets))
{
var runtimeAssetsCurrent = await File.ReadAllLinesAsync(layoutTrimsRuntimeAssets);
foreach (var runtimeFile in runtimeAssetsCurrent)
{
if (runtimeAssets.Any(x => runtimeFile.EndsWith(x, StringComparison.OrdinalIgnoreCase)))
{
continue;
}
else
{
newFiles.Add(runtimeFile);
}
}
if (newFiles.Count > 0)
{
Assert.True(false, $"Found new dotnet runtime files '{string.Join('\n', newFiles)}'. These will break runner update using trimmed packages. You might need to update `Misc/runnerdotnetruntimeassets`.");
}
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_CheckDotnetRuntimeHash()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
trace.Info($"Current hash: {File.ReadAllText(dotnetRuntimeHashFile)}");
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runtime");
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
#if OS_WINDOWS
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node");
#else
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node");
#endif
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p1 = new ProcessInvokerWrapper();
p1.Initialize(hc);
p1.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
{
hashResult = data.Data.Substring(10, data.Data.Length - 20);
trace.Info($"Hash result: '{hashResult}'");
}
else
{
trace.Info(data.Data);
}
};
p1.OutputDataReceived += (_, data) =>
{
trace.Info(data.Data);
};
var env = new Dictionary<string, string>
{
["patterns"] = "**"
};
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsRuntimeAssets,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: CancellationToken.None);
Assert.True(string.Equals(hashResult, File.ReadAllText(dotnetRuntimeHashFile).Trim()), $"Hash mismatch for dotnet runtime. You might need to update `Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_CheckExternalsHash()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
trace.Info($"Current hash: {File.ReadAllText(externalsHashFile)}");
string layoutTrimsExternalsAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/externals");
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
#if OS_WINDOWS
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node");
#else
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node");
#endif
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p1 = new ProcessInvokerWrapper();
p1.Initialize(hc);
p1.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
{
hashResult = data.Data.Substring(10, data.Data.Length - 20);
trace.Info($"Hash result: '{hashResult}'");
}
else
{
trace.Info(data.Data);
}
};
p1.OutputDataReceived += (_, data) =>
{
trace.Info(data.Data);
};
var env = new Dictionary<string, string>
{
["patterns"] = "**"
};
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsExternalsAssets,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: CancellationToken.None);
Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public Task RunnerLayoutParts_ContentHashFilesNoNewline()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
var dotnetRuntimeHash = File.ReadAllText(dotnetRuntimeHashFile);
trace.Info($"Current hash: {dotnetRuntimeHash}");
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
var externalsHash = File.ReadAllText(externalsHashFile);
trace.Info($"Current hash: {externalsHash}");
Assert.False(externalsHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in externals hash file.");
Assert.False(dotnetRuntimeHash.Any(x => char.IsWhiteSpace(x)), $"Found whitespace in dotnet runtime hash file.");
return Task.CompletedTask;
}
}
}
}

View File

@@ -960,33 +960,6 @@ namespace GitHub.Runner.Common.Tests.Util
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void ReplaceInvalidFileNameChars()
{
Assert.Equal(string.Empty, IOUtil.ReplaceInvalidFileNameChars(null));
Assert.Equal(string.Empty, IOUtil.ReplaceInvalidFileNameChars(string.Empty));
Assert.Equal("hello.txt", IOUtil.ReplaceInvalidFileNameChars("hello.txt"));
#if OS_WINDOWS
// Refer https://github.com/dotnet/runtime/blob/ce84f1d8a3f12711bad678a33efbc37b461f684f/src/libraries/System.Private.CoreLib/src/System/IO/Path.Windows.cs#L15
Assert.Equal(
"1_ 2_ 3_ 4_ 5_ 6_ 7_ 8_ 9_ 10_ 11_ 12_ 13_ 14_ 15_ 16_ 17_ 18_ 19_ 20_ 21_ 22_ 23_ 24_ 25_ 26_ 27_ 28_ 29_ 30_ 31_ 32_ 33_ 34_ 35_ 36_ 37_ 38_ 39_ 40_ 41_",
IOUtil.ReplaceInvalidFileNameChars($"1\" 2< 3> 4| 5\0 6{(char)1} 7{(char)2} 8{(char)3} 9{(char)4} 10{(char)5} 11{(char)6} 12{(char)7} 13{(char)8} 14{(char)9} 15{(char)10} 16{(char)11} 17{(char)12} 18{(char)13} 19{(char)14} 20{(char)15} 21{(char)16} 22{(char)17} 23{(char)18} 24{(char)19} 25{(char)20} 26{(char)21} 27{(char)22} 28{(char)23} 29{(char)24} 30{(char)25} 31{(char)26} 32{(char)27} 33{(char)28} 34{(char)29} 35{(char)30} 36{(char)31} 37: 38* 39? 40\\ 41/"));
#else
// Refer https://github.com/dotnet/runtime/blob/ce84f1d8a3f12711bad678a33efbc37b461f684f/src/libraries/System.Private.CoreLib/src/System/IO/Path.Unix.cs#L12
Assert.Equal("1_ 2_", IOUtil.ReplaceInvalidFileNameChars("1\0 2/"));
#endif
Assert.Equal("_leading", IOUtil.ReplaceInvalidFileNameChars("/leading"));
Assert.Equal("__consecutive leading", IOUtil.ReplaceInvalidFileNameChars("//consecutive leading"));
Assert.Equal("trailing_", IOUtil.ReplaceInvalidFileNameChars("trailing/"));
Assert.Equal("consecutive trailing__", IOUtil.ReplaceInvalidFileNameChars("consecutive trailing//"));
Assert.Equal("middle_middle", IOUtil.ReplaceInvalidFileNameChars("middle/middle"));
Assert.Equal("consecutive middle__consecutive middle", IOUtil.ReplaceInvalidFileNameChars("consecutive middle//consecutive middle"));
Assert.Equal("_leading_middle_trailing_", IOUtil.ReplaceInvalidFileNameChars("/leading/middle/trailing/"));
Assert.Equal("__consecutive leading__consecutive middle__consecutive trailing__", IOUtil.ReplaceInvalidFileNameChars("//consecutive leading//consecutive middle//consecutive trailing//"));
}
private static async Task CreateDirectoryReparsePoint(IHostContext context, string link, string target)
{
#if OS_WINDOWS

View File

@@ -212,5 +212,210 @@ namespace GitHub.Runner.Common.Tests.Util
File.Delete(brokenSymlink);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void UseWhich2FindGit()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
string gitPath = WhichUtil.Which2("git", trace: trace);
trace.Info($"Which(\"git\") returns: {gitPath ?? string.Empty}");
// Assert.
Assert.True(!string.IsNullOrEmpty(gitPath) && File.Exists(gitPath), $"Unable to find Git through: {nameof(WhichUtil.Which)}");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2ReturnsNullWhenNotFound()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
string nosuch = WhichUtil.Which2("no-such-file-cf7e351f", trace: trace);
trace.Info($"result: {nosuch ?? string.Empty}");
// Assert.
Assert.True(string.IsNullOrEmpty(nosuch), "Path should not be resolved");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2ThrowsWhenRequireAndNotFound()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
try
{
WhichUtil.Which2("no-such-file-cf7e351f", require: true, trace: trace);
throw new Exception("which should have thrown");
}
catch (FileNotFoundException ex)
{
Assert.Equal("no-such-file-cf7e351f", ex.FileName);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2HandleFullyQualifiedPath()
{
using (TestHostContext hc = new(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
var gitPath = WhichUtil.Which2("git", require: true, trace: trace);
var gitPath2 = WhichUtil.Which2(gitPath, require: true, trace: trace);
// Assert.
Assert.Equal(gitPath, gitPath2);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2HandlesSymlinkToTargetFullPath()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}.exe";
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}";
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}";
#endif
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
using (File.Create(target))
{
File.CreateSymbolicLink(symlink, target);
// Act.
var result = WhichUtil.Which2(symlinkName, require: true, trace: trace);
// Assert
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find symlink through: {nameof(WhichUtil.Which)}");
}
// Cleanup
File.Delete(symlink);
File.Delete(target);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2HandlesSymlinkToTargetRelativePath()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
string targetName = $"target-{Guid.NewGuid()}.exe";
string target = Path.GetTempPath() + targetName;
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}";
string targetName = $"target-{Guid.NewGuid()}";
string target = Path.GetTempPath() + targetName;
#endif
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
using (File.Create(target))
{
File.CreateSymbolicLink(symlink, targetName);
// Act.
var result = WhichUtil.Which2(symlinkName, require: true, trace: trace);
// Assert
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find {symlinkName} through: {nameof(WhichUtil.Which)}");
}
// Cleanup
File.Delete(symlink);
File.Delete(target);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Which2ThrowsWhenSymlinkBroken()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}.exe";
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}";
#endif
string target = "no-such-file-cf7e351f";
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
File.CreateSymbolicLink(brokenSymlink, target);
// Act.
var exception = Assert.Throws<FileNotFoundException>(() => WhichUtil.Which2(brokenSymlinkName, require: true, trace: trace));
// Assert
Assert.Equal(brokenSymlinkName, exception.FileName);
// Cleanup
File.Delete(brokenSymlink);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
}
}

View File

@@ -232,7 +232,7 @@ namespace GitHub.Runner.Common.Tests.Worker
TimelineReference timeline = new();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,

View File

@@ -382,6 +382,8 @@ runs:
}
};
_ec.Object.Global.Variables.Set("DistributedTask.UseActionArchiveCache", bool.TrueString);
//Act
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
@@ -2373,6 +2375,10 @@ runs:
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);
_ec.Setup(x => x.Root).Returns(new GitHub.Runner.Worker.ExecutionContext());
var variables = new Dictionary<string, VariableValue>();
if (enableComposite)
{
variables["DistributedTask.EnableCompositeActions"] = "true";
}
_ec.Object.Global.Variables = new Variables(_hc, variables);
_ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());

View File

@@ -757,7 +757,7 @@ namespace GitHub.Runner.Common.Tests.Worker
//Assert
var err = Assert.Throws<ArgumentException>(() => actionManifest.Load(_ec.Object, action_path));
Assert.Contains($"Failed to load {action_path}", err.Message);
Assert.Contains($"Fail to load {action_path}", err.Message);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'.")), It.IsAny<ExecutionContextLogOptions>()), Times.Once);
}
finally

Some files were not shown because too many files have changed in this diff Show More